diff --git a/.config/nextest.toml b/.config/nextest.toml index c063bb861..cc1a18dbe 100644 --- a/.config/nextest.toml +++ b/.config/nextest.toml @@ -1,4 +1,4 @@ [profile.default] # Mark tests that take longer than 10s as slow. -# Terminate after 90s as a stop-gap measure to terminate on deadlock. -slow-timeout = { period = "10s", terminate-after = 9 } +# Terminate after 120s as a stop-gap measure to terminate on deadlock. +slow-timeout = { period = "10s", terminate-after = 12 } diff --git a/.github/workflows/build-binaries.yml b/.github/workflows/build-binaries.yml index 5f42bdf9c..ccd3ef3ee 100644 --- a/.github/workflows/build-binaries.yml +++ b/.github/workflows/build-binaries.yml @@ -54,7 +54,7 @@ jobs: - name: "Prep README.md" run: python scripts/transform_readme.py --target pypi - name: "Build sdist" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: command: sdist args: --out dist @@ -74,7 +74,7 @@ jobs: # uv-build - name: "Build sdist uv-build" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: command: sdist args: --out crates/uv-build/dist -m crates/uv-build/Cargo.toml @@ -103,7 +103,7 @@ jobs: # uv - name: "Build wheels - x86_64" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: x86_64 args: --release --locked --out dist --features self-update @@ -133,7 +133,7 @@ jobs: # uv-build - name: "Build wheels uv-build - x86_64" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: x86_64 args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml @@ -157,7 +157,7 @@ jobs: # uv - name: "Build wheels - aarch64" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: aarch64 args: --release --locked --out dist --features self-update @@ -193,7 +193,7 @@ jobs: # uv-build - name: "Build wheels uv-build - aarch64" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: aarch64 args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml @@ -231,7 +231,7 @@ jobs: # uv - name: "Build wheels" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} args: --release --locked --out dist --features self-update,windows-gui-bin @@ -267,7 +267,7 @@ jobs: # uv-build - name: "Build wheels uv-build" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml @@ -303,7 +303,7 @@ jobs: # uv - name: "Build wheels" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.target }} # Generally, we try to build in a target docker container. In this case however, a @@ -368,7 +368,7 @@ jobs: # uv-build - name: "Build wheels uv-build" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.target }} manylinux: auto @@ -412,7 +412,7 @@ jobs: # uv - name: "Build wheels" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} # On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`. @@ -461,7 +461,7 @@ jobs: # uv-build - name: "Build wheels uv-build" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} # On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`. @@ -509,7 +509,7 @@ jobs: # uv - name: "Build wheels" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} manylinux: auto @@ -561,7 +561,7 @@ jobs: # uv-build - name: "Build wheels uv-build" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} manylinux: auto @@ -614,7 +614,7 @@ jobs: # uv - name: "Build wheels" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} manylinux: auto @@ -671,7 +671,7 @@ jobs: # uv-build - name: "Build wheels uv-build" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} manylinux: auto @@ -703,8 +703,8 @@ jobs: arch: riscv64 steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 with: python-version: ${{ env.PYTHON_VERSION }} - name: "Prep README.md" @@ -712,7 +712,7 @@ jobs: # uv - name: "Build wheels" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} manylinux: auto @@ -761,7 +761,7 @@ jobs: # uv-build - name: "Build wheels uv-build" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} manylinux: auto @@ -807,7 +807,7 @@ jobs: # uv - name: "Build wheels" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.target }} manylinux: musllinux_1_1 @@ -854,7 +854,7 @@ jobs: # uv-build - name: "Build wheels uv-build" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.target }} manylinux: musllinux_1_1 @@ -901,7 +901,7 @@ jobs: # uv - name: "Build wheels" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} manylinux: musllinux_1_1 @@ -966,7 +966,7 @@ jobs: # uv-build - name: "Build wheels" - uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1 + uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3 with: target: ${{ matrix.platform.target }} manylinux: musllinux_1_1 diff --git a/.github/workflows/build-docker.yml b/.github/workflows/build-docker.yml index 520800991..843ee8dfb 100644 --- a/.github/workflows/build-docker.yml +++ b/.github/workflows/build-docker.yml @@ -1,11 +1,19 @@ -# Build and publish a Docker image. +# Build and publish Docker images. # -# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local -# artifacts job within `cargo-dist`. +# Uses Depot for multi-platform builds. Includes both a `uv` base image, which +# is just the binary in a scratch image, and a set of extra, common images with +# the uv binary installed. # -# TODO(charlie): Ideally, the publish step would happen as a publish job within `cargo-dist`, but -# sharing the built image as an artifact between jobs is challenging. -name: "Build Docker image" +# Images are built on all runs. +# +# On release, assumed to run as a subworkflow of .github/workflows/release.yml; +# specifically, as a local artifacts job within `cargo-dist`. In this case, +# images are published based on the `plan`. +# +# TODO(charlie): Ideally, the publish step would happen as a publish job within +# `cargo-dist`, but sharing the built image as an artifact between jobs is +# challenging. +name: "Docker images" on: workflow_call: @@ -29,35 +37,67 @@ on: - .github/workflows/build-docker.yml env: - UV_BASE_IMG: ghcr.io/${{ github.repository_owner }}/uv + UV_GHCR_IMAGE: ghcr.io/${{ github.repository_owner }}/uv + UV_DOCKERHUB_IMAGE: docker.io/astral/uv jobs: - docker-build: - if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }} - name: Build Docker image (ghcr.io/astral-sh/uv) for ${{ matrix.platform }} + docker-plan: + name: plan runs-on: ubuntu-latest + outputs: + login: ${{ steps.plan.outputs.login }} + push: ${{ steps.plan.outputs.push }} + tag: ${{ steps.plan.outputs.tag }} + action: ${{ steps.plan.outputs.action }} + steps: + - name: Set push variable + env: + DRY_RUN: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }} + TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag }} + IS_LOCAL_PR: ${{ github.event.pull_request.head.repo.full_name == 'astral-sh/uv' }} + id: plan + run: | + if [ "${{ env.DRY_RUN }}" == "false" ]; then + echo "login=true" >> "$GITHUB_OUTPUT" + echo "push=true" >> "$GITHUB_OUTPUT" + echo "tag=${{ env.TAG }}" >> "$GITHUB_OUTPUT" + echo "action=build and publish" >> "$GITHUB_OUTPUT" + else + echo "login=${{ env.IS_LOCAL_PR }}" >> "$GITHUB_OUTPUT" + echo "push=false" >> "$GITHUB_OUTPUT" + echo "tag=dry-run" >> "$GITHUB_OUTPUT" + echo "action=build" >> "$GITHUB_OUTPUT" + fi + + docker-publish-base: + if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }} + name: ${{ needs.docker-plan.outputs.action }} uv + needs: + - docker-plan + runs-on: ubuntu-latest + permissions: + contents: read + id-token: write # for Depot OIDC and GHCR signing + packages: write # for GHCR image pushes + attestations: write # for GHCR attestations environment: - name: release - strategy: - fail-fast: false - matrix: - platform: - - linux/amd64 - - linux/arm64 + name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }} + outputs: + image-tags: ${{ steps.meta.outputs.tags }} + image-annotations: ${{ steps.meta.outputs.annotations }} + image-digest: ${{ steps.build.outputs.digest }} + image-version: ${{ steps.meta.outputs.version }} steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: submodules: recursive - # Login to DockerHub first, to avoid rate-limiting + # Login to DockerHub (when not pushing, it's to avoid rate-limiting) - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 - # PRs from forks don't have access to secrets, disable this step in that case. - if: ${{ github.event.pull_request.head.repo.full_name == 'astral-sh/uv' }} + if: ${{ needs.docker-plan.outputs.login == 'true' }} with: - username: astralshbot - password: ${{ secrets.DOCKERHUB_TOKEN_RO }} - - - uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 + username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }} + password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }} - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 with: @@ -65,13 +105,15 @@ jobs: username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} + - uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 + - name: Check tag consistency - if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }} + if: ${{ needs.docker-plan.outputs.push == 'true' }} run: | version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g') - if [ "${{ fromJson(inputs.plan).announcement_tag }}" != "${version}" ]; then + if [ "${{ needs.docker-plan.outputs.tag }}" != "${version}" ]; then echo "The input tag does not match the version from pyproject.toml:" >&2 - echo "${{ fromJson(inputs.plan).announcement_tag }}" >&2 + echo "${{ needs.docker-plan.outputs.tag }}" >&2 echo "${version}" >&2 exit 1 else @@ -81,107 +123,50 @@ jobs: - name: Extract metadata (tags, labels) for Docker id: meta uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0 + env: + DOCKER_METADATA_ANNOTATIONS_LEVELS: index with: - images: ${{ env.UV_BASE_IMG }} + images: | + ${{ env.UV_GHCR_IMAGE }} + ${{ env.UV_DOCKERHUB_IMAGE }} # Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name tags: | - type=raw,value=dry-run,enable=${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }} - type=pep440,pattern={{ version }},value=${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }},enable=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }} + type=raw,value=dry-run,enable=${{ needs.docker-plan.outputs.push == 'false' }} + type=pep440,pattern={{ version }},value=${{ needs.docker-plan.outputs.tag }},enable=${{ needs.docker-plan.outputs.push }} + type=pep440,pattern={{ major }}.{{ minor }},value=${{ needs.docker-plan.outputs.tag }},enable=${{ needs.docker-plan.outputs.push }} - - name: Normalize Platform Pair (replace / with -) - run: | - platform=${{ matrix.platform }} - echo "PLATFORM_TUPLE=${platform//\//-}" >> $GITHUB_ENV - - # Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/ - name: Build and push by digest id: build - uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0 + uses: depot/build-push-action@2583627a84956d07561420dcc1d0eb1f2af3fac0 # v1.15.0 with: + project: 7hd4vdzmw5 # astral-sh/uv context: . - platforms: ${{ matrix.platform }} - cache-from: type=gha,scope=uv-${{ env.PLATFORM_TUPLE }} - cache-to: type=gha,mode=min,scope=uv-${{ env.PLATFORM_TUPLE }} + platforms: linux/amd64,linux/arm64 + push: ${{ needs.docker-plan.outputs.push }} + tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - outputs: type=image,name=${{ env.UV_BASE_IMG }},push-by-digest=true,name-canonical=true,push=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }} + # TODO(zanieb): Annotations are not supported by Depot yet and are ignored + annotations: ${{ steps.meta.outputs.annotations }} - - name: Export digests - run: | - mkdir -p /tmp/digests - digest="${{ steps.build.outputs.digest }}" - touch "/tmp/digests/${digest#sha256:}" - - - name: Upload digests - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + - name: Generate artifact attestation for base image + if: ${{ needs.docker-plan.outputs.push == 'true' }} + uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 with: - name: digests-${{ env.PLATFORM_TUPLE }} - path: /tmp/digests/* - if-no-files-found: error - retention-days: 1 - - docker-publish: - name: Publish Docker image (ghcr.io/astral-sh/uv) - runs-on: ubuntu-latest - environment: - name: release - needs: - - docker-build - if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }} - steps: - # Login to DockerHub first, to avoid rate-limiting - - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 - with: - username: astralshbot - password: ${{ secrets.DOCKERHUB_TOKEN_RO }} - - - name: Download digests - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 - with: - path: /tmp/digests - pattern: digests-* - merge-multiple: true - - - uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 - - - name: Extract metadata (tags, labels) for Docker - id: meta - uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0 - with: - images: ${{ env.UV_BASE_IMG }} - # Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version - tags: | - type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }} - type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }} - - - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 - with: - registry: ghcr.io - username: ${{ github.repository_owner }} - password: ${{ secrets.GITHUB_TOKEN }} - - # Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/ - - name: Create manifest list and push - working-directory: /tmp/digests - # The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array - # The printf will expand the base image with the `@sha256: ...` for each sha256 in the directory - # The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... @sha256: @sha256: ...` - run: | - docker buildx imagetools create \ - $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ - $(printf '${{ env.UV_BASE_IMG }}@sha256:%s ' *) + subject-name: ${{ env.UV_GHCR_IMAGE }} + subject-digest: ${{ steps.build.outputs.digest }} docker-publish-extra: - name: Publish additional Docker image based on ${{ matrix.image-mapping }} + name: ${{ needs.docker-plan.outputs.action }} ${{ matrix.image-mapping }} runs-on: ubuntu-latest environment: - name: release + name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }} needs: - - docker-publish - if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }} + - docker-plan + - docker-publish-base permissions: - packages: write - attestations: write # needed to push image attestations to the Github attestation store - id-token: write # needed for signing the images with GitHub OIDC Token + id-token: write # for Depot OIDC and GHCR signing + packages: write # for GHCR image pushes + attestations: write # for GHCR attestations strategy: fail-fast: false matrix: @@ -213,13 +198,12 @@ jobs: - python:3.9-slim-bookworm,python3.9-bookworm-slim - python:3.8-slim-bookworm,python3.8-bookworm-slim steps: - # Login to DockerHub first, to avoid rate-limiting + # Login to DockerHub (when not pushing, it's to avoid rate-limiting) - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 + if: ${{ needs.docker-plan.outputs.login == 'true' }} with: - username: astralshbot - password: ${{ secrets.DOCKERHUB_TOKEN_RO }} - - - uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 + username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }} + password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }} - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 with: @@ -227,6 +211,8 @@ jobs: username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} + - uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 + - name: Generate Dynamic Dockerfile Tags shell: bash run: | @@ -238,7 +224,7 @@ jobs: # Generate Dockerfile content cat < Dockerfile FROM ${BASE_IMAGE} - COPY --from=${{ env.UV_BASE_IMG }}:latest /uv /uvx /usr/local/bin/ + COPY --from=${{ env.UV_GHCR_IMAGE }}:latest /uv /uvx /usr/local/bin/ ENTRYPOINT [] CMD ["/usr/local/bin/uv"] EOF @@ -249,17 +235,14 @@ jobs: # Loop through all base tags and append its docker metadata pattern to the list # Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version IFS=','; for TAG in ${BASE_TAGS}; do - TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${{ fromJson(inputs.plan).announcement_tag }}\n" - TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${{ fromJson(inputs.plan).announcement_tag }}\n" + TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${{ needs.docker-plan.outputs.tag }}\n" + TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${{ needs.docker-plan.outputs.tag }}\n" TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n" done # Remove the trailing newline from the pattern list TAG_PATTERNS="${TAG_PATTERNS%\\n}" - # Export image cache name - echo "IMAGE_REF=${BASE_IMAGE//:/-}" >> $GITHUB_ENV - # Export tag patterns using the multiline env var syntax { echo "TAG_PATTERNS<> "$GITHUB_OUTPUT" + + # See `docker-annotate-base` for details. + - name: Generate artifact attestation + if: ${{ needs.docker-plan.outputs.push == 'true' }} + uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 + with: + subject-name: ${{ env.UV_GHCR_IMAGE }} + subject-digest: ${{ steps.manifest-digest.outputs.digest }} + + # Annotate the base image + docker-annotate-base: + name: annotate uv runs-on: ubuntu-latest environment: - name: release + name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }} needs: + - docker-plan + - docker-publish-base - docker-publish-extra - if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }} - permissions: - packages: write - attestations: write # needed to push image attestations to the Github attestation store - id-token: write # needed for signing the images with GitHub OIDC Token + if: ${{ needs.docker-plan.outputs.push == 'true' }} steps: - # Login to DockerHub first, to avoid rate-limiting - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 with: - username: astralshbot - password: ${{ secrets.DOCKERHUB_TOKEN_RO }} - - - name: Download digests - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 - with: - path: /tmp/digests - pattern: digests-* - merge-multiple: true - - - uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 - - - name: Extract metadata (tags, labels) for Docker - id: meta - uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0 - env: - DOCKER_METADATA_ANNOTATIONS_LEVELS: index - with: - images: ${{ env.UV_BASE_IMG }} - # Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version - tags: | - type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }} - type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }} + username: astral + password: ${{ secrets.DOCKERHUB_TOKEN_RW }} - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 with: @@ -350,22 +352,37 @@ jobs: username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} - # Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/ - - name: Create manifest list and push - working-directory: /tmp/digests + # Depot doesn't support annotating images, so we need to do so manually + # afterwards. Mutating the manifest is desirable regardless, because we + # want to bump the base image to appear at the top of the list on GHCR. + # However, once annotation support is added to Depot, this step can be + # minimized to just touch the GHCR manifest. + - name: Add annotations to images + env: + IMAGES: "${{ env.UV_GHCR_IMAGE }} ${{ env.UV_DOCKERHUB_IMAGE }}" + DIGEST: ${{ needs.docker-publish-base.outputs.image-digest }} + TAGS: ${{ needs.docker-publish-base.outputs.image-tags }} + ANNOTATIONS: ${{ needs.docker-publish-base.outputs.image-annotations }} # The readarray part is used to make sure the quoting and special characters are preserved on expansion (e.g. spaces) - # The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array - # The printf will expand the base image with the `@sha256: ...` for each sha256 in the directory - # The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... @sha256: @sha256: ...` + # The final command becomes `docker buildx imagetools create --annotation 'index:foo=1' --annotation 'index:bar=2' ... -t tag1 -t tag2 ... @sha256:` run: | - readarray -t lines <<< "$DOCKER_METADATA_OUTPUT_ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done - docker buildx imagetools create \ - "${annotations[@]}" \ - $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ - $(printf '${{ env.UV_BASE_IMG }}@sha256:%s ' *) + set -x + readarray -t lines <<< "$ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done + for image in $IMAGES; do + readarray -t lines < <(grep "^${image}:" <<< "$TAGS"); tags=(); for line in "${lines[@]}"; do tags+=(-t "$line"); done + docker buildx imagetools create \ + "${annotations[@]}" \ + "${tags[@]}" \ + "${image}@${DIGEST}" + done - - name: Share manifest digest + # Now that we've modified the manifest, we need to attest it again. + # Note we only generate an attestation for GHCR. + - name: Export manifest digest id: manifest-digest + env: + IMAGE: ${{ env.UV_GHCR_IMAGE }} + VERSION: ${{ needs.docker-publish-base.outputs.image-version }} # To sign the manifest, we need it's digest. Unfortunately "docker # buildx imagetools create" does not (yet) have a clean way of sharing # the digest of the manifest it creates (see docker/buildx#2407), so @@ -377,15 +394,14 @@ jobs: run: | digest="$( docker buildx imagetools inspect \ - "${UV_BASE_IMG}:${DOCKER_METADATA_OUTPUT_VERSION}" \ + "${IMAGE}:${VERSION}" \ --format '{{json .Manifest}}' \ | jq -r '.digest' )" echo "digest=${digest}" >> "$GITHUB_OUTPUT" - name: Generate artifact attestation - uses: actions/attest-build-provenance@db473fddc028af60658334401dc6fa3ffd8669fd # v2.3.0 + uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0 with: - subject-name: ${{ env.UV_BASE_IMG }} + subject-name: ${{ env.UV_GHCR_IMAGE }} subject-digest: ${{ steps.manifest-digest.outputs.digest }} - # push-to-registry is explicitly not enabled to maintain full control over the top image diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e29d8743c..bc77abd93 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,8 +14,9 @@ env: CARGO_INCREMENTAL: 0 CARGO_NET_RETRY: 10 CARGO_TERM_COLOR: always - RUSTUP_MAX_RETRIES: 10 PYTHON_VERSION: "3.12" + RUSTUP_MAX_RETRIES: 10 + RUST_BACKTRACE: 1 jobs: determine_changes: @@ -81,7 +82,7 @@ jobs: run: rustup component add rustfmt - name: "Install uv" - uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0 + uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 - name: "rustfmt" run: cargo fmt --all --check @@ -125,11 +126,11 @@ jobs: name: "cargo clippy | ubuntu" steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 with: save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Check uv_build dependencies" - uses: EmbarkStudios/cargo-deny-action@34899fc7ba81ca6268d5947a7a16b4649013fea1 # v2.0.11 + uses: EmbarkStudios/cargo-deny-action@30f817c6f72275c6d54dc744fbca09ebc958599f # v2.0.12 with: command: check bans manifest-path: crates/uv-build/Cargo.toml @@ -155,7 +156,7 @@ jobs: run: | Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 with: workspaces: ${{ env.UV_WORKSPACE }} @@ -174,7 +175,7 @@ jobs: name: "cargo dev generate-all" steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 with: save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Generate all" @@ -187,7 +188,7 @@ jobs: steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: "Install cargo shear" - uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4 + uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 with: tool: cargo-shear - run: cargo shear @@ -207,17 +208,17 @@ jobs: - uses: rui314/setup-mold@v1 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - name: "Install Rust toolchain" run: rustup show - - uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0 + - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 - name: "Install required Python versions" run: uv python install - name: "Install cargo nextest" - uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4 + uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 with: tool: cargo-nextest @@ -239,17 +240,17 @@ jobs: - uses: rui314/setup-mold@v1 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - name: "Install Rust toolchain" run: rustup show - - uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0 + - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 - name: "Install required Python versions" run: uv python install - name: "Install cargo nextest" - uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4 + uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 with: tool: cargo-nextest @@ -265,7 +266,7 @@ jobs: timeout-minutes: 15 needs: determine_changes if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} - runs-on: github-windows-2025-x86_64-16 + runs-on: depot-windows-2022-16 name: "cargo test | windows" steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 @@ -278,11 +279,11 @@ jobs: run: | Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse - - uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0 + - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 - name: "Install required Python versions" run: uv python install - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 with: workspaces: ${{ env.UV_WORKSPACE }} @@ -291,27 +292,11 @@ jobs: run: rustup show - name: "Install cargo nextest" - uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4 + uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 with: tool: cargo-nextest - # Get crash dumps to debug the `exit_code: -1073741819` failures - - name: Configure crash dumps - if: runner.os == 'Windows' - shell: powershell - run: | - $dumps = "$env:GITHUB_WORKSPACE\dumps" - New-Item -Path $dumps -ItemType Directory -Force - - # https://github.com/microsoft/terminal/wiki/Troubleshooting-Tips#capture-automatically - $reg = "HKLM:\SOFTWARE\Microsoft\Windows\Windows Error Reporting\LocalDumps" - New-Item -Path $reg -Force | Out-Null - Set-ItemProperty -Path $reg -Name "DumpFolder" -Value $dumps - Set-ItemProperty -Path $reg -Name "DumpType" -Value 2 - - name: "Cargo test" - id: test - continue-on-error: true working-directory: ${{ env.UV_WORKSPACE }} env: # Avoid permission errors during concurrent tests @@ -325,42 +310,6 @@ jobs: --workspace \ --status-level skip --failure-output immediate-final --no-fail-fast -j 20 --final-status-level slow - # Get crash dumps to debug the `exit_code: -1073741819` failures (contd.) - - name: Analyze crashes - if: steps.test.outcome == 'failure' - shell: powershell - run: | - $dumps = Get-ChildItem "$env:GITHUB_WORKSPACE\dumps\*.dmp" -ErrorAction SilentlyContinue - if (!$dumps) { exit 0 } - - Write-Host "Found $($dumps.Count) crash dump(s)" - - # Download cdb if needed - $cdb = "C:\Program Files (x86)\Windows Kits\10\Debuggers\x64\cdb.exe" - if (!(Test-Path $cdb)) { - # https://github.com/microsoft/react-native-windows/blob/f1570a5ef1c4fc1e78d0a0ad5af848ab91a4061c/vnext/Scripts/Analyze-Crash.ps1#L44-L56 - Invoke-WebRequest "https://go.microsoft.com/fwlink/?linkid=2173743" -OutFile "$env:TEMP\sdk.exe" - Start-Process "$env:TEMP\sdk.exe" -ArgumentList "/features OptionId.WindowsDesktopDebuggers /quiet" -Wait - } - - # Analyze each dump - foreach ($dump in $dumps) { - Write-Host "`n=== $($dump.Name) ===" - & $cdb -z $dump -c "!analyze -v; .ecxr; k; q" 2>&1 | Select-String -Pattern "(ExceptionCode:|SYMBOL_NAME:|IMAGE_NAME:|STACK_TEXT:)" -Context 0,2 - } - - - name: Upload crash dumps - if: steps.test.outcome == 'failure' - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - with: - name: crash-dumps-${{ github.run_number }} - path: dumps/*.dmp - if-no-files-found: ignore - - - name: Fail if tests failed - if: steps.test.outcome == 'failure' - run: exit 1 - # Separate jobs for the nightly crate windows-trampoline-check: timeout-minutes: 15 @@ -383,7 +332,7 @@ jobs: run: | Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 with: workspaces: ${{ env.UV_WORKSPACE }}/crates/uv-trampoline @@ -394,7 +343,7 @@ jobs: rustup component add rust-src --target ${{ matrix.target-arch }}-pc-windows-msvc - name: "Install cargo-bloat" - uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4 + uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 with: tool: cargo-bloat @@ -439,7 +388,7 @@ jobs: - name: Copy Git Repo to Dev Drive run: | Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 with: workspaces: ${{ env.UV_WORKSPACE }}/crates/uv-trampoline - name: "Install Rust toolchain" @@ -481,7 +430,7 @@ jobs: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 0 - - uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0 + - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 - name: "Add SSH key" if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }} @@ -494,7 +443,7 @@ jobs: - name: "Build docs (insiders)" if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }} - run: uvx --with-requirements docs/requirements.txt mkdocs build --strict -f mkdocs.insiders.yml + run: uvx --with-requirements docs/requirements-insiders.txt mkdocs build --strict -f mkdocs.insiders.yml build-binary-linux-libc: timeout-minutes: 10 @@ -507,7 +456,7 @@ jobs: - uses: rui314/setup-mold@v1 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - name: "Build" run: cargo build @@ -521,6 +470,31 @@ jobs: ./target/debug/uvx retention-days: 1 + build-binary-linux-aarch64: + timeout-minutes: 10 + needs: determine_changes + if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} + runs-on: github-ubuntu-24.04-aarch64-4 + name: "build binary | linux aarch64" + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - uses: rui314/setup-mold@v1 + + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 + + - name: "Build" + run: cargo build + + - name: "Upload binary" + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: uv-linux-aarch64-${{ github.sha }} + path: | + ./target/debug/uv + ./target/debug/uvx + retention-days: 1 + build-binary-linux-musl: timeout-minutes: 10 needs: determine_changes @@ -537,7 +511,7 @@ jobs: sudo apt-get install musl-tools rustup target add x86_64-unknown-linux-musl - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - name: "Build" run: cargo build --target x86_64-unknown-linux-musl --bin uv --bin uvx @@ -562,7 +536,7 @@ jobs: - uses: rui314/setup-mold@v1 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - name: "Build" run: cargo build --bin uv --bin uvx @@ -586,7 +560,7 @@ jobs: - uses: rui314/setup-mold@v1 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - name: "Build" run: cargo build --bin uv --bin uvx @@ -616,7 +590,7 @@ jobs: run: | Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 with: workspaces: ${{ env.UV_WORKSPACE }} @@ -651,7 +625,7 @@ jobs: run: | Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 with: workspaces: ${{ env.UV_WORKSPACE }} @@ -688,7 +662,7 @@ jobs: run: rustup default ${{ steps.msrv.outputs.value }} - name: "Install mold" uses: rui314/setup-mold@v1 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - run: cargo +${{ steps.msrv.outputs.value }} build - run: ./target/debug/uv --version @@ -701,7 +675,7 @@ jobs: steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - name: "Cross build" run: | # Install cross from `freebsd-firecracker` @@ -712,7 +686,7 @@ jobs: cross build --target x86_64-unknown-freebsd - name: Test in Firecracker VM - uses: acj/freebsd-firecracker-action@d548632daa4f81a142a054c9829408e659350eb0 # v0.4.2 + uses: acj/freebsd-firecracker-action@136ca0bce2adade21e526ceb07db643ad23dd2dd # v0.5.1 with: verbose: false checkout: false @@ -821,6 +795,33 @@ jobs: eval "$(./uv generate-shell-completion bash)" eval "$(./uvx --generate-shell-completion bash)" + smoke-test-linux-aarch64: + timeout-minutes: 10 + needs: build-binary-linux-aarch64 + name: "smoke test | linux aarch64" + runs-on: github-ubuntu-24.04-aarch64-2 + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - name: "Download binary" + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + with: + name: uv-linux-aarch64-${{ github.sha }} + + - name: "Prepare binary" + run: | + chmod +x ./uv + chmod +x ./uvx + + - name: "Smoke test" + run: | + ./uv run scripts/smoke-test + + - name: "Test shell completions" + run: | + eval "$(./uv generate-shell-completion bash)" + eval "$(./uvx --generate-shell-completion bash)" + smoke-test-linux-musl: timeout-minutes: 10 needs: build-binary-linux-musl @@ -903,7 +904,7 @@ jobs: timeout-minutes: 10 needs: build-binary-windows-aarch64 name: "smoke test | windows aarch64" - runs-on: github-windows-11-aarch64-4 + runs-on: windows-11-arm steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 @@ -934,7 +935,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: conda-incubator/setup-miniconda@505e6394dae86d6a5c7fbb6e3fb8938e3e863830 # v3.1.1 + - uses: conda-incubator/setup-miniconda@835234971496cad1653abb28a638a281cf32541f # v3.2.0 with: miniconda-version: latest activate-environment: uv @@ -1051,6 +1052,96 @@ jobs: ./uv run python -c "" ./uv run -p 3.13t python -c "" + integration-test-windows-aarch64-implicit: + timeout-minutes: 10 + needs: build-binary-windows-aarch64 + name: "integration test | aarch64 windows implicit" + runs-on: windows-11-arm + + steps: + - name: "Download binary" + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + with: + name: uv-windows-aarch64-${{ github.sha }} + + - name: "Install Python via uv (implicitly select x64)" + run: | + ./uv python install -v 3.13 + + - name: "Create a virtual environment (stdlib)" + run: | + & (./uv python find 3.13) -m venv .venv + + - name: "Check version (stdlib)" + run: | + .venv/Scripts/python --version + + - name: "Create a virtual environment (uv)" + run: | + ./uv venv -p 3.13 --managed-python + + - name: "Check version (uv)" + run: | + .venv/Scripts/python --version + + - name: "Check is x64" + run: | + .venv/Scripts/python -c "import sys; exit(1) if 'AMD64' not in sys.version else exit(0)" + + - name: "Check install" + run: | + ./uv pip install -v anyio + + - name: "Check uv run" + run: | + ./uv run python -c "" + ./uv run -p 3.13 python -c "" + + integration-test-windows-aarch64-explicit: + timeout-minutes: 10 + needs: build-binary-windows-aarch64 + name: "integration test | aarch64 windows explicit" + runs-on: windows-11-arm + + steps: + - name: "Download binary" + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + with: + name: uv-windows-aarch64-${{ github.sha }} + + - name: "Install Python via uv (explicitly select aarch64)" + run: | + ./uv python install -v cpython-3.13-windows-aarch64-none + + - name: "Create a virtual environment (stdlib)" + run: | + & (./uv python find 3.13) -m venv .venv + + - name: "Check version (stdlib)" + run: | + .venv/Scripts/python --version + + - name: "Create a virtual environment (uv)" + run: | + ./uv venv -p 3.13 --managed-python + + - name: "Check version (uv)" + run: | + .venv/Scripts/python --version + + - name: "Check is NOT x64" + run: | + .venv/Scripts/python -c "import sys; exit(1) if 'AMD64' in sys.version else exit(0)" + + - name: "Check install" + run: | + ./uv pip install -v anyio + + - name: "Check uv run" + run: | + ./uv run python -c "" + ./uv run -p 3.13 python -c "" + integration-test-pypy-linux: timeout-minutes: 10 needs: build-binary-linux-libc @@ -1467,6 +1558,90 @@ jobs: done <<< "${CHANGED_FILES}" echo "code_any_changed=${CODE_CHANGED}" >> "${GITHUB_OUTPUT}" + integration-test-registries: + timeout-minutes: 10 + needs: build-binary-linux-libc + name: "integration test | registries" + runs-on: ubuntu-latest + if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event.pull_request.head.repo.fork != true }} + environment: uv-test-registries + env: + PYTHON_VERSION: 3.12 + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + fetch-depth: 0 + + - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + with: + python-version: "${{ env.PYTHON_VERSION }}" + + - name: "Download binary" + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + with: + name: uv-linux-libc-${{ github.sha }} + + - name: "Prepare binary" + run: chmod +x ./uv + + - name: "Configure AWS credentials" + uses: aws-actions/configure-aws-credentials@f503a1870408dcf2c35d5c2b8a68e69211042c7d + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-1 + + - name: "Get AWS CodeArtifact token" + run: | + UV_TEST_AWS_TOKEN=$(aws codeartifact get-authorization-token \ + --domain tests \ + --domain-owner ${{ secrets.AWS_ACCOUNT_ID }} \ + --region us-east-1 \ + --query authorizationToken \ + --output text) + echo "::add-mask::$UV_TEST_AWS_TOKEN" + echo "UV_TEST_AWS_TOKEN=$UV_TEST_AWS_TOKEN" >> $GITHUB_ENV + + - name: "Authenticate with GCP" + id: "auth" + uses: "google-github-actions/auth@0920706a19e9d22c3d0da43d1db5939c6ad837a8" + with: + credentials_json: "${{ secrets.GCP_SERVICE_ACCOUNT_KEY }}" + + - name: "Set up GCP SDK" + uses: "google-github-actions/setup-gcloud@a8b58010a5b2a061afd605f50e88629c9ec7536b" + + - name: "Get GCP Artifact Registry token" + id: get_token + run: | + UV_TEST_GCP_TOKEN=$(gcloud auth print-access-token) + echo "::add-mask::$UV_TEST_GCP_TOKEN" + echo "UV_TEST_GCP_TOKEN=$UV_TEST_GCP_TOKEN" >> $GITHUB_ENV + + - name: "Run registry tests" + run: ./uv run -p ${{ env.PYTHON_VERSION }} scripts/registries-test.py --uv ./uv --color always --all + env: + RUST_LOG: uv=debug + UV_TEST_ARTIFACTORY_TOKEN: ${{ secrets.UV_TEST_ARTIFACTORY_TOKEN }} + UV_TEST_ARTIFACTORY_URL: ${{ secrets.UV_TEST_ARTIFACTORY_URL }} + UV_TEST_ARTIFACTORY_USERNAME: ${{ secrets.UV_TEST_ARTIFACTORY_USERNAME }} + UV_TEST_AWS_URL: ${{ secrets.UV_TEST_AWS_URL }} + UV_TEST_AWS_USERNAME: aws + UV_TEST_AZURE_TOKEN: ${{ secrets.UV_TEST_AZURE_TOKEN }} + UV_TEST_AZURE_URL: ${{ secrets.UV_TEST_AZURE_URL }} + UV_TEST_AZURE_USERNAME: dummy + UV_TEST_CLOUDSMITH_TOKEN: ${{ secrets.UV_TEST_CLOUDSMITH_TOKEN }} + UV_TEST_CLOUDSMITH_URL: ${{ secrets.UV_TEST_CLOUDSMITH_URL }} + UV_TEST_CLOUDSMITH_USERNAME: ${{ secrets.UV_TEST_CLOUDSMITH_USERNAME }} + UV_TEST_GCP_URL: ${{ secrets.UV_TEST_GCP_URL }} + UV_TEST_GCP_USERNAME: oauth2accesstoken + UV_TEST_GEMFURY_TOKEN: ${{ secrets.UV_TEST_GEMFURY_TOKEN }} + UV_TEST_GEMFURY_URL: ${{ secrets.UV_TEST_GEMFURY_URL }} + UV_TEST_GEMFURY_USERNAME: ${{ secrets.UV_TEST_GEMFURY_USERNAME }} + UV_TEST_GITLAB_TOKEN: ${{ secrets.UV_TEST_GITLAB_TOKEN }} + UV_TEST_GITLAB_URL: ${{ secrets.UV_TEST_GITLAB_URL }} + UV_TEST_GITLAB_USERNAME: token + integration-test-publish: timeout-minutes: 20 needs: integration-test-publish-changed @@ -2039,7 +2214,7 @@ jobs: timeout-minutes: 10 needs: build-binary-windows-aarch64 name: "check system | x86-64 python3.13 on windows aarch64" - runs-on: github-windows-11-aarch64-4 + runs-on: windows-11-arm steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 @@ -2057,6 +2232,28 @@ jobs: - name: "Validate global Python install" run: py -3.13 ./scripts/check_system_python.py --uv ./uv.exe + system-test-windows-aarch64-aarch64-python-313: + timeout-minutes: 10 + needs: build-binary-windows-aarch64 + name: "check system | aarch64 python3.13 on windows aarch64" + runs-on: windows-11-arm + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + + - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + with: + python-version: "3.13" + architecture: "arm64" + allow-prereleases: true + + - name: "Download binary" + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + with: + name: uv-windows-aarch64-${{ github.sha }} + + - name: "Validate global Python install" + run: py -3.13 ./scripts/check_system_python.py --uv ./uv.exe + # Test our PEP 514 integration that installs Python into the Windows registry. system-test-windows-registry: timeout-minutes: 10 @@ -2202,7 +2399,7 @@ jobs: steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: conda-incubator/setup-miniconda@505e6394dae86d6a5c7fbb6e3fb8938e3e863830 # v3.1.1 + - uses: conda-incubator/setup-miniconda@835234971496cad1653abb28a638a281cf32541f # v3.2.0 with: miniconda-version: "latest" activate-environment: uv @@ -2304,13 +2501,13 @@ jobs: - name: "Checkout Branch" uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - name: "Install Rust toolchain" run: rustup show - name: "Install codspeed" - uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4 + uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 with: tool: cargo-codspeed @@ -2341,13 +2538,13 @@ jobs: - name: "Checkout Branch" uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0 - name: "Install Rust toolchain" run: rustup show - name: "Install codspeed" - uses: taiki-e/install-action@735e5933943122c5ac182670a935f54a949265c1 # v2.52.4 + uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8 with: tool: cargo-codspeed diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index de3ca4d30..e4435ff17 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -22,12 +22,14 @@ jobs: id-token: write steps: - name: "Install uv" - uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0 + uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: pattern: wheels_uv-* path: wheels_uv merge-multiple: true + - name: Remove wheels unsupported by PyPI + run: rm wheels_uv/*riscv* - name: Publish to PyPI run: uv publish -v wheels_uv/* @@ -41,11 +43,13 @@ jobs: id-token: write steps: - name: "Install uv" - uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0 + uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 with: pattern: wheels_uv_build-* path: wheels_uv_build merge-multiple: true + - name: Remove wheels unsupported by PyPI + run: rm wheels_uv_build/*riscv* - name: Publish to PyPI run: uv publish -v wheels_uv_build/* diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b1c77c316..2688c3fc8 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -69,7 +69,7 @@ jobs: # we specify bash to get pipefail; it guards against the `curl` command # failing. otherwise `sh` won't catch that `curl` returned non-0 shell: bash - run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.4/cargo-dist-installer.sh | sh" + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.7-prerelease.1/cargo-dist-installer.sh | sh" - name: Cache dist uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47 with: diff --git a/.github/workflows/setup-dev-drive.ps1 b/.github/workflows/setup-dev-drive.ps1 index e0e2a765b..474b082dc 100644 --- a/.github/workflows/setup-dev-drive.ps1 +++ b/.github/workflows/setup-dev-drive.ps1 @@ -1,13 +1,43 @@ # Configures a drive for testing in CI. +# +# When using standard GitHub Actions runners, a `D:` drive is present and has +# similar or better performance characteristics than a ReFS dev drive. Sometimes +# using a larger runner is still more performant (e.g., when running the test +# suite) and we need to create a dev drive. This script automatically configures +# the appropriate drive. +# +# When using GitHub Actions' "larger runners", the `D:` drive is not present and +# we create a DevDrive mount on `C:`. This is purported to be more performant +# than an ReFS drive, though we did not see a change when we switched over. +# +# When using Depot runners, the underling infrastructure is EC2, which does not +# support Hyper-V. The `New-VHD` commandlet only works with Hyper-V, but we can +# create a ReFS drive using `diskpart` and `format` directory. We cannot use a +# DevDrive, as that also requires Hyper-V. The Depot runners use `D:` already, +# so we must check if it's a Depot runner first, and we use `V:` as the target +# instead. -# When not using a GitHub Actions "larger runner", the `D:` drive is present and -# has similar or better performance characteristics than a ReFS dev drive. -# Sometimes using a larger runner is still more performant (e.g., when running -# the test suite) and we need to create a dev drive. This script automatically -# configures the appropriate drive. -# Note we use `Get-PSDrive` is not sufficient because the drive letter is assigned. -if (Test-Path "D:\") { +if ($env:DEPOT_RUNNER -eq "1") { + Write-Output "DEPOT_RUNNER detected, setting up custom dev drive..." + + # Create VHD and configure drive using diskpart + $vhdPath = "C:\uv_dev_drive.vhdx" + @" +create vdisk file="$vhdPath" maximum=20480 type=expandable +attach vdisk +create partition primary +active +assign letter=V +"@ | diskpart + + # Format the drive as ReFS + format V: /fs:ReFS /q /y + $Drive = "V:" + + Write-Output "Custom dev drive created at $Drive" +} elseif (Test-Path "D:\") { + # Note `Get-PSDrive` is not sufficient because the drive letter is assigned. Write-Output "Using existing drive at D:" $Drive = "D:" } else { @@ -55,10 +85,8 @@ Write-Output ` "DEV_DRIVE=$($Drive)" ` "TMP=$($Tmp)" ` "TEMP=$($Tmp)" ` - "UV_INTERNAL__TEST_DIR=$($Tmp)" ` "RUSTUP_HOME=$($Drive)/.rustup" ` "CARGO_HOME=$($Drive)/.cargo" ` "UV_WORKSPACE=$($Drive)/uv" ` "PATH=$($Drive)/.cargo/bin;$env:PATH" ` >> $env:GITHUB_ENV - diff --git a/.github/workflows/sync-python-releases.yml b/.github/workflows/sync-python-releases.yml index 14b572e08..166458507 100644 --- a/.github/workflows/sync-python-releases.yml +++ b/.github/workflows/sync-python-releases.yml @@ -17,7 +17,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0 + - uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1 with: version: "latest" enable-cache: true diff --git a/.gitignore b/.gitignore index 07247a33c..8ccf60790 100644 --- a/.gitignore +++ b/.gitignore @@ -3,9 +3,10 @@ # Generated by Cargo # will have compiled files and executables +/vendor/ debug/ -target/ target-alpine/ +target/ # Bootstrapped Python versions /bin/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 982d8f296..1c8965c0f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,7 +12,7 @@ repos: - id: validate-pyproject - repo: https://github.com/crate-ci/typos - rev: v1.33.1 + rev: v1.34.0 hooks: - id: typos @@ -42,7 +42,7 @@ repos: types_or: [yaml, json5] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.11.13 + rev: v0.12.2 hooks: - id: ruff-format - id: ruff diff --git a/CHANGELOG.md b/CHANGELOG.md index f62cedf62..c1c163331 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,8 +3,183 @@ -## 0.7.13 +## 0.7.19 +The **[uv build backend](https://docs.astral.sh/uv/concepts/build-backend/) is now stable**, and considered ready for production use. + +The uv build backend is a great choice for pure Python projects. It has reasonable defaults, with the goal of requiring zero configuration for most users, but provides flexible configuration to accommodate most Python project structures. It integrates tightly with uv, to improve messaging and user experience. It validates project metadata and structures, preventing common mistakes. And, finally, it's very fast — `uv sync` on a new project (from `uv init`) is 10-30x faster than with other build backends. + +To use uv as a build backend in an existing project, add `uv_build` to the `[build-system]` section in your `pyproject.toml`: + +```toml +[build-system] +requires = ["uv_build>=0.7.19,<0.8.0"] +build-backend = "uv_build" +``` + +In a future release, it will replace `hatchling` as the default in `uv init`. As before, uv will remain compatible with all standards-compliant build backends. + +### Python + +- Add PGO distributions of Python for aarch64 Linux, which are more optimized for better performance + +See the [python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250702) for more details. + +### Enhancements + +- Ignore Python patch version for `--universal` pip compile ([#14405](https://github.com/astral-sh/uv/pull/14405)) +- Update the tilde version specifier warning to include more context ([#14335](https://github.com/astral-sh/uv/pull/14335)) +- Clarify behavior and hint on tool install when no executables are available ([#14423](https://github.com/astral-sh/uv/pull/14423)) + +### Bug fixes + +- Make project and interpreter lock acquisition non-fatal ([#14404](https://github.com/astral-sh/uv/pull/14404)) +- Includes `sys.prefix` in cached environment keys to avoid `--with` collisions across projects ([#14403](https://github.com/astral-sh/uv/pull/14403)) + +### Documentation + +- Add a migration guide from pip to uv projects ([#12382](https://github.com/astral-sh/uv/pull/12382)) + +## 0.7.18 + +### Python + +- Added arm64 Windows Python 3.11, 3.12, 3.13, and 3.14 + + These are not downloaded by default, since x86-64 Python has broader ecosystem support on Windows. + However, they can be requested with `cpython--windows-aarch64`. + +See the [python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250630) for more details. + +### Enhancements + +- Keep track of retries in `ManagedPythonDownload::fetch_with_retry` ([#14378](https://github.com/astral-sh/uv/pull/14378)) +- Reuse build (virtual) environments across resolution and installation ([#14338](https://github.com/astral-sh/uv/pull/14338)) +- Improve trace message for cached Python interpreter query ([#14328](https://github.com/astral-sh/uv/pull/14328)) +- Use parsed URLs for conflicting URL error message ([#14380](https://github.com/astral-sh/uv/pull/14380)) + +### Preview features + +- Ignore invalid build backend settings when not building ([#14372](https://github.com/astral-sh/uv/pull/14372)) + +### Bug fixes + +- Fix equals-star and tilde-equals with `python_version` and `python_full_version` ([#14271](https://github.com/astral-sh/uv/pull/14271)) +- Include the canonical path in the interpreter query cache key ([#14331](https://github.com/astral-sh/uv/pull/14331)) +- Only drop build directories on program exit ([#14304](https://github.com/astral-sh/uv/pull/14304)) +- Error instead of panic on conflict between global and subcommand flags ([#14368](https://github.com/astral-sh/uv/pull/14368)) +- Consistently normalize trailing slashes on URLs with no path segments ([#14349](https://github.com/astral-sh/uv/pull/14349)) + +### Documentation + +- Add instructions for publishing to JFrog's Artifactory ([#14253](https://github.com/astral-sh/uv/pull/14253)) +- Edits to the build backend documentation ([#14376](https://github.com/astral-sh/uv/pull/14376)) + +## 0.7.17 + +### Bug fixes + +- Apply build constraints when resolving `--with` dependencies ([#14340](https://github.com/astral-sh/uv/pull/14340)) +- Drop trailing slashes when converting index URL from URL ([#14346](https://github.com/astral-sh/uv/pull/14346)) +- Ignore `UV_PYTHON_CACHE_DIR` when empty ([#14336](https://github.com/astral-sh/uv/pull/14336)) +- Fix error message ordering for `pyvenv.cfg` version conflict ([#14329](https://github.com/astral-sh/uv/pull/14329)) + +## 0.7.16 + +### Python + +- Add Python 3.14.0b3 + +See the +[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250626) +for more details. + +### Enhancements + +- Include path or URL when failing to convert in lockfile ([#14292](https://github.com/astral-sh/uv/pull/14292)) +- Warn when `~=` is used as a Python version specifier without a patch version ([#14008](https://github.com/astral-sh/uv/pull/14008)) + +### Preview features + +- Ensure preview default Python installs are upgradeable ([#14261](https://github.com/astral-sh/uv/pull/14261)) + +### Performance + +- Share workspace cache between lock and sync operations ([#14321](https://github.com/astral-sh/uv/pull/14321)) + +### Bug fixes + +- Allow local indexes to reference remote files ([#14294](https://github.com/astral-sh/uv/pull/14294)) +- Avoid rendering desugared prefix matches in error messages ([#14195](https://github.com/astral-sh/uv/pull/14195)) +- Avoid using path URL for workspace Git dependencies in `requirements.txt` ([#14288](https://github.com/astral-sh/uv/pull/14288)) +- Normalize index URLs to remove trailing slash ([#14245](https://github.com/astral-sh/uv/pull/14245)) +- Respect URL-encoded credentials in redirect location ([#14315](https://github.com/astral-sh/uv/pull/14315)) +- Lock the source tree when running setuptools, to protect concurrent builds ([#14174](https://github.com/astral-sh/uv/pull/14174)) + +### Documentation + +- Note that GCP Artifact Registry download URLs must have `/simple` component ([#14251](https://github.com/astral-sh/uv/pull/14251)) + +## 0.7.15 + +### Enhancements + +- Consistently use `Ordering::Relaxed` for standalone atomic use cases ([#14190](https://github.com/astral-sh/uv/pull/14190)) +- Warn on ambiguous relative paths for `--index` ([#14152](https://github.com/astral-sh/uv/pull/14152)) +- Skip GitHub fast path when rate-limited ([#13033](https://github.com/astral-sh/uv/pull/13033)) +- Preserve newlines in `schema.json` descriptions ([#13693](https://github.com/astral-sh/uv/pull/13693)) + +### Bug fixes + +- Add check for using minor version link when creating a venv on Windows ([#14252](https://github.com/astral-sh/uv/pull/14252)) +- Strip query parameters when parsing source URL ([#14224](https://github.com/astral-sh/uv/pull/14224)) + +### Documentation + +- Add a link to PyPI FAQ to clarify what per-project token is ([#14242](https://github.com/astral-sh/uv/pull/14242)) + +### Preview features + +- Allow symlinks in the build backend ([#14212](https://github.com/astral-sh/uv/pull/14212)) + +## 0.7.14 + +### Enhancements + +- Add XPU to `--torch-backend` ([#14172](https://github.com/astral-sh/uv/pull/14172)) +- Add ROCm backends to `--torch-backend` ([#14120](https://github.com/astral-sh/uv/pull/14120)) +- Remove preview label from `--torch-backend` ([#14119](https://github.com/astral-sh/uv/pull/14119)) +- Add `[tool.uv.dependency-groups].mygroup.requires-python` ([#13735](https://github.com/astral-sh/uv/pull/13735)) +- Add auto-detection for AMD GPUs ([#14176](https://github.com/astral-sh/uv/pull/14176)) +- Show retries for HTTP status code errors ([#13897](https://github.com/astral-sh/uv/pull/13897)) +- Support transparent Python patch version upgrades ([#13954](https://github.com/astral-sh/uv/pull/13954)) +- Warn on empty index directory ([#13940](https://github.com/astral-sh/uv/pull/13940)) +- Publish to DockerHub ([#14088](https://github.com/astral-sh/uv/pull/14088)) + +### Performance + +- Make cold resolves about 10% faster ([#14035](https://github.com/astral-sh/uv/pull/14035)) + +### Bug fixes + +- Don't use walrus operator in interpreter query script ([#14108](https://github.com/astral-sh/uv/pull/14108)) +- Fix handling of changes to `requires-python` ([#14076](https://github.com/astral-sh/uv/pull/14076)) +- Fix implied `platform_machine` marker for `win_amd64` platform tag ([#14041](https://github.com/astral-sh/uv/pull/14041)) +- Only update existing symlink directories on preview uninstall ([#14179](https://github.com/astral-sh/uv/pull/14179)) +- Serialize Python requests for tools as canonicalized strings ([#14109](https://github.com/astral-sh/uv/pull/14109)) +- Support netrc and same-origin credential propagation on index redirects ([#14126](https://github.com/astral-sh/uv/pull/14126)) +- Support reading `dependency-groups` from pyproject.tomls with no `[project]` ([#13742](https://github.com/astral-sh/uv/pull/13742)) +- Handle an existing shebang in `uv init --script` ([#14141](https://github.com/astral-sh/uv/pull/14141)) +- Prevent concurrent updates of the environment in `uv run` ([#14153](https://github.com/astral-sh/uv/pull/14153)) +- Filter managed Python distributions by platform before querying when included in request ([#13936](https://github.com/astral-sh/uv/pull/13936)) + +### Documentation + +- Replace cuda124 with cuda128 ([#14168](https://github.com/astral-sh/uv/pull/14168)) +- Document the way member sources shadow workspace sources ([#14136](https://github.com/astral-sh/uv/pull/14136)) +- Sync documented PyTorch integration index for CUDA and ROCm versions from PyTorch website ([#14100](https://github.com/astral-sh/uv/pull/14100)) + +## 0.7.13 ### Python diff --git a/Cargo.lock b/Cargo.lock index 4dd29fe69..ef7511af5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -94,6 +94,15 @@ version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" +[[package]] +name = "approx" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6" +dependencies = [ + "num-traits", +] + [[package]] name = "arbitrary" version = "1.4.1" @@ -180,9 +189,9 @@ dependencies = [ [[package]] name = "async-channel" -version = "2.3.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" +checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" dependencies = [ "concurrent-queue", "event-listener-strategy", @@ -364,6 +373,15 @@ version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + [[package]] name = "bisection" version = "0.1.0" @@ -402,9 +420,9 @@ dependencies = [ [[package]] name = "boxcar" -version = "0.2.12" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66bb12751a83493ef4b8da1120451a262554e216a247f14b48cb5e8fe7ed8bdf" +checksum = "26c4925bc979b677330a8c7fe7a8c94af2dbb4a2d37b4a20a80d884400f46baa" [[package]] name = "bstr" @@ -512,9 +530,9 @@ dependencies = [ [[package]] name = "cargo-util" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d767bc85f367f6483a6072430b56f5c0d6ee7636751a21a800526d0711753d76" +checksum = "c95ec8b2485b20aed818bd7460f8eecc6c87c35c84191b353a3aba9aa1736c36" dependencies = [ "anyhow", "core-foundation", @@ -601,9 +619,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.39" +version = "4.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd60e63e9be68e5fb56422e397cf9baddded06dae1d2e523401542383bc72a9f" +checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f" dependencies = [ "clap_builder", "clap_derive", @@ -611,9 +629,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.39" +version = "4.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89cc6392a1f72bbeb820d71f32108f61fdaf18bc526e1d23954168a67759ef51" +checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e" dependencies = [ "anstream", "anstyle", @@ -654,9 +672,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.32" +version = "4.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7" +checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce" dependencies = [ "heck", "proc-macro2", @@ -672,22 +690,27 @@ checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "codspeed" -version = "2.10.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93f4cce9c27c49c4f101fffeebb1826f41a9df2e7498b7cd4d95c0658b796c6c" +checksum = "922018102595f6668cdd09c03f4bff2d951ce2318c6dca4fe11bdcb24b65b2bf" dependencies = [ + "anyhow", + "bincode", "colored", + "glob", "libc", + "nix 0.29.0", "serde", "serde_json", + "statrs", "uuid", ] [[package]] name = "codspeed-criterion-compat" -version = "2.10.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3c23d880a28a2aab52d38ca8481dd7a3187157d0a952196b6db1db3c8499725" +checksum = "24d8ad82d2383cb74995f58993cbdd2914aed57b2f91f46580310dd81dc3d05a" dependencies = [ "codspeed", "codspeed-criterion-compat-walltime", @@ -696,9 +719,9 @@ dependencies = [ [[package]] name = "codspeed-criterion-compat-walltime" -version = "2.10.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b0a2f7365e347f4f22a67e9ea689bf7bc89900a354e22e26cf8a531a42c8fbb" +checksum = "61badaa6c452d192a29f8387147888f0ab358553597c3fe9bf8a162ef7c2fa64" dependencies = [ "anes", "cast", @@ -738,7 +761,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" dependencies = [ "lazy_static", - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] @@ -765,7 +788,7 @@ dependencies = [ "encode_unicode", "libc", "once_cell", - "unicode-width 0.2.0", + "unicode-width 0.2.1", "windows-sys 0.59.0", ] @@ -933,7 +956,7 @@ dependencies = [ "hashbrown 0.14.5", "lock_api", "once_cell", - "parking_lot_core 0.9.10", + "parking_lot_core", ] [[package]] @@ -1115,7 +1138,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1142,9 +1165,9 @@ dependencies = [ [[package]] name = "event-listener-strategy" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" dependencies = [ "event-listener", "pin-project-lite", @@ -1675,22 +1698,26 @@ dependencies = [ "tokio", "tokio-rustls", "tower-service", - "webpki-roots", + "webpki-roots 0.26.8", ] [[package]] name = "hyper-util" -version = "0.1.12" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9f1e950e0d9d1d3c47184416723cf29c0d1f93bd8cccf37e4beb6b44f31710" +checksum = "dc2fdfdbff08affe55bb779f33b053aa1fe5dd5b54c257343c17edfa55711bdb" dependencies = [ + "base64 0.22.1", "bytes", "futures-channel", + "futures-core", "futures-util", "http", "http-body", "hyper", + "ipnet", "libc", + "percent-encoding", "pin-project-lite", "socket2", "tokio", @@ -1872,9 +1899,9 @@ checksum = "b72ad49b554c1728b1e83254a1b1565aea4161e28dabbfa171fc15fe62299caf" [[package]] name = "indexmap" -version = "2.9.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" +checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" dependencies = [ "equivalent", "hashbrown 0.15.4", @@ -1890,7 +1917,7 @@ dependencies = [ "console", "number_prefix", "portable-atomic", - "unicode-width 0.2.0", + "unicode-width 0.2.1", "web-time", ] @@ -1915,24 +1942,22 @@ dependencies = [ "similar", ] -[[package]] -name = "instant" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" -dependencies = [ - "cfg-if", - "js-sys", - "wasm-bindgen", - "web-sys", -] - [[package]] name = "ipnet" version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" +[[package]] +name = "iri-string" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "is-terminal" version = "0.4.15" @@ -1941,7 +1966,7 @@ checksum = "e19b23d53f35ce9f56aebc7d1bb4e6ac1e9c0db7ac85c8d1760c04379edced37" dependencies = [ "hermit-abi 0.4.0", "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1991,9 +2016,9 @@ checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "jiff" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a194df1107f33c79f4f93d02c80798520551949d59dfad22b6157048a88cca93" +checksum = "be1f93b8b1eb69c77f24bbb0afdf66f54b632ee39af40ca21c4365a1d7347e49" dependencies = [ "jiff-static", "jiff-tzdb-platform", @@ -2001,14 +2026,14 @@ dependencies = [ "portable-atomic", "portable-atomic-util", "serde", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "jiff-static" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c6e1db7ed32c6c71b759497fae34bf7933636f75a251b9e736555da426f6442" +checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4" dependencies = [ "proc-macro2", "quote", @@ -2113,7 +2138,7 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ "bitflags 2.9.1", "libc", - "redox_syscall 0.5.8", + "redox_syscall", ] [[package]] @@ -2227,9 +2252,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.4" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" [[package]] name = "memmap2" @@ -2485,9 +2510,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "owo-colors" -version = "4.2.1" +version = "4.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26995317201fa17f3656c36716aed4a7c81743a9634ac4c99c0eeda495db0cec" +checksum = "48dd4f4a2c8405440fd0462561f0e5806bd0f77e86f51c761481bdd4018b545e" [[package]] name = "parking" @@ -2495,17 +2520,6 @@ version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" -[[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core 0.8.6", -] - [[package]] name = "parking_lot" version = "0.12.3" @@ -2513,21 +2527,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", - "parking_lot_core 0.9.10", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" -dependencies = [ - "cfg-if", - "instant", - "libc", - "redox_syscall 0.2.16", - "smallvec", - "winapi", + "parking_lot_core", ] [[package]] @@ -2538,7 +2538,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.8", + "redox_syscall", "smallvec", "windows-targets 0.52.6", ] @@ -2885,7 +2885,7 @@ dependencies = [ "once_cell", "socket2", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -2968,15 +2968,6 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b42e27ef78c35d3998403c1d26f3efd9e135d3e5121b0a4845cc5cc27547f4f" -[[package]] -name = "redox_syscall" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "redox_syscall" version = "0.5.8" @@ -3084,9 +3075,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.15" +version = "0.12.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb" +checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531" dependencies = [ "async-compression", "base64 0.22.1", @@ -3101,18 +3092,14 @@ dependencies = [ "hyper", "hyper-rustls", "hyper-util", - "ipnet", "js-sys", "log", - "mime", "mime_guess", - "once_cell", "percent-encoding", "pin-project-lite", "quinn", "rustls", "rustls-native-certs", - "rustls-pemfile", "rustls-pki-types", "serde", "serde_json", @@ -3120,24 +3107,22 @@ dependencies = [ "sync_wrapper", "tokio", "tokio-rustls", - "tokio-socks", "tokio-util", "tower", + "tower-http", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots", - "windows-registry 0.4.0", + "webpki-roots 1.0.1", ] [[package]] name = "reqwest-middleware" version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57f17d28a6e6acfe1733fe24bcd30774d13bffa4b8a22535b4c8c98423088d4e" +source = "git+https://github.com/astral-sh/reqwest-middleware?rev=ad8b9d332d1773fde8b4cd008486de5973e0a3f8#ad8b9d332d1773fde8b4cd008486de5973e0a3f8" dependencies = [ "anyhow", "async-trait", @@ -3151,8 +3136,7 @@ dependencies = [ [[package]] name = "reqwest-retry" version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29c73e4195a6bfbcb174b790d9b3407ab90646976c55de58a6515da25d851178" +source = "git+https://github.com/astral-sh/reqwest-middleware?rev=ad8b9d332d1773fde8b4cd008486de5973e0a3f8#ad8b9d332d1773fde8b4cd008486de5973e0a3f8" dependencies = [ "anyhow", "async-trait", @@ -3160,14 +3144,13 @@ dependencies = [ "getrandom 0.2.15", "http", "hyper", - "parking_lot 0.11.2", "reqwest", "reqwest-middleware", "retry-policies", "thiserror 1.0.69", "tokio", "tracing", - "wasm-timer", + "wasmtimer", ] [[package]] @@ -3334,7 +3317,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys 0.4.15", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -3347,7 +3330,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys 0.9.2", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -3376,15 +3359,6 @@ dependencies = [ "security-framework", ] -[[package]] -name = "rustls-pemfile" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" -dependencies = [ - "rustls-pki-types", -] - [[package]] name = "rustls-pki-types" version = "1.11.0" @@ -3453,11 +3427,12 @@ dependencies = [ [[package]] name = "schemars" -version = "0.8.22" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615" +checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" dependencies = [ "dyn-clone", + "ref-cast", "schemars_derive", "serde", "serde_json", @@ -3466,9 +3441,9 @@ dependencies = [ [[package]] name = "schemars_derive" -version = "0.8.22" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d" +checksum = "33d020396d1d138dc19f1165df7545479dcd58d93810dc5d646a16e55abefa80" dependencies = [ "proc-macro2", "quote", @@ -3604,9 +3579,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "0.6.8" +version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" dependencies = [ "serde", ] @@ -3732,9 +3707,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.15.0" +version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] name = "smawk" @@ -3767,6 +3742,16 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +[[package]] +name = "statrs" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a3fe7c28c6512e766b0874335db33c94ad7b8f9054228ae1c2abd47ce7d335e" +dependencies = [ + "approx", + "num-traits", +] + [[package]] name = "strict-num" version = "0.1.1" @@ -3847,9 +3832,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.101" +version = "2.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf" +checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" dependencies = [ "proc-macro2", "quote", @@ -3915,7 +3900,7 @@ version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96374855068f47402c3121c6eed88d29cb1de8f3ab27090e273e420bdabcf050" dependencies = [ - "parking_lot 0.12.3", + "parking_lot", ] [[package]] @@ -3928,7 +3913,7 @@ dependencies = [ "getrandom 0.3.1", "once_cell", "rustix 1.0.7", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -3982,9 +3967,9 @@ dependencies = [ [[package]] name = "test-log" -version = "0.2.17" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7f46083d221181166e5b6f6b1e5f1d499f3a76888826e6cb1d057554157cd0f" +checksum = "1e33b98a582ea0be1168eba097538ee8dd4bbe0f2b01b22ac92ea30054e5be7b" dependencies = [ "test-log-macros", "tracing-subscriber", @@ -3992,9 +3977,9 @@ dependencies = [ [[package]] name = "test-log-macros" -version = "0.2.17" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "888d0c3c6db53c0fdab160d2ed5e12ba745383d3e85813f2ea0f2b1475ab553f" +checksum = "451b374529930d7601b1eef8d32bc79ae870b6079b069401709c2a8bf9e75f36" dependencies = [ "proc-macro2", "quote", @@ -4009,7 +3994,7 @@ checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057" dependencies = [ "smawk", "unicode-linebreak", - "unicode-width 0.2.0", + "unicode-width 0.2.1", ] [[package]] @@ -4157,7 +4142,7 @@ dependencies = [ "bytes", "libc", "mio", - "parking_lot 0.12.3", + "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", @@ -4186,18 +4171,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-socks" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d4770b8024672c1101b3f6733eab95b18007dbe0847a8afe341fcf79e06043f" -dependencies = [ - "either", - "futures-util", - "thiserror 1.0.69", - "tokio", -] - [[package]] name = "tokio-stream" version = "0.1.17" @@ -4226,9 +4199,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.22" +version = "0.8.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05ae329d1f08c4d17a59bed7ff5b5a769d062e64a62d34a3261b219e62cd5aae" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" dependencies = [ "serde", "serde_spanned", @@ -4238,18 +4211,18 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.9" +version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3da5db5a963e24bc68be8b17b6fa82814bb22ee8660f192bb182771d498f09a3" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.22.26" +version = "0.22.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" dependencies = [ "indexmap", "serde", @@ -4261,9 +4234,9 @@ dependencies = [ [[package]] name = "toml_write" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfb942dfe1d8e29a7ee7fcbde5bd2b9a25fb89aa70caea2eba3bee836ff41076" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" [[package]] name = "tower" @@ -4280,6 +4253,24 @@ dependencies = [ "tower-service", ] +[[package]] +name = "tower-http" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +dependencies = [ + "bitflags 2.9.1", + "bytes", + "futures-util", + "http", + "http-body", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + [[package]] name = "tower-layer" version = "0.3.3" @@ -4515,9 +4506,9 @@ checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" [[package]] name = "unicode-width" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" +checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" [[package]] name = "unsafe-libyaml" @@ -4617,7 +4608,7 @@ dependencies = [ [[package]] name = "uv" -version = "0.7.13" +version = "0.7.19" dependencies = [ "anstream", "anyhow", @@ -4630,13 +4621,14 @@ dependencies = [ "console", "ctrlc", "dotenvy", - "etcetera", + "dunce", "filetime", "flate2", "fs-err 3.1.1", "futures", "http", "ignore", + "indexmap", "indicatif", "indoc", "insta", @@ -4666,7 +4658,7 @@ dependencies = [ "tracing-durations-export", "tracing-subscriber", "tracing-tree", - "unicode-width 0.2.0", + "unicode-width 0.2.1", "url", "uv-auth", "uv-build-backend", @@ -4765,7 +4757,6 @@ dependencies = [ "uv-configuration", "uv-dispatch", "uv-distribution", - "uv-distribution-filename", "uv-distribution-types", "uv-extract", "uv-install-wheel", @@ -4781,7 +4772,7 @@ dependencies = [ [[package]] name = "uv-build" -version = "0.7.13" +version = "0.7.19" dependencies = [ "anyhow", "uv-build-backend", @@ -4844,6 +4835,7 @@ dependencies = [ "tokio", "toml_edit", "tracing", + "uv-cache-key", "uv-configuration", "uv-distribution", "uv-distribution-types", @@ -4994,6 +4986,7 @@ dependencies = [ "uv-torch", "uv-version", "uv-warnings", + "wiremock", ] [[package]] @@ -5180,7 +5173,6 @@ dependencies = [ "serde", "smallvec", "thiserror 2.0.12", - "url", "uv-cache-key", "uv-normalize", "uv-pep440", @@ -5223,6 +5215,7 @@ dependencies = [ "uv-pypi-types", "uv-redacted", "uv-small-str", + "uv-warnings", "version-ranges", ] @@ -5465,7 +5458,7 @@ dependencies = [ "rkyv", "serde", "tracing", - "unicode-width 0.2.0", + "unicode-width 0.2.1", "unscanny", "version-ranges", ] @@ -5488,7 +5481,7 @@ dependencies = [ "thiserror 2.0.12", "tracing", "tracing-test", - "unicode-width 0.2.0", + "unicode-width 0.2.1", "url", "uv-fs", "uv-normalize", @@ -5595,15 +5588,18 @@ dependencies = [ "assert_fs", "clap", "configparser", + "dunce", "fs-err 3.1.1", "futures", "goblin", + "indexmap", "indoc", "insta", "itertools 0.14.0", "once_cell", "owo-colors", "procfs", + "ref-cast", "regex", "reqwest", "reqwest-middleware", @@ -5628,6 +5624,7 @@ dependencies = [ "uv-cache-info", "uv-cache-key", "uv-client", + "uv-configuration", "uv-dirs", "uv-distribution-filename", "uv-extract", @@ -5643,7 +5640,7 @@ dependencies = [ "uv-trampoline-builder", "uv-warnings", "which", - "windows-registry 0.5.2", + "windows-registry", "windows-result 0.3.4", "windows-sys 0.59.0", ] @@ -5790,6 +5787,7 @@ dependencies = [ "fs-err 3.1.1", "indoc", "memchr", + "regex", "serde", "thiserror 2.0.12", "toml", @@ -5799,6 +5797,7 @@ dependencies = [ "uv-pypi-types", "uv-redacted", "uv-settings", + "uv-warnings", "uv-workspace", ] @@ -5845,7 +5844,7 @@ dependencies = [ "tracing", "uv-fs", "uv-static", - "windows-registry 0.5.2", + "windows-registry", "windows-result 0.3.4", "windows-sys 0.59.0", ] @@ -5889,6 +5888,7 @@ dependencies = [ "toml_edit", "tracing", "uv-cache", + "uv-configuration", "uv-dirs", "uv-distribution-types", "uv-fs", @@ -5942,6 +5942,7 @@ name = "uv-types" version = "0.0.1" dependencies = [ "anyhow", + "dashmap", "rustc-hash", "thiserror 2.0.12", "uv-cache", @@ -5961,7 +5962,7 @@ dependencies = [ [[package]] name = "uv-version" -version = "0.7.13" +version = "0.7.19" [[package]] name = "uv-virtualenv" @@ -5973,6 +5974,7 @@ dependencies = [ "self-replace", "thiserror 2.0.12", "tracing", + "uv-configuration", "uv-fs", "uv-pypi-types", "uv-python", @@ -6013,6 +6015,7 @@ dependencies = [ "tracing", "uv-build-backend", "uv-cache-key", + "uv-configuration", "uv-distribution-types", "uv-fs", "uv-git-types", @@ -6181,18 +6184,17 @@ dependencies = [ ] [[package]] -name = "wasm-timer" -version = "0.2.5" +name = "wasmtimer" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be0ecb0db480561e9a7642b5d3e4187c128914e58aa84330b9493e3eb68c5e7f" +checksum = "0048ad49a55b9deb3953841fa1fc5858f0efbcb7a18868c899a360269fac1b23" dependencies = [ "futures", "js-sys", - "parking_lot 0.11.2", + "parking_lot", "pin-utils", + "slab", "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", ] [[package]] @@ -6224,6 +6226,15 @@ dependencies = [ "rustls-pki-types", ] +[[package]] +name = "webpki-roots" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8782dd5a41a24eed3a4f40b606249b3e236ca61adf1f25ea4d45c73de122b502" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "weezl" version = "0.1.8" @@ -6232,11 +6243,10 @@ checksum = "53a85b86a771b1c87058196170769dd264f66c0782acf1ae6cc51bfd64b39082" [[package]] name = "which" -version = "7.0.3" +version = "8.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d643ce3fd3e5b54854602a080f34fb10ab75e0b813ee32d00ca2b44fa74762" +checksum = "d3fabb953106c3c8eea8306e4393700d7657561cb43122571b172bbfb7c7ba1d" dependencies = [ - "either", "env_home", "regex", "rustix 1.0.7", @@ -6249,7 +6259,7 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6994d13118ab492c3c80c1f81928718159254c53c472bf9ce36f8dae4add02a7" dependencies = [ - "redox_syscall 0.5.8", + "redox_syscall", "wasite", "web-sys", ] @@ -6282,7 +6292,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] @@ -6368,7 +6378,7 @@ dependencies = [ "windows-interface 0.59.1", "windows-link", "windows-result 0.3.4", - "windows-strings 0.4.1", + "windows-strings 0.4.2", ] [[package]] @@ -6438,9 +6448,9 @@ dependencies = [ [[package]] name = "windows-link" -version = "0.1.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" [[package]] name = "windows-numerics" @@ -6454,24 +6464,13 @@ dependencies = [ [[package]] name = "windows-registry" -version = "0.4.0" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3" -dependencies = [ - "windows-result 0.3.4", - "windows-strings 0.3.1", - "windows-targets 0.53.0", -] - -[[package]] -name = "windows-registry" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3bab093bdd303a1240bb99b8aba8ea8a69ee19d34c9e2ef9594e708a4878820" +checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e" dependencies = [ "windows-link", "windows-result 0.3.4", - "windows-strings 0.4.1", + "windows-strings 0.4.2", ] [[package]] @@ -6503,9 +6502,9 @@ dependencies = [ [[package]] name = "windows-strings" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a7ab927b2637c19b3dbe0965e75d8f2d30bdd697a1516191cad2ec4df8fb28a" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" dependencies = [ "windows-link", ] @@ -6724,9 +6723,9 @@ checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" [[package]] name = "winnow" -version = "0.7.7" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6cb8234a863ea0e8cd7284fcdd4f145233eb00fee02bbdd9861aec44e6477bc5" +checksum = "74c7b26e3480b707944fc872477815d29a8e429d2f93a1ce000f5fa84a15cbcd" dependencies = [ "memchr", ] @@ -6739,9 +6738,9 @@ checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" [[package]] name = "wiremock" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "101681b74cd87b5899e87bcf5a64e83334dd313fcd3053ea72e6dba18928e301" +checksum = "a2b8b99d4cdbf36b239a9532e31fe4fb8acc38d1897c1761e161550a7dc78e6a" dependencies = [ "assert-json-diff", "async-trait", diff --git a/Cargo.toml b/Cargo.toml index 0f1d02b47..fc19dcc9a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,7 +12,7 @@ resolver = "2" [workspace.package] edition = "2024" -rust-version = "1.85" +rust-version = "1.86" homepage = "https://pypi.org/project/uv/" documentation = "https://pypi.org/project/uv/" repository = "https://github.com/astral-sh/uv" @@ -142,16 +142,16 @@ ref-cast = { version = "1.0.24" } reflink-copy = { version = "0.1.19" } regex = { version = "1.10.6" } regex-automata = { version = "0.4.8", default-features = false, features = ["dfa-build", "dfa-search", "perf", "std", "syntax"] } -reqwest = { version = "=0.12.15", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] } -reqwest-middleware = { version = "0.4.0", features = ["multipart"] } -reqwest-retry = { version = "0.7.0" } +reqwest = { version = "0.12.22", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] } +reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8", features = ["multipart"] } +reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" } rkyv = { version = "0.8.8", features = ["bytecheck"] } rmp-serde = { version = "1.3.0" } rust-netrc = { version = "0.1.2" } rustc-hash = { version = "2.0.0" } rustix = { version = "1.0.0", default-features = false, features = ["fs", "std"] } same-file = { version = "1.0.6" } -schemars = { version = "0.8.21", features = ["url"] } +schemars = { version = "1.0.0", features = ["url2"] } seahash = { version = "4.1.0" } self-replace = { version = "1.5.0" } serde = { version = "1.0.210", features = ["derive", "rc"] } @@ -183,13 +183,13 @@ unscanny = { version = "0.1.0" } url = { version = "2.5.2", features = ["serde"] } version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" } walkdir = { version = "2.5.0" } -which = { version = "7.0.0", features = ["regex"] } +which = { version = "8.0.0", features = ["regex"] } windows = { version = "0.59.0", features = ["Win32_Storage_FileSystem"] } windows-core = { version = "0.59.0" } windows-registry = { version = "0.5.0" } windows-result = { version = "0.3.0" } windows-sys = { version = "0.59.0", features = ["Win32_Foundation", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Ioctl", "Win32_System_IO", "Win32_System_Registry"] } -wiremock = { version = "0.6.2" } +wiremock = { version = "0.6.4" } xz2 = { version = "0.1.7" } zip = { version = "2.2.3", default-features = false, features = ["deflate", "zstd", "bzip2", "lzma", "xz"] } @@ -214,6 +214,7 @@ missing_panics_doc = "allow" module_name_repetitions = "allow" must_use_candidate = "allow" similar_names = "allow" +struct_excessive_bools = "allow" too_many_arguments = "allow" too_many_lines = "allow" used_underscore_binding = "allow" @@ -296,85 +297,6 @@ codegen-units = 1 [profile.dist] inherits = "release" -# Config for 'dist' -[workspace.metadata.dist] -# The preferred dist version to use in CI (Cargo.toml SemVer syntax) -cargo-dist-version = "0.28.4" -# make a package being included in our releases opt-in instead of opt-out -dist = false -# CI backends to support -ci = "github" -# The installers to generate for each app -installers = ["shell", "powershell"] -# The archive format to use for windows builds (defaults .zip) -windows-archive = ".zip" -# The archive format to use for non-windows builds (defaults .tar.xz) -unix-archive = ".tar.gz" -# Target platforms to build apps for (Rust target-triple syntax) -targets = [ - "aarch64-apple-darwin", - "aarch64-pc-windows-msvc", - "aarch64-unknown-linux-gnu", - "aarch64-unknown-linux-musl", - "arm-unknown-linux-musleabihf", - "armv7-unknown-linux-gnueabihf", - "armv7-unknown-linux-musleabihf", - "i686-pc-windows-msvc", - "i686-unknown-linux-gnu", - "i686-unknown-linux-musl", - "powerpc64-unknown-linux-gnu", - "powerpc64le-unknown-linux-gnu", - "riscv64gc-unknown-linux-gnu", - "s390x-unknown-linux-gnu", - "x86_64-apple-darwin", - "x86_64-pc-windows-msvc", - "x86_64-unknown-linux-gnu", - "x86_64-unknown-linux-musl", -] -# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true) -auto-includes = false -# Whether dist should create a Github Release or use an existing draft -create-release = true -# Which actions to run on pull requests -pr-run-mode = "plan" -# Whether CI should trigger releases with dispatches instead of tag pushes -dispatch-releases = true -# Which phase dist should use to create the GitHub release -github-release = "announce" -# Whether CI should include auto-generated code to build local artifacts -build-local-artifacts = false -# Local artifacts jobs to run in CI -local-artifacts-jobs = ["./build-binaries", "./build-docker"] -# Publish jobs to run in CI -publish-jobs = ["./publish-pypi"] -# Post-announce jobs to run in CI -post-announce-jobs = ["./publish-docs"] -# Custom permissions for GitHub Jobs -github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read", id-token = "write", attestations = "write" } } -# Whether to install an updater program -install-updater = false -# Path that installers should place binaries in -install-path = ["$XDG_BIN_HOME/", "$XDG_DATA_HOME/../bin", "~/.local/bin"] - -[workspace.metadata.dist.github-custom-runners] -global = "depot-ubuntu-latest-4" - -[workspace.metadata.dist.min-glibc-version] -# Override glibc version for specific target triplets. -aarch64-unknown-linux-gnu = "2.28" -riscv64gc-unknown-linux-gnu = "2.31" -# Override all remaining glibc versions. -"*" = "2.17" - -[workspace.metadata.dist.github-action-commits] -"actions/checkout" = "11bd71901bbe5b1630ceea73d27597364c9af683" # v4 -"actions/upload-artifact" = "6027e3dd177782cd8ab9af838c04fd81a07f1d47" # v4.6.2 -"actions/download-artifact" = "d3f86a106a0bac45b974a628896c90dbdf5c8093" # v4.3.0 -"actions/attest-build-provenance" = "c074443f1aee8d4aeeae555aebba3282517141b2" #v2.2.3 - -[workspace.metadata.dist.binaries] -"*" = ["uv", "uvx"] -# Add "uvw" binary for Windows targets -aarch64-pc-windows-msvc = ["uv", "uvx", "uvw"] -i686-pc-windows-msvc = ["uv", "uvx", "uvw"] -x86_64-pc-windows-msvc = ["uv", "uvx", "uvw"] +[patch.crates-io] +reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" } +reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" } diff --git a/clippy.toml b/clippy.toml index 191195e33..1151d773d 100644 --- a/clippy.toml +++ b/clippy.toml @@ -6,6 +6,8 @@ doc-valid-idents = [ "GraalPy", "ReFS", "PyTorch", + "ROCm", + "XPU", ".." # Include the defaults ] @@ -35,7 +37,7 @@ disallowed-methods = [ "std::fs::soft_link", "std::fs::symlink_metadata", "std::fs::write", - "std::os::unix::fs::symlink", - "std::os::windows::fs::symlink_dir", - "std::os::windows::fs::symlink_file", + { path = "std::os::unix::fs::symlink", allow-invalid = true }, + { path = "std::os::windows::fs::symlink_dir", allow-invalid = true }, + { path = "std::os::windows::fs::symlink_file", allow-invalid = true }, ] diff --git a/crates/uv-auth/src/index.rs b/crates/uv-auth/src/index.rs index e17bbd8fe..b71bc9a62 100644 --- a/crates/uv-auth/src/index.rs +++ b/crates/uv-auth/src/index.rs @@ -86,7 +86,7 @@ impl Indexes { Self(FxHashSet::default()) } - /// Create a new [`AuthIndexUrls`] from an iterator of [`AuthIndexUrl`]s. + /// Create a new [`Indexes`] instance from an iterator of [`Index`]s. pub fn from_indexes(urls: impl IntoIterator) -> Self { let mut index_urls = Self::new(); for url in urls { diff --git a/crates/uv-bench/Cargo.toml b/crates/uv-bench/Cargo.toml index 65ce78731..8c08d4dd2 100644 --- a/crates/uv-bench/Cargo.toml +++ b/crates/uv-bench/Cargo.toml @@ -18,11 +18,6 @@ workspace = true doctest = false bench = false -[[bench]] -name = "distribution-filename" -path = "benches/distribution_filename.rs" -harness = false - [[bench]] name = "uv" path = "benches/uv.rs" @@ -34,7 +29,6 @@ uv-client = { workspace = true } uv-configuration = { workspace = true } uv-dispatch = { workspace = true } uv-distribution = { workspace = true } -uv-distribution-filename = { workspace = true } uv-distribution-types = { workspace = true } uv-extract = { workspace = true, optional = true } uv-install-wheel = { workspace = true } @@ -48,8 +42,10 @@ uv-types = { workspace = true } uv-workspace = { workspace = true } anyhow = { workspace = true } -codspeed-criterion-compat = { version = "2.7.2", default-features = false, optional = true } -criterion = { version = "0.6.0", default-features = false, features = ["async_tokio"] } +codspeed-criterion-compat = { version = "3.0.2", default-features = false, optional = true } +criterion = { version = "0.6.0", default-features = false, features = [ + "async_tokio", +] } jiff = { workspace = true } tokio = { workspace = true } diff --git a/crates/uv-bench/benches/distribution_filename.rs b/crates/uv-bench/benches/distribution_filename.rs deleted file mode 100644 index 99d72cf05..000000000 --- a/crates/uv-bench/benches/distribution_filename.rs +++ /dev/null @@ -1,168 +0,0 @@ -use std::str::FromStr; - -use uv_bench::criterion::{ - BenchmarkId, Criterion, Throughput, criterion_group, criterion_main, measurement::WallTime, -}; -use uv_distribution_filename::WheelFilename; -use uv_platform_tags::{AbiTag, LanguageTag, PlatformTag, Tags}; - -/// A set of platform tags extracted from burntsushi's Archlinux workstation. -/// We could just re-create these via `Tags::from_env`, but those might differ -/// depending on the platform. This way, we always use the same data. It also -/// lets us assert tag compatibility regardless of where the benchmarks run. -const PLATFORM_TAGS: &[(&str, &str, &str)] = include!("../inputs/platform_tags.rs"); - -/// A set of wheel names used in the benchmarks below. We pick short and long -/// names, as well as compatible and not-compatibles (with `PLATFORM_TAGS`) -/// names. -/// -/// The tuple is (name, filename, compatible) where `name` is a descriptive -/// name for humans used in the benchmark definition. And `filename` is the -/// actual wheel filename we want to benchmark operation on. And `compatible` -/// indicates whether the tags in the wheel filename are expected to be -/// compatible with the tags in `PLATFORM_TAGS`. -const WHEEL_NAMES: &[(&str, &str, bool)] = &[ - // This tests a case with a very short name that *is* compatible with - // PLATFORM_TAGS. It only uses one tag for each component (one Python - // version, one ABI and one platform). - ( - "flyte-short-compatible", - "ipython-2.1.0-py3-none-any.whl", - true, - ), - // This tests a case with a long name that is *not* compatible. That - // is, all platform tags need to be checked against the tags in the - // wheel filename. This is essentially the worst possible practical - // case. - ( - "flyte-long-incompatible", - "protobuf-3.5.2.post1-cp36-cp36m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", - false, - ), - // This tests a case with a long name that *is* compatible. We - // expect this to be (on average) quicker because the compatibility - // check stops as soon as a positive match is found. (Where as the - // incompatible case needs to check all tags.) - ( - "flyte-long-compatible", - "coverage-6.6.0b1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", - true, - ), -]; - -/// A list of names that are candidates for wheel filenames but will ultimately -/// fail to parse. -const INVALID_WHEEL_NAMES: &[(&str, &str)] = &[ - ("flyte-short-extension", "mock-5.1.0.tar.gz"), - ( - "flyte-long-extension", - "Pillow-5.4.0.dev0-py3.7-macosx-10.13-x86_64.egg", - ), -]; - -/// Benchmarks the construction of platform tags. -/// -/// This only happens ~once per program startup. Originally, construction was -/// trivial. But to speed up `WheelFilename::is_compatible`, we added some -/// extra processing. We thus expect construction to become slower, but we -/// write a benchmark to ensure it is still "reasonable." -fn benchmark_build_platform_tags(c: &mut Criterion) { - let tags: Vec<(LanguageTag, AbiTag, PlatformTag)> = PLATFORM_TAGS - .iter() - .map(|&(py, abi, plat)| { - ( - LanguageTag::from_str(py).unwrap(), - AbiTag::from_str(abi).unwrap(), - PlatformTag::from_str(plat).unwrap(), - ) - }) - .collect(); - - let mut group = c.benchmark_group("build_platform_tags"); - group.bench_function(BenchmarkId::from_parameter("burntsushi-archlinux"), |b| { - b.iter(|| std::hint::black_box(Tags::new(tags.clone()))); - }); - group.finish(); -} - -/// Benchmarks `WheelFilename::from_str`. This has been observed to take some -/// non-trivial time in profiling (although, at time of writing, not as much -/// as tag compatibility). In the process of optimizing tag compatibility, -/// we tweaked wheel filename parsing. This benchmark was therefore added to -/// ensure we didn't regress here. -fn benchmark_wheelname_parsing(c: &mut Criterion) { - let mut group = c.benchmark_group("wheelname_parsing"); - for (name, filename, _) in WHEEL_NAMES.iter().copied() { - let len = u64::try_from(filename.len()).expect("length fits in u64"); - group.throughput(Throughput::Bytes(len)); - group.bench_function(BenchmarkId::from_parameter(name), |b| { - b.iter(|| { - filename - .parse::() - .expect("valid wheel filename"); - }); - }); - } - group.finish(); -} - -/// Benchmarks `WheelFilename::from_str` when it fails. This routine is called -/// on every filename in a package's metadata. A non-trivial portion of which -/// are not wheel filenames. Ensuring that the error path is fast is thus -/// probably a good idea. -fn benchmark_wheelname_parsing_failure(c: &mut Criterion) { - let mut group = c.benchmark_group("wheelname_parsing_failure"); - for (name, filename) in INVALID_WHEEL_NAMES.iter().copied() { - let len = u64::try_from(filename.len()).expect("length fits in u64"); - group.throughput(Throughput::Bytes(len)); - group.bench_function(BenchmarkId::from_parameter(name), |b| { - b.iter(|| { - filename - .parse::() - .expect_err("invalid wheel filename"); - }); - }); - } - group.finish(); -} - -/// Benchmarks the `WheelFilename::is_compatible` routine. This was revealed -/// to be the #1 bottleneck in the resolver. The main issue was that the -/// set of platform tags (generated once) is quite large, and the original -/// implementation did an exhaustive search over each of them for each tag in -/// the wheel filename. -fn benchmark_wheelname_tag_compatibility(c: &mut Criterion) { - let tags: Vec<(LanguageTag, AbiTag, PlatformTag)> = PLATFORM_TAGS - .iter() - .map(|&(py, abi, plat)| { - ( - LanguageTag::from_str(py).unwrap(), - AbiTag::from_str(abi).unwrap(), - PlatformTag::from_str(plat).unwrap(), - ) - }) - .collect(); - let tags = Tags::new(tags); - - let mut group = c.benchmark_group("wheelname_tag_compatibility"); - for (name, filename, expected) in WHEEL_NAMES.iter().copied() { - let wheelname: WheelFilename = filename.parse().expect("valid wheel filename"); - let len = u64::try_from(filename.len()).expect("length fits in u64"); - group.throughput(Throughput::Bytes(len)); - group.bench_function(BenchmarkId::from_parameter(name), |b| { - b.iter(|| { - assert_eq!(expected, wheelname.is_compatible(&tags)); - }); - }); - } - group.finish(); -} - -criterion_group!( - uv_distribution_filename, - benchmark_build_platform_tags, - benchmark_wheelname_parsing, - benchmark_wheelname_parsing_failure, - benchmark_wheelname_tag_compatibility, -); -criterion_main!(uv_distribution_filename); diff --git a/crates/uv-bench/benches/uv.rs b/crates/uv-bench/benches/uv.rs index 95106a52b..9bdd7adb9 100644 --- a/crates/uv-bench/benches/uv.rs +++ b/crates/uv-bench/benches/uv.rs @@ -91,7 +91,7 @@ mod resolver { }; use uv_dispatch::{BuildDispatch, SharedState}; use uv_distribution::DistributionDatabase; - use uv_distribution_types::{DependencyMetadata, IndexLocations}; + use uv_distribution_types::{DependencyMetadata, IndexLocations, RequiresPython}; use uv_install_wheel::LinkMode; use uv_pep440::Version; use uv_pep508::{MarkerEnvironment, MarkerEnvironmentBuilder}; @@ -99,8 +99,8 @@ mod resolver { use uv_pypi_types::{Conflicts, ResolverMarkerEnvironment}; use uv_python::Interpreter; use uv_resolver::{ - FlatIndex, InMemoryIndex, Manifest, OptionsBuilder, PythonRequirement, RequiresPython, - Resolver, ResolverEnvironment, ResolverOutput, + FlatIndex, InMemoryIndex, Manifest, OptionsBuilder, PythonRequirement, Resolver, + ResolverEnvironment, ResolverOutput, }; use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy}; use uv_workspace::WorkspaceCache; diff --git a/crates/uv-build-backend/src/lib.rs b/crates/uv-build-backend/src/lib.rs index 15ff81a4b..548214c32 100644 --- a/crates/uv-build-backend/src/lib.rs +++ b/crates/uv-build-backend/src/lib.rs @@ -9,12 +9,12 @@ pub use settings::{BuildBackendSettings, WheelDataIncludes}; pub use source_dist::{build_source_dist, list_source_dist}; pub use wheel::{build_editable, build_wheel, list_wheel, metadata}; -use std::fs::FileType; use std::io; use std::path::{Path, PathBuf}; use std::str::FromStr; use thiserror::Error; use tracing::debug; +use walkdir::DirEntry; use uv_fs::Simplified; use uv_globfilter::PortableGlobError; @@ -54,8 +54,6 @@ pub enum Error { #[source] err: walkdir::Error, }, - #[error("Unsupported file type {:?}: `{}`", _1, _0.user_display())] - UnsupportedFileType(PathBuf, FileType), #[error("Failed to write wheel zip archive")] Zip(#[from] zip::result::ZipError), #[error("Failed to write RECORD file")] @@ -86,6 +84,16 @@ trait DirectoryWriter { /// Files added through the method are considered generated when listing included files. fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error>; + /// Add the file or directory to the path. + fn write_dir_entry(&mut self, entry: &DirEntry, target_path: &str) -> Result<(), Error> { + if entry.file_type().is_dir() { + self.write_directory(target_path)?; + } else { + self.write_file(target_path, entry.path())?; + } + Ok(()) + } + /// Add a local file. fn write_file(&mut self, path: &str, file: &Path) -> Result<(), Error>; diff --git a/crates/uv-build-backend/src/settings.rs b/crates/uv-build-backend/src/settings.rs index fc495c268..3b413e8e3 100644 --- a/crates/uv-build-backend/src/settings.rs +++ b/crates/uv-build-backend/src/settings.rs @@ -4,10 +4,6 @@ use uv_macros::OptionsMetadata; /// Settings for the uv build backend (`uv_build`). /// -/// !!! note -/// -/// The uv build backend is currently in preview and may change in any future release. -/// /// Note that those settings only apply when using the `uv_build` backend, other build backends /// (such as hatchling) have their own configuration. /// diff --git a/crates/uv-build-backend/src/source_dist.rs b/crates/uv-build-backend/src/source_dist.rs index 6285ae7c0..0a302ccf2 100644 --- a/crates/uv-build-backend/src/source_dist.rs +++ b/crates/uv-build-backend/src/source_dist.rs @@ -250,32 +250,16 @@ fn write_source_dist( .expect("walkdir starts with root"); if !include_matcher.match_path(relative) || exclude_matcher.is_match(relative) { - trace!("Excluding: `{}`", relative.user_display()); + trace!("Excluding from sdist: `{}`", relative.user_display()); continue; } - debug!("Including {}", relative.user_display()); - if entry.file_type().is_dir() { - writer.write_directory( - &Path::new(&top_level) - .join(relative) - .portable_display() - .to_string(), - )?; - } else if entry.file_type().is_file() { - writer.write_file( - &Path::new(&top_level) - .join(relative) - .portable_display() - .to_string(), - entry.path(), - )?; - } else { - return Err(Error::UnsupportedFileType( - relative.to_path_buf(), - entry.file_type(), - )); - } + let entry_path = Path::new(&top_level) + .join(relative) + .portable_display() + .to_string(); + debug!("Adding to sdist: {}", relative.user_display()); + writer.write_dir_entry(&entry, &entry_path)?; } debug!("Visited {files_visited} files for source dist build"); diff --git a/crates/uv-build-backend/src/wheel.rs b/crates/uv-build-backend/src/wheel.rs index da376d078..7da232941 100644 --- a/crates/uv-build-backend/src/wheel.rs +++ b/crates/uv-build-backend/src/wheel.rs @@ -164,7 +164,7 @@ fn write_wheel( .path() .strip_prefix(source_tree) .expect("walkdir starts with root"); - let wheel_path = entry + let entry_path = entry .path() .strip_prefix(&src_root) .expect("walkdir starts with root"); @@ -172,21 +172,10 @@ fn write_wheel( trace!("Excluding from module: `{}`", match_path.user_display()); continue; } - let wheel_path = wheel_path.portable_display().to_string(); - debug!("Adding to wheel: `{wheel_path}`"); - - if entry.file_type().is_dir() { - wheel_writer.write_directory(&wheel_path)?; - } else if entry.file_type().is_file() { - wheel_writer.write_file(&wheel_path, entry.path())?; - } else { - // TODO(konsti): We may want to support symlinks, there is support for installing them. - return Err(Error::UnsupportedFileType( - entry.path().to_path_buf(), - entry.file_type(), - )); - } + let entry_path = entry_path.portable_display().to_string(); + debug!("Adding to wheel: {entry_path}"); + wheel_writer.write_dir_entry(&entry, &entry_path)?; } debug!("Visited {files_visited} files for wheel build"); @@ -519,23 +508,12 @@ fn wheel_subdir_from_globs( continue; } - let relative_licenses = Path::new(target) + let license_path = Path::new(target) .join(relative) .portable_display() .to_string(); - - if entry.file_type().is_dir() { - wheel_writer.write_directory(&relative_licenses)?; - } else if entry.file_type().is_file() { - debug!("Adding {} file: `{}`", globs_field, relative.user_display()); - wheel_writer.write_file(&relative_licenses, entry.path())?; - } else { - // TODO(konsti): We may want to support symlinks, there is support for installing them. - return Err(Error::UnsupportedFileType( - entry.path().to_path_buf(), - entry.file_type(), - )); - } + debug!("Adding for {}: `{}`", globs_field, relative.user_display()); + wheel_writer.write_dir_entry(&entry, &license_path)?; } Ok(()) } diff --git a/crates/uv-build-frontend/Cargo.toml b/crates/uv-build-frontend/Cargo.toml index 83f8008d9..748e7bb28 100644 --- a/crates/uv-build-frontend/Cargo.toml +++ b/crates/uv-build-frontend/Cargo.toml @@ -17,6 +17,7 @@ doctest = false workspace = true [dependencies] +uv-cache-key = { workspace = true } uv-configuration = { workspace = true } uv-distribution = { workspace = true } uv-distribution-types = { workspace = true } diff --git a/crates/uv-build-frontend/src/lib.rs b/crates/uv-build-frontend/src/lib.rs index 1c29b2c31..5cbaece2e 100644 --- a/crates/uv-build-frontend/src/lib.rs +++ b/crates/uv-build-frontend/src/lib.rs @@ -25,11 +25,14 @@ use tempfile::TempDir; use tokio::io::AsyncBufReadExt; use tokio::process::Command; use tokio::sync::{Mutex, Semaphore}; -use tracing::{Instrument, debug, info_span, instrument}; +use tracing::{Instrument, debug, info_span, instrument, warn}; +use uv_cache_key::cache_digest; +use uv_configuration::PreviewMode; use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, SourceStrategy}; use uv_distribution::BuildRequires; use uv_distribution_types::{IndexLocations, Requirement, Resolution}; +use uv_fs::LockedFile; use uv_fs::{PythonExt, Simplified}; use uv_pep440::Version; use uv_pep508::PackageName; @@ -200,6 +203,11 @@ impl Pep517Backend { {import} "#, backend_path = backend_path_encoded} } + + fn is_setuptools(&self) -> bool { + // either `setuptools.build_meta` or `setuptools.build_meta:__legacy__` + self.backend.split(':').next() == Some("setuptools.build_meta") + } } /// Uses an [`Rc`] internally, clone freely. @@ -278,6 +286,7 @@ impl SourceBuild { mut environment_variables: FxHashMap, level: BuildOutput, concurrent_builds: usize, + preview: PreviewMode, ) -> Result { let temp_dir = build_context.cache().venv_dir()?; @@ -325,6 +334,8 @@ impl SourceBuild { false, false, false, + false, + preview, )? }; @@ -430,6 +441,31 @@ impl SourceBuild { }) } + /// Acquire a lock on the source tree, if necessary. + async fn acquire_lock(&self) -> Result, Error> { + // Depending on the command, setuptools puts `*.egg-info`, `build/`, and `dist/` in the + // source tree, and concurrent invocations of setuptools using the same source dir can + // stomp on each other. We need to lock something to fix that, but we don't want to dump a + // `.lock` file into the source tree that the user will need to .gitignore. Take a global + // proxy lock instead. + let mut source_tree_lock = None; + if self.pep517_backend.is_setuptools() { + debug!("Locking the source tree for setuptools"); + let canonical_source_path = self.source_tree.canonicalize()?; + let lock_path = env::temp_dir().join(format!( + "uv-setuptools-{}.lock", + cache_digest(&canonical_source_path) + )); + source_tree_lock = LockedFile::acquire(lock_path, self.source_tree.to_string_lossy()) + .await + .inspect_err(|err| { + warn!("Failed to acquire build lock: {err}"); + }) + .ok(); + } + Ok(source_tree_lock) + } + async fn get_resolved_requirements( build_context: &impl BuildContext, source_build_context: SourceBuildContext, @@ -600,6 +636,9 @@ impl SourceBuild { return Ok(Some(metadata_dir.clone())); } + // Lock the source tree, if necessary. + let _lock = self.acquire_lock().await?; + // Hatch allows for highly dynamic customization of metadata via hooks. In such cases, Hatch // can't uphold the PEP 517 contract, in that the metadata Hatch would return by // `prepare_metadata_for_build_wheel` isn't guaranteed to match that of the built wheel. @@ -712,16 +751,15 @@ impl SourceBuild { pub async fn build(&self, wheel_dir: &Path) -> Result { // The build scripts run with the extracted root as cwd, so they need the absolute path. let wheel_dir = std::path::absolute(wheel_dir)?; - let filename = self.pep517_build(&wheel_dir, &self.pep517_backend).await?; + let filename = self.pep517_build(&wheel_dir).await?; Ok(filename) } /// Perform a PEP 517 build for a wheel or source distribution (sdist). - async fn pep517_build( - &self, - output_dir: &Path, - pep517_backend: &Pep517Backend, - ) -> Result { + async fn pep517_build(&self, output_dir: &Path) -> Result { + // Lock the source tree, if necessary. + let _lock = self.acquire_lock().await?; + // Write the hook output to a file so that we can read it back reliably. let outfile = self .temp_dir @@ -733,7 +771,7 @@ impl SourceBuild { BuildKind::Sdist => { debug!( r#"Calling `{}.build_{}("{}", {})`"#, - pep517_backend.backend, + self.pep517_backend.backend, self.build_kind, output_dir.escape_for_python(), self.config_settings.escape_for_python(), @@ -746,7 +784,7 @@ impl SourceBuild { with open("{}", "w") as fp: fp.write(sdist_filename) "#, - pep517_backend.backend_import(), + self.pep517_backend.backend_import(), self.build_kind, output_dir.escape_for_python(), self.config_settings.escape_for_python(), @@ -762,7 +800,7 @@ impl SourceBuild { }); debug!( r#"Calling `{}.build_{}("{}", {}, {})`"#, - pep517_backend.backend, + self.pep517_backend.backend, self.build_kind, output_dir.escape_for_python(), self.config_settings.escape_for_python(), @@ -776,7 +814,7 @@ impl SourceBuild { with open("{}", "w") as fp: fp.write(wheel_filename) "#, - pep517_backend.backend_import(), + self.pep517_backend.backend_import(), self.build_kind, output_dir.escape_for_python(), self.config_settings.escape_for_python(), @@ -806,7 +844,7 @@ impl SourceBuild { return Err(Error::from_command_output( format!( "Call to `{}.build_{}` failed", - pep517_backend.backend, self.build_kind + self.pep517_backend.backend, self.build_kind ), &output, self.level, @@ -821,7 +859,7 @@ impl SourceBuild { return Err(Error::from_command_output( format!( "Call to `{}.build_{}` failed", - pep517_backend.backend, self.build_kind + self.pep517_backend.backend, self.build_kind ), &output, self.level, diff --git a/crates/uv-build/Cargo.toml b/crates/uv-build/Cargo.toml index 01f6f7cbe..34dfa996a 100644 --- a/crates/uv-build/Cargo.toml +++ b/crates/uv-build/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv-build" -version = "0.7.13" +version = "0.7.19" edition.workspace = true rust-version.workspace = true homepage.workspace = true diff --git a/crates/uv-build/pyproject.toml b/crates/uv-build/pyproject.toml index 7e556e6bf..660e95c95 100644 --- a/crates/uv-build/pyproject.toml +++ b/crates/uv-build/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "uv-build" -version = "0.7.13" +version = "0.7.19" description = "The uv build backend" authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }] requires-python = ">=3.8" diff --git a/crates/uv-build/ruff.toml b/crates/uv-build/ruff.toml new file mode 100644 index 000000000..e480507a2 --- /dev/null +++ b/crates/uv-build/ruff.toml @@ -0,0 +1,2 @@ +# It is important retain compatibility with old versions in the build backend +target-version = "py37" diff --git a/crates/uv-cli/src/compat.rs b/crates/uv-cli/src/compat.rs index 50f4c173d..d29afa760 100644 --- a/crates/uv-cli/src/compat.rs +++ b/crates/uv-cli/src/compat.rs @@ -13,7 +13,6 @@ pub trait CompatArgs { /// For example, users often pass `--allow-unsafe`, which is unnecessary with uv. But it's a /// nice user experience to warn, rather than fail, when users pass `--allow-unsafe`. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipCompileCompatArgs { #[clap(long, hide = true)] allow_unsafe: bool, @@ -159,7 +158,6 @@ impl CompatArgs for PipCompileCompatArgs { /// /// These represent a subset of the `pip list` interface that uv supports by default. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipListCompatArgs { #[clap(long, hide = true)] disable_pip_version_check: bool, @@ -184,7 +182,6 @@ impl CompatArgs for PipListCompatArgs { /// /// These represent a subset of the `pip-sync` interface that uv supports by default. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipSyncCompatArgs { #[clap(short, long, hide = true)] ask: bool, @@ -268,7 +265,6 @@ enum Resolver { /// /// These represent a subset of the `virtualenv` interface that uv supports by default. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct VenvCompatArgs { #[clap(long, hide = true)] clear: bool, @@ -327,7 +323,6 @@ impl CompatArgs for VenvCompatArgs { /// /// These represent a subset of the `pip install` interface that uv supports by default. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipInstallCompatArgs { #[clap(long, hide = true)] disable_pip_version_check: bool, @@ -361,7 +356,6 @@ impl CompatArgs for PipInstallCompatArgs { /// /// These represent a subset of the `pip` interface that exists on all commands. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipGlobalCompatArgs { #[clap(long, hide = true)] disable_pip_version_check: bool, diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs index 0b96875e5..bf605198f 100644 --- a/crates/uv-cli/src/lib.rs +++ b/crates/uv-cli/src/lib.rs @@ -85,7 +85,6 @@ const STYLES: Styles = Styles::styled() disable_version_flag = true )] #[command(styles=STYLES)] -#[allow(clippy::struct_excessive_bools)] pub struct Cli { #[command(subcommand)] pub command: Box, @@ -133,7 +132,6 @@ pub struct TopLevelArgs { #[derive(Parser, Debug, Clone)] #[command(next_help_heading = "Global options", next_display_order = 1000)] -#[allow(clippy::struct_excessive_bools)] pub struct GlobalArgs { #[arg( global = true, @@ -526,7 +524,6 @@ pub struct HelpArgs { #[derive(Args)] #[command(group = clap::ArgGroup::new("operation"))] -#[allow(clippy::struct_excessive_bools)] pub struct VersionArgs { /// Set the project version to this value /// @@ -657,7 +654,6 @@ pub struct SelfUpdateArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct CacheNamespace { #[command(subcommand)] pub command: CacheCommand, @@ -687,14 +683,12 @@ pub enum CacheCommand { } #[derive(Args, Debug)] -#[allow(clippy::struct_excessive_bools)] pub struct CleanArgs { /// The packages to remove from the cache. pub package: Vec, } #[derive(Args, Debug)] -#[allow(clippy::struct_excessive_bools)] pub struct PruneArgs { /// Optimize the cache for persistence in a continuous integration environment, like GitHub /// Actions. @@ -714,7 +708,6 @@ pub struct PruneArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipNamespace { #[command(subcommand)] pub command: PipCommand, @@ -1095,7 +1088,6 @@ fn parse_maybe_string(input: &str) -> Result, String> { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] #[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))] pub struct PipCompileArgs { /// Include all packages listed in the given `requirements.in` files. @@ -1443,7 +1435,6 @@ pub struct PipCompileArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipSyncArgs { /// Include all packages listed in the given `requirements.txt` files. /// @@ -1700,7 +1691,6 @@ pub struct PipSyncArgs { #[derive(Args)] #[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))] -#[allow(clippy::struct_excessive_bools)] pub struct PipInstallArgs { /// Install all listed packages. /// @@ -2015,7 +2005,6 @@ pub struct PipInstallArgs { #[derive(Args)] #[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))] -#[allow(clippy::struct_excessive_bools)] pub struct PipUninstallArgs { /// Uninstall all listed packages. #[arg(group = "sources")] @@ -2104,7 +2093,6 @@ pub struct PipUninstallArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipFreezeArgs { /// Exclude any editable packages from output. #[arg(long)] @@ -2159,7 +2147,6 @@ pub struct PipFreezeArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipListArgs { /// Only include editable projects. #[arg(short, long)] @@ -2235,7 +2222,6 @@ pub struct PipListArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipCheckArgs { /// The Python interpreter for which packages should be checked. /// @@ -2271,7 +2257,6 @@ pub struct PipCheckArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipShowArgs { /// The package(s) to display. pub package: Vec, @@ -2325,7 +2310,6 @@ pub struct PipShowArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PipTreeArgs { /// Show the version constraint(s) imposed on each package. #[arg(long)] @@ -2382,7 +2366,6 @@ pub struct PipTreeArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct BuildArgs { /// The directory from which distributions should be built, or a source /// distribution archive to build into a wheel. @@ -2529,7 +2512,6 @@ pub struct BuildArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct VenvArgs { /// The Python interpreter to use for the virtual environment. /// @@ -2725,7 +2707,6 @@ pub enum AuthorFrom { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct InitArgs { /// The path to use for the project/script. /// @@ -2883,7 +2864,6 @@ pub struct InitArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct RunArgs { /// Include optional dependencies from the specified extra name. /// @@ -3170,7 +3150,6 @@ pub struct RunArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct SyncArgs { /// Include optional dependencies from the specified extra name. /// @@ -3427,7 +3406,6 @@ pub struct SyncArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct LockArgs { /// Check if the lockfile is up-to-date. /// @@ -3489,7 +3467,6 @@ pub struct LockArgs { #[derive(Args)] #[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))] -#[allow(clippy::struct_excessive_bools)] pub struct AddArgs { /// The packages to add, as PEP 508 requirements (e.g., `ruff==0.5.0`). #[arg(group = "sources")] @@ -3674,7 +3651,6 @@ pub struct AddArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct RemoveArgs { /// The names of the dependencies to remove (e.g., `ruff`). #[arg(required = true)] @@ -3769,7 +3745,6 @@ pub struct RemoveArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct TreeArgs { /// Show a platform-independent dependency tree. /// @@ -3909,7 +3884,6 @@ pub struct TreeArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ExportArgs { /// The format to which `uv.lock` should be exported. /// @@ -4124,7 +4098,6 @@ pub struct ExportArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ToolNamespace { #[command(subcommand)] pub command: ToolCommand, @@ -4217,7 +4190,6 @@ pub enum ToolCommand { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ToolRunArgs { /// The command to run. /// @@ -4336,7 +4308,6 @@ pub struct UvxArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ToolInstallArgs { /// The package to install commands from. pub package: String, @@ -4425,7 +4396,6 @@ pub struct ToolInstallArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ToolListArgs { /// Whether to display the path to each tool environment and installed executable. #[arg(long)] @@ -4452,7 +4422,6 @@ pub struct ToolListArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ToolDirArgs { /// Show the directory into which `uv tool` will install executables. /// @@ -4471,7 +4440,6 @@ pub struct ToolDirArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ToolUninstallArgs { /// The name of the tool to uninstall. #[arg(required = true)] @@ -4483,7 +4451,6 @@ pub struct ToolUninstallArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ToolUpgradeArgs { /// The name of the tool to upgrade, along with an optional version specifier. #[arg(required = true)] @@ -4713,7 +4680,6 @@ pub struct ToolUpgradeArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PythonNamespace { #[command(subcommand)] pub command: PythonCommand, @@ -4756,6 +4722,24 @@ pub enum PythonCommand { /// See `uv help python` to view supported request formats. Install(PythonInstallArgs), + /// Upgrade installed Python versions to the latest supported patch release (requires the + /// `--preview` flag). + /// + /// A target Python minor version to upgrade may be provided, e.g., `3.13`. Multiple versions + /// may be provided to perform more than one upgrade. + /// + /// If no target version is provided, then uv will upgrade all managed CPython versions. + /// + /// During an upgrade, uv will not uninstall outdated patch versions. + /// + /// When an upgrade is performed, virtual environments created by uv will automatically + /// use the new version. However, if the virtual environment was created before the + /// upgrade functionality was added, it will continue to use the old Python version; to enable + /// upgrades, the environment must be recreated. + /// + /// Upgrades are not yet supported for alternative implementations, like PyPy. + Upgrade(PythonUpgradeArgs), + /// Search for a Python installation. /// /// Displays the path to the Python executable. @@ -4793,7 +4777,6 @@ pub enum PythonCommand { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PythonListArgs { /// A Python request to filter by. /// @@ -4848,7 +4831,6 @@ pub struct PythonListArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PythonDirArgs { /// Show the directory into which `uv python` will install Python executables. /// @@ -4866,7 +4848,6 @@ pub struct PythonDirArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PythonInstallArgs { /// The directory to store the Python installation in. /// @@ -4945,7 +4926,50 @@ pub struct PythonInstallArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] +pub struct PythonUpgradeArgs { + /// The directory Python installations are stored in. + /// + /// If provided, `UV_PYTHON_INSTALL_DIR` will need to be set for subsequent operations for uv to + /// discover the Python installation. + /// + /// See `uv python dir` to view the current Python installation directory. Defaults to + /// `~/.local/share/uv/python`. + #[arg(long, short, env = EnvVars::UV_PYTHON_INSTALL_DIR)] + pub install_dir: Option, + + /// The Python minor version(s) to upgrade. + /// + /// If no target version is provided, then uv will upgrade all managed CPython versions. + #[arg(env = EnvVars::UV_PYTHON)] + pub targets: Vec, + + /// Set the URL to use as the source for downloading Python installations. + /// + /// The provided URL will replace + /// `https://github.com/astral-sh/python-build-standalone/releases/download` in, e.g., + /// `https://github.com/astral-sh/python-build-standalone/releases/download/20240713/cpython-3.12.4%2B20240713-aarch64-apple-darwin-install_only.tar.gz`. + /// + /// Distributions can be read from a local directory by using the `file://` URL scheme. + #[arg(long, env = EnvVars::UV_PYTHON_INSTALL_MIRROR)] + pub mirror: Option, + + /// Set the URL to use as the source for downloading PyPy installations. + /// + /// The provided URL will replace `https://downloads.python.org/pypy` in, e.g., + /// `https://downloads.python.org/pypy/pypy3.8-v7.3.7-osx64.tar.bz2`. + /// + /// Distributions can be read from a local directory by using the `file://` URL scheme. + #[arg(long, env = EnvVars::UV_PYPY_INSTALL_MIRROR)] + pub pypy_mirror: Option, + + /// URL pointing to JSON of custom Python installations. + /// + /// Note that currently, only local paths are supported. + #[arg(long, env = EnvVars::UV_PYTHON_DOWNLOADS_JSON_URL)] + pub python_downloads_json_url: Option, +} + +#[derive(Args)] pub struct PythonUninstallArgs { /// The directory where the Python was installed. #[arg(long, short, env = EnvVars::UV_PYTHON_INSTALL_DIR)] @@ -4963,7 +4987,6 @@ pub struct PythonUninstallArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PythonFindArgs { /// The Python request. /// @@ -5012,7 +5035,6 @@ pub struct PythonFindArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct PythonPinArgs { /// The Python version request. /// @@ -5061,7 +5083,6 @@ pub struct PythonPinArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct GenerateShellCompletionArgs { /// The shell to generate the completion script for pub shell: clap_complete_command::Shell, @@ -5100,7 +5121,6 @@ pub struct GenerateShellCompletionArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct IndexArgs { /// The URLs to use when resolving dependencies, in addition to the default index. /// @@ -5110,6 +5130,9 @@ pub struct IndexArgs { /// All indexes provided via this flag take priority over the index specified by /// `--default-index` (which defaults to PyPI). When multiple `--index` flags are provided, /// earlier values take priority. + /// + /// Index names are not supported as values. Relative paths must be disambiguated from index + /// names with `./` or `../` on Unix or `.\\`, `..\\`, `./` or `../` on Windows. // // The nested Vec structure (`Vec>>`) is required for clap's // value parsing mechanism, which processes one value at a time, in order to handle @@ -5175,7 +5198,6 @@ pub struct IndexArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct RefreshArgs { /// Refresh all cached data. #[arg( @@ -5201,7 +5223,6 @@ pub struct RefreshArgs { } #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct BuildOptionsArgs { /// Don't build source distributions. /// @@ -5257,7 +5278,6 @@ pub struct BuildOptionsArgs { /// Arguments that are used by commands that need to install (but not resolve) packages. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct InstallerArgs { #[command(flatten)] pub index_args: IndexArgs, @@ -5399,7 +5419,6 @@ pub struct InstallerArgs { /// Arguments that are used by commands that need to resolve (but not install) packages. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ResolverArgs { #[command(flatten)] pub index_args: IndexArgs, @@ -5566,7 +5585,6 @@ pub struct ResolverArgs { /// Arguments that are used by commands that need to resolve and install packages. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct ResolverInstallerArgs { #[command(flatten)] pub index_args: IndexArgs, @@ -5783,7 +5801,6 @@ pub struct ResolverInstallerArgs { /// Arguments that are used by commands that need to fetch from the Simple API. #[derive(Args)] -#[allow(clippy::struct_excessive_bools)] pub struct FetchArgs { #[command(flatten)] pub index_args: IndexArgs, diff --git a/crates/uv-cli/src/options.rs b/crates/uv-cli/src/options.rs index 656edd43c..f522022a1 100644 --- a/crates/uv-cli/src/options.rs +++ b/crates/uv-cli/src/options.rs @@ -1,7 +1,10 @@ +use anstream::eprintln; + use uv_cache::Refresh; use uv_configuration::ConfigSettings; use uv_resolver::PrereleaseMode; use uv_settings::{Combine, PipOptions, ResolverInstallerOptions, ResolverOptions}; +use uv_warnings::owo_colors::OwoColorize; use crate::{ BuildOptionsArgs, FetchArgs, IndexArgs, InstallerArgs, Maybe, RefreshArgs, ResolverArgs, @@ -9,12 +12,27 @@ use crate::{ }; /// Given a boolean flag pair (like `--upgrade` and `--no-upgrade`), resolve the value of the flag. -pub fn flag(yes: bool, no: bool) -> Option { +pub fn flag(yes: bool, no: bool, name: &str) -> Option { match (yes, no) { (true, false) => Some(true), (false, true) => Some(false), (false, false) => None, - (..) => unreachable!("Clap should make this impossible"), + (..) => { + eprintln!( + "{}{} `{}` and `{}` cannot be used together. \ + Boolean flags on different levels are currently not supported \ + (https://github.com/clap-rs/clap/issues/6049)", + "error".bold().red(), + ":".bold(), + format!("--{name}").green(), + format!("--no-{name}").green(), + ); + // No error forwarding since should eventually be solved on the clap side. + #[allow(clippy::exit)] + { + std::process::exit(2); + } + } } } @@ -26,7 +44,7 @@ impl From for Refresh { refresh_package, } = value; - Self::from_args(flag(refresh, no_refresh), refresh_package) + Self::from_args(flag(refresh, no_refresh, "no-refresh"), refresh_package) } } @@ -53,7 +71,7 @@ impl From for PipOptions { } = args; Self { - upgrade: flag(upgrade, no_upgrade), + upgrade: flag(upgrade, no_upgrade, "no-upgrade"), upgrade_package: Some(upgrade_package), index_strategy, keyring_provider, @@ -66,7 +84,7 @@ impl From for PipOptions { }, config_settings: config_setting .map(|config_settings| config_settings.into_iter().collect::()), - no_build_isolation: flag(no_build_isolation, build_isolation), + no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"), no_build_isolation_package: Some(no_build_isolation_package), exclude_newer, link_mode, @@ -96,16 +114,16 @@ impl From for PipOptions { } = args; Self { - reinstall: flag(reinstall, no_reinstall), + reinstall: flag(reinstall, no_reinstall, "reinstall"), reinstall_package: Some(reinstall_package), index_strategy, keyring_provider, config_settings: config_setting .map(|config_settings| config_settings.into_iter().collect::()), - no_build_isolation: flag(no_build_isolation, build_isolation), + no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"), exclude_newer, link_mode, - compile_bytecode: flag(compile_bytecode, no_compile_bytecode), + compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"), no_sources: if no_sources { Some(true) } else { None }, ..PipOptions::from(index_args) } @@ -140,9 +158,9 @@ impl From for PipOptions { } = args; Self { - upgrade: flag(upgrade, no_upgrade), + upgrade: flag(upgrade, no_upgrade, "upgrade"), upgrade_package: Some(upgrade_package), - reinstall: flag(reinstall, no_reinstall), + reinstall: flag(reinstall, no_reinstall, "reinstall"), reinstall_package: Some(reinstall_package), index_strategy, keyring_provider, @@ -155,11 +173,11 @@ impl From for PipOptions { fork_strategy, config_settings: config_setting .map(|config_settings| config_settings.into_iter().collect::()), - no_build_isolation: flag(no_build_isolation, build_isolation), + no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"), no_build_isolation_package: Some(no_build_isolation_package), exclude_newer, link_mode, - compile_bytecode: flag(compile_bytecode, no_compile_bytecode), + compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"), no_sources: if no_sources { Some(true) } else { None }, ..PipOptions::from(index_args) } @@ -289,7 +307,7 @@ pub fn resolver_options( .filter_map(Maybe::into_option) .collect() }), - upgrade: flag(upgrade, no_upgrade), + upgrade: flag(upgrade, no_upgrade, "no-upgrade"), upgrade_package: Some(upgrade_package), index_strategy, keyring_provider, @@ -303,13 +321,13 @@ pub fn resolver_options( dependency_metadata: None, config_settings: config_setting .map(|config_settings| config_settings.into_iter().collect::()), - no_build_isolation: flag(no_build_isolation, build_isolation), + no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"), no_build_isolation_package: Some(no_build_isolation_package), exclude_newer, link_mode, - no_build: flag(no_build, build), + no_build: flag(no_build, build, "build"), no_build_package: Some(no_build_package), - no_binary: flag(no_binary, binary), + no_binary: flag(no_binary, binary, "binary"), no_binary_package: Some(no_binary_package), no_sources: if no_sources { Some(true) } else { None }, } @@ -386,13 +404,13 @@ pub fn resolver_installer_options( .filter_map(Maybe::into_option) .collect() }), - upgrade: flag(upgrade, no_upgrade), + upgrade: flag(upgrade, no_upgrade, "upgrade"), upgrade_package: if upgrade_package.is_empty() { None } else { Some(upgrade_package) }, - reinstall: flag(reinstall, no_reinstall), + reinstall: flag(reinstall, no_reinstall, "reinstall"), reinstall_package: if reinstall_package.is_empty() { None } else { @@ -410,7 +428,7 @@ pub fn resolver_installer_options( dependency_metadata: None, config_settings: config_setting .map(|config_settings| config_settings.into_iter().collect::()), - no_build_isolation: flag(no_build_isolation, build_isolation), + no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"), no_build_isolation_package: if no_build_isolation_package.is_empty() { None } else { @@ -418,14 +436,14 @@ pub fn resolver_installer_options( }, exclude_newer, link_mode, - compile_bytecode: flag(compile_bytecode, no_compile_bytecode), - no_build: flag(no_build, build), + compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"), + no_build: flag(no_build, build, "build"), no_build_package: if no_build_package.is_empty() { None } else { Some(no_build_package) }, - no_binary: flag(no_binary, binary), + no_binary: flag(no_binary, binary, "binary"), no_binary_package: if no_binary_package.is_empty() { None } else { diff --git a/crates/uv-client/Cargo.toml b/crates/uv-client/Cargo.toml index 81d1909fe..bc7fc611f 100644 --- a/crates/uv-client/Cargo.toml +++ b/crates/uv-client/Cargo.toml @@ -65,3 +65,4 @@ hyper = { version = "1.4.1", features = ["server", "http1"] } hyper-util = { version = "0.1.8", features = ["tokio"] } insta = { version = "1.40.0", features = ["filters", "json", "redactions"] } tokio = { workspace = true } +wiremock = { workspace = true } diff --git a/crates/uv-client/src/base_client.rs b/crates/uv-client/src/base_client.rs index f5fda246d..e11845adb 100644 --- a/crates/uv-client/src/base_client.rs +++ b/crates/uv-client/src/base_client.rs @@ -6,16 +6,26 @@ use std::sync::Arc; use std::time::Duration; use std::{env, io, iter}; +use anyhow::anyhow; +use http::{ + HeaderMap, HeaderName, HeaderValue, Method, StatusCode, + header::{ + AUTHORIZATION, CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TYPE, COOKIE, LOCATION, + PROXY_AUTHORIZATION, REFERER, TRANSFER_ENCODING, WWW_AUTHENTICATE, + }, +}; use itertools::Itertools; -use reqwest::{Client, ClientBuilder, Proxy, Response}; +use reqwest::{Client, ClientBuilder, IntoUrl, Proxy, Request, Response, multipart}; use reqwest_middleware::{ClientWithMiddleware, Middleware}; use reqwest_retry::policies::ExponentialBackoff; use reqwest_retry::{ DefaultRetryableStrategy, RetryTransientMiddleware, Retryable, RetryableStrategy, }; use tracing::{debug, trace}; +use url::ParseError; use url::Url; +use uv_auth::Credentials; use uv_auth::{AuthMiddleware, Indexes}; use uv_configuration::{KeyringProviderType, TrustedHost}; use uv_fs::Simplified; @@ -32,6 +42,10 @@ use crate::middleware::OfflineMiddleware; use crate::tls::read_identity; pub const DEFAULT_RETRIES: u32 = 3; +/// Maximum number of redirects to follow before giving up. +/// +/// This is the default used by [`reqwest`]. +const DEFAULT_MAX_REDIRECTS: u32 = 10; /// Selectively skip parts or the entire auth middleware. #[derive(Debug, Clone, Copy, Default)] @@ -61,6 +75,31 @@ pub struct BaseClientBuilder<'a> { default_timeout: Duration, extra_middleware: Option, proxies: Vec, + redirect_policy: RedirectPolicy, + /// Whether credentials should be propagated during cross-origin redirects. + /// + /// A policy allowing propagation is insecure and should only be available for test code. + cross_origin_credential_policy: CrossOriginCredentialsPolicy, +} + +/// The policy for handling HTTP redirects. +#[derive(Debug, Default, Clone, Copy)] +pub enum RedirectPolicy { + /// Use reqwest's built-in redirect handling. This bypasses our custom middleware + /// on redirect. + #[default] + BypassMiddleware, + /// Handle redirects manually, re-triggering our custom middleware for each request. + RetriggerMiddleware, +} + +impl RedirectPolicy { + pub fn reqwest_policy(self) -> reqwest::redirect::Policy { + match self { + RedirectPolicy::BypassMiddleware => reqwest::redirect::Policy::default(), + RedirectPolicy::RetriggerMiddleware => reqwest::redirect::Policy::none(), + } + } } /// A list of user-defined middlewares to be applied to the client. @@ -96,6 +135,8 @@ impl BaseClientBuilder<'_> { default_timeout: Duration::from_secs(30), extra_middleware: None, proxies: vec![], + redirect_policy: RedirectPolicy::default(), + cross_origin_credential_policy: CrossOriginCredentialsPolicy::Secure, } } } @@ -173,6 +214,24 @@ impl<'a> BaseClientBuilder<'a> { self } + #[must_use] + pub fn redirect(mut self, policy: RedirectPolicy) -> Self { + self.redirect_policy = policy; + self + } + + /// Allows credentials to be propagated on cross-origin redirects. + /// + /// WARNING: This should only be available for tests. In production code, propagating credentials + /// during cross-origin redirects can lead to security vulnerabilities including credential + /// leakage to untrusted domains. + #[cfg(test)] + #[must_use] + pub fn allow_cross_origin_credentials(mut self) -> Self { + self.cross_origin_credential_policy = CrossOriginCredentialsPolicy::Insecure; + self + } + pub fn is_offline(&self) -> bool { matches!(self.connectivity, Connectivity::Offline) } @@ -229,6 +288,7 @@ impl<'a> BaseClientBuilder<'a> { timeout, ssl_cert_file_exists, Security::Secure, + self.redirect_policy, ); // Create an insecure client that accepts invalid certificates. @@ -237,11 +297,20 @@ impl<'a> BaseClientBuilder<'a> { timeout, ssl_cert_file_exists, Security::Insecure, + self.redirect_policy, ); // Wrap in any relevant middleware and handle connectivity. - let client = self.apply_middleware(raw_client.clone()); - let dangerous_client = self.apply_middleware(raw_dangerous_client.clone()); + let client = RedirectClientWithMiddleware { + client: self.apply_middleware(raw_client.clone()), + redirect_policy: self.redirect_policy, + cross_origin_credentials_policy: self.cross_origin_credential_policy, + }; + let dangerous_client = RedirectClientWithMiddleware { + client: self.apply_middleware(raw_dangerous_client.clone()), + redirect_policy: self.redirect_policy, + cross_origin_credentials_policy: self.cross_origin_credential_policy, + }; BaseClient { connectivity: self.connectivity, @@ -258,8 +327,16 @@ impl<'a> BaseClientBuilder<'a> { /// Share the underlying client between two different middleware configurations. pub fn wrap_existing(&self, existing: &BaseClient) -> BaseClient { // Wrap in any relevant middleware and handle connectivity. - let client = self.apply_middleware(existing.raw_client.clone()); - let dangerous_client = self.apply_middleware(existing.raw_dangerous_client.clone()); + let client = RedirectClientWithMiddleware { + client: self.apply_middleware(existing.raw_client.clone()), + redirect_policy: self.redirect_policy, + cross_origin_credentials_policy: self.cross_origin_credential_policy, + }; + let dangerous_client = RedirectClientWithMiddleware { + client: self.apply_middleware(existing.raw_dangerous_client.clone()), + redirect_policy: self.redirect_policy, + cross_origin_credentials_policy: self.cross_origin_credential_policy, + }; BaseClient { connectivity: self.connectivity, @@ -279,6 +356,7 @@ impl<'a> BaseClientBuilder<'a> { timeout: Duration, ssl_cert_file_exists: bool, security: Security, + redirect_policy: RedirectPolicy, ) -> Client { // Configure the builder. let client_builder = ClientBuilder::new() @@ -286,7 +364,8 @@ impl<'a> BaseClientBuilder<'a> { .user_agent(user_agent) .pool_max_idle_per_host(20) .read_timeout(timeout) - .tls_built_in_root_certs(false); + .tls_built_in_root_certs(false) + .redirect(redirect_policy.reqwest_policy()); // If necessary, accept invalid certificates. let client_builder = match security { @@ -381,9 +460,9 @@ impl<'a> BaseClientBuilder<'a> { #[derive(Debug, Clone)] pub struct BaseClient { /// The underlying HTTP client that enforces valid certificates. - client: ClientWithMiddleware, + client: RedirectClientWithMiddleware, /// The underlying HTTP client that accepts invalid certificates. - dangerous_client: ClientWithMiddleware, + dangerous_client: RedirectClientWithMiddleware, /// The HTTP client without middleware. raw_client: Client, /// The HTTP client that accepts invalid certificates without middleware. @@ -408,7 +487,7 @@ enum Security { impl BaseClient { /// Selects the appropriate client based on the host's trustworthiness. - pub fn for_host(&self, url: &DisplaySafeUrl) -> &ClientWithMiddleware { + pub fn for_host(&self, url: &DisplaySafeUrl) -> &RedirectClientWithMiddleware { if self.disable_ssl(url) { &self.dangerous_client } else { @@ -416,6 +495,12 @@ impl BaseClient { } } + /// Executes a request, applying redirect policy. + pub async fn execute(&self, req: Request) -> reqwest_middleware::Result { + let client = self.for_host(&DisplaySafeUrl::from(req.url().clone())); + client.execute(req).await + } + /// Returns `true` if the host is trusted to use the insecure client. pub fn disable_ssl(&self, url: &DisplaySafeUrl) -> bool { self.allow_insecure_host @@ -439,6 +524,326 @@ impl BaseClient { } } +/// Wrapper around [`ClientWithMiddleware`] that manages redirects. +#[derive(Debug, Clone)] +pub struct RedirectClientWithMiddleware { + client: ClientWithMiddleware, + redirect_policy: RedirectPolicy, + /// Whether credentials should be preserved during cross-origin redirects. + /// + /// WARNING: This should only be available for tests. In production code, preserving credentials + /// during cross-origin redirects can lead to security vulnerabilities including credential + /// leakage to untrusted domains. + cross_origin_credentials_policy: CrossOriginCredentialsPolicy, +} + +impl RedirectClientWithMiddleware { + /// Convenience method to make a `GET` request to a URL. + pub fn get(&self, url: U) -> RequestBuilder { + RequestBuilder::new(self.client.get(url), self) + } + + /// Convenience method to make a `POST` request to a URL. + pub fn post(&self, url: U) -> RequestBuilder { + RequestBuilder::new(self.client.post(url), self) + } + + /// Convenience method to make a `HEAD` request to a URL. + pub fn head(&self, url: U) -> RequestBuilder { + RequestBuilder::new(self.client.head(url), self) + } + + /// Executes a request, applying the redirect policy. + pub async fn execute(&self, req: Request) -> reqwest_middleware::Result { + match self.redirect_policy { + RedirectPolicy::BypassMiddleware => self.client.execute(req).await, + RedirectPolicy::RetriggerMiddleware => self.execute_with_redirect_handling(req).await, + } + } + + /// Executes a request. If the response is a redirect (one of HTTP 301, 302, 303, 307, or 308), the + /// request is executed again with the redirect location URL (up to a maximum number of + /// redirects). + /// + /// Unlike the built-in reqwest redirect policies, this sends the redirect request through the + /// entire middleware pipeline again. + /// + /// See RFC 7231 7.1.2 for details on + /// redirect semantics. + async fn execute_with_redirect_handling( + &self, + req: Request, + ) -> reqwest_middleware::Result { + let mut request = req; + let mut redirects = 0; + let max_redirects = DEFAULT_MAX_REDIRECTS; + + loop { + let result = self + .client + .execute(request.try_clone().expect("HTTP request must be cloneable")) + .await; + let Ok(response) = result else { + return result; + }; + + if redirects >= max_redirects { + return Ok(response); + } + + let Some(redirect_request) = + request_into_redirect(request, &response, self.cross_origin_credentials_policy)? + else { + return Ok(response); + }; + + redirects += 1; + request = redirect_request; + } + } + + pub fn raw_client(&self) -> &ClientWithMiddleware { + &self.client + } +} + +impl From for ClientWithMiddleware { + fn from(item: RedirectClientWithMiddleware) -> ClientWithMiddleware { + item.client + } +} + +/// Check if this is should be a redirect and, if so, return a new redirect request. +/// +/// This implementation is based on the [`reqwest`] crate redirect implementation. +/// It takes ownership of the original [`Request`] and mutates it to create the new +/// redirect [`Request`]. +fn request_into_redirect( + mut req: Request, + res: &Response, + cross_origin_credentials_policy: CrossOriginCredentialsPolicy, +) -> reqwest_middleware::Result> { + let original_req_url = DisplaySafeUrl::from(req.url().clone()); + let status = res.status(); + let should_redirect = match status { + StatusCode::MOVED_PERMANENTLY + | StatusCode::FOUND + | StatusCode::TEMPORARY_REDIRECT + | StatusCode::PERMANENT_REDIRECT => true, + StatusCode::SEE_OTHER => { + // Per RFC 7231, HTTP 303 is intended for the user agent + // to perform a GET or HEAD request to the redirect target. + // Historically, some browsers also changed method from POST + // to GET on 301 or 302, but this is not required by RFC 7231 + // and was not intended by the HTTP spec. + *req.body_mut() = None; + for header in &[ + TRANSFER_ENCODING, + CONTENT_ENCODING, + CONTENT_TYPE, + CONTENT_LENGTH, + ] { + req.headers_mut().remove(header); + } + + match *req.method() { + Method::GET | Method::HEAD => {} + _ => { + *req.method_mut() = Method::GET; + } + } + true + } + _ => false, + }; + if !should_redirect { + return Ok(None); + } + + let location = res + .headers() + .get(LOCATION) + .ok_or(reqwest_middleware::Error::Middleware(anyhow!( + "Server returned redirect (HTTP {status}) without destination URL. This may indicate a server configuration issue" + )))? + .to_str() + .map_err(|_| { + reqwest_middleware::Error::Middleware(anyhow!( + "Invalid HTTP {status} 'Location' value: must only contain visible ascii characters" + )) + })?; + + let mut redirect_url = match DisplaySafeUrl::parse(location) { + Ok(url) => url, + // Per RFC 7231, URLs should be resolved against the request URL. + Err(ParseError::RelativeUrlWithoutBase) => original_req_url.join(location).map_err(|err| { + reqwest_middleware::Error::Middleware(anyhow!( + "Invalid HTTP {status} 'Location' value `{location}` relative to `{original_req_url}`: {err}" + )) + })?, + Err(err) => { + return Err(reqwest_middleware::Error::Middleware(anyhow!( + "Invalid HTTP {status} 'Location' value `{location}`: {err}" + ))); + } + }; + // Per RFC 7231, fragments must be propagated + if let Some(fragment) = original_req_url.fragment() { + redirect_url.set_fragment(Some(fragment)); + } + + // Ensure the URL is a valid HTTP URI. + if let Err(err) = redirect_url.as_str().parse::() { + return Err(reqwest_middleware::Error::Middleware(anyhow!( + "HTTP {status} 'Location' value `{redirect_url}` is not a valid HTTP URI: {err}" + ))); + } + + if redirect_url.scheme() != "http" && redirect_url.scheme() != "https" { + return Err(reqwest_middleware::Error::Middleware(anyhow!( + "Invalid HTTP {status} 'Location' value `{redirect_url}`: scheme needs to be https or http" + ))); + } + + let mut headers = HeaderMap::new(); + std::mem::swap(req.headers_mut(), &mut headers); + + let cross_host = redirect_url.host_str() != original_req_url.host_str() + || redirect_url.port_or_known_default() != original_req_url.port_or_known_default(); + if cross_host { + if cross_origin_credentials_policy == CrossOriginCredentialsPolicy::Secure { + debug!("Received a cross-origin redirect. Removing sensitive headers."); + headers.remove(AUTHORIZATION); + headers.remove(COOKIE); + headers.remove(PROXY_AUTHORIZATION); + headers.remove(WWW_AUTHENTICATE); + } + // If the redirect request is not a cross-origin request and the original request already + // had a Referer header, attempt to set the Referer header for the redirect request. + } else if headers.contains_key(REFERER) { + if let Some(referer) = make_referer(&redirect_url, &original_req_url) { + headers.insert(REFERER, referer); + } + } + + // Check if there are credentials on the redirect location itself. + // If so, move them to Authorization header. + if !redirect_url.username().is_empty() { + if let Some(credentials) = Credentials::from_url(&redirect_url) { + let _ = redirect_url.set_username(""); + let _ = redirect_url.set_password(None); + headers.insert(AUTHORIZATION, credentials.to_header_value()); + } + } + + std::mem::swap(req.headers_mut(), &mut headers); + *req.url_mut() = Url::from(redirect_url); + debug!( + "Received HTTP {status}. Redirecting to {}", + DisplaySafeUrl::ref_cast(req.url()) + ); + Ok(Some(req)) +} + +/// Return a Referer [`HeaderValue`] according to RFC 7231. +/// +/// Return [`None`] if https has been downgraded in the redirect location. +fn make_referer( + redirect_url: &DisplaySafeUrl, + original_url: &DisplaySafeUrl, +) -> Option { + if redirect_url.scheme() == "http" && original_url.scheme() == "https" { + return None; + } + + let mut referer = original_url.clone(); + referer.remove_credentials(); + referer.set_fragment(None); + referer.as_str().parse().ok() +} + +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Hash)] +pub(crate) enum CrossOriginCredentialsPolicy { + /// Do not propagate credentials on cross-origin requests. + #[default] + Secure, + + /// Propagate credentials on cross-origin requests. + /// + /// WARNING: This should only be available for tests. In production code, preserving credentials + /// during cross-origin redirects can lead to security vulnerabilities including credential + /// leakage to untrusted domains. + #[cfg(test)] + Insecure, +} + +/// A builder to construct the properties of a `Request`. +/// +/// This wraps [`reqwest_middleware::RequestBuilder`] to ensure that the [`BaseClient`] +/// redirect policy is respected if `send()` is called. +#[derive(Debug)] +#[must_use] +pub struct RequestBuilder<'a> { + builder: reqwest_middleware::RequestBuilder, + client: &'a RedirectClientWithMiddleware, +} + +impl<'a> RequestBuilder<'a> { + pub fn new( + builder: reqwest_middleware::RequestBuilder, + client: &'a RedirectClientWithMiddleware, + ) -> Self { + Self { builder, client } + } + + /// Add a `Header` to this Request. + pub fn header(mut self, key: K, value: V) -> Self + where + HeaderName: TryFrom, + >::Error: Into, + HeaderValue: TryFrom, + >::Error: Into, + { + self.builder = self.builder.header(key, value); + self + } + + /// Add a set of Headers to the existing ones on this Request. + /// + /// The headers will be merged in to any already set. + pub fn headers(mut self, headers: HeaderMap) -> Self { + self.builder = self.builder.headers(headers); + self + } + + #[cfg(not(target_arch = "wasm32"))] + pub fn version(mut self, version: reqwest::Version) -> Self { + self.builder = self.builder.version(version); + self + } + + #[cfg_attr(docsrs, doc(cfg(feature = "multipart")))] + pub fn multipart(mut self, multipart: multipart::Form) -> Self { + self.builder = self.builder.multipart(multipart); + self + } + + /// Build a `Request`. + pub fn build(self) -> reqwest::Result { + self.builder.build() + } + + /// Constructs the Request and sends it to the target URL, returning a + /// future Response. + pub async fn send(self) -> reqwest_middleware::Result { + self.client.execute(self.build()?).await + } + + pub fn raw_builder(&self) -> &reqwest_middleware::RequestBuilder { + &self.builder + } +} + /// Extends [`DefaultRetryableStrategy`], to log transient request failures and additional retry cases. pub struct UvRetryableStrategy; @@ -528,3 +933,204 @@ fn find_source(orig: &dyn Error) -> Option<&E> { fn find_sources(orig: &dyn Error) -> impl Iterator { iter::successors(find_source::(orig), |&err| find_source(err)) } + +#[cfg(test)] +mod tests { + use super::*; + use anyhow::Result; + + use reqwest::{Client, Method}; + use wiremock::matchers::method; + use wiremock::{Mock, MockServer, ResponseTemplate}; + + use crate::base_client::request_into_redirect; + + #[tokio::test] + async fn test_redirect_preserves_authorization_header_on_same_origin() -> Result<()> { + for status in &[301, 302, 303, 307, 308] { + let server = MockServer::start().await; + Mock::given(method("GET")) + .respond_with( + ResponseTemplate::new(*status) + .insert_header("location", format!("{}/redirect", server.uri())), + ) + .mount(&server) + .await; + + let request = Client::new() + .get(server.uri()) + .basic_auth("username", Some("password")) + .build() + .unwrap(); + + assert!(request.headers().contains_key(AUTHORIZATION)); + + let response = Client::builder() + .redirect(reqwest::redirect::Policy::none()) + .build() + .unwrap() + .execute(request.try_clone().unwrap()) + .await + .unwrap(); + + let redirect_request = + request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)? + .unwrap(); + assert!(redirect_request.headers().contains_key(AUTHORIZATION)); + } + + Ok(()) + } + + #[tokio::test] + async fn test_redirect_preserves_fragment() -> Result<()> { + for status in &[301, 302, 303, 307, 308] { + let server = MockServer::start().await; + Mock::given(method("GET")) + .respond_with( + ResponseTemplate::new(*status) + .insert_header("location", format!("{}/redirect", server.uri())), + ) + .mount(&server) + .await; + + let request = Client::new() + .get(format!("{}#fragment", server.uri())) + .build() + .unwrap(); + + let response = Client::builder() + .redirect(reqwest::redirect::Policy::none()) + .build() + .unwrap() + .execute(request.try_clone().unwrap()) + .await + .unwrap(); + + let redirect_request = + request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)? + .unwrap(); + assert!( + redirect_request + .url() + .fragment() + .is_some_and(|fragment| fragment == "fragment") + ); + } + + Ok(()) + } + + #[tokio::test] + async fn test_redirect_removes_authorization_header_on_cross_origin() -> Result<()> { + for status in &[301, 302, 303, 307, 308] { + let server = MockServer::start().await; + Mock::given(method("GET")) + .respond_with( + ResponseTemplate::new(*status) + .insert_header("location", "https://cross-origin.com/simple"), + ) + .mount(&server) + .await; + + let request = Client::new() + .get(server.uri()) + .basic_auth("username", Some("password")) + .build() + .unwrap(); + + assert!(request.headers().contains_key(AUTHORIZATION)); + + let response = Client::builder() + .redirect(reqwest::redirect::Policy::none()) + .build() + .unwrap() + .execute(request.try_clone().unwrap()) + .await + .unwrap(); + + let redirect_request = + request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)? + .unwrap(); + assert!(!redirect_request.headers().contains_key(AUTHORIZATION)); + } + + Ok(()) + } + + #[tokio::test] + async fn test_redirect_303_changes_post_to_get() -> Result<()> { + let server = MockServer::start().await; + Mock::given(method("POST")) + .respond_with( + ResponseTemplate::new(303) + .insert_header("location", format!("{}/redirect", server.uri())), + ) + .mount(&server) + .await; + + let request = Client::new() + .post(server.uri()) + .basic_auth("username", Some("password")) + .build() + .unwrap(); + + assert_eq!(request.method(), Method::POST); + + let response = Client::builder() + .redirect(reqwest::redirect::Policy::none()) + .build() + .unwrap() + .execute(request.try_clone().unwrap()) + .await + .unwrap(); + + let redirect_request = + request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)? + .unwrap(); + assert_eq!(redirect_request.method(), Method::GET); + + Ok(()) + } + + #[tokio::test] + async fn test_redirect_no_referer_if_disabled() -> Result<()> { + for status in &[301, 302, 303, 307, 308] { + let server = MockServer::start().await; + Mock::given(method("GET")) + .respond_with( + ResponseTemplate::new(*status) + .insert_header("location", format!("{}/redirect", server.uri())), + ) + .mount(&server) + .await; + + let request = Client::builder() + .referer(false) + .build() + .unwrap() + .get(server.uri()) + .basic_auth("username", Some("password")) + .build() + .unwrap(); + + assert!(!request.headers().contains_key(REFERER)); + + let response = Client::builder() + .redirect(reqwest::redirect::Policy::none()) + .build() + .unwrap() + .execute(request.try_clone().unwrap()) + .await + .unwrap(); + + let redirect_request = + request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)? + .unwrap(); + + assert!(!redirect_request.headers().contains_key(REFERER)); + } + + Ok(()) + } +} diff --git a/crates/uv-client/src/cached_client.rs b/crates/uv-client/src/cached_client.rs index 5821fe9f3..ee3314d1c 100644 --- a/crates/uv-client/src/cached_client.rs +++ b/crates/uv-client/src/cached_client.rs @@ -1,4 +1,3 @@ -use std::fmt::{Debug, Display, Formatter}; use std::time::{Duration, SystemTime}; use std::{borrow::Cow, path::Path}; @@ -100,44 +99,62 @@ where } } -/// Either a cached client error or a (user specified) error from the callback +/// Dispatch type: Either a cached client error or a (user specified) error from the callback pub enum CachedClientError { - Client(Error), - Callback(CallbackError), + Client { + retries: Option, + err: Error, + }, + Callback { + retries: Option, + err: CallbackError, + }, } -impl Display for CachedClientError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { +impl CachedClientError { + /// Attach the number of retries to the error context. + /// + /// Adds to existing errors if any, in case different layers retried. + fn with_retries(self, retries: u32) -> Self { match self { - CachedClientError::Client(err) => write!(f, "{err}"), - CachedClientError::Callback(err) => write!(f, "{err}"), + CachedClientError::Client { + retries: existing_retries, + err, + } => CachedClientError::Client { + retries: Some(existing_retries.unwrap_or_default() + retries), + err, + }, + CachedClientError::Callback { + retries: existing_retries, + err, + } => CachedClientError::Callback { + retries: Some(existing_retries.unwrap_or_default() + retries), + err, + }, } } -} -impl Debug for CachedClientError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + fn retries(&self) -> Option { match self { - CachedClientError::Client(err) => write!(f, "{err:?}"), - CachedClientError::Callback(err) => write!(f, "{err:?}"), + CachedClientError::Client { retries, .. } => *retries, + CachedClientError::Callback { retries, .. } => *retries, } } -} -impl std::error::Error - for CachedClientError -{ - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + fn error(&self) -> &dyn std::error::Error { match self { - CachedClientError::Client(err) => Some(err), - CachedClientError::Callback(err) => Some(err), + CachedClientError::Client { err, .. } => err, + CachedClientError::Callback { err, .. } => err, } } } impl From for CachedClientError { fn from(error: Error) -> Self { - Self::Client(error) + Self::Client { + retries: None, + err: error, + } } } @@ -145,15 +162,35 @@ impl From for CachedClientError { fn from(error: ErrorKind) -> Self { - Self::Client(error.into()) + Self::Client { + retries: None, + err: error.into(), + } } } impl + std::error::Error + 'static> From> for Error { + /// Attach retry error context, if there were retries. fn from(error: CachedClientError) -> Self { match error { - CachedClientError::Client(error) => error, - CachedClientError::Callback(error) => error.into(), + CachedClientError::Client { + retries: Some(retries), + err, + } => ErrorKind::RequestWithRetries { + source: Box::new(err.into_kind()), + retries, + } + .into(), + CachedClientError::Client { retries: None, err } => err, + CachedClientError::Callback { + retries: Some(retries), + err, + } => ErrorKind::RequestWithRetries { + source: Box::new(err.into().into_kind()), + retries, + } + .into(), + CachedClientError::Callback { retries: None, err } => err.into(), } } } @@ -385,7 +422,7 @@ impl CachedClient { let data = response_callback(response) .boxed_local() .await - .map_err(|err| CachedClientError::Callback(err))?; + .map_err(|err| CachedClientError::Callback { retries: None, err })?; let Some(cache_policy) = cache_policy else { return Ok(data.into_target()); }; @@ -486,7 +523,6 @@ impl CachedClient { debug!("Sending revalidation request for: {url}"); let response = self .0 - .for_host(&url) .execute(req) .instrument(info_span!("revalidation_request", url = url.as_str())) .await @@ -527,12 +563,23 @@ impl CachedClient { let cache_policy_builder = CachePolicyBuilder::new(&req); let response = self .0 - .for_host(&url) .execute(req) .await - .map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))? - .error_for_status() - .map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?; + .map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?; + + let retry_count = response + .extensions() + .get::() + .map(|retries| retries.value()); + + if let Err(status_error) = response.error_for_status_ref() { + return Err(CachedClientError::::Client { + retries: retry_count, + err: ErrorKind::from_reqwest(url, status_error).into(), + } + .into()); + } + let cache_policy = cache_policy_builder.build(&response); let cache_policy = if cache_policy.to_archived().is_storable() { Some(Box::new(cache_policy)) @@ -579,7 +626,7 @@ impl CachedClient { cache_control: CacheControl, response_callback: Callback, ) -> Result> { - let mut n_past_retries = 0; + let mut past_retries = 0; let start_time = SystemTime::now(); let retry_policy = self.uncached().retry_policy(); loop { @@ -587,11 +634,20 @@ impl CachedClient { let result = self .get_cacheable(fresh_req, cache_entry, cache_control, &response_callback) .await; + + // Check if the middleware already performed retries + let middleware_retries = match &result { + Err(err) => err.retries().unwrap_or_default(), + Ok(_) => 0, + }; + if result .as_ref() - .is_err_and(|err| is_extended_transient_error(err)) + .is_err_and(|err| is_extended_transient_error(err.error())) { - let retry_decision = retry_policy.should_retry(start_time, n_past_retries); + // If middleware already retried, consider that in our retry budget + let total_retries = past_retries + middleware_retries; + let retry_decision = retry_policy.should_retry(start_time, total_retries); if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision { debug!( "Transient failure while handling response from {}; retrying...", @@ -601,10 +657,15 @@ impl CachedClient { .duration_since(SystemTime::now()) .unwrap_or_else(|_| Duration::default()); tokio::time::sleep(duration).await; - n_past_retries += 1; + past_retries += 1; continue; } } + + if past_retries > 0 { + return result.map_err(|err| err.with_retries(past_retries)); + } + return result; } } @@ -622,7 +683,7 @@ impl CachedClient { cache_entry: &CacheEntry, response_callback: Callback, ) -> Result> { - let mut n_past_retries = 0; + let mut past_retries = 0; let start_time = SystemTime::now(); let retry_policy = self.uncached().retry_policy(); loop { @@ -630,12 +691,20 @@ impl CachedClient { let result = self .skip_cache(fresh_req, cache_entry, &response_callback) .await; + + // Check if the middleware already performed retries + let middleware_retries = match &result { + Err(err) => err.retries().unwrap_or_default(), + _ => 0, + }; + if result .as_ref() .err() - .is_some_and(|err| is_extended_transient_error(err)) + .is_some_and(|err| is_extended_transient_error(err.error())) { - let retry_decision = retry_policy.should_retry(start_time, n_past_retries); + let total_retries = past_retries + middleware_retries; + let retry_decision = retry_policy.should_retry(start_time, total_retries); if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision { debug!( "Transient failure while handling response from {}; retrying...", @@ -645,10 +714,15 @@ impl CachedClient { .duration_since(SystemTime::now()) .unwrap_or_else(|_| Duration::default()); tokio::time::sleep(duration).await; - n_past_retries += 1; + past_retries += 1; continue; } } + + if past_retries > 0 { + return result.map_err(|err| err.with_retries(past_retries)); + } + return result; } } diff --git a/crates/uv-client/src/error.rs b/crates/uv-client/src/error.rs index 6629171e9..368e1ad33 100644 --- a/crates/uv-client/src/error.rs +++ b/crates/uv-client/src/error.rs @@ -197,6 +197,13 @@ pub enum ErrorKind { #[error("Failed to fetch: `{0}`")] WrappedReqwestError(DisplaySafeUrl, #[source] WrappedReqwestError), + /// Add the number of failed retries to the error. + #[error("Request failed after {retries} retries")] + RequestWithRetries { + source: Box, + retries: u32, + }, + #[error("Received some unexpected JSON from {}", url)] BadJson { source: serde_json::Error, diff --git a/crates/uv-client/src/flat_index.rs b/crates/uv-client/src/flat_index.rs index 0670fbe36..91668c5c4 100644 --- a/crates/uv-client/src/flat_index.rs +++ b/crates/uv-client/src/flat_index.rs @@ -246,7 +246,7 @@ impl<'a> FlatIndexClient<'a> { .collect(); Ok(FlatIndexEntries::from_entries(files)) } - Err(CachedClientError::Client(err)) if err.is_offline() => { + Err(CachedClientError::Client { err, .. }) if err.is_offline() => { Ok(FlatIndexEntries::offline()) } Err(err) => Err(err.into()), diff --git a/crates/uv-client/src/httpcache/control.rs b/crates/uv-client/src/httpcache/control.rs index 724683188..ddac9d1bc 100644 --- a/crates/uv-client/src/httpcache/control.rs +++ b/crates/uv-client/src/httpcache/control.rs @@ -21,7 +21,6 @@ use crate::rkyvutil::OwnedArchive; rkyv::Serialize, )] #[rkyv(derive(Debug))] -#[allow(clippy::struct_excessive_bools)] pub struct CacheControl { // directives for requests and responses /// * diff --git a/crates/uv-client/src/lib.rs b/crates/uv-client/src/lib.rs index 3ea33204c..e42c86620 100644 --- a/crates/uv-client/src/lib.rs +++ b/crates/uv-client/src/lib.rs @@ -1,6 +1,6 @@ pub use base_client::{ AuthIntegration, BaseClient, BaseClientBuilder, DEFAULT_RETRIES, ExtraMiddleware, - UvRetryableStrategy, is_extended_transient_error, + RedirectClientWithMiddleware, RequestBuilder, UvRetryableStrategy, is_extended_transient_error, }; pub use cached_client::{CacheControl, CachedClient, CachedClientError, DataWithCachePolicy}; pub use error::{Error, ErrorKind, WrappedReqwestError}; diff --git a/crates/uv-client/src/registry_client.rs b/crates/uv-client/src/registry_client.rs index 7dbdf7e49..5788ea56c 100644 --- a/crates/uv-client/src/registry_client.rs +++ b/crates/uv-client/src/registry_client.rs @@ -10,7 +10,6 @@ use futures::{FutureExt, StreamExt, TryStreamExt}; use http::{HeaderMap, StatusCode}; use itertools::Either; use reqwest::{Proxy, Response}; -use reqwest_middleware::ClientWithMiddleware; use rustc_hash::FxHashMap; use tokio::sync::{Mutex, Semaphore}; use tracing::{Instrument, debug, info_span, instrument, trace, warn}; @@ -35,15 +34,15 @@ use uv_redacted::DisplaySafeUrl; use uv_small_str::SmallString; use uv_torch::TorchStrategy; -use crate::base_client::{BaseClientBuilder, ExtraMiddleware}; +use crate::base_client::{BaseClientBuilder, ExtraMiddleware, RedirectPolicy}; use crate::cached_client::CacheControl; use crate::flat_index::FlatIndexEntry; use crate::html::SimpleHtml; use crate::remote_metadata::wheel_metadata_from_remote_zip; use crate::rkyvutil::OwnedArchive; use crate::{ - BaseClient, CachedClient, CachedClientError, Error, ErrorKind, FlatIndexClient, - FlatIndexEntries, + BaseClient, CachedClient, Error, ErrorKind, FlatIndexClient, FlatIndexEntries, + RedirectClientWithMiddleware, }; /// A builder for an [`RegistryClient`]. @@ -152,9 +151,23 @@ impl<'a> RegistryClientBuilder<'a> { self } + /// Allows credentials to be propagated on cross-origin redirects. + /// + /// WARNING: This should only be available for tests. In production code, propagating credentials + /// during cross-origin redirects can lead to security vulnerabilities including credential + /// leakage to untrusted domains. + #[cfg(test)] + #[must_use] + pub fn allow_cross_origin_credentials(mut self) -> Self { + self.base_client_builder = self.base_client_builder.allow_cross_origin_credentials(); + self + } + pub fn build(self) -> RegistryClient { // Build a base client - let builder = self.base_client_builder; + let builder = self + .base_client_builder + .redirect(RedirectPolicy::RetriggerMiddleware); let client = builder.build(); @@ -251,7 +264,7 @@ impl RegistryClient { } /// Return the [`BaseClient`] used by this client. - pub fn uncached_client(&self, url: &DisplaySafeUrl) -> &ClientWithMiddleware { + pub fn uncached_client(&self, url: &DisplaySafeUrl) -> &RedirectClientWithMiddleware { self.client.uncached().for_host(url) } @@ -607,18 +620,16 @@ impl RegistryClient { .boxed_local() .instrument(info_span!("parse_simple_api", package = %package_name)) }; - self.cached_client() + let simple = self + .cached_client() .get_cacheable_with_retry( simple_request, cache_entry, cache_control, parse_simple_response, ) - .await - .map_err(|err| match err { - CachedClientError::Client(err) => err, - CachedClientError::Callback(err) => err, - }) + .await?; + Ok(simple) } /// Fetch the [`SimpleMetadata`] from a local file, using a PEP 503-compatible directory @@ -900,15 +911,13 @@ impl RegistryClient { .map_err(|err| ErrorKind::AsyncHttpRangeReader(url.clone(), err))?; trace!("Getting metadata for {filename} by range request"); let text = wheel_metadata_from_remote_zip(filename, url, &mut reader).await?; - let metadata = - ResolutionMetadata::parse_metadata(text.as_bytes()).map_err(|err| { - Error::from(ErrorKind::MetadataParseError( - filename.clone(), - url.to_string(), - Box::new(err), - )) - })?; - Ok::>(metadata) + ResolutionMetadata::parse_metadata(text.as_bytes()).map_err(|err| { + Error::from(ErrorKind::MetadataParseError( + filename.clone(), + url.to_string(), + Box::new(err), + )) + }) } .boxed_local() .instrument(info_span!("read_metadata_range_request", wheel = %filename)) @@ -1222,12 +1231,191 @@ impl Connectivity { mod tests { use std::str::FromStr; + use url::Url; use uv_normalize::PackageName; use uv_pypi_types::{JoinRelativeError, SimpleJson}; use uv_redacted::DisplaySafeUrl; use crate::{SimpleMetadata, SimpleMetadatum, html::SimpleHtml}; + use uv_cache::Cache; + use wiremock::matchers::{basic_auth, method, path_regex}; + use wiremock::{Mock, MockServer, ResponseTemplate}; + + use crate::RegistryClientBuilder; + + type Error = Box; + + async fn start_test_server(username: &'static str, password: &'static str) -> MockServer { + let server = MockServer::start().await; + + Mock::given(method("GET")) + .and(basic_auth(username, password)) + .respond_with(ResponseTemplate::new(200)) + .mount(&server) + .await; + + Mock::given(method("GET")) + .respond_with(ResponseTemplate::new(401)) + .mount(&server) + .await; + + server + } + + #[tokio::test] + async fn test_redirect_to_server_with_credentials() -> Result<(), Error> { + let username = "user"; + let password = "password"; + + let auth_server = start_test_server(username, password).await; + let auth_base_url = DisplaySafeUrl::parse(&auth_server.uri())?; + + let redirect_server = MockServer::start().await; + + // Configure the redirect server to respond with a 302 to the auth server + Mock::given(method("GET")) + .respond_with( + ResponseTemplate::new(302).insert_header("Location", format!("{auth_base_url}")), + ) + .mount(&redirect_server) + .await; + + let redirect_server_url = DisplaySafeUrl::parse(&redirect_server.uri())?; + + let cache = Cache::temp()?; + let registry_client = RegistryClientBuilder::new(cache) + .allow_cross_origin_credentials() + .build(); + let client = registry_client.cached_client().uncached(); + + assert_eq!( + client + .for_host(&redirect_server_url) + .get(redirect_server.uri()) + .send() + .await? + .status(), + 401, + "Requests should fail if credentials are missing" + ); + + let mut url = redirect_server_url.clone(); + let _ = url.set_username(username); + let _ = url.set_password(Some(password)); + + assert_eq!( + client + .for_host(&redirect_server_url) + .get(Url::from(url)) + .send() + .await? + .status(), + 200, + "Requests should succeed if credentials are present" + ); + + Ok(()) + } + + #[tokio::test] + async fn test_redirect_root_relative_url() -> Result<(), Error> { + let username = "user"; + let password = "password"; + + let redirect_server = MockServer::start().await; + + // Configure the redirect server to respond with a 307 with a relative URL. + Mock::given(method("GET")) + .and(path_regex("/foo/")) + .respond_with( + ResponseTemplate::new(307).insert_header("Location", "/bar/baz/".to_string()), + ) + .mount(&redirect_server) + .await; + + Mock::given(method("GET")) + .and(path_regex("/bar/baz/")) + .and(basic_auth(username, password)) + .respond_with(ResponseTemplate::new(200)) + .mount(&redirect_server) + .await; + + let redirect_server_url = DisplaySafeUrl::parse(&redirect_server.uri())?.join("foo/")?; + + let cache = Cache::temp()?; + let registry_client = RegistryClientBuilder::new(cache) + .allow_cross_origin_credentials() + .build(); + let client = registry_client.cached_client().uncached(); + + let mut url = redirect_server_url.clone(); + let _ = url.set_username(username); + let _ = url.set_password(Some(password)); + + assert_eq!( + client + .for_host(&url) + .get(Url::from(url)) + .send() + .await? + .status(), + 200, + "Requests should succeed for relative URL" + ); + + Ok(()) + } + + #[tokio::test] + async fn test_redirect_relative_url() -> Result<(), Error> { + let username = "user"; + let password = "password"; + + let redirect_server = MockServer::start().await; + + // Configure the redirect server to respond with a 307 with a relative URL. + Mock::given(method("GET")) + .and(path_regex("/foo/bar/baz/")) + .and(basic_auth(username, password)) + .respond_with(ResponseTemplate::new(200)) + .mount(&redirect_server) + .await; + + Mock::given(method("GET")) + .and(path_regex("/foo/")) + .and(basic_auth(username, password)) + .respond_with( + ResponseTemplate::new(307).insert_header("Location", "bar/baz/".to_string()), + ) + .mount(&redirect_server) + .await; + + let cache = Cache::temp()?; + let registry_client = RegistryClientBuilder::new(cache) + .allow_cross_origin_credentials() + .build(); + let client = registry_client.cached_client().uncached(); + + let redirect_server_url = DisplaySafeUrl::parse(&redirect_server.uri())?.join("foo/")?; + let mut url = redirect_server_url.clone(); + let _ = url.set_username(username); + let _ = url.set_password(Some(password)); + + assert_eq!( + client + .for_host(&url) + .get(Url::from(url)) + .send() + .await? + .status(), + 200, + "Requests should succeed for relative URL" + ); + + Ok(()) + } + #[test] fn ignore_failing_files() { // 1.7.7 has an invalid requires-python field (double comma), 1.7.8 is valid diff --git a/crates/uv-configuration/src/build_options.rs b/crates/uv-configuration/src/build_options.rs index 1a62a1a12..8b493cbf0 100644 --- a/crates/uv-configuration/src/build_options.rs +++ b/crates/uv-configuration/src/build_options.rs @@ -4,7 +4,7 @@ use uv_pep508::PackageName; use crate::{PackageNameSpecifier, PackageNameSpecifiers}; -#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, Hash)] pub enum BuildKind { /// A PEP 517 wheel build. #[default] diff --git a/crates/uv-configuration/src/dependency_groups.rs b/crates/uv-configuration/src/dependency_groups.rs index 345f4077c..a3b90ea5f 100644 --- a/crates/uv-configuration/src/dependency_groups.rs +++ b/crates/uv-configuration/src/dependency_groups.rs @@ -295,6 +295,15 @@ pub struct DependencyGroupsWithDefaults { } impl DependencyGroupsWithDefaults { + /// Do not enable any groups + /// + /// Many places in the code need to know what dependency-groups are active, + /// but various commands or subsystems never enable any dependency-groups, + /// in which case they want this. + pub fn none() -> Self { + DependencyGroups::default().with_defaults(DefaultGroups::default()) + } + /// Returns `true` if the specification was enabled, and *only* because it was a default pub fn contains_because_default(&self, group: &GroupName) -> bool { self.cur.contains(group) && !self.prev.contains(group) diff --git a/crates/uv-configuration/src/extras.rs b/crates/uv-configuration/src/extras.rs index 3bc9da21a..e39fc72ef 100644 --- a/crates/uv-configuration/src/extras.rs +++ b/crates/uv-configuration/src/extras.rs @@ -263,6 +263,14 @@ pub struct ExtrasSpecificationWithDefaults { } impl ExtrasSpecificationWithDefaults { + /// Do not enable any extras + /// + /// Many places in the code need to know what extras are active, + /// but various commands or subsystems never enable any extras, + /// in which case they want this. + pub fn none() -> Self { + ExtrasSpecification::default().with_defaults(DefaultExtras::default()) + } /// Returns `true` if the specification was enabled, and *only* because it was a default pub fn contains_because_default(&self, extra: &ExtraName) -> bool { self.cur.contains(extra) && !self.prev.contains(extra) diff --git a/crates/uv-configuration/src/name_specifiers.rs b/crates/uv-configuration/src/name_specifiers.rs index 5ff209948..3efeee1f2 100644 --- a/crates/uv-configuration/src/name_specifiers.rs +++ b/crates/uv-configuration/src/name_specifiers.rs @@ -1,3 +1,5 @@ +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::str::FromStr; use uv_pep508::PackageName; @@ -63,28 +65,16 @@ impl<'de> serde::Deserialize<'de> for PackageNameSpecifier { #[cfg(feature = "schemars")] impl schemars::JsonSchema for PackageNameSpecifier { - fn schema_name() -> String { - "PackageNameSpecifier".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("PackageNameSpecifier") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - string: Some(Box::new(schemars::schema::StringValidation { - // See: https://packaging.python.org/en/latest/specifications/name-normalization/#name-format - pattern: Some( - r"^(:none:|:all:|([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9]))$" - .to_string(), - ), - ..schemars::schema::StringValidation::default() - })), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some("The name of a package, or `:all:` or `:none:` to select or omit all packages, respectively.".to_string()), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_gen: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "pattern": r"^(:none:|:all:|([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9]))$", + "description": "The name of a package, or `:all:` or `:none:` to select or omit all packages, respectively.", + }) } } diff --git a/crates/uv-configuration/src/required_version.rs b/crates/uv-configuration/src/required_version.rs index a0138a46e..70c69eaf3 100644 --- a/crates/uv-configuration/src/required_version.rs +++ b/crates/uv-configuration/src/required_version.rs @@ -1,5 +1,6 @@ -use std::fmt::Formatter; -use std::str::FromStr; +#[cfg(feature = "schemars")] +use std::borrow::Cow; +use std::{fmt::Formatter, str::FromStr}; use uv_pep440::{Version, VersionSpecifier, VersionSpecifiers, VersionSpecifiersParseError}; @@ -36,20 +37,15 @@ impl FromStr for RequiredVersion { #[cfg(feature = "schemars")] impl schemars::JsonSchema for RequiredVersion { - fn schema_name() -> String { - String::from("RequiredVersion") + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("RequiredVersion") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some("A version specifier, e.g. `>=0.5.0` or `==0.5.0`.".to_string()), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "description": "A version specifier, e.g. `>=0.5.0` or `==0.5.0`." + }) } } diff --git a/crates/uv-configuration/src/sources.rs b/crates/uv-configuration/src/sources.rs index c60d69ef4..f8d0c3367 100644 --- a/crates/uv-configuration/src/sources.rs +++ b/crates/uv-configuration/src/sources.rs @@ -1,4 +1,6 @@ -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +#[derive( + Debug, Default, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize, +)] #[serde(rename_all = "kebab-case", deny_unknown_fields)] pub enum SourceStrategy { /// Use `tool.uv.sources` when resolving dependencies. diff --git a/crates/uv-configuration/src/threading.rs b/crates/uv-configuration/src/threading.rs index 58b6190a6..2f70b5d81 100644 --- a/crates/uv-configuration/src/threading.rs +++ b/crates/uv-configuration/src/threading.rs @@ -62,7 +62,7 @@ pub static RAYON_PARALLELISM: AtomicUsize = AtomicUsize::new(0); /// `LazyLock::force(&RAYON_INITIALIZE)`. pub static RAYON_INITIALIZE: LazyLock<()> = LazyLock::new(|| { rayon::ThreadPoolBuilder::new() - .num_threads(RAYON_PARALLELISM.load(Ordering::SeqCst)) + .num_threads(RAYON_PARALLELISM.load(Ordering::Relaxed)) .stack_size(min_stack_size()) .build_global() .expect("failed to initialize global rayon pool"); diff --git a/crates/uv-configuration/src/trusted_host.rs b/crates/uv-configuration/src/trusted_host.rs index 64fb14169..07ff2998a 100644 --- a/crates/uv-configuration/src/trusted_host.rs +++ b/crates/uv-configuration/src/trusted_host.rs @@ -1,4 +1,6 @@ use serde::{Deserialize, Deserializer}; +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::str::FromStr; use url::Url; @@ -143,20 +145,15 @@ impl std::fmt::Display for TrustedHost { #[cfg(feature = "schemars")] impl schemars::JsonSchema for TrustedHost { - fn schema_name() -> String { - "TrustedHost".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("TrustedHost") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some("A host or host-port pair.".to_string()), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "description": "A host or host-port pair." + }) } } diff --git a/crates/uv-dev/src/compile.rs b/crates/uv-dev/src/compile.rs index 434b5e791..d2b685b23 100644 --- a/crates/uv-dev/src/compile.rs +++ b/crates/uv-dev/src/compile.rs @@ -4,7 +4,7 @@ use clap::Parser; use tracing::info; use uv_cache::{Cache, CacheArgs}; -use uv_configuration::Concurrency; +use uv_configuration::{Concurrency, PreviewMode}; use uv_python::{EnvironmentPreference, PythonEnvironment, PythonRequest}; #[derive(Parser)] @@ -26,6 +26,7 @@ pub(crate) async fn compile(args: CompileArgs) -> anyhow::Result<()> { &PythonRequest::default(), EnvironmentPreference::OnlyVirtual, &cache, + PreviewMode::Disabled, )? .into_interpreter(); interpreter.sys_executable().to_path_buf() diff --git a/crates/uv-dev/src/generate_json_schema.rs b/crates/uv-dev/src/generate_json_schema.rs index 75465f429..8a4ff47d5 100644 --- a/crates/uv-dev/src/generate_json_schema.rs +++ b/crates/uv-dev/src/generate_json_schema.rs @@ -3,7 +3,7 @@ use std::path::PathBuf; use anstream::println; use anyhow::{Result, bail}; use pretty_assertions::StrComparison; -use schemars::{JsonSchema, schema_for}; +use schemars::JsonSchema; use serde::Deserialize; use uv_settings::Options as SettingsOptions; @@ -91,7 +91,10 @@ const REPLACEMENTS: &[(&str, &str)] = &[ /// Generate the JSON schema for the combined options as a string. fn generate() -> String { - let schema = schema_for!(CombinedOptions); + let settings = schemars::generate::SchemaSettings::draft07(); + let generator = schemars::SchemaGenerator::new(settings); + let schema = generator.into_root_schema_for::(); + let mut output = serde_json::to_string_pretty(&schema).unwrap(); for (value, replacement) in REPLACEMENTS { diff --git a/crates/uv-dev/src/generate_sysconfig_mappings.rs b/crates/uv-dev/src/generate_sysconfig_mappings.rs index 632d8f6c1..f556922c6 100644 --- a/crates/uv-dev/src/generate_sysconfig_mappings.rs +++ b/crates/uv-dev/src/generate_sysconfig_mappings.rs @@ -11,7 +11,7 @@ use crate::ROOT_DIR; use crate::generate_all::Mode; /// Contains current supported targets -const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250612/cpython-unix/targets.yml"; +const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250702/cpython-unix/targets.yml"; #[derive(clap::Args)] pub(crate) struct Args { @@ -130,7 +130,7 @@ async fn generate() -> Result { output.push_str("//! DO NOT EDIT\n"); output.push_str("//!\n"); output.push_str("//! Generated with `cargo run dev generate-sysconfig-metadata`\n"); - output.push_str("//! Targets from \n"); + output.push_str("//! Targets from \n"); output.push_str("//!\n"); // Disable clippy/fmt diff --git a/crates/uv-dispatch/src/lib.rs b/crates/uv-dispatch/src/lib.rs index 3b0ad5555..874e412e5 100644 --- a/crates/uv-dispatch/src/lib.rs +++ b/crates/uv-dispatch/src/lib.rs @@ -11,6 +11,7 @@ use itertools::Itertools; use rustc_hash::FxHashMap; use thiserror::Error; use tracing::{debug, instrument, trace}; + use uv_build_backend::check_direct_build; use uv_build_frontend::{SourceBuild, SourceBuildContext}; use uv_cache::Cache; @@ -35,8 +36,8 @@ use uv_resolver::{ PythonRequirement, Resolver, ResolverEnvironment, }; use uv_types::{ - AnyErrorBuild, BuildContext, BuildIsolation, BuildStack, EmptyInstalledPackages, HashStrategy, - InFlight, + AnyErrorBuild, BuildArena, BuildContext, BuildIsolation, BuildStack, EmptyInstalledPackages, + HashStrategy, InFlight, }; use uv_workspace::WorkspaceCache; @@ -179,6 +180,10 @@ impl BuildContext for BuildDispatch<'_> { &self.shared_state.git } + fn build_arena(&self) -> &BuildArena { + &self.shared_state.build_arena + } + fn capabilities(&self) -> &IndexCapabilities { &self.shared_state.capabilities } @@ -433,6 +438,7 @@ impl BuildContext for BuildDispatch<'_> { self.build_extra_env_vars.clone(), build_output, self.concurrency.builds, + self.preview, ) .boxed_local() .await?; @@ -447,12 +453,6 @@ impl BuildContext for BuildDispatch<'_> { build_kind: BuildKind, version_id: Option<&'data str>, ) -> Result, BuildDispatchError> { - // Direct builds are a preview feature with the uv build backend. - if self.preview.is_disabled() { - trace!("Preview is disabled, not checking for direct build"); - return Ok(None); - } - let source_tree = if let Some(subdir) = subdirectory { source.join(subdir) } else { @@ -520,6 +520,8 @@ pub struct SharedState { index: InMemoryIndex, /// The downloaded distributions. in_flight: InFlight, + /// Build directories for any PEP 517 builds executed during resolution or installation. + build_arena: BuildArena, } impl SharedState { @@ -532,6 +534,7 @@ impl SharedState { Self { git: self.git.clone(), capabilities: self.capabilities.clone(), + build_arena: self.build_arena.clone(), ..Default::default() } } @@ -555,4 +558,9 @@ impl SharedState { pub fn capabilities(&self) -> &IndexCapabilities { &self.capabilities } + + /// Return the [`BuildArena`] used by the [`SharedState`]. + pub fn build_arena(&self) -> &BuildArena { + &self.build_arena + } } diff --git a/crates/uv-distribution-filename/Cargo.toml b/crates/uv-distribution-filename/Cargo.toml index f30e79b3b..0dfdd623e 100644 --- a/crates/uv-distribution-filename/Cargo.toml +++ b/crates/uv-distribution-filename/Cargo.toml @@ -27,7 +27,6 @@ rkyv = { workspace = true, features = ["smallvec-1"] } serde = { workspace = true } smallvec = { workspace = true } thiserror = { workspace = true } -url = { workspace = true } [dev-dependencies] insta = { version = "1.40.0" } diff --git a/crates/uv-distribution-filename/src/wheel.rs b/crates/uv-distribution-filename/src/wheel.rs index d7dc7dfca..2ac0ef7d9 100644 --- a/crates/uv-distribution-filename/src/wheel.rs +++ b/crates/uv-distribution-filename/src/wheel.rs @@ -5,7 +5,6 @@ use std::str::FromStr; use memchr::memchr; use serde::{Deserialize, Deserializer, Serialize, Serializer, de}; use thiserror::Error; -use url::Url; use uv_cache_key::cache_digest; use uv_normalize::{InvalidNameError, PackageName}; @@ -300,29 +299,6 @@ impl WheelFilename { } } -impl TryFrom<&Url> for WheelFilename { - type Error = WheelFilenameError; - - fn try_from(url: &Url) -> Result { - let filename = url - .path_segments() - .ok_or_else(|| { - WheelFilenameError::InvalidWheelFileName( - url.to_string(), - "URL must have a path".to_string(), - ) - })? - .next_back() - .ok_or_else(|| { - WheelFilenameError::InvalidWheelFileName( - url.to_string(), - "URL must contain a filename".to_string(), - ) - })?; - Self::from_str(filename) - } -} - impl<'de> Deserialize<'de> for WheelFilename { fn deserialize(deserializer: D) -> Result where diff --git a/crates/uv-distribution-types/Cargo.toml b/crates/uv-distribution-types/Cargo.toml index dc5a70166..1ca28c5ed 100644 --- a/crates/uv-distribution-types/Cargo.toml +++ b/crates/uv-distribution-types/Cargo.toml @@ -29,6 +29,7 @@ uv-platform-tags = { workspace = true } uv-pypi-types = { workspace = true } uv-redacted = { workspace = true } uv-small-str = { workspace = true } +uv-warnings = { workspace = true } arcstr = { workspace = true } bitflags = { workspace = true } diff --git a/crates/uv-distribution-types/src/annotation.rs b/crates/uv-distribution-types/src/annotation.rs index 673d23c17..398bcb6b4 100644 --- a/crates/uv-distribution-types/src/annotation.rs +++ b/crates/uv-distribution-types/src/annotation.rs @@ -26,7 +26,11 @@ impl std::fmt::Display for SourceAnnotation { write!(f, "{project_name} ({})", path.portable_display()) } RequirementOrigin::Group(path, project_name, group) => { - write!(f, "{project_name} ({}:{group})", path.portable_display()) + if let Some(project_name) = project_name { + write!(f, "{project_name} ({}:{group})", path.portable_display()) + } else { + write!(f, "({}:{group})", path.portable_display()) + } } RequirementOrigin::Workspace => { write!(f, "(workspace)") @@ -45,11 +49,15 @@ impl std::fmt::Display for SourceAnnotation { } RequirementOrigin::Group(path, project_name, group) => { // Group is not used for override - write!( - f, - "--override {project_name} ({}:{group})", - path.portable_display() - ) + if let Some(project_name) = project_name { + write!( + f, + "--override {project_name} ({}:{group})", + path.portable_display() + ) + } else { + write!(f, "--override ({}:{group})", path.portable_display()) + } } RequirementOrigin::Workspace => { write!(f, "--override (workspace)") diff --git a/crates/uv-distribution-types/src/file.rs b/crates/uv-distribution-types/src/file.rs index e17901f80..a75af3977 100644 --- a/crates/uv-distribution-types/src/file.rs +++ b/crates/uv-distribution-types/src/file.rs @@ -1,3 +1,4 @@ +use std::borrow::Cow; use std::fmt::{self, Display, Formatter}; use std::str::FromStr; @@ -160,16 +161,33 @@ impl UrlString { .unwrap_or(self.as_ref()) } - /// Return the [`UrlString`] with any fragments removed. + /// Return the [`UrlString`] (as a [`Cow`]) with any fragments removed. #[must_use] - pub fn without_fragment(&self) -> Self { - Self( - self.as_ref() - .split_once('#') - .map(|(path, _)| path) - .map(SmallString::from) - .unwrap_or_else(|| self.0.clone()), - ) + pub fn without_fragment(&self) -> Cow<'_, Self> { + self.as_ref() + .split_once('#') + .map(|(path, _)| Cow::Owned(UrlString(SmallString::from(path)))) + .unwrap_or(Cow::Borrowed(self)) + } + + /// Return the [`UrlString`] (as a [`Cow`]) with trailing slash removed. + /// + /// This matches the semantics of [`Url::pop_if_empty`], which will not trim a trailing slash if + /// it's the only path segment, e.g., `https://example.com/` would be unchanged. + #[must_use] + pub fn without_trailing_slash(&self) -> Cow<'_, Self> { + self.as_ref() + .strip_suffix('/') + .filter(|path| { + // Only strip the trailing slash if there's _another_ trailing slash that isn't a + // part of the scheme. + path.split_once("://") + .map(|(_scheme, rest)| rest) + .unwrap_or(path) + .contains('/') + }) + .map(|path| Cow::Owned(UrlString(SmallString::from(path)))) + .unwrap_or(Cow::Borrowed(self)) } } @@ -252,16 +270,51 @@ mod tests { #[test] fn without_fragment() { + // Borrows a URL without a fragment + let url = UrlString("https://example.com/path".into()); + assert_eq!(&*url.without_fragment(), &url); + assert!(matches!(url.without_fragment(), Cow::Borrowed(_))); + + // Removes the fragment if present on the URL let url = UrlString("https://example.com/path?query#fragment".into()); assert_eq!( - url.without_fragment(), - UrlString("https://example.com/path?query".into()) + &*url.without_fragment(), + &UrlString("https://example.com/path?query".into()) ); + assert!(matches!(url.without_fragment(), Cow::Owned(_))); + } - let url = UrlString("https://example.com/path#fragment".into()); - assert_eq!(url.base_str(), "https://example.com/path"); - + #[test] + fn without_trailing_slash() { + // Borrows a URL without a slash let url = UrlString("https://example.com/path".into()); - assert_eq!(url.base_str(), "https://example.com/path"); + assert_eq!(&*url.without_trailing_slash(), &url); + assert!(matches!(url.without_trailing_slash(), Cow::Borrowed(_))); + + // Removes the trailing slash if present on the URL + let url = UrlString("https://example.com/path/".into()); + assert_eq!( + &*url.without_trailing_slash(), + &UrlString("https://example.com/path".into()) + ); + assert!(matches!(url.without_trailing_slash(), Cow::Owned(_))); + + // Does not remove a trailing slash if it's the only path segment + let url = UrlString("https://example.com/".into()); + assert_eq!(&*url.without_trailing_slash(), &url); + assert!(matches!(url.without_trailing_slash(), Cow::Borrowed(_))); + + // Does not remove a trailing slash if it's the only path segment with a missing scheme + let url = UrlString("example.com/".into()); + assert_eq!(&*url.without_trailing_slash(), &url); + assert!(matches!(url.without_trailing_slash(), Cow::Borrowed(_))); + + // Removes the trailing slash when the scheme is missing + let url = UrlString("example.com/path/".into()); + assert_eq!( + &*url.without_trailing_slash(), + &UrlString("example.com/path".into()) + ); + assert!(matches!(url.without_trailing_slash(), Cow::Owned(_))); } } diff --git a/crates/uv-distribution-types/src/index_url.rs b/crates/uv-distribution-types/src/index_url.rs index 9604fbd30..0290018f1 100644 --- a/crates/uv-distribution-types/src/index_url.rs +++ b/crates/uv-distribution-types/src/index_url.rs @@ -12,6 +12,7 @@ use url::{ParseError, Url}; use uv_pep508::{Scheme, VerbatimUrl, VerbatimUrlError, split_scheme}; use uv_redacted::DisplaySafeUrl; +use uv_warnings::warn_user; use crate::{Index, IndexStatusCodeStrategy, Verbatim}; @@ -37,6 +38,8 @@ impl IndexUrl { /// /// If no root directory is provided, relative paths are resolved against the current working /// directory. + /// + /// Normalizes non-file URLs by removing trailing slashes for consistency. pub fn parse(path: &str, root_dir: Option<&Path>) -> Result { let url = match split_scheme(path) { Some((scheme, ..)) => { @@ -92,20 +95,15 @@ impl IndexUrl { #[cfg(feature = "schemars")] impl schemars::JsonSchema for IndexUrl { - fn schema_name() -> String { - "IndexUrl".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("IndexUrl") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some("The URL of an index to use for fetching packages (e.g., `https://pypi.org/simple`), or a local path.".to_string()), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "description": "The URL of an index to use for fetching packages (e.g., `https://pypi.org/simple`), or a local path." + }) } } @@ -140,6 +138,30 @@ impl IndexUrl { Cow::Owned(url) } } + + /// Warn user if the given URL was provided as an ambiguous relative path. + /// + /// This is a temporary warning. Ambiguous values will not be + /// accepted in the future. + pub fn warn_on_disambiguated_relative_path(&self) { + let Self::Path(verbatim_url) = &self else { + return; + }; + + if let Some(path) = verbatim_url.given() { + if !is_disambiguated_path(path) { + if cfg!(windows) { + warn_user!( + "Relative paths passed to `--index` or `--default-index` should be disambiguated from index names (use `.\\{path}` or `./{path}`). Support for ambiguous values will be removed in the future" + ); + } else { + warn_user!( + "Relative paths passed to `--index` or `--default-index` should be disambiguated from index names (use `./{path}`). Support for ambiguous values will be removed in the future" + ); + } + } + } + } } impl Display for IndexUrl { @@ -162,6 +184,28 @@ impl Verbatim for IndexUrl { } } +/// Checks if a path is disambiguated. +/// +/// Disambiguated paths are absolute paths, paths with valid schemes, +/// and paths starting with "./" or "../" on Unix or ".\\", "..\\", +/// "./", or "../" on Windows. +fn is_disambiguated_path(path: &str) -> bool { + if cfg!(windows) { + if path.starts_with(".\\") || path.starts_with("..\\") || path.starts_with('/') { + return true; + } + } + if path.starts_with("./") || path.starts_with("../") || Path::new(path).is_absolute() { + return true; + } + // Check if the path has a scheme (like `file://`) + if let Some((scheme, _)) = split_scheme(path) { + return Scheme::parse(scheme).is_some(); + } + // This is an ambiguous relative path + false +} + /// An error that can occur when parsing an [`IndexUrl`]. #[derive(Error, Debug)] pub enum IndexUrlError { @@ -214,13 +258,20 @@ impl<'de> serde::de::Deserialize<'de> for IndexUrl { } impl From for IndexUrl { - fn from(url: VerbatimUrl) -> Self { + fn from(mut url: VerbatimUrl) -> Self { if url.scheme() == "file" { Self::Path(Arc::new(url)) - } else if *url.raw() == *PYPI_URL { - Self::Pypi(Arc::new(url)) } else { - Self::Url(Arc::new(url)) + // Remove trailing slashes for consistency. They'll be re-added if necessary when + // querying the Simple API. + if let Ok(mut path_segments) = url.raw_mut().path_segments_mut() { + path_segments.pop_if_empty(); + } + if *url.raw() == *PYPI_URL { + Self::Pypi(Arc::new(url)) + } else { + Self::Url(Arc::new(url)) + } } } } @@ -411,6 +462,19 @@ impl<'a> IndexLocations { indexes } } + + /// Add all authenticated sources to the cache. + pub fn cache_index_credentials(&self) { + for index in self.allowed_indexes() { + if let Some(credentials) = index.credentials() { + let credentials = Arc::new(credentials); + uv_auth::store_credentials(index.raw_url(), credentials.clone()); + if let Some(root_url) = index.root_url() { + uv_auth::store_credentials(&root_url, credentials.clone()); + } + } + } + } } impl From<&IndexLocations> for uv_auth::Indexes { @@ -511,30 +575,23 @@ impl<'a> IndexUrls { /// iterator. pub fn defined_indexes(&'a self) -> impl Iterator + 'a { if self.no_index { - Either::Left(std::iter::empty()) - } else { - Either::Right( - { - let mut seen = FxHashSet::default(); - self.indexes - .iter() - .filter(move |index| { - index.name.as_ref().is_none_or(|name| seen.insert(name)) - }) - .filter(|index| !index.default) - } - .chain({ - let mut seen = FxHashSet::default(); - self.indexes - .iter() - .filter(move |index| { - index.name.as_ref().is_none_or(|name| seen.insert(name)) - }) - .find(|index| index.default) - .into_iter() - }), - ) + return Either::Left(std::iter::empty()); } + + let mut seen = FxHashSet::default(); + let (non_default, default) = self + .indexes + .iter() + .filter(move |index| { + if let Some(name) = &index.name { + seen.insert(name) + } else { + true + } + }) + .partition::, _>(|index| !index.default); + + Either::Right(non_default.into_iter().chain(default)) } /// Return the `--no-index` flag. @@ -632,3 +689,41 @@ impl IndexCapabilities { .insert(Flags::FORBIDDEN); } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_index_url_parse_valid_paths() { + // Absolute path + assert!(is_disambiguated_path("/absolute/path")); + // Relative path + assert!(is_disambiguated_path("./relative/path")); + assert!(is_disambiguated_path("../../relative/path")); + if cfg!(windows) { + // Windows absolute path + assert!(is_disambiguated_path("C:/absolute/path")); + // Windows relative path + assert!(is_disambiguated_path(".\\relative\\path")); + assert!(is_disambiguated_path("..\\..\\relative\\path")); + } + } + + #[test] + fn test_index_url_parse_ambiguous_paths() { + // Test single-segment ambiguous path + assert!(!is_disambiguated_path("index")); + // Test multi-segment ambiguous path + assert!(!is_disambiguated_path("relative/path")); + } + + #[test] + fn test_index_url_parse_with_schemes() { + assert!(is_disambiguated_path("file:///absolute/path")); + assert!(is_disambiguated_path("https://registry.com/simple/")); + assert!(is_disambiguated_path( + "git+https://github.com/example/repo.git" + )); + } +} diff --git a/crates/uv-distribution-types/src/lib.rs b/crates/uv-distribution-types/src/lib.rs index 44030ffee..1e3ad7eba 100644 --- a/crates/uv-distribution-types/src/lib.rs +++ b/crates/uv-distribution-types/src/lib.rs @@ -73,6 +73,7 @@ pub use crate::pip_index::*; pub use crate::prioritized_distribution::*; pub use crate::requested::*; pub use crate::requirement::*; +pub use crate::requires_python::*; pub use crate::resolution::*; pub use crate::resolved::*; pub use crate::specified_requirement::*; @@ -100,6 +101,7 @@ mod pip_index; mod prioritized_distribution; mod requested; mod requirement; +mod requires_python; mod resolution; mod resolved; mod specified_requirement; diff --git a/crates/uv-distribution-types/src/pip_index.rs b/crates/uv-distribution-types/src/pip_index.rs index 6ce22abd2..18671e42f 100644 --- a/crates/uv-distribution-types/src/pip_index.rs +++ b/crates/uv-distribution-types/src/pip_index.rs @@ -3,6 +3,8 @@ //! flags set. use serde::{Deserialize, Deserializer, Serialize}; +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::path::Path; use crate::{Index, IndexUrl}; @@ -50,14 +52,14 @@ macro_rules! impl_index { #[cfg(feature = "schemars")] impl schemars::JsonSchema for $name { - fn schema_name() -> String { + fn schema_name() -> Cow<'static, str> { IndexUrl::schema_name() } fn json_schema( - r#gen: &mut schemars::r#gen::SchemaGenerator, - ) -> schemars::schema::Schema { - IndexUrl::json_schema(r#gen) + generator: &mut schemars::generate::SchemaGenerator, + ) -> schemars::Schema { + IndexUrl::json_schema(generator) } } }; diff --git a/crates/uv-distribution-types/src/prioritized_distribution.rs b/crates/uv-distribution-types/src/prioritized_distribution.rs index e3da41a67..52ac2fbd1 100644 --- a/crates/uv-distribution-types/src/prioritized_distribution.rs +++ b/crates/uv-distribution-types/src/prioritized_distribution.rs @@ -831,7 +831,7 @@ pub fn implied_markers(filename: &WheelFilename) -> MarkerTree { tag_marker.and(MarkerTree::expression(MarkerExpression::String { key: MarkerValueString::PlatformMachine, operator: MarkerOperator::Equal, - value: arcstr::literal!("x86_64"), + value: arcstr::literal!("AMD64"), })); marker.or(tag_marker); } @@ -925,7 +925,7 @@ mod tests { ); assert_markers( "numpy-2.2.1-cp313-cp313t-win_amd64.whl", - "sys_platform == 'win32' and platform_machine == 'x86_64'", + "sys_platform == 'win32' and platform_machine == 'AMD64'", ); assert_markers( "numpy-2.2.1-cp313-cp313t-win_arm64.whl", diff --git a/crates/uv-resolver/src/requires_python.rs b/crates/uv-distribution-types/src/requires_python.rs similarity index 96% rename from crates/uv-resolver/src/requires_python.rs rename to crates/uv-distribution-types/src/requires_python.rs index 8e4d33213..49a4fd5c4 100644 --- a/crates/uv-resolver/src/requires_python.rs +++ b/crates/uv-distribution-types/src/requires_python.rs @@ -1,6 +1,6 @@ use std::collections::Bound; -use pubgrub::Range; +use version_ranges::Ranges; use uv_distribution_filename::WheelFilename; use uv_pep440::{ @@ -66,15 +66,8 @@ impl RequiresPython { ) -> Option { // Convert to PubGrub range and perform an intersection. let range = specifiers - .into_iter() - .map(|specifier| release_specifiers_to_ranges(specifier.clone())) - .fold(None, |range: Option>, requires_python| { - if let Some(range) = range { - Some(range.intersection(&requires_python)) - } else { - Some(requires_python) - } - })?; + .map(|specs| release_specifiers_to_ranges(specs.clone())) + .reduce(|acc, r| acc.intersection(&r))?; // If the intersection is empty, return `None`. if range.is_empty() { @@ -97,12 +90,12 @@ impl RequiresPython { pub fn split(&self, bound: Bound) -> Option<(Self, Self)> { let RequiresPythonRange(.., upper) = &self.range; - let upper = Range::from_range_bounds((bound, upper.clone().into())); + let upper = Ranges::from_range_bounds((bound, upper.clone().into())); let lower = upper.complement(); // Intersect left and right with the existing range. - let lower = lower.intersection(&Range::from(self.range.clone())); - let upper = upper.intersection(&Range::from(self.range.clone())); + let lower = lower.intersection(&Ranges::from(self.range.clone())); + let upper = upper.intersection(&Ranges::from(self.range.clone())); if lower.is_empty() || upper.is_empty() { None @@ -353,7 +346,7 @@ impl RequiresPython { /// a lock file are deserialized and turned into a `ResolutionGraph`, the /// markers are "complexified" to put the `requires-python` assumption back /// into the marker explicitly. - pub(crate) fn simplify_markers(&self, marker: MarkerTree) -> MarkerTree { + pub fn simplify_markers(&self, marker: MarkerTree) -> MarkerTree { let (lower, upper) = (self.range().lower(), self.range().upper()); marker.simplify_python_versions(lower.as_ref(), upper.as_ref()) } @@ -373,7 +366,7 @@ impl RequiresPython { /// ```text /// python_full_version >= '3.8' and python_full_version < '3.12' /// ``` - pub(crate) fn complexify_markers(&self, marker: MarkerTree) -> MarkerTree { + pub fn complexify_markers(&self, marker: MarkerTree) -> MarkerTree { let (lower, upper) = (self.range().lower(), self.range().upper()); marker.complexify_python_versions(lower.as_ref(), upper.as_ref()) } @@ -537,7 +530,7 @@ pub struct RequiresPythonRange(LowerBound, UpperBound); impl RequiresPythonRange { /// Initialize a [`RequiresPythonRange`] from a [`Range`]. - pub fn from_range(range: &Range) -> Self { + pub fn from_range(range: &Ranges) -> Self { let (lower, upper) = range .bounding_range() .map(|(lower_bound, upper_bound)| (lower_bound.cloned(), upper_bound.cloned())) @@ -575,9 +568,9 @@ impl Default for RequiresPythonRange { } } -impl From for Range { +impl From for Ranges { fn from(value: RequiresPythonRange) -> Self { - Range::from_range_bounds::<(Bound, Bound), _>(( + Ranges::from_range_bounds::<(Bound, Bound), _>(( value.0.into(), value.1.into(), )) @@ -592,21 +585,18 @@ impl From for Range { /// a simplified marker, one must re-contextualize it by adding the /// `requires-python` constraint back to the marker. #[derive(Clone, Copy, Debug, Default, Eq, PartialEq, PartialOrd, Ord, serde::Deserialize)] -pub(crate) struct SimplifiedMarkerTree(MarkerTree); +pub struct SimplifiedMarkerTree(MarkerTree); impl SimplifiedMarkerTree { /// Simplifies the given markers by assuming the given `requires-python` /// bound is true. - pub(crate) fn new( - requires_python: &RequiresPython, - marker: MarkerTree, - ) -> SimplifiedMarkerTree { + pub fn new(requires_python: &RequiresPython, marker: MarkerTree) -> SimplifiedMarkerTree { SimplifiedMarkerTree(requires_python.simplify_markers(marker)) } /// Complexifies the given markers by adding the given `requires-python` as /// a constraint to these simplified markers. - pub(crate) fn into_marker(self, requires_python: &RequiresPython) -> MarkerTree { + pub fn into_marker(self, requires_python: &RequiresPython) -> MarkerTree { requires_python.complexify_markers(self.0) } @@ -614,12 +604,12 @@ impl SimplifiedMarkerTree { /// /// This only returns `None` when the underlying marker is always true, /// i.e., it matches all possible marker environments. - pub(crate) fn try_to_string(self) -> Option { + pub fn try_to_string(self) -> Option { self.0.try_to_string() } /// Returns the underlying marker tree without re-complexifying them. - pub(crate) fn as_simplified_marker_tree(self) -> MarkerTree { + pub fn as_simplified_marker_tree(self) -> MarkerTree { self.0 } } diff --git a/crates/uv-distribution-types/src/status_code_strategy.rs b/crates/uv-distribution-types/src/status_code_strategy.rs index a2940a23a..b019d0329 100644 --- a/crates/uv-distribution-types/src/status_code_strategy.rs +++ b/crates/uv-distribution-types/src/status_code_strategy.rs @@ -1,3 +1,5 @@ +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::ops::Deref; use http::StatusCode; @@ -136,17 +138,17 @@ impl<'de> Deserialize<'de> for SerializableStatusCode { #[cfg(feature = "schemars")] impl schemars::JsonSchema for SerializableStatusCode { - fn schema_name() -> String { - "StatusCode".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("StatusCode") } - fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - let mut schema = r#gen.subschema_for::().into_object(); - schema.metadata().description = Some("HTTP status code (100-599)".to_string()); - schema.number().minimum = Some(100.0); - schema.number().maximum = Some(599.0); - - schema.into() + fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "number", + "minimum": 100, + "maximum": 599, + "description": "HTTP status code (100-599)" + }) } } diff --git a/crates/uv-distribution/src/distribution_database.rs b/crates/uv-distribution/src/distribution_database.rs index 0ecea36e6..dcb0a17e3 100644 --- a/crates/uv-distribution/src/distribution_database.rs +++ b/crates/uv-distribution/src/distribution_database.rs @@ -644,8 +644,8 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { }) .await .map_err(|err| match err { - CachedClientError::Callback(err) => err, - CachedClientError::Client(err) => Error::Client(err), + CachedClientError::Callback { err, .. } => err, + CachedClientError::Client { err, .. } => Error::Client(err), })?; // If the archive is missing the required hashes, or has since been removed, force a refresh. @@ -663,8 +663,8 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { .skip_cache_with_retry(self.request(url)?, &http_entry, download) .await .map_err(|err| match err { - CachedClientError::Callback(err) => err, - CachedClientError::Client(err) => Error::Client(err), + CachedClientError::Callback { err, .. } => err, + CachedClientError::Client { err, .. } => Error::Client(err), }) }) .await? @@ -811,8 +811,8 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { }) .await .map_err(|err| match err { - CachedClientError::Callback(err) => err, - CachedClientError::Client(err) => Error::Client(err), + CachedClientError::Callback { err, .. } => err, + CachedClientError::Client { err, .. } => Error::Client(err), })?; // If the archive is missing the required hashes, or has since been removed, force a refresh. @@ -830,8 +830,8 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { .skip_cache_with_retry(self.request(url)?, &http_entry, download) .await .map_err(|err| match err { - CachedClientError::Callback(err) => err, - CachedClientError::Client(err) => Error::Client(err), + CachedClientError::Callback { err, .. } => err, + CachedClientError::Client { err, .. } => Error::Client(err), }) }) .await? diff --git a/crates/uv-distribution/src/error.rs b/crates/uv-distribution/src/error.rs index c19867e75..7c2a0f804 100644 --- a/crates/uv-distribution/src/error.rs +++ b/crates/uv-distribution/src/error.rs @@ -108,6 +108,8 @@ pub enum Error { CacheHeal(String, HashAlgorithm), #[error("The source distribution requires Python {0}, but {1} is installed")] RequiresPython(VersionSpecifiers, Version), + #[error("Failed to identify base Python interpreter")] + BaseInterpreter(#[source] std::io::Error), /// A generic request middleware error happened while making a request. /// Refer to the error message for more details. diff --git a/crates/uv-distribution/src/lib.rs b/crates/uv-distribution/src/lib.rs index d7679a5fb..07958f715 100644 --- a/crates/uv-distribution/src/lib.rs +++ b/crates/uv-distribution/src/lib.rs @@ -4,7 +4,7 @@ pub use error::Error; pub use index::{BuiltWheelIndex, RegistryWheelIndex}; pub use metadata::{ ArchiveMetadata, BuildRequires, FlatRequiresDist, LoweredRequirement, LoweringError, Metadata, - MetadataError, RequiresDist, + MetadataError, RequiresDist, SourcedDependencyGroups, }; pub use reporter::Reporter; pub use source::prune; diff --git a/crates/uv-distribution/src/metadata/dependency_groups.rs b/crates/uv-distribution/src/metadata/dependency_groups.rs new file mode 100644 index 000000000..7fb69b516 --- /dev/null +++ b/crates/uv-distribution/src/metadata/dependency_groups.rs @@ -0,0 +1,208 @@ +use std::collections::BTreeMap; +use std::path::{Path, PathBuf}; + +use uv_configuration::SourceStrategy; +use uv_distribution_types::{IndexLocations, Requirement}; +use uv_normalize::{GroupName, PackageName}; +use uv_workspace::dependency_groups::FlatDependencyGroups; +use uv_workspace::pyproject::{Sources, ToolUvSources}; +use uv_workspace::{ + DiscoveryOptions, MemberDiscovery, VirtualProject, WorkspaceCache, WorkspaceError, +}; + +use crate::metadata::{GitWorkspaceMember, LoweredRequirement, MetadataError}; + +/// Like [`crate::RequiresDist`] but only supporting dependency-groups. +/// +/// PEP 735 says: +/// +/// > A pyproject.toml file with only `[dependency-groups]` and no other tables is valid. +/// +/// This is a special carveout to enable users to adopt dependency-groups without having +/// to learn about projects. It is supported by `pip install --group`, and thus interfaces +/// like `uv pip install --group` must also support it for interop and conformance. +/// +/// On paper this is trivial to support because dependency-groups are so self-contained +/// that they're basically a `requirements.txt` embedded within a pyproject.toml, so it's +/// fine to just grab that section and handle it independently. +/// +/// However several uv extensions make this complicated, notably, as of this writing: +/// +/// * tool.uv.sources +/// * tool.uv.index +/// +/// These fields may also be present in the pyproject.toml, and, critically, +/// may be defined and inherited in a parent workspace pyproject.toml. +/// +/// Therefore, we need to gracefully degrade from a full workspacey situation all +/// the way down to one of these stub pyproject.tomls the PEP defines. This is why +/// we avoid going through `RequiresDist` -- we don't want to muddy up the "compile a package" +/// logic with support for non-project/workspace pyproject.tomls, and we don't want to +/// muddy this logic up with setuptools fallback modes that `RequiresDist` wants. +/// +/// (We used to shove this feature into that path, and then we would see there's no metadata +/// and try to run setuptools to try to desperately find any metadata, and then error out.) +#[derive(Debug, Clone)] +pub struct SourcedDependencyGroups { + pub name: Option, + pub dependency_groups: BTreeMap>, +} + +impl SourcedDependencyGroups { + /// Lower by considering `tool.uv` in `pyproject.toml` if present, used for Git and directory + /// dependencies. + pub async fn from_virtual_project( + pyproject_path: &Path, + git_member: Option<&GitWorkspaceMember<'_>>, + locations: &IndexLocations, + source_strategy: SourceStrategy, + cache: &WorkspaceCache, + ) -> Result { + let discovery = DiscoveryOptions { + stop_discovery_at: git_member.map(|git_member| { + git_member + .fetch_root + .parent() + .expect("git checkout has a parent") + .to_path_buf() + }), + members: match source_strategy { + SourceStrategy::Enabled => MemberDiscovery::default(), + SourceStrategy::Disabled => MemberDiscovery::None, + }, + }; + + // The subsequent API takes an absolute path to the dir the pyproject is in + let empty = PathBuf::new(); + let absolute_pyproject_path = + std::path::absolute(pyproject_path).map_err(WorkspaceError::Normalize)?; + let project_dir = absolute_pyproject_path.parent().unwrap_or(&empty); + let project = VirtualProject::discover_defaulted(project_dir, &discovery, cache).await?; + + // Collect the dependency groups. + let dependency_groups = + FlatDependencyGroups::from_pyproject_toml(project.root(), project.pyproject_toml())?; + + // If sources/indexes are disabled we can just stop here + let SourceStrategy::Enabled = source_strategy else { + return Ok(Self { + name: project.project_name().cloned(), + dependency_groups: dependency_groups + .into_iter() + .map(|(name, group)| { + let requirements = group + .requirements + .into_iter() + .map(Requirement::from) + .collect(); + (name, requirements) + }) + .collect(), + }); + }; + + // Collect any `tool.uv.index` entries. + let empty = vec![]; + let project_indexes = project + .pyproject_toml() + .tool + .as_ref() + .and_then(|tool| tool.uv.as_ref()) + .and_then(|uv| uv.index.as_deref()) + .unwrap_or(&empty); + + // Collect any `tool.uv.sources` and `tool.uv.dev_dependencies` from `pyproject.toml`. + let empty = BTreeMap::default(); + let project_sources = project + .pyproject_toml() + .tool + .as_ref() + .and_then(|tool| tool.uv.as_ref()) + .and_then(|uv| uv.sources.as_ref()) + .map(ToolUvSources::inner) + .unwrap_or(&empty); + + // Now that we've resolved the dependency groups, we can validate that each source references + // a valid extra or group, if present. + Self::validate_sources(project_sources, &dependency_groups)?; + + // Lower the dependency groups. + let dependency_groups = dependency_groups + .into_iter() + .map(|(name, group)| { + let requirements = group + .requirements + .into_iter() + .flat_map(|requirement| { + let requirement_name = requirement.name.clone(); + let group = name.clone(); + let extra = None; + LoweredRequirement::from_requirement( + requirement, + project.project_name(), + project.root(), + project_sources, + project_indexes, + extra, + Some(&group), + locations, + project.workspace(), + git_member, + ) + .map(move |requirement| match requirement { + Ok(requirement) => Ok(requirement.into_inner()), + Err(err) => Err(MetadataError::GroupLoweringError( + group.clone(), + requirement_name.clone(), + Box::new(err), + )), + }) + }) + .collect::, _>>()?; + Ok::<(GroupName, Box<_>), MetadataError>((name, requirements)) + }) + .collect::, _>>()?; + + Ok(Self { + name: project.project_name().cloned(), + dependency_groups, + }) + } + + /// Validate the sources. + /// + /// If a source is requested with `group`, ensure that the relevant dependency is + /// present in the relevant `dependency-groups` section. + fn validate_sources( + sources: &BTreeMap, + dependency_groups: &FlatDependencyGroups, + ) -> Result<(), MetadataError> { + for (name, sources) in sources { + for source in sources.iter() { + if let Some(group) = source.group() { + // If the group doesn't exist at all, error. + let Some(flat_group) = dependency_groups.get(group) else { + return Err(MetadataError::MissingSourceGroup( + name.clone(), + group.clone(), + )); + }; + + // If there is no such requirement with the group, error. + if !flat_group + .requirements + .iter() + .any(|requirement| requirement.name == *name) + { + return Err(MetadataError::IncompleteSourceGroup( + name.clone(), + group.clone(), + )); + } + } + } + } + + Ok(()) + } +} diff --git a/crates/uv-distribution/src/metadata/lowering.rs b/crates/uv-distribution/src/metadata/lowering.rs index dd0974a99..330075842 100644 --- a/crates/uv-distribution/src/metadata/lowering.rs +++ b/crates/uv-distribution/src/metadata/lowering.rs @@ -13,7 +13,7 @@ use uv_git_types::{GitReference, GitUrl, GitUrlParseError}; use uv_normalize::{ExtraName, GroupName, PackageName}; use uv_pep440::VersionSpecifiers; use uv_pep508::{MarkerTree, VerbatimUrl, VersionOrUrl, looks_like_git_repository}; -use uv_pypi_types::{ConflictItem, ParsedUrlError, VerbatimParsedUrl}; +use uv_pypi_types::{ConflictItem, ParsedGitUrl, ParsedUrlError, VerbatimParsedUrl}; use uv_redacted::DisplaySafeUrl; use uv_workspace::Workspace; use uv_workspace::pyproject::{PyProjectToml, Source, Sources}; @@ -700,17 +700,23 @@ fn path_source( }; if is_dir { if let Some(git_member) = git_member { + let git = git_member.git_source.git.clone(); let subdirectory = uv_fs::relative_to(install_path, git_member.fetch_root) .expect("Workspace member must be relative"); let subdirectory = uv_fs::normalize_path_buf(subdirectory); + let subdirectory = if subdirectory == PathBuf::new() { + None + } else { + Some(subdirectory.into_boxed_path()) + }; + let url = DisplaySafeUrl::from(ParsedGitUrl { + url: git.clone(), + subdirectory: subdirectory.clone(), + }); return Ok(RequirementSource::Git { - git: git_member.git_source.git.clone(), - subdirectory: if subdirectory == PathBuf::new() { - None - } else { - Some(subdirectory.into_boxed_path()) - }, - url, + git, + subdirectory, + url: VerbatimUrl::from_url(url), }); } diff --git a/crates/uv-distribution/src/metadata/mod.rs b/crates/uv-distribution/src/metadata/mod.rs index 85c55666e..a56a1c354 100644 --- a/crates/uv-distribution/src/metadata/mod.rs +++ b/crates/uv-distribution/src/metadata/mod.rs @@ -12,11 +12,13 @@ use uv_workspace::dependency_groups::DependencyGroupError; use uv_workspace::{WorkspaceCache, WorkspaceError}; pub use crate::metadata::build_requires::BuildRequires; +pub use crate::metadata::dependency_groups::SourcedDependencyGroups; pub use crate::metadata::lowering::LoweredRequirement; pub use crate::metadata::lowering::LoweringError; pub use crate::metadata::requires_dist::{FlatRequiresDist, RequiresDist}; mod build_requires; +mod dependency_groups; mod lowering; mod requires_dist; diff --git a/crates/uv-distribution/src/metadata/requires_dist.rs b/crates/uv-distribution/src/metadata/requires_dist.rs index d728ed58b..e9f36f174 100644 --- a/crates/uv-distribution/src/metadata/requires_dist.rs +++ b/crates/uv-distribution/src/metadata/requires_dist.rs @@ -6,7 +6,7 @@ use rustc_hash::FxHashSet; use uv_configuration::SourceStrategy; use uv_distribution_types::{IndexLocations, Requirement}; -use uv_normalize::{DEV_DEPENDENCIES, ExtraName, GroupName, PackageName}; +use uv_normalize::{ExtraName, GroupName, PackageName}; use uv_pep508::MarkerTree; use uv_workspace::dependency_groups::FlatDependencyGroups; use uv_workspace::pyproject::{Sources, ToolUvSources}; @@ -107,41 +107,10 @@ impl RequiresDist { SourceStrategy::Disabled => &empty, }; - // Collect the dependency groups. - let dependency_groups = { - // First, collect `tool.uv.dev_dependencies` - let dev_dependencies = project_workspace - .current_project() - .pyproject_toml() - .tool - .as_ref() - .and_then(|tool| tool.uv.as_ref()) - .and_then(|uv| uv.dev_dependencies.as_ref()); - - // Then, collect `dependency-groups` - let dependency_groups = project_workspace - .current_project() - .pyproject_toml() - .dependency_groups - .iter() - .flatten() - .collect::>(); - - // Flatten the dependency groups. - let mut dependency_groups = - FlatDependencyGroups::from_dependency_groups(&dependency_groups) - .map_err(|err| err.with_dev_dependencies(dev_dependencies))?; - - // Add the `dev` group, if `dev-dependencies` is defined. - if let Some(dev_dependencies) = dev_dependencies { - dependency_groups - .entry(DEV_DEPENDENCIES.clone()) - .or_insert_with(Vec::new) - .extend(dev_dependencies.clone()); - } - - dependency_groups - }; + let dependency_groups = FlatDependencyGroups::from_pyproject_toml( + project_workspace.current_project().root(), + project_workspace.current_project().pyproject_toml(), + )?; // Now that we've resolved the dependency groups, we can validate that each source references // a valid extra or group, if present. @@ -150,9 +119,10 @@ impl RequiresDist { // Lower the dependency groups. let dependency_groups = dependency_groups .into_iter() - .map(|(name, requirements)| { + .map(|(name, flat_group)| { let requirements = match source_strategy { - SourceStrategy::Enabled => requirements + SourceStrategy::Enabled => flat_group + .requirements .into_iter() .flat_map(|requirement| { let requirement_name = requirement.name.clone(); @@ -182,9 +152,11 @@ impl RequiresDist { ) }) .collect::, _>>(), - SourceStrategy::Disabled => { - Ok(requirements.into_iter().map(Requirement::from).collect()) - } + SourceStrategy::Disabled => Ok(flat_group + .requirements + .into_iter() + .map(Requirement::from) + .collect()), }?; Ok::<(GroupName, Box<_>), MetadataError>((name, requirements)) }) @@ -265,7 +237,7 @@ impl RequiresDist { if let Some(group) = source.group() { // If the group doesn't exist at all, error. - let Some(dependencies) = dependency_groups.get(group) else { + let Some(flat_group) = dependency_groups.get(group) else { return Err(MetadataError::MissingSourceGroup( name.clone(), group.clone(), @@ -273,7 +245,8 @@ impl RequiresDist { }; // If there is no such requirement with the group, error. - if !dependencies + if !flat_group + .requirements .iter() .any(|requirement| requirement.name == *name) { diff --git a/crates/uv-distribution/src/source/mod.rs b/crates/uv-distribution/src/source/mod.rs index 7a92d700f..2b73eb4ff 100644 --- a/crates/uv-distribution/src/source/mod.rs +++ b/crates/uv-distribution/src/source/mod.rs @@ -43,7 +43,7 @@ use uv_normalize::PackageName; use uv_pep440::{Version, release_specifiers_to_ranges}; use uv_platform_tags::Tags; use uv_pypi_types::{HashAlgorithm, HashDigest, HashDigests, PyProjectToml, ResolutionMetadata}; -use uv_types::{BuildContext, BuildStack, SourceBuildTrait}; +use uv_types::{BuildContext, BuildKey, BuildStack, SourceBuildTrait}; use uv_workspace::pyproject::ToolUvSources; use crate::distribution_database::ManagedClient; @@ -728,8 +728,8 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { }) .await .map_err(|err| match err { - CachedClientError::Callback(err) => err, - CachedClientError::Client(err) => Error::Client(err), + CachedClientError::Callback { err, .. } => err, + CachedClientError::Client { err, .. } => Error::Client(err), })?; // If the archive is missing the required hashes, force a refresh. @@ -747,8 +747,8 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { ) .await .map_err(|err| match err { - CachedClientError::Callback(err) => err, - CachedClientError::Client(err) => Error::Client(err), + CachedClientError::Callback { err, .. } => err, + CachedClientError::Client { err, .. } => Error::Client(err), }) }) .await @@ -1583,7 +1583,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { client .unmanaged .uncached_client(resource.git.repository()) - .clone(), + .raw_client(), ) .await { @@ -1860,13 +1860,22 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { } }; + // If the URL is already precise, return it. + if self.build_context.git().get_precise(git).is_some() { + debug!("Precise commit already known: {source}"); + return Ok(()); + } + // If this is GitHub URL, attempt to resolve to a precise commit using the GitHub API. if self .build_context .git() .github_fast_path( git, - client.unmanaged.uncached_client(git.repository()).clone(), + client + .unmanaged + .uncached_client(git.repository()) + .raw_client(), ) .await? .is_some() @@ -2084,8 +2093,8 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { ) .await .map_err(|err| match err { - CachedClientError::Callback(err) => err, - CachedClientError::Client(err) => Error::Client(err), + CachedClientError::Callback { err, .. } => err, + CachedClientError::Client { err, .. } => Error::Client(err), }) }) .await @@ -2267,6 +2276,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { fs::create_dir_all(&cache_shard) .await .map_err(Error::CacheWrite)?; + // Try a direct build if that isn't disabled and the uv build backend is used. let disk_filename = if let Some(name) = self .build_context @@ -2287,27 +2297,73 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { // In the uv build backend, the normalized filename and the disk filename are the same. name.to_string() } else { - self.build_context - .setup_build( - source_root, - subdirectory, - source_root, - Some(&source.to_string()), - source.as_dist(), - source_strategy, - if source.is_editable() { - BuildKind::Editable - } else { - BuildKind::Wheel - }, - BuildOutput::Debug, - self.build_stack.cloned().unwrap_or_default(), - ) - .await - .map_err(|err| Error::Build(err.into()))? - .wheel(temp_dir.path()) - .await - .map_err(Error::Build)? + // Identify the base Python interpreter to use in the cache key. + let base_python = if cfg!(unix) { + self.build_context + .interpreter() + .find_base_python() + .map_err(Error::BaseInterpreter)? + } else { + self.build_context + .interpreter() + .to_base_python() + .map_err(Error::BaseInterpreter)? + }; + + let build_kind = if source.is_editable() { + BuildKind::Editable + } else { + BuildKind::Wheel + }; + + let build_key = BuildKey { + base_python: base_python.into_boxed_path(), + source_root: source_root.to_path_buf().into_boxed_path(), + subdirectory: subdirectory + .map(|subdirectory| subdirectory.to_path_buf().into_boxed_path()), + source_strategy, + build_kind, + }; + + if let Some(builder) = self.build_context.build_arena().remove(&build_key) { + debug!("Creating build environment for: {source}"); + let wheel = builder.wheel(temp_dir.path()).await.map_err(Error::Build)?; + + // Store the build context. + self.build_context.build_arena().insert(build_key, builder); + + wheel + } else { + debug!("Reusing existing build environment for: {source}"); + + let builder = self + .build_context + .setup_build( + source_root, + subdirectory, + source_root, + Some(&source.to_string()), + source.as_dist(), + source_strategy, + if source.is_editable() { + BuildKind::Editable + } else { + BuildKind::Wheel + }, + BuildOutput::Debug, + self.build_stack.cloned().unwrap_or_default(), + ) + .await + .map_err(|err| Error::Build(err.into()))?; + + // Build the wheel. + let wheel = builder.wheel(temp_dir.path()).await.map_err(Error::Build)?; + + // Store the build context. + self.build_context.build_arena().insert(build_key, builder); + + wheel + } }; // Read the metadata from the wheel. @@ -2362,6 +2418,26 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { } } + // Identify the base Python interpreter to use in the cache key. + let base_python = if cfg!(unix) { + self.build_context + .interpreter() + .find_base_python() + .map_err(Error::BaseInterpreter)? + } else { + self.build_context + .interpreter() + .to_base_python() + .map_err(Error::BaseInterpreter)? + }; + + // Determine whether this is an editable or non-editable build. + let build_kind = if source.is_editable() { + BuildKind::Editable + } else { + BuildKind::Wheel + }; + // Set up the builder. let mut builder = self .build_context @@ -2372,11 +2448,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { Some(&source.to_string()), source.as_dist(), source_strategy, - if source.is_editable() { - BuildKind::Editable - } else { - BuildKind::Wheel - }, + build_kind, BuildOutput::Debug, self.build_stack.cloned().unwrap_or_default(), ) @@ -2385,6 +2457,21 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { // Build the metadata. let dist_info = builder.metadata().await.map_err(Error::Build)?; + + // Store the build context. + self.build_context.build_arena().insert( + BuildKey { + base_python: base_python.into_boxed_path(), + source_root: source_root.to_path_buf().into_boxed_path(), + subdirectory: subdirectory + .map(|subdirectory| subdirectory.to_path_buf().into_boxed_path()), + source_strategy, + build_kind, + }, + builder, + ); + + // Return the `.dist-info` directory, if it exists. let Some(dist_info) = dist_info else { return Ok(None); }; diff --git a/crates/uv-extract/src/error.rs b/crates/uv-extract/src/error.rs index 09191bb0a..ae2fdff1a 100644 --- a/crates/uv-extract/src/error.rs +++ b/crates/uv-extract/src/error.rs @@ -2,11 +2,11 @@ use std::{ffi::OsString, path::PathBuf}; #[derive(Debug, thiserror::Error)] pub enum Error { - #[error(transparent)] + #[error("Failed to read from zip file")] Zip(#[from] zip::result::ZipError), - #[error(transparent)] + #[error("Failed to read from zip file")] AsyncZip(#[from] async_zip::error::ZipError), - #[error(transparent)] + #[error("I/O operation failed during extraction")] Io(#[from] std::io::Error), #[error( "The top-level of the archive must only contain a list directory, but it contains: {0:?}" diff --git a/crates/uv-fs/Cargo.toml b/crates/uv-fs/Cargo.toml index 12a5f94b7..fba4910e6 100644 --- a/crates/uv-fs/Cargo.toml +++ b/crates/uv-fs/Cargo.toml @@ -16,7 +16,6 @@ doctest = false workspace = true [dependencies] - dunce = { workspace = true } either = { workspace = true } encoding_rs_io = { workspace = true } diff --git a/crates/uv-fs/src/lib.rs b/crates/uv-fs/src/lib.rs index 0b5055b40..dcc0f00b2 100644 --- a/crates/uv-fs/src/lib.rs +++ b/crates/uv-fs/src/lib.rs @@ -575,8 +575,33 @@ pub fn is_temporary(path: impl AsRef) -> bool { .is_some_and(|name| name.starts_with(".tmp")) } +/// Checks if the grandparent directory of the given executable is the base +/// of a virtual environment. +/// +/// The procedure described in PEP 405 includes checking both the parent and +/// grandparent directory of an executable, but in practice we've found this to +/// be unnecessary. +pub fn is_virtualenv_executable(executable: impl AsRef) -> bool { + executable + .as_ref() + .parent() + .and_then(Path::parent) + .is_some_and(is_virtualenv_base) +} + +/// Returns `true` if a path is the base path of a virtual environment, +/// indicated by the presence of a `pyvenv.cfg` file. +/// +/// The procedure described in PEP 405 includes scanning `pyvenv.cfg` +/// for a `home` key, but in practice we've found this to be +/// unnecessary. +pub fn is_virtualenv_base(path: impl AsRef) -> bool { + path.as_ref().join("pyvenv.cfg").is_file() +} + /// A file lock that is automatically released when dropped. #[derive(Debug)] +#[must_use] pub struct LockedFile(fs_err::File); impl LockedFile { diff --git a/crates/uv-fs/src/path.rs b/crates/uv-fs/src/path.rs index 7a75c76c3..40e579f8e 100644 --- a/crates/uv-fs/src/path.rs +++ b/crates/uv-fs/src/path.rs @@ -277,21 +277,6 @@ fn normalized(path: &Path) -> PathBuf { normalized } -/// Like `fs_err::canonicalize`, but avoids attempting to resolve symlinks on Windows. -pub fn canonicalize_executable(path: impl AsRef) -> std::io::Result { - let path = path.as_ref(); - debug_assert!( - path.is_absolute(), - "path must be absolute: {}", - path.display() - ); - if cfg!(windows) { - Ok(path.to_path_buf()) - } else { - fs_err::canonicalize(path) - } -} - /// Compute a path describing `path` relative to `base`. /// /// `lib/python/site-packages/foo/__init__.py` and `lib/python/site-packages` -> `foo/__init__.py` @@ -345,11 +330,11 @@ pub struct PortablePathBuf(Box); #[cfg(feature = "schemars")] impl schemars::JsonSchema for PortablePathBuf { - fn schema_name() -> String { - PathBuf::schema_name() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("PortablePathBuf") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { + fn json_schema(_gen: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { PathBuf::json_schema(_gen) } } diff --git a/crates/uv-fs/src/which.rs b/crates/uv-fs/src/which.rs index 9dd4cc508..e63174a17 100644 --- a/crates/uv-fs/src/which.rs +++ b/crates/uv-fs/src/which.rs @@ -17,7 +17,7 @@ fn get_binary_type(path: &Path) -> windows::core::Result { .chain(Some(0)) .collect::>(); // SAFETY: winapi call - unsafe { GetBinaryTypeW(PCWSTR(name.as_ptr()), &mut binary_type)? }; + unsafe { GetBinaryTypeW(PCWSTR(name.as_ptr()), &raw mut binary_type)? }; Ok(binary_type) } diff --git a/crates/uv-git/src/git.rs b/crates/uv-git/src/git.rs index 298c205ba..4ee4c2670 100644 --- a/crates/uv-git/src/git.rs +++ b/crates/uv-git/src/git.rs @@ -20,6 +20,8 @@ use uv_redacted::DisplaySafeUrl; use uv_static::EnvVars; use uv_version::version; +use crate::rate_limit::{GITHUB_RATE_LIMIT_STATUS, is_github_rate_limited}; + /// A file indicates that if present, `git reset` has been done and a repo /// checkout is ready to go. See [`GitCheckout::reset`] for why we need this. const CHECKOUT_READY_LOCK: &str = ".ok"; @@ -787,7 +789,15 @@ fn github_fast_path( } }; - let url = format!("https://api.github.com/repos/{owner}/{repo}/commits/{github_branch_name}"); + // Check if we're rate-limited by GitHub before determining the FastPathRev + if GITHUB_RATE_LIMIT_STATUS.is_active() { + debug!("Skipping GitHub fast path attempt for: {url} (rate-limited)"); + return Ok(FastPathRev::Indeterminate); + } + + let base_url = std::env::var(EnvVars::UV_GITHUB_FAST_PATH_URL) + .unwrap_or("https://api.github.com/repos".to_owned()); + let url = format!("{base_url}/{owner}/{repo}/commits/{github_branch_name}"); let runtime = tokio::runtime::Builder::new_current_thread() .enable_all() @@ -807,6 +817,11 @@ fn github_fast_path( let response = request.send().await?; + if is_github_rate_limited(&response) { + // Mark that we are being rate-limited by GitHub + GITHUB_RATE_LIMIT_STATUS.activate(); + } + // GitHub returns a 404 if the repository does not exist, and a 422 if it exists but GitHub // is unable to resolve the requested revision. response.error_for_status_ref()?; diff --git a/crates/uv-git/src/lib.rs b/crates/uv-git/src/lib.rs index ef23e58c2..716eb7538 100644 --- a/crates/uv-git/src/lib.rs +++ b/crates/uv-git/src/lib.rs @@ -7,5 +7,6 @@ pub use crate::source::{Fetch, GitSource, Reporter}; mod credentials; mod git; +mod rate_limit; mod resolver; mod source; diff --git a/crates/uv-git/src/rate_limit.rs b/crates/uv-git/src/rate_limit.rs new file mode 100644 index 000000000..4d277e652 --- /dev/null +++ b/crates/uv-git/src/rate_limit.rs @@ -0,0 +1,37 @@ +use reqwest::{Response, StatusCode}; +use std::sync::atomic::{AtomicBool, Ordering}; + +/// A global state on whether we are being rate-limited by GitHub's REST API. +/// If we are, avoid "fast-path" attempts. +pub(crate) static GITHUB_RATE_LIMIT_STATUS: GitHubRateLimitStatus = GitHubRateLimitStatus::new(); + +/// GitHub REST API rate limit status tracker. +/// +/// ## Assumptions +/// +/// The rate limit timeout duration is much longer than the runtime of a `uv` command. +/// And so we do not need to invalidate this state based on `x-ratelimit-reset`. +#[derive(Debug)] +pub(crate) struct GitHubRateLimitStatus(AtomicBool); + +impl GitHubRateLimitStatus { + const fn new() -> Self { + Self(AtomicBool::new(false)) + } + + pub(crate) fn activate(&self) { + self.0.store(true, Ordering::Relaxed); + } + + pub(crate) fn is_active(&self) -> bool { + self.0.load(Ordering::Relaxed) + } +} + +/// Determine if GitHub is applying rate-limiting based on the response +pub(crate) fn is_github_rate_limited(response: &Response) -> bool { + // HTTP 403 and 429 are possible status codes in the event of a primary or secondary rate limit. + // Source: https://docs.github.com/en/rest/using-the-rest-api/troubleshooting-the-rest-api?apiVersion=2022-11-28#rate-limit-errors + let status_code = response.status(); + status_code == StatusCode::FORBIDDEN || status_code == StatusCode::TOO_MANY_REQUESTS +} diff --git a/crates/uv-git/src/resolver.rs b/crates/uv-git/src/resolver.rs index fd90ff587..3c12fc589 100644 --- a/crates/uv-git/src/resolver.rs +++ b/crates/uv-git/src/resolver.rs @@ -15,7 +15,10 @@ use uv_git_types::{GitHubRepository, GitOid, GitReference, GitUrl}; use uv_static::EnvVars; use uv_version::version; -use crate::{Fetch, GitSource, Reporter}; +use crate::{ + Fetch, GitSource, Reporter, + rate_limit::{GITHUB_RATE_LIMIT_STATUS, is_github_rate_limited}, +}; #[derive(Debug, thiserror::Error)] pub enum GitResolverError { @@ -46,6 +49,21 @@ impl GitResolver { self.0.get(reference) } + pub fn get_precise(&self, url: &GitUrl) -> Option { + // If the URL is already precise, return it. + if let Some(precise) = url.precise() { + return Some(precise); + } + + // If we know the precise commit already, return it. + let reference = RepositoryReference::from(url); + if let Some(precise) = self.get(&reference) { + return Some(*precise); + } + + None + } + /// Resolve a Git URL to a specific commit without performing any Git operations. /// /// Returns a [`GitOid`] if the URL has already been resolved (i.e., is available in the cache), @@ -53,37 +71,38 @@ impl GitResolver { pub async fn github_fast_path( &self, url: &GitUrl, - client: ClientWithMiddleware, + client: &ClientWithMiddleware, ) -> Result, GitResolverError> { if std::env::var_os(EnvVars::UV_NO_GITHUB_FAST_PATH).is_some() { return Ok(None); } - let reference = RepositoryReference::from(url); - - // If the URL is already precise, return it. - if let Some(precise) = url.precise() { + // If the URL is already precise or we know the precise commit, return it. + if let Some(precise) = self.get_precise(url) { return Ok(Some(precise)); } - // If we know the precise commit already, return it. - if let Some(precise) = self.get(&reference) { - return Ok(Some(*precise)); - } - // If the URL is a GitHub URL, attempt to resolve it via the GitHub API. let Some(GitHubRepository { owner, repo }) = GitHubRepository::parse(url.repository()) else { return Ok(None); }; + // Check if we're rate-limited by GitHub, before determining the Git reference + if GITHUB_RATE_LIMIT_STATUS.is_active() { + debug!("Rate-limited by GitHub. Skipping GitHub fast path attempt for: {url}"); + return Ok(None); + } + // Determine the Git reference. let rev = url.reference().as_rev(); - let url = format!("https://api.github.com/repos/{owner}/{repo}/commits/{rev}"); + let github_api_base_url = std::env::var(EnvVars::UV_GITHUB_FAST_PATH_URL) + .unwrap_or("https://api.github.com/repos".to_owned()); + let github_api_url = format!("{github_api_base_url}/{owner}/{repo}/commits/{rev}"); - debug!("Querying GitHub for commit at: {url}"); - let mut request = client.get(&url); + debug!("Querying GitHub for commit at: {github_api_url}"); + let mut request = client.get(&github_api_url); request = request.header("Accept", "application/vnd.github.3.sha"); request = request.header( "User-Agent", @@ -91,13 +110,20 @@ impl GitResolver { ); let response = request.send().await?; - if !response.status().is_success() { + let status = response.status(); + if !status.is_success() { // Returns a 404 if the repository does not exist, and a 422 if GitHub is unable to // resolve the requested rev. debug!( - "GitHub API request failed for: {url} ({})", + "GitHub API request failed for: {github_api_url} ({})", response.status() ); + + if is_github_rate_limited(&response) { + // Mark that we are being rate-limited by GitHub + GITHUB_RATE_LIMIT_STATUS.activate(); + } + return Ok(None); } @@ -108,7 +134,7 @@ impl GitResolver { // Insert the resolved URL into the in-memory cache. This ensures that subsequent fetches // resolve to the same precise commit. - self.insert(reference, precise); + self.insert(RepositoryReference::from(url), precise); Ok(Some(precise)) } @@ -117,7 +143,7 @@ impl GitResolver { pub async fn fetch( &self, url: &GitUrl, - client: ClientWithMiddleware, + client: impl Into, disable_ssl: bool, offline: bool, cache: PathBuf, diff --git a/crates/uv-pep440/src/lib.rs b/crates/uv-pep440/src/lib.rs index 3d2e256ae..0e8b50e72 100644 --- a/crates/uv-pep440/src/lib.rs +++ b/crates/uv-pep440/src/lib.rs @@ -34,7 +34,7 @@ pub use { VersionPatternParseError, }, version_specifier::{ - VersionSpecifier, VersionSpecifierBuildError, VersionSpecifiers, + TildeVersionSpecifier, VersionSpecifier, VersionSpecifierBuildError, VersionSpecifiers, VersionSpecifiersParseError, }, }; diff --git a/crates/uv-pep440/src/version.rs b/crates/uv-pep440/src/version.rs index 1ef0badf2..a496f95a2 100644 --- a/crates/uv-pep440/src/version.rs +++ b/crates/uv-pep440/src/version.rs @@ -610,6 +610,24 @@ impl Version { Self::new(self.release().iter().copied()) } + /// Return the version with any segments apart from the release removed, with trailing zeroes + /// trimmed. + #[inline] + #[must_use] + pub fn only_release_trimmed(&self) -> Self { + if let Some(last_non_zero) = self.release().iter().rposition(|segment| *segment != 0) { + if last_non_zero == self.release().len() { + // Already trimmed. + self.clone() + } else { + Self::new(self.release().iter().take(last_non_zero + 1).copied()) + } + } else { + // `0` is a valid version. + Self::new([0]) + } + } + /// Return the version with trailing `.0` release segments removed. /// /// # Panics diff --git a/crates/uv-pep440/src/version_ranges.rs b/crates/uv-pep440/src/version_ranges.rs index 2bd7dcd4d..38038ffcf 100644 --- a/crates/uv-pep440/src/version_ranges.rs +++ b/crates/uv-pep440/src/version_ranges.rs @@ -132,7 +132,7 @@ impl From for Ranges { pub fn release_specifiers_to_ranges(specifiers: VersionSpecifiers) -> Ranges { let mut range = Ranges::full(); for specifier in specifiers { - range = range.intersection(&release_specifier_to_range(specifier)); + range = range.intersection(&release_specifier_to_range(specifier, false)); } range } @@ -148,67 +148,57 @@ pub fn release_specifiers_to_ranges(specifiers: VersionSpecifiers) -> Ranges -pub fn release_specifier_to_range(specifier: VersionSpecifier) -> Ranges { +pub fn release_specifier_to_range(specifier: VersionSpecifier, trim: bool) -> Ranges { let VersionSpecifier { operator, version } = specifier; + // Note(konsti): We switched strategies to trimmed for the markers, but we don't want to cause + // churn in lockfile requires-python, so we only trim for markers. + let version_trimmed = if trim { + version.only_release_trimmed() + } else { + version.only_release() + }; match operator { - Operator::Equal => { - let version = version.only_release(); - Ranges::singleton(version) - } - Operator::ExactEqual => { - let version = version.only_release(); - Ranges::singleton(version) - } - Operator::NotEqual => { - let version = version.only_release(); - Ranges::singleton(version).complement() - } + // Trailing zeroes are not semantically relevant. + Operator::Equal => Ranges::singleton(version_trimmed), + Operator::ExactEqual => Ranges::singleton(version_trimmed), + Operator::NotEqual => Ranges::singleton(version_trimmed).complement(), + Operator::LessThan => Ranges::strictly_lower_than(version_trimmed), + Operator::LessThanEqual => Ranges::lower_than(version_trimmed), + Operator::GreaterThan => Ranges::strictly_higher_than(version_trimmed), + Operator::GreaterThanEqual => Ranges::higher_than(version_trimmed), + + // Trailing zeroes are semantically relevant. Operator::TildeEqual => { let release = version.release(); let [rest @ .., last, _] = &*release else { unreachable!("~= must have at least two segments"); }; let upper = Version::new(rest.iter().chain([&(last + 1)])); - let version = version.only_release(); - Ranges::from_range_bounds(version..upper) - } - Operator::LessThan => { - let version = version.only_release(); - Ranges::strictly_lower_than(version) - } - Operator::LessThanEqual => { - let version = version.only_release(); - Ranges::lower_than(version) - } - Operator::GreaterThan => { - let version = version.only_release(); - Ranges::strictly_higher_than(version) - } - Operator::GreaterThanEqual => { - let version = version.only_release(); - Ranges::higher_than(version) + Ranges::from_range_bounds(version_trimmed..upper) } Operator::EqualStar => { - let low = version.only_release(); + // For (not-)equal-star, trailing zeroes are still before the star. + let low_full = version.only_release(); let high = { - let mut high = low.clone(); + let mut high = low_full.clone(); let mut release = high.release().to_vec(); *release.last_mut().unwrap() += 1; high = high.with_release(release); high }; - Ranges::from_range_bounds(low..high) + Ranges::from_range_bounds(version..high) } Operator::NotEqualStar => { - let low = version.only_release(); + // For (not-)equal-star, trailing zeroes are still before the star. + let low_full = version.only_release(); let high = { - let mut high = low.clone(); + let mut high = low_full.clone(); let mut release = high.release().to_vec(); *release.last_mut().unwrap() += 1; high = high.with_release(release); high }; - Ranges::from_range_bounds(low..high).complement() + Ranges::from_range_bounds(version..high).complement() } } } @@ -223,8 +213,8 @@ impl LowerBound { /// These bounds use release-only semantics when comparing versions. pub fn new(bound: Bound) -> Self { Self(match bound { - Bound::Included(version) => Bound::Included(version.only_release()), - Bound::Excluded(version) => Bound::Excluded(version.only_release()), + Bound::Included(version) => Bound::Included(version.only_release_trimmed()), + Bound::Excluded(version) => Bound::Excluded(version.only_release_trimmed()), Bound::Unbounded => Bound::Unbounded, }) } @@ -358,8 +348,8 @@ impl UpperBound { /// These bounds use release-only semantics when comparing versions. pub fn new(bound: Bound) -> Self { Self(match bound { - Bound::Included(version) => Bound::Included(version.only_release()), - Bound::Excluded(version) => Bound::Excluded(version.only_release()), + Bound::Included(version) => Bound::Included(version.only_release_trimmed()), + Bound::Excluded(version) => Bound::Excluded(version.only_release_trimmed()), Bound::Unbounded => Bound::Unbounded, }) } diff --git a/crates/uv-pep440/src/version_specifier.rs b/crates/uv-pep440/src/version_specifier.rs index 4255c13fa..e111c5118 100644 --- a/crates/uv-pep440/src/version_specifier.rs +++ b/crates/uv-pep440/src/version_specifier.rs @@ -80,24 +80,38 @@ impl VersionSpecifiers { // Add specifiers for the holes between the bounds. for (lower, upper) in bounds { - match (next, lower) { + let specifier = match (next, lower) { // Ex) [3.7, 3.8.5), (3.8.5, 3.9] -> >=3.7,!=3.8.5,<=3.9 (Bound::Excluded(prev), Bound::Excluded(lower)) if prev == lower => { - specifiers.push(VersionSpecifier::not_equals_version(prev.clone())); + Some(VersionSpecifier::not_equals_version(prev.clone())) } // Ex) [3.7, 3.8), (3.8, 3.9] -> >=3.7,!=3.8.*,<=3.9 - (Bound::Excluded(prev), Bound::Included(lower)) - if prev.release().len() == 2 - && *lower.release() == [prev.release()[0], prev.release()[1] + 1] => - { - specifiers.push(VersionSpecifier::not_equals_star_version(prev.clone())); - } - _ => { - #[cfg(feature = "tracing")] - warn!( - "Ignoring unsupported gap in `requires-python` version: {next:?} -> {lower:?}" - ); + (Bound::Excluded(prev), Bound::Included(lower)) => { + match *prev.only_release_trimmed().release() { + [major] if *lower.only_release_trimmed().release() == [major, 1] => { + Some(VersionSpecifier::not_equals_star_version(Version::new([ + major, 0, + ]))) + } + [major, minor] + if *lower.only_release_trimmed().release() == [major, minor + 1] => + { + Some(VersionSpecifier::not_equals_star_version(Version::new([ + major, minor, + ]))) + } + _ => None, + } } + _ => None, + }; + if let Some(specifier) = specifier { + specifiers.push(specifier); + } else { + #[cfg(feature = "tracing")] + warn!( + "Ignoring unsupported gap in `requires-python` version: {next:?} -> {lower:?}" + ); } next = upper; } @@ -348,6 +362,33 @@ impl VersionSpecifier { Ok(Self { operator, version }) } + /// Remove all non-release parts of the version. + /// + /// The marker decision diagram relies on the assumption that the negation of a marker tree is + /// the complement of the marker space. However, pre-release versions violate this assumption. + /// + /// For example, the marker `python_full_version > '3.9' or python_full_version <= '3.9'` + /// does not match `python_full_version == 3.9.0a0` and so cannot simplify to `true`. However, + /// its negation, `python_full_version > '3.9' and python_full_version <= '3.9'`, also does not + /// match `3.9.0a0` and simplifies to `false`, which violates the algebra decision diagrams + /// rely on. For this reason we ignore pre-release versions entirely when evaluating markers. + /// + /// Note that `python_version` cannot take on pre-release values as it is truncated to just the + /// major and minor version segments. Thus using release-only specifiers is definitely necessary + /// for `python_version` to fully simplify any ranges, such as + /// `python_version > '3.9' or python_version <= '3.9'`, which is always `true` for + /// `python_version`. For `python_full_version` however, this decision is a semantic change. + /// + /// For Python versions, the major.minor is considered the API version, so unlike the rules + /// for package versions in PEP 440, we Python `3.9.0a0` is acceptable for `>= "3.9"`. + #[must_use] + pub fn only_release(self) -> Self { + Self { + operator: self.operator, + version: self.version.only_release(), + } + } + /// `==` pub fn equals_version(version: Version) -> Self { Self { @@ -416,7 +457,7 @@ impl VersionSpecifier { &self.operator } - /// Get the version, e.g. `<=` in `<= 2.0.0` + /// Get the version, e.g. `2.0.0` in `<= 2.0.0` pub fn version(&self) -> &Version { &self.version } @@ -442,14 +483,23 @@ impl VersionSpecifier { (Some(VersionSpecifier::equals_version(v1.clone())), None) } // `v >= 3.7 && v < 3.8` is equivalent to `v == 3.7.*` - (Bound::Included(v1), Bound::Excluded(v2)) - if v1.release().len() == 2 - && *v2.release() == [v1.release()[0], v1.release()[1] + 1] => - { - ( - Some(VersionSpecifier::equals_star_version(v1.clone())), - None, - ) + (Bound::Included(v1), Bound::Excluded(v2)) => { + match *v1.only_release_trimmed().release() { + [major] if *v2.only_release_trimmed().release() == [major, 1] => { + let version = Version::new([major, 0]); + (Some(VersionSpecifier::equals_star_version(version)), None) + } + [major, minor] + if *v2.only_release_trimmed().release() == [major, minor + 1] => + { + let version = Version::new([major, minor]); + (Some(VersionSpecifier::equals_star_version(version)), None) + } + _ => ( + VersionSpecifier::from_lower_bound(&Bound::Included(v1.clone())), + VersionSpecifier::from_upper_bound(&Bound::Excluded(v2.clone())), + ), + } } (lower, upper) => ( VersionSpecifier::from_lower_bound(lower), @@ -838,6 +888,90 @@ pub(crate) fn parse_version_specifiers( Ok(version_ranges) } +/// A simple `~=` version specifier with a major, minor and (optional) patch version, e.g., `~=3.13` +/// or `~=3.13.0`. +#[derive(Clone, Debug)] +pub struct TildeVersionSpecifier<'a> { + inner: Cow<'a, VersionSpecifier>, +} + +impl<'a> TildeVersionSpecifier<'a> { + /// Create a new [`TildeVersionSpecifier`] from a [`VersionSpecifier`] value. + /// + /// If a [`Operator::TildeEqual`] is not used, or the version includes more than minor and patch + /// segments, this will return [`None`]. + pub fn from_specifier(specifier: VersionSpecifier) -> Option> { + TildeVersionSpecifier::new(Cow::Owned(specifier)) + } + + /// Create a new [`TildeVersionSpecifier`] from a [`VersionSpecifier`] reference. + /// + /// See [`TildeVersionSpecifier::from_specifier`]. + pub fn from_specifier_ref( + specifier: &'a VersionSpecifier, + ) -> Option> { + TildeVersionSpecifier::new(Cow::Borrowed(specifier)) + } + + fn new(specifier: Cow<'a, VersionSpecifier>) -> Option { + if specifier.operator != Operator::TildeEqual { + return None; + } + if specifier.version().release().len() < 2 || specifier.version().release().len() > 3 { + return None; + } + if specifier.version().any_prerelease() + || specifier.version().is_local() + || specifier.version().is_post() + { + return None; + } + Some(Self { inner: specifier }) + } + + /// Whether a patch version is present in this tilde version specifier. + pub fn has_patch(&self) -> bool { + self.inner.version.release().len() == 3 + } + + /// Construct the lower and upper bounding version specifiers for this tilde version specifier, + /// e.g., for `~=3.13` this would return `>=3.13` and `<4` and for `~=3.13.0` it would + /// return `>=3.13.0` and `<3.14`. + pub fn bounding_specifiers(&self) -> (VersionSpecifier, VersionSpecifier) { + let release = self.inner.version().release(); + let lower = self.inner.version.clone(); + let upper = if self.has_patch() { + Version::new([release[0], release[1] + 1]) + } else { + Version::new([release[0] + 1]) + }; + ( + VersionSpecifier::greater_than_equal_version(lower), + VersionSpecifier::less_than_version(upper), + ) + } + + /// Construct a new tilde `VersionSpecifier` with the given patch version appended. + pub fn with_patch_version(&self, patch: u64) -> TildeVersionSpecifier { + let mut release = self.inner.version.release().to_vec(); + if self.has_patch() { + release.pop(); + } + release.push(patch); + TildeVersionSpecifier::from_specifier( + VersionSpecifier::from_version(Operator::TildeEqual, Version::new(release)) + .expect("We should always derive a valid new version specifier"), + ) + .expect("We should always derive a new tilde version specifier") + } +} + +impl std::fmt::Display for TildeVersionSpecifier<'_> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.inner) + } +} + #[cfg(test)] mod tests { use std::{cmp::Ordering, str::FromStr}; diff --git a/crates/uv-pep508/Cargo.toml b/crates/uv-pep508/Cargo.toml index 7494a722d..e9306da00 100644 --- a/crates/uv-pep508/Cargo.toml +++ b/crates/uv-pep508/Cargo.toml @@ -41,7 +41,7 @@ version-ranges = { workspace = true } [dev-dependencies] insta = { version = "1.40.0" } -serde_json = { version = "1.0.128" } +serde_json = { workspace = true } tracing-test = { version = "0.2.5" } [features] diff --git a/crates/uv-pep508/src/lib.rs b/crates/uv-pep508/src/lib.rs index e313db86d..e2945743b 100644 --- a/crates/uv-pep508/src/lib.rs +++ b/crates/uv-pep508/src/lib.rs @@ -16,6 +16,8 @@ #![warn(missing_docs)] +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::error::Error; use std::fmt::{Debug, Display, Formatter}; use std::path::Path; @@ -334,22 +336,15 @@ impl Reporter for TracingReporter { #[cfg(feature = "schemars")] impl schemars::JsonSchema for Requirement { - fn schema_name() -> String { - "Requirement".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("Requirement") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some( - "A PEP 508 dependency specifier, e.g., `ruff >= 0.6.0`".to_string(), - ), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_gen: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "description": "A PEP 508 dependency specifier, e.g., `ruff >= 0.6.0`" + }) } } diff --git a/crates/uv-pep508/src/marker/algebra.rs b/crates/uv-pep508/src/marker/algebra.rs index f421a8fa3..2a3f82f27 100644 --- a/crates/uv-pep508/src/marker/algebra.rs +++ b/crates/uv-pep508/src/marker/algebra.rs @@ -172,7 +172,7 @@ impl InternerGuard<'_> { ), // Normalize `python_version` markers to `python_full_version` nodes. MarkerValueVersion::PythonVersion => { - match python_version_to_full_version(normalize_specifier(specifier)) { + match python_version_to_full_version(specifier.only_release()) { Ok(specifier) => ( Variable::Version(CanonicalMarkerValueVersion::PythonFullVersion), Edges::from_specifier(specifier), @@ -1214,7 +1214,7 @@ impl Edges { /// Returns the [`Edges`] for a version specifier. fn from_specifier(specifier: VersionSpecifier) -> Edges { - let specifier = release_specifier_to_range(normalize_specifier(specifier)); + let specifier = release_specifier_to_range(specifier.only_release(), true); Edges::Version { edges: Edges::from_range(&specifier), } @@ -1227,9 +1227,9 @@ impl Edges { let mut range: Ranges = versions .into_iter() .map(|version| { - let specifier = VersionSpecifier::equals_version(version.clone()); + let specifier = VersionSpecifier::equals_version(version.only_release()); let specifier = python_version_to_full_version(specifier)?; - Ok(release_specifier_to_range(normalize_specifier(specifier))) + Ok(release_specifier_to_range(specifier, true)) }) .flatten_ok() .collect::, NodeId>>()?; @@ -1526,57 +1526,62 @@ impl Edges { } } -// Normalize a [`VersionSpecifier`] before adding it to the tree. -fn normalize_specifier(specifier: VersionSpecifier) -> VersionSpecifier { - let (operator, version) = specifier.into_parts(); - - // The decision diagram relies on the assumption that the negation of a marker tree is - // the complement of the marker space. However, pre-release versions violate this assumption. - // - // For example, the marker `python_full_version > '3.9' or python_full_version <= '3.9'` - // does not match `python_full_version == 3.9.0a0` and so cannot simplify to `true`. However, - // its negation, `python_full_version > '3.9' and python_full_version <= '3.9'`, also does not - // match `3.9.0a0` and simplifies to `false`, which violates the algebra decision diagrams - // rely on. For this reason we ignore pre-release versions entirely when evaluating markers. - // - // Note that `python_version` cannot take on pre-release values as it is truncated to just the - // major and minor version segments. Thus using release-only specifiers is definitely necessary - // for `python_version` to fully simplify any ranges, such as `python_version > '3.9' or python_version <= '3.9'`, - // which is always `true` for `python_version`. For `python_full_version` however, this decision - // is a semantic change. - let mut release = &*version.release(); - - // Strip any trailing `0`s. - // - // The [`Version`] type ignores trailing `0`s for equality, but still preserves them in its - // [`Display`] output. We must normalize all versions by stripping trailing `0`s to remove the - // distinction between versions like `3.9` and `3.9.0`. Otherwise, their output would depend on - // which form was added to the global marker interner first. - // - // Note that we cannot strip trailing `0`s for star equality, as `==3.0.*` is different from `==3.*`. - if !operator.is_star() { - if let Some(end) = release.iter().rposition(|segment| *segment != 0) { - if end > 0 { - release = &release[..=end]; - } - } - } - - VersionSpecifier::from_version(operator, Version::new(release)).unwrap() -} - /// Returns the equivalent `python_full_version` specifier for a `python_version` specifier. /// /// Returns `Err` with a constant node if the equivalent comparison is always `true` or `false`. fn python_version_to_full_version(specifier: VersionSpecifier) -> Result { + // Trailing zeroes matter only for (not-)equals-star and tilde-equals. This means that below + // the next two blocks, we can use the trimmed release as the release. + if specifier.operator().is_star() { + // Input python_version python_full_version + // ==3.* 3.* 3.* + // ==3.0.* 3.0 3.0.* + // ==3.0.0.* 3.0 3.0.* + // ==3.9.* 3.9 3.9.* + // ==3.9.0.* 3.9 3.9.* + // ==3.9.0.0.* 3.9 3.9.* + // ==3.9.1.* FALSE FALSE + // ==3.9.1.0.* FALSE FALSE + // ==3.9.1.0.0.* FALSE FALSE + return match &*specifier.version().release() { + // `3.*` + [_major] => Ok(specifier), + // Ex) `3.9.*`, `3.9.0.*`, or `3.9.0.0.*` + [major, minor, rest @ ..] if rest.iter().all(|x| *x == 0) => { + let python_version = Version::new([major, minor]); + // Unwrap safety: A star operator with two version segments is always valid. + Ok(VersionSpecifier::from_version(*specifier.operator(), python_version).unwrap()) + } + // Ex) `3.9.1.*` or `3.9.0.1.*` + _ => Err(NodeId::FALSE), + }; + } + + if *specifier.operator() == Operator::TildeEqual { + // python_version python_full_version + // ~=3 (not possible) + // ~= 3.0 >= 3.0, < 4.0 + // ~= 3.9 >= 3.9, < 4.0 + // ~= 3.9.0 == 3.9.* + // ~= 3.9.1 FALSE + // ~= 3.9.0.0 == 3.9.* + // ~= 3.9.0.1 FALSE + return match &*specifier.version().release() { + // Ex) `3.0`, `3.7` + [_major, _minor] => Ok(specifier), + // Ex) `3.9`, `3.9.0`, or `3.9.0.0` + [major, minor, rest @ ..] if rest.iter().all(|x| *x == 0) => { + let python_version = Version::new([major, minor]); + Ok(VersionSpecifier::equals_star_version(python_version)) + } + // Ex) `3.9.1` or `3.9.0.1` + _ => Err(NodeId::FALSE), + }; + } + // Extract the major and minor version segments if the specifier contains exactly // those segments, or if it contains a major segment with an implied minor segment of `0`. - let major_minor = match *specifier.version().release() { - // For star operators, we cannot add a trailing `0`. - // - // `python_version == 3.*` is equivalent to `python_full_version == 3.*`. Adding a - // trailing `0` would result in `python_version == 3.0.*`, which is incorrect. - [_major] if specifier.operator().is_star() => return Ok(specifier), + let major_minor = match *specifier.version().only_release_trimmed().release() { // Add a trailing `0` for the minor version, which is implied. // For example, `python_version == 3` matches `3.0.1`, `3.0.2`, etc. [major] => Some((major, 0)), @@ -1614,9 +1619,10 @@ fn python_version_to_full_version(specifier: VersionSpecifier) -> Result specifier, + Operator::EqualStar | Operator::NotEqualStar | Operator::TildeEqual => { + // Handled above. + unreachable!() + } }) } else { let [major, minor, ..] = *specifier.version().release() else { @@ -1624,13 +1630,14 @@ fn python_version_to_full_version(specifier: VersionSpecifier) -> Result { + // `python_version` cannot have more than two release segments, and we know + // that the following release segments aren't purely zeroes so equality is impossible. + Operator::Equal | Operator::ExactEqual => { return Err(NodeId::FALSE); } // Similarly, inequalities are always `true`. - Operator::NotEqual | Operator::NotEqualStar => return Err(NodeId::TRUE), + Operator::NotEqual => return Err(NodeId::TRUE), // `python_version {<,<=} 3.7.8` is equivalent to `python_full_version < 3.8`. Operator::LessThan | Operator::LessThanEqual => { @@ -1641,6 +1648,11 @@ fn python_version_to_full_version(specifier: VersionSpecifier) -> Result { VersionSpecifier::greater_than_equal_version(Version::new([major, minor + 1])) } + + Operator::EqualStar | Operator::NotEqualStar | Operator::TildeEqual => { + // Handled above. + unreachable!() + } }) } } diff --git a/crates/uv-pep508/src/marker/simplify.rs b/crates/uv-pep508/src/marker/simplify.rs index 34c095b09..3dc03693a 100644 --- a/crates/uv-pep508/src/marker/simplify.rs +++ b/crates/uv-pep508/src/marker/simplify.rs @@ -64,8 +64,8 @@ fn collect_dnf( continue; } - // Detect whether the range for this edge can be simplified as a star inequality. - if let Some(specifier) = star_range_inequality(&range) { + // Detect whether the range for this edge can be simplified as a star specifier. + if let Some(specifier) = star_range_specifier(&range) { path.push(MarkerExpression::Version { key: marker.key().into(), specifier, @@ -343,22 +343,34 @@ where Some(excluded) } -/// Returns `Some` if the version expression can be simplified as a star inequality with the given -/// specifier. +/// Returns `Some` if the version range can be simplified as a star specifier. /// -/// For example, `python_full_version < '3.8' or python_full_version >= '3.9'` can be simplified to -/// `python_full_version != '3.8.*'`. -fn star_range_inequality(range: &Ranges) -> Option { +/// Only for the two bounds case not covered by [`VersionSpecifier::from_release_only_bounds`]. +/// +/// For negative ranges like `python_full_version < '3.8' or python_full_version >= '3.9'`, +/// returns `!= '3.8.*'`. +fn star_range_specifier(range: &Ranges) -> Option { + if range.iter().count() != 2 { + return None; + } + // Check for negative star range: two segments [(Unbounded, Excluded(v1)), (Included(v2), Unbounded)] let (b1, b2) = range.iter().collect_tuple()?; - - match (b1, b2) { - ((Bound::Unbounded, Bound::Excluded(v1)), (Bound::Included(v2), Bound::Unbounded)) - if v1.release().len() == 2 - && *v2.release() == [v1.release()[0], v1.release()[1] + 1] => - { - Some(VersionSpecifier::not_equals_star_version(v1.clone())) + if let ((Bound::Unbounded, Bound::Excluded(v1)), (Bound::Included(v2), Bound::Unbounded)) = + (b1, b2) + { + match *v1.only_release_trimmed().release() { + [major] if *v2.release() == [major, 1] => { + Some(VersionSpecifier::not_equals_star_version(Version::new([ + major, 0, + ]))) + } + [major, minor] if *v2.release() == [major, minor + 1] => { + Some(VersionSpecifier::not_equals_star_version(v1.clone())) + } + _ => None, } - _ => None, + } else { + None } } diff --git a/crates/uv-pep508/src/marker/tree.rs b/crates/uv-pep508/src/marker/tree.rs index 070a24b26..5739d7c98 100644 --- a/crates/uv-pep508/src/marker/tree.rs +++ b/crates/uv-pep508/src/marker/tree.rs @@ -1707,23 +1707,15 @@ impl Display for MarkerTreeContents { #[cfg(feature = "schemars")] impl schemars::JsonSchema for MarkerTree { - fn schema_name() -> String { - "MarkerTree".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("MarkerTree") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some( - "A PEP 508-compliant marker expression, e.g., `sys_platform == 'Darwin'`" - .to_string(), - ), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "description": "A PEP 508-compliant marker expression, e.g., `sys_platform == 'Darwin'`" + }) } } @@ -2279,13 +2271,13 @@ mod test { #[test] fn test_marker_simplification() { assert_false("python_version == '3.9.1'"); - assert_false("python_version == '3.9.0.*'"); assert_true("python_version != '3.9.1'"); - // Technically these is are valid substring comparison, but we do not allow them. - // e.g., using a version with patch components with `python_version` is considered - // impossible to satisfy since the value it is truncated at the minor version - assert_false("python_version in '3.9.0'"); + // This is an edge case that happens to be supported, but is not critical to support. + assert_simplifies( + "python_version in '3.9.0'", + "python_full_version == '3.9.*'", + ); // e.g., using a version that is not PEP 440 compliant is considered arbitrary assert_true("python_version in 'foo'"); // e.g., including `*` versions, which would require tracking a version specifier @@ -2295,16 +2287,25 @@ mod test { assert_true("python_version in '3.9,3.10'"); assert_true("python_version in '3.9 or 3.10'"); - // e.g, when one of the values cannot be true - // TODO(zanieb): This seems like a quirk of the `python_full_version` normalization, this - // should just act as though the patch version isn't present - assert_false("python_version in '3.9 3.10.0 3.11'"); + // This is an edge case that happens to be supported, but is not critical to support. + assert_simplifies( + "python_version in '3.9 3.10.0 3.11'", + "python_full_version >= '3.9' and python_full_version < '3.12'", + ); assert_simplifies("python_version == '3.9'", "python_full_version == '3.9.*'"); assert_simplifies( "python_version == '3.9.0'", "python_full_version == '3.9.*'", ); + assert_simplifies( + "python_version == '3.9.0.*'", + "python_full_version == '3.9.*'", + ); + assert_simplifies( + "python_version == '3.*'", + "python_full_version >= '3' and python_full_version < '4'", + ); // ` in` // e.g., when the range is not contiguous @@ -2515,7 +2516,7 @@ mod test { #[test] fn test_simplification_extra_versus_other() { // Here, the `extra != 'foo'` cannot be simplified out, because - // `extra == 'foo'` can be true even when `extra == 'bar`' is true. + // `extra == 'foo'` can be true even when `extra == 'bar'`' is true. assert_simplifies( r#"extra != "foo" and (extra == "bar" or extra == "baz")"#, "(extra == 'bar' and extra != 'foo') or (extra == 'baz' and extra != 'foo')", @@ -2536,6 +2537,68 @@ mod test { ); } + #[test] + fn test_python_version_equal_star() { + // Input, equivalent with python_version, equivalent with python_full_version + let cases = [ + ("3.*", "3.*", "3.*"), + ("3.0.*", "3.0", "3.0.*"), + ("3.0.0.*", "3.0", "3.0.*"), + ("3.9.*", "3.9", "3.9.*"), + ("3.9.0.*", "3.9", "3.9.*"), + ("3.9.0.0.*", "3.9", "3.9.*"), + ]; + for (input, equal_python_version, equal_python_full_version) in cases { + assert_eq!( + m(&format!("python_version == '{input}'")), + m(&format!("python_version == '{equal_python_version}'")), + "{input} {equal_python_version}" + ); + assert_eq!( + m(&format!("python_version == '{input}'")), + m(&format!( + "python_full_version == '{equal_python_full_version}'" + )), + "{input} {equal_python_full_version}" + ); + } + + let cases_false = ["3.9.1.*", "3.9.1.0.*", "3.9.1.0.0.*"]; + for input in cases_false { + assert!( + m(&format!("python_version == '{input}'")).is_false(), + "{input}" + ); + } + } + + #[test] + fn test_tilde_equal_normalization() { + assert_eq!( + m("python_version ~= '3.10.0'"), + m("python_version >= '3.10.0' and python_version < '3.11.0'") + ); + + // Two digit versions such as `python_version` get padded with a zero, so they can never + // match + assert_eq!(m("python_version ~= '3.10.1'"), MarkerTree::FALSE); + + assert_eq!( + m("python_version ~= '3.10'"), + m("python_version >= '3.10' and python_version < '4.0'") + ); + + assert_eq!( + m("python_full_version ~= '3.10.0'"), + m("python_full_version >= '3.10.0' and python_full_version < '3.11.0'") + ); + + assert_eq!( + m("python_full_version ~= '3.10'"), + m("python_full_version >= '3.10' and python_full_version < '4.0'") + ); + } + /// This tests marker implication. /// /// Specifically, these test cases come from a [bug] where `foo` and `bar` @@ -3332,4 +3395,32 @@ mod test { ] ); } + + /// Case a: There is no version `3` (no trailing zero) in the interner yet. + #[test] + fn marker_normalization_a() { + let left_tree = m("python_version == '3.0.*'"); + let left = left_tree.try_to_string().unwrap(); + let right = "python_full_version == '3.0.*'"; + assert_eq!(left, right, "{left} != {right}"); + } + + /// Case b: There is already a version `3` (no trailing zero) in the interner. + #[test] + fn marker_normalization_b() { + m("python_version >= '3' and python_version <= '3.0'"); + + let left_tree = m("python_version == '3.0.*'"); + let left = left_tree.try_to_string().unwrap(); + let right = "python_full_version == '3.0.*'"; + assert_eq!(left, right, "{left} != {right}"); + } + + #[test] + fn marker_normalization_c() { + let left_tree = MarkerTree::from_str("python_version == '3.10.0.*'").unwrap(); + let left = left_tree.try_to_string().unwrap(); + let right = "python_full_version == '3.10.*'"; + assert_eq!(left, right, "{left} != {right}"); + } } diff --git a/crates/uv-pep508/src/origin.rs b/crates/uv-pep508/src/origin.rs index 91a88f59a..4619e6f2e 100644 --- a/crates/uv-pep508/src/origin.rs +++ b/crates/uv-pep508/src/origin.rs @@ -12,8 +12,8 @@ pub enum RequirementOrigin { File(PathBuf), /// The requirement was provided via a local project (e.g., a `pyproject.toml` file). Project(PathBuf, PackageName), - /// The requirement was provided via a local project (e.g., a `pyproject.toml` file). - Group(PathBuf, PackageName, GroupName), + /// The requirement was provided via a local project's group (e.g., a `pyproject.toml` file). + Group(PathBuf, Option, GroupName), /// The requirement was provided via a workspace. Workspace, } diff --git a/crates/uv-pep508/src/verbatim_url.rs b/crates/uv-pep508/src/verbatim_url.rs index 988bebc5e..c800ba10c 100644 --- a/crates/uv-pep508/src/verbatim_url.rs +++ b/crates/uv-pep508/src/verbatim_url.rs @@ -18,11 +18,16 @@ use uv_redacted::DisplaySafeUrl; use crate::Pep508Url; /// A wrapper around [`Url`] that preserves the original string. +/// +/// The original string is not preserved after serialization/deserialization. #[derive(Debug, Clone, Eq)] pub struct VerbatimUrl { /// The parsed URL. url: DisplaySafeUrl, /// The URL as it was provided by the user. + /// + /// Even if originally set, this will be [`None`] after + /// serialization/deserialization. given: Option, } @@ -166,6 +171,11 @@ impl VerbatimUrl { &self.url } + /// Return a mutable reference to the underlying [`DisplaySafeUrl`]. + pub fn raw_mut(&mut self) -> &mut DisplaySafeUrl { + &mut self.url + } + /// Convert a [`VerbatimUrl`] into a [`DisplaySafeUrl`]. pub fn to_url(&self) -> DisplaySafeUrl { self.url.clone() diff --git a/crates/uv-performance-memory-allocator/Cargo.lock b/crates/uv-performance-memory-allocator/Cargo.lock index 831d5a0f9..e1650c824 100644 --- a/crates/uv-performance-memory-allocator/Cargo.lock +++ b/crates/uv-performance-memory-allocator/Cargo.lock @@ -19,9 +19,9 @@ checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" [[package]] name = "libmimalloc-sys" -version = "0.1.42" +version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec9d6fac27761dabcd4ee73571cdb06b7022dc99089acbe5435691edffaac0f4" +checksum = "bf88cd67e9de251c1781dbe2f641a1a3ad66eaae831b8a2c38fbdc5ddae16d4d" dependencies = [ "cc", "libc", @@ -29,9 +29,9 @@ dependencies = [ [[package]] name = "mimalloc" -version = "0.1.46" +version = "0.1.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "995942f432bbb4822a7e9c3faa87a695185b0d09273ba85f097b54f4e458f2af" +checksum = "b1791cbe101e95af5764f06f20f6760521f7158f69dbf9d6baf941ee1bf6bc40" dependencies = [ "libmimalloc-sys", ] diff --git a/crates/uv-publish/src/lib.rs b/crates/uv-publish/src/lib.rs index dd8358439..f3dc768c6 100644 --- a/crates/uv-publish/src/lib.rs +++ b/crates/uv-publish/src/lib.rs @@ -12,7 +12,6 @@ use itertools::Itertools; use reqwest::header::AUTHORIZATION; use reqwest::multipart::Part; use reqwest::{Body, Response, StatusCode}; -use reqwest_middleware::RequestBuilder; use reqwest_retry::policies::ExponentialBackoff; use reqwest_retry::{RetryPolicy, Retryable, RetryableStrategy}; use rustc_hash::FxHashSet; @@ -29,7 +28,7 @@ use uv_auth::Credentials; use uv_cache::{Cache, Refresh}; use uv_client::{ BaseClient, DEFAULT_RETRIES, MetadataFormat, OwnedArchive, RegistryClientBuilder, - UvRetryableStrategy, + RequestBuilder, UvRetryableStrategy, }; use uv_configuration::{KeyringProviderType, TrustedPublishing}; use uv_distribution_filename::{DistFilename, SourceDistExtension, SourceDistFilename}; @@ -330,7 +329,9 @@ pub async fn check_trusted_publishing( debug!( "Running on GitHub Actions without explicit credentials, checking for trusted publishing" ); - match trusted_publishing::get_token(registry, client.for_host(registry)).await { + match trusted_publishing::get_token(registry, client.for_host(registry).raw_client()) + .await + { Ok(token) => Ok(TrustedPublishResult::Configured(token)), Err(err) => { // TODO(konsti): It would be useful if we could differentiate between actual errors @@ -364,7 +365,9 @@ pub async fn check_trusted_publishing( ); } - let token = trusted_publishing::get_token(registry, client.for_host(registry)).await?; + let token = + trusted_publishing::get_token(registry, client.for_host(registry).raw_client()) + .await?; Ok(TrustedPublishResult::Configured(token)) } TrustedPublishing::Never => Ok(TrustedPublishResult::Skipped), @@ -387,7 +390,7 @@ pub async fn upload( download_concurrency: &Semaphore, reporter: Arc, ) -> Result { - let form_metadata = form_metadata(file, filename) + let form_metadata = FormMetadata::read_from_file(file, filename) .await .map_err(|err| PublishError::PublishPrepare(file.to_path_buf(), Box::new(err)))?; @@ -641,125 +644,143 @@ async fn metadata(file: &Path, filename: &DistFilename) -> Result -async fn form_metadata( - file: &Path, - filename: &DistFilename, -) -> Result, PublishPrepareError> { - let hash_hex = hash_file(file, Hasher::from(HashAlgorithm::Sha256)).await?; +#[derive(Debug, Clone)] +struct FormMetadata(Vec<(&'static str, String)>); - let Metadata23 { - metadata_version, - name, - version, - platforms, - // Not used by PyPI legacy upload - supported_platforms: _, - summary, - description, - description_content_type, - keywords, - home_page, - download_url, - author, - author_email, - maintainer, - maintainer_email, - license, - license_expression, - license_files, - classifiers, - requires_dist, - provides_dist, - obsoletes_dist, - requires_python, - requires_external, - project_urls, - provides_extras, - dynamic, - } = metadata(file, filename).await?; +impl FormMetadata { + /// Collect the non-file fields for the multipart request from the package METADATA. + /// + /// Reference implementation: + async fn read_from_file( + file: &Path, + filename: &DistFilename, + ) -> Result { + let hash_hex = hash_file(file, Hasher::from(HashAlgorithm::Sha256)).await?; - let mut form_metadata = vec![ - (":action", "file_upload".to_string()), - ("sha256_digest", hash_hex.digest.to_string()), - ("protocol_version", "1".to_string()), - ("metadata_version", metadata_version.clone()), - // Twine transforms the name with `re.sub("[^A-Za-z0-9.]+", "-", name)` - // * - // * - // warehouse seems to call `packaging.utils.canonicalize_name` nowadays and has a separate - // `normalized_name`, so we'll start with this and we'll readjust if there are user reports. - ("name", name.clone()), - ("version", version.clone()), - ("filetype", filename.filetype().to_string()), - ]; + let Metadata23 { + metadata_version, + name, + version, + platforms, + // Not used by PyPI legacy upload + supported_platforms: _, + summary, + description, + description_content_type, + keywords, + home_page, + download_url, + author, + author_email, + maintainer, + maintainer_email, + license, + license_expression, + license_files, + classifiers, + requires_dist, + provides_dist, + obsoletes_dist, + requires_python, + requires_external, + project_urls, + provides_extras, + dynamic, + } = metadata(file, filename).await?; - if let DistFilename::WheelFilename(wheel) = filename { - form_metadata.push(("pyversion", wheel.python_tags().iter().join("."))); - } else { - form_metadata.push(("pyversion", "source".to_string())); + let mut form_metadata = vec![ + (":action", "file_upload".to_string()), + ("sha256_digest", hash_hex.digest.to_string()), + ("protocol_version", "1".to_string()), + ("metadata_version", metadata_version.clone()), + // Twine transforms the name with `re.sub("[^A-Za-z0-9.]+", "-", name)` + // * + // * + // warehouse seems to call `packaging.utils.canonicalize_name` nowadays and has a separate + // `normalized_name`, so we'll start with this and we'll readjust if there are user reports. + ("name", name.clone()), + ("version", version.clone()), + ("filetype", filename.filetype().to_string()), + ]; + + if let DistFilename::WheelFilename(wheel) = filename { + form_metadata.push(("pyversion", wheel.python_tags().iter().join("."))); + } else { + form_metadata.push(("pyversion", "source".to_string())); + } + + let mut add_option = |name, value: Option| { + if let Some(some) = value.clone() { + form_metadata.push((name, some)); + } + }; + + add_option("author", author); + add_option("author_email", author_email); + add_option("description", description); + add_option("description_content_type", description_content_type); + add_option("download_url", download_url); + add_option("home_page", home_page); + add_option("keywords", keywords); + add_option("license", license); + add_option("license_expression", license_expression); + add_option("maintainer", maintainer); + add_option("maintainer_email", maintainer_email); + add_option("summary", summary); + + // The GitLab PyPI repository API implementation requires this metadata field and twine always + // includes it in the request, even when it's empty. + form_metadata.push(("requires_python", requires_python.unwrap_or(String::new()))); + + let mut add_vec = |name, values: Vec| { + for i in values { + form_metadata.push((name, i.clone())); + } + }; + + add_vec("classifiers", classifiers); + add_vec("dynamic", dynamic); + add_vec("license_file", license_files); + add_vec("obsoletes_dist", obsoletes_dist); + add_vec("platform", platforms); + add_vec("project_urls", project_urls); + add_vec("provides_dist", provides_dist); + add_vec("provides_extra", provides_extras); + add_vec("requires_dist", requires_dist); + add_vec("requires_external", requires_external); + + Ok(Self(form_metadata)) } - let mut add_option = |name, value: Option| { - if let Some(some) = value.clone() { - form_metadata.push((name, some)); - } - }; + /// Returns an iterator over the metadata fields. + fn iter(&self) -> std::slice::Iter<'_, (&'static str, String)> { + self.0.iter() + } +} - add_option("author", author); - add_option("author_email", author_email); - add_option("description", description); - add_option("description_content_type", description_content_type); - add_option("download_url", download_url); - add_option("home_page", home_page); - add_option("keywords", keywords); - add_option("license", license); - add_option("license_expression", license_expression); - add_option("maintainer", maintainer); - add_option("maintainer_email", maintainer_email); - add_option("summary", summary); - - // The GitLab PyPI repository API implementation requires this metadata field and twine always - // includes it in the request, even when it's empty. - form_metadata.push(("requires_python", requires_python.unwrap_or(String::new()))); - - let mut add_vec = |name, values: Vec| { - for i in values { - form_metadata.push((name, i.clone())); - } - }; - - add_vec("classifiers", classifiers); - add_vec("dynamic", dynamic); - add_vec("license_file", license_files); - add_vec("obsoletes_dist", obsoletes_dist); - add_vec("platform", platforms); - add_vec("project_urls", project_urls); - add_vec("provides_dist", provides_dist); - add_vec("provides_extra", provides_extras); - add_vec("requires_dist", requires_dist); - add_vec("requires_external", requires_external); - - Ok(form_metadata) +impl<'a> IntoIterator for &'a FormMetadata { + type Item = &'a (&'a str, String); + type IntoIter = std::slice::Iter<'a, (&'a str, String)>; + fn into_iter(self) -> Self::IntoIter { + self.iter() + } } /// Build the upload request. /// /// Returns the request and the reporter progress bar id. -async fn build_request( +async fn build_request<'a>( file: &Path, raw_filename: &str, filename: &DistFilename, registry: &DisplaySafeUrl, - client: &BaseClient, + client: &'a BaseClient, credentials: &Credentials, - form_metadata: &[(&'static str, String)], + form_metadata: &FormMetadata, reporter: Arc, -) -> Result<(RequestBuilder, usize), PublishPrepareError> { +) -> Result<(RequestBuilder<'a>, usize), PublishPrepareError> { let mut form = reqwest::multipart::Form::new(); - for (key, value) in form_metadata { + for (key, value) in form_metadata.iter() { form = form.text(*key, value.clone()); } @@ -885,16 +906,19 @@ async fn handle_response(registry: &Url, response: Response) -> Result<(), Publi #[cfg(test)] mod tests { - use crate::{Reporter, build_request, form_metadata}; - use insta::{assert_debug_snapshot, assert_snapshot}; - use itertools::Itertools; use std::path::PathBuf; use std::sync::Arc; + + use insta::{assert_debug_snapshot, assert_snapshot}; + use itertools::Itertools; + use uv_auth::Credentials; use uv_client::BaseClientBuilder; use uv_distribution_filename::DistFilename; use uv_redacted::DisplaySafeUrl; + use crate::{FormMetadata, Reporter, build_request}; + struct DummyReporter; impl Reporter for DummyReporter { @@ -913,7 +937,9 @@ mod tests { let file = PathBuf::from("../../scripts/links/").join(raw_filename); let filename = DistFilename::try_from_normalized_filename(raw_filename).unwrap(); - let form_metadata = form_metadata(&file, &filename).await.unwrap(); + let form_metadata = FormMetadata::read_from_file(&file, &filename) + .await + .unwrap(); let formatted_metadata = form_metadata .iter() @@ -969,12 +995,13 @@ mod tests { project_urls: Source, https://github.com/unknown/tqdm "###); + let client = BaseClientBuilder::new().build(); let (request, _) = build_request( &file, raw_filename, &filename, &DisplaySafeUrl::parse("https://example.org/upload").unwrap(), - &BaseClientBuilder::new().build(), + &client, &Credentials::basic(Some("ferris".to_string()), Some("F3RR!S".to_string())), &form_metadata, Arc::new(DummyReporter), @@ -985,7 +1012,7 @@ mod tests { insta::with_settings!({ filters => [("boundary=[0-9a-f-]+", "boundary=[...]")], }, { - assert_debug_snapshot!(&request, @r#" + assert_debug_snapshot!(&request.raw_builder(), @r#" RequestBuilder { inner: RequestBuilder { method: POST, @@ -1024,7 +1051,9 @@ mod tests { let file = PathBuf::from("../../scripts/links/").join(raw_filename); let filename = DistFilename::try_from_normalized_filename(raw_filename).unwrap(); - let form_metadata = form_metadata(&file, &filename).await.unwrap(); + let form_metadata = FormMetadata::read_from_file(&file, &filename) + .await + .unwrap(); let formatted_metadata = form_metadata .iter() @@ -1118,12 +1147,13 @@ mod tests { requires_dist: requests ; extra == 'telegram' "###); + let client = BaseClientBuilder::new().build(); let (request, _) = build_request( &file, raw_filename, &filename, &DisplaySafeUrl::parse("https://example.org/upload").unwrap(), - &BaseClientBuilder::new().build(), + &client, &Credentials::basic(Some("ferris".to_string()), Some("F3RR!S".to_string())), &form_metadata, Arc::new(DummyReporter), @@ -1134,7 +1164,7 @@ mod tests { insta::with_settings!({ filters => [("boundary=[0-9a-f-]+", "boundary=[...]")], }, { - assert_debug_snapshot!(&request, @r#" + assert_debug_snapshot!(&request.raw_builder(), @r#" RequestBuilder { inner: RequestBuilder { method: POST, diff --git a/crates/uv-pypi-types/src/conflicts.rs b/crates/uv-pypi-types/src/conflicts.rs index 94366bfd2..81064955a 100644 --- a/crates/uv-pypi-types/src/conflicts.rs +++ b/crates/uv-pypi-types/src/conflicts.rs @@ -3,6 +3,8 @@ use petgraph::{ graph::{DiGraph, NodeIndex}, }; use rustc_hash::{FxHashMap, FxHashSet}; +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::{collections::BTreeSet, hash::Hash, rc::Rc}; use uv_normalize::{ExtraName, GroupName, PackageName}; @@ -638,12 +640,12 @@ pub struct SchemaConflictItem { #[cfg(feature = "schemars")] impl schemars::JsonSchema for SchemaConflictItem { - fn schema_name() -> String { - "SchemaConflictItem".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("SchemaConflictItem") } - fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - ::json_schema(r#gen) + fn json_schema(generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + ::json_schema(generator) } } diff --git a/crates/uv-pypi-types/src/identifier.rs b/crates/uv-pypi-types/src/identifier.rs index b0c78d5b2..47439f2c9 100644 --- a/crates/uv-pypi-types/src/identifier.rs +++ b/crates/uv-pypi-types/src/identifier.rs @@ -1,4 +1,6 @@ use serde::{Serialize, Serializer}; +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::fmt::Display; use std::str::FromStr; use thiserror::Error; @@ -99,25 +101,16 @@ impl Serialize for Identifier { #[cfg(feature = "schemars")] impl schemars::JsonSchema for Identifier { - fn schema_name() -> String { - "Identifier".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("Identifier") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - string: Some(Box::new(schemars::schema::StringValidation { - // Best-effort Unicode support (https://stackoverflow.com/a/68844380/3549270) - pattern: Some(r"^[_\p{Alphabetic}][_0-9\p{Alphabetic}]*$".to_string()), - ..schemars::schema::StringValidation::default() - })), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some("An identifier in Python".to_string()), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "pattern": r"^[_\p{Alphabetic}][_0-9\p{Alphabetic}]*$", + "description": "An identifier in Python" + }) } } diff --git a/crates/uv-python/Cargo.toml b/crates/uv-python/Cargo.toml index 59a9829e0..d008b2d4e 100644 --- a/crates/uv-python/Cargo.toml +++ b/crates/uv-python/Cargo.toml @@ -20,6 +20,7 @@ uv-cache = { workspace = true } uv-cache-info = { workspace = true } uv-cache-key = { workspace = true } uv-client = { workspace = true } +uv-configuration = { workspace = true } uv-dirs = { workspace = true } uv-distribution-filename = { workspace = true } uv-extract = { workspace = true } @@ -38,11 +39,14 @@ uv-warnings = { workspace = true } anyhow = { workspace = true } clap = { workspace = true, optional = true } configparser = { workspace = true } +dunce = { workspace = true } fs-err = { workspace = true, features = ["tokio"] } futures = { workspace = true } goblin = { workspace = true, default-features = false } +indexmap = { workspace = true } itertools = { workspace = true } owo-colors = { workspace = true } +ref-cast = { workspace = true } regex = { workspace = true } reqwest = { workspace = true } reqwest-middleware = { workspace = true } diff --git a/crates/uv-python/download-metadata.json b/crates/uv-python/download-metadata.json index 71fe83c78..b697da9c8 100644 --- a/crates/uv-python/download-metadata.json +++ b/crates/uv-python/download-metadata.json @@ -1,4 +1,836 @@ { + "cpython-3.14.0b3-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "0b948f37363193fcf5e20c2e887183467907f1b6d04420fc5a0c0c7c421e7b12", + "variant": null + }, + "cpython-3.14.0b3-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "47f21cf35481e5ba8e4e6b35c4dd549b0463d0f1dc24134d6e7fcc832a292869", + "variant": null + }, + "cpython-3.14.0b3-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "2935079dd417d8940955f0b083be698ae27a1d65f947614c36ce5e4ea509c812", + "variant": null + }, + "cpython-3.14.0b3-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "b64dfec2a7016ae5fa5340298f46c05df0c93a30021c009fd3db9b97a5cad92b", + "variant": null + }, + "cpython-3.14.0b3-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "7139f66c73f09f8ed3fcd840e08b85dc591fe8df048cfa5c48dc695a68f74149", + "variant": null + }, + "cpython-3.14.0b3-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "5210b912d9dc1e7ee9fc215972c7c254ddaf9d64ad293f42af1a819896a4cbed", + "variant": null + }, + "cpython-3.14.0b3-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "66a8f6825c5e1b289bfd62370b4cc6c9b5212a91b0440dcf5408c4e3bcfcdddd", + "variant": null + }, + "cpython-3.14.0b3-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "2af3a5d27e7fd49b5796a35c1f4a17848d9e5d40c946b9e690d7c27e527d99d8", + "variant": null + }, + "cpython-3.14.0b3-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "17643efc55b6b68b4fa7b3a5e43abb0ea31b4f03942e2d17bd04c5cd5be52c52", + "variant": null + }, + "cpython-3.14.0b3-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "1c35d7e5ac357d012d3c265da406e331535bf9fa5e29454b190ac8cc0c57dd40", + "variant": null + }, + "cpython-3.14.0b3-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "8d7c283a6f9e18377776968c5d5fcce9ff0a9c833c4f6c64d8f804da743e0e9d", + "variant": null + }, + "cpython-3.14.0b3-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "75d5b65bae7b39f3e35a30070a7ccef0c773b1976e764c7fb68ba840a3ad0594", + "variant": null + }, + "cpython-3.14.0b3-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "db25121d9a35f1613e281ead33903a7e6489d0506207451ef49d82eb71d722df", + "variant": null + }, + "cpython-3.14.0b3-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "31cbe24575231d706937802a8f81536d11dd79f8c9cd7981b8f93b970a8e8481", + "variant": null + }, + "cpython-3.14.0b3-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "3c98b94dfc77c9d6369c3cdc09e03abc0dad2ead2f40a6b52d1b119bdcb33ab7", + "variant": null + }, + "cpython-3.14.0b3-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "0742eb6b381fdb6b57983f8a5918dd9e154953f959f2be5a203699e5b1901c1b", + "variant": null + }, + "cpython-3.14.0b3-windows-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-aarch64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "62dc6ff21cbbf2c216f1b9f573ed8e0433c0f7185280a13b2b2f3a81ac862b90", + "variant": null + }, + "cpython-3.14.0b3-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "0fc98664753360e23eaf3aa169657627ca5766141a49e1cfb0397895cbb47826", + "variant": null + }, + "cpython-3.14.0b3-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "5b5ef4c03b4e2aaab389f10b973914780d76bd82eeaeb3c305239a57aba2e367", + "variant": null + }, + "cpython-3.14.0b3+freethreaded-darwin-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "d19213021f5fd039d7021ccb41698cc99ca313064d7c1cc9b5ef8f831abb9961", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-darwin-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "darwin", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "26ec6697bbb38c3fa6275e79e110854b2585914ca503c65916478e7ca8d0491b", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-aarch64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "b01cc74173515cc3733f0af62b7d574364c1c68daf3ad748bca47e4328770cde", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst", + "sha256": "199ff8d1366007d666840a7320b0a44e6bab0aa0ee1e13e9247d3ec610ed9d45", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst", + "sha256": "e62adb4c3c7549bb909556457ac7863b98073bdcf5e6d9ffec52182b0fe32ccd", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "1f093e0c3532e27744e3fb73a8c738355910b6bfa195039e4f73b4f48c1bc4fc", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "73162a5da31cc1e410d456496114f8e5ee7243bc7bbe0e087b1ea50f0fdc6774", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "045017e60f1298111e8ccfec6afbe47abe56f82997258c8754009269a5343736", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "081f0147d8f4479764d6a3819f67275be3306003366eda9ecb9ee844f2f611be", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "3e20f3c4757ca3d3738e2b4ed9bb7ce1b6b868b0f92e1766549b58bdfdf6ad79", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "7b50ca3a919531e6d175308d53efa0ccd3d21438ac735a51c7fdcd74c5316f99", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "6787ae8dfa33268ae3336d9f2ff7107bb9da5714757cab2aed20bf916835888f", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "6f16bffec9ad3717498b379b5640956abeb39b830ae390bb650585beac14b974", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "651aef6d3640db60dbb0c28c68d194846053b3d08085427e1c9c76eb13de5e46", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "637097da9317bd1af34a2f3baab76d98fb11aee3fb887dec4e829616d944cdb8", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "f607cd590190311cbe5f85d82d4220eb5b71416486b827e99b93ca1c341f2045", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-windows-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-aarch64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "331816d79cd78eaadba5ae6cdd3a243771199d0ca07057e7a452158dd4a7edcc", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-windows-i686-none": { + "name": "cpython", + "arch": { + "family": "i686", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "2e55b7204f391fbe653168e6004daf5ed624d890ab7dd7d5aa7f7234e271ae47", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+freethreaded-windows-x86_64-none": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "8de6235b29396e3b25fc3ade166c49506171ec464cda46987ef9641dd9a44071", + "variant": "freethreaded" + }, + "cpython-3.14.0b3+debug-linux-aarch64-gnu": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "9bc39c6c669aaba047690395bf0955062aa80edb4fd92c59ada03a18a3df1234", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-armv7-gnueabi": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabi", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "544dc6759e2d7af367eeb5d3c45116c52c33054a730e120a8ea442e6e8b9d091", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-armv7-gnueabihf": { + "name": "cpython", + "arch": { + "family": "armv7", + "variant": null + }, + "os": "linux", + "libc": "gnueabihf", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "3e91cd08cefd404d55003ec25347ae9d591e72ee77a00e2a172ce206c34f5ecc", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-powerpc64le-gnu": { + "name": "cpython", + "arch": { + "family": "powerpc64le", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "b5679a4176431600ce146a6783046bbac84721d99ff91ead0b8eef1538514369", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-riscv64-gnu": { + "name": "cpython", + "arch": { + "family": "riscv64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "9724b0ebf2a8f80c1dd76bcb9880297bb2a95010bc707868145d9a1cfa0857de", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-s390x-gnu": { + "name": "cpython", + "arch": { + "family": "s390x", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "23ca40a78ad8a61fc820d58b71e5aeb3b5f88ed7e449a04c0515b37041e8e644", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-x86_64-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "84129181fc24fd5fd39a8dc83c5eb4dd7c51a9f105bd1b22733dba2d52da9f38", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-x86_64-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": null + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "bfaaabee0e9cab4a7967be9759140830de1994c8f87e8e05bee5ec7fd6a99b69", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-x86_64_v2-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "c14586447c4ef79ab875b7b7b8a13e6d05eaec8627f187067e02f4b026023db6", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-x86_64_v2-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v2" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "69c477df92e4332382e9a1b3177155b1c2c9e6612366b385409bd17f18c49a70", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-x86_64_v3-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "5a9c969834b90307152a8bdcef27a2797288fdfecb92911e0ebc17ec5747ccbf", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-x86_64_v3-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v3" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "02fad0b21f30b42742468107fe33eb23d307ba2c5670b0baa11e33fc30160fba", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-x86_64_v4-gnu": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "gnu", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "4e110ee96813a907c7468f5c1317046c5e5ba10e2fe23b2c3d30f1ee6b4bc5c7", + "variant": "debug" + }, + "cpython-3.14.0b3+debug-linux-x86_64_v4-musl": { + "name": "cpython", + "arch": { + "family": "x86_64", + "variant": "v4" + }, + "os": "linux", + "libc": "musl", + "major": 3, + "minor": 14, + "patch": 0, + "prerelease": "b3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.14.0b3%2B20250702-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "6015df86031d23765c7f4c8a02e1aa3e3b5e4a5fe9f2747fcdc37d28e3f8a0f5", + "variant": "debug" + }, "cpython-3.14.0b2-darwin-aarch64-none": { "name": "cpython", "arch": { @@ -4731,8 +5563,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-aarch64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "3e52e6b539dca2729788a06f3f47b2edfc30ba3ef82eb14926f0a23ed0ce4cff", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "5a7888b6e0bbc2abf7327a786d50f46f36b941f43268ce05d6ef6f1f733734ca", "variant": null }, "cpython-3.13.5-darwin-x86_64-none": { @@ -4747,8 +5579,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "fce29c000087f0ed966580aff703117d8238e2be043a90a2a0ec8352c0708db8", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "691282c117b01de296d70bd3f2ec2d7316620233626440b35fa2271ddbcc07dc", "variant": null }, "cpython-3.13.5-linux-aarch64-gnu": { @@ -4763,8 +5595,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "6957a6d66c633890fc97f3da818066cd0d10de7cf695a7c46c4c23b107c02fa7", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "d7ab196fefb0cacb44869774dd6afcaed2edc90219b67601ec1875002378219f", "variant": null }, "cpython-3.13.5-linux-armv7-gnueabi": { @@ -4779,8 +5611,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", - "sha256": "1ca9efecff540162b22e5b86152864e621c97463061171f6734cd31d50e39f1d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "b90aac358d49e0c278843b83b5c8935036effe10f004ecec903313fea199badf", "variant": null }, "cpython-3.13.5-linux-armv7-gnueabihf": { @@ -4795,8 +5627,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", - "sha256": "4efad529678f93119e99bd5494070c76f5c54958bc9686ee12fd9e1950c80b27", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "69fe828cd149b21a6fda0937f73ef654dd8237d567916508acb328f24f9368c7", "variant": null }, "cpython-3.13.5-linux-powerpc64le-gnu": { @@ -4811,8 +5643,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "abbd685fe948653ad79624e568f9f843233e808c8756d6d4429dbe1d3e7550f9", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "1a69f799fc5c0eb61708202ec5ba1514d6e5f3a547c07c53d40415d93084b903", "variant": null }, "cpython-3.13.5-linux-riscv64-gnu": { @@ -4827,8 +5659,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "4a17bcc199782d0bbaaf073b0eedfac0ebfc5eeab2cc23b9b59968869708779c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "2ce3570c6f3f8204e4b5e8e35896c87c71ddc038ca9a60f1111e2ea468b78f08", "variant": null }, "cpython-3.13.5-linux-s390x-gnu": { @@ -4843,8 +5675,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "6d877e1b2c302d205f0bddbc76b7ca465fa227d9252147df7821d5474d4ea147", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "c974786ad18943fc3d5fbe4eca7bd43ceb07e725d2d513ac4dc0e3b6dd11a89e", "variant": null }, "cpython-3.13.5-linux-x86_64-gnu": { @@ -4859,8 +5691,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "5c63e7ffe47baff0a96a685c94fb5075612817741feb4e85ec3cc082c742b4f8", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "4444b5e95217c2c686bf3a689ab9655d47ea3b11c1f74714eceab021d50b7d74", "variant": null }, "cpython-3.13.5-linux-x86_64-musl": { @@ -4875,8 +5707,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "641d0cefa3124e42728fc5dac970d5a172a61d80d2a5a24995f2b6e9ddf71e3f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "5125ef4d58b3dddbb0946c29f443160de95d6e8ea79bbe9562f9dd2873651d12", "variant": null }, "cpython-3.13.5-linux-x86_64_v2-gnu": { @@ -4891,8 +5723,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "7f60b2f61fad6c846c7d7c8f523dee285c36cd9d53573314b6ca82eca4e80241", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "91cf219324d090e7b2814f33c2b7fbf4930aa01c6a0fd8960eab8874f3b8babd", "variant": null }, "cpython-3.13.5-linux-x86_64_v2-musl": { @@ -4907,8 +5739,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "21c20b03866e1ec3dcd86224cf82396e58e175768e51030ab83ba21d482cfc26", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "392dd5fd090f9aa5759318795733e37026cf13d554bcf5f90315d0f448a07228", "variant": null }, "cpython-3.13.5-linux-x86_64_v3-gnu": { @@ -4923,8 +5755,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "ec8794e649b3e0abac7dccda7de20892ce1ba43f2148e65b2a66edeba42f4c61", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "c1bd09a8f83a78dd92a6d0e2a8dbf30b99d6ca31269fd1c80e14f0769b67df3f", "variant": null }, "cpython-3.13.5-linux-x86_64_v3-musl": { @@ -4939,8 +5771,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "56576436ccc2a9e509becd72ebdbc9abf147a471df6e1022dcd6967975ccee55", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "77350988d48699f3172a25aad33b229604b6faab9f1df35589ad7aca10ec10a8", "variant": null }, "cpython-3.13.5-linux-x86_64_v4-gnu": { @@ -4955,8 +5787,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "b6aa1c65d74b528130244888ee5b47fbf52451aabac2a98e5b87873e73705d87", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f84aafa52b22484de42edb7c9965cafc52718fc03ac5f8d5ad6a92eb46ff3008", "variant": null }, "cpython-3.13.5-linux-x86_64_v4-musl": { @@ -4971,8 +5803,24 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "c3322d81ef18e862e0069bfd8824ef1a907135bf2d1c6514312854ea99f0a372", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "9510ffc50a28a1a06d5c1ed2bfd18fa0f469d5e93982d7a9289ab0ac4c8a2eee", + "variant": null + }, + "cpython-3.13.5-windows-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-aarch64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "2459713eff80372e0bfcce42b23b9892eb7e3b21ea6ae5cb5e504b8c0f12e6dd", "variant": null }, "cpython-3.13.5-windows-i686-none": { @@ -4987,8 +5835,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-i686-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "254754ec03dd94dc8e67f89b415253a9ee16f0d277478e0e01c25de45b7fc172", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "76287476f3a2b8658d29da67e05d550fbf2db33b9e9730c6d071bd239211ffe8", "variant": null }, "cpython-3.13.5-windows-x86_64-none": { @@ -5003,8 +5851,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "361fa8ca96403f890d0ef4f202ea410b2e121273830c979d6517f2c7e733b5e2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "f5838b42985c644d0597a1a6a54fb185647bb57d4f06cbc7d3ac8dfb53326521", "variant": null }, "cpython-3.13.5+freethreaded-darwin-aarch64-none": { @@ -5019,8 +5867,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "a29cb4ef8adcd343e0f5bc5c4371cbc859fc7ce6d8f1a3c8d0cd7e44c4b9b866", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-aarch64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "52e582cc89d654c565297b4ff9c3bd4bed5c3e81cad46f41c62485e700faf8bd", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-darwin-x86_64-none": { @@ -5035,8 +5883,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "52aeb1b4073fa3f180d74a0712ceabc86dd2b40be499599e2e170948fb22acde", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64-apple-darwin-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "5aed6d5950514004149d514f81a1cd426ac549696a563b8e47d32f7eba3b4be3", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-aarch64-gnu": { @@ -5051,8 +5899,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-aarch64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "0ef13d13e16b4e58f167694940c6db54591db50bbc7ba61be6901ed5a69ad27b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-aarch64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "461832e4fb5ec1d719dc40f6490f9a639414dfa6769158187fa85d4b424b57cd", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-armv7-gnueabi": { @@ -5067,8 +5915,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst", - "sha256": "4eb024f92a1e832c7533d17d566c47eabcb7b5684112290695ef76a149282ee4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-armv7-unknown-linux-gnueabi-freethreaded%2Blto-full.tar.zst", + "sha256": "469fc30158fbcb5c3dc7e65e0d7d9e9e0f4de7dffdc97492083781f5f6216356", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-armv7-gnueabihf": { @@ -5083,8 +5931,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst", - "sha256": "cac9d5fe76dcc4942b538d5f5a9fa6c20f261bc02a8e75821dd2ea4e6c214074", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-armv7-unknown-linux-gnueabihf-freethreaded%2Blto-full.tar.zst", + "sha256": "9db3d9dbb529068d24b46c0616307f3c278e59c0087d7a1637105afde3bc5685", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-powerpc64le-gnu": { @@ -5099,8 +5947,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "66545ad4b09385750529ef09a665fc0b0ce698f984df106d7b167e3f7d59eace", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-ppc64le-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "c65c75edb450de830f724afdc774a215c2d3255097e0d670f709d2271fd6fd52", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-riscv64-gnu": { @@ -5115,8 +5963,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "a82a741abefa7db61b2aeef36426bd56da5c69dc9dac105d68fba7fe658943ca", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-riscv64-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "716e6e3fad24fb9931b93005000152dd9da4c3343b88ca54b5c01a7ab879d734", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-s390x-gnu": { @@ -5131,8 +5979,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", - "sha256": "403c5758428013d5aa472841294c7b6ec91a572bb7123d02b7f1de24af4b0e13", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-s390x-unknown-linux-gnu-freethreaded%2Blto-full.tar.zst", + "sha256": "27276aee426a51f4165fac49391aedc5a9e301ae217366c77b65826122bb30fc", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-x86_64-gnu": { @@ -5147,8 +5995,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "33fdd6c42258cdf0402297d9e06842b53d9413d70849cee61755b9b5fb619836", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "f5eb29604c0b7afa2097fca094a06eb7a1f3ca4e194264c34f342739cae78202", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-x86_64-musl": { @@ -5163,8 +6011,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "35c387d86e2238d65c16962003475074a771bd96a6e6027606365dd9b23307c2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "61f5960849c547313ff7142990ec8a8c1e299ccf3fcba00600bc8ee50fbb0db9", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-x86_64_v2-gnu": { @@ -5179,8 +6027,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "5de3a03470af84e56557a5429137f96632536b0dc07ec119988e9936fcd586b5", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v2-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "6348a6ca86e8cfe30557fecfc15d6facefeeecb55aba33c338d6aa5830495e5b", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-x86_64_v2-musl": { @@ -5195,8 +6043,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "4e4f198e3cb5248921a7292620156412735b154201571f5da6198167b9888b5c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v2-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "4200aa24f5ca3b1621185fe0aee642f84e91ec353e0da2ca47a62c369855d07a", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-x86_64_v3-gnu": { @@ -5211,8 +6059,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "b826fe552e2fcc12859f963410e2c1a109929fe5b73978a74f64c6c812fef92f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v3-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "17ada48150f49c1d9aefc10839708d6f31f2665fa625103d45ccf88af46c9674", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-x86_64_v3-musl": { @@ -5227,8 +6075,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "20702fe10bef77477eb02c5a1817650560142b477572f391c297e137daf7a057", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v3-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "b8a951e4eb04042af91864f3934e8e8b7527e390720ba68507a4b9fe4143032b", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-x86_64_v4-gnu": { @@ -5243,8 +6091,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", - "sha256": "d4024ab82373300a8c1262033af61f64b3348379afe9a112004cf6988468b551", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v4-unknown-linux-gnu-freethreaded%2Bpgo%2Blto-full.tar.zst", + "sha256": "7bb14c995b2bc7b0a330a8e7b33d103d9f99ecb9c30ff8ca621d9d066bb63d9f", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-linux-x86_64_v4-musl": { @@ -5259,8 +6107,24 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", - "sha256": "ab04b63ce58c9ff63c5314ad32a28b25b0b1dbd0a521e1ad056550142f55de43", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v4-unknown-linux-musl-freethreaded%2Blto-full.tar.zst", + "sha256": "a78845959c6f816f9a0fa602403b339d67d7125515f5b0fbe5c0ef393e4ce4e9", + "variant": "freethreaded" + }, + "cpython-3.13.5+freethreaded-windows-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 13, + "patch": 5, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-aarch64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "97041594d903d6a1de1e55e9a3e5c613384aa7b900a93096f372732d9953f52a", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-windows-i686-none": { @@ -5275,8 +6139,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", - "sha256": "9256369550eeb71729dca0094d098d161035806c24b9b9054cb8038b05bd7e0f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-i686-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "02d20b1521420ef84160c3a781928cdc49cd2e39b3850fb26f01e4e643b8379e", "variant": "freethreaded" }, "cpython-3.13.5+freethreaded-windows-x86_64-none": { @@ -5291,8 +6155,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", - "sha256": "9da2f02d81597340163174ee91d91a8733dad2af53fc1b7c79ecc45a739a89d5", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64-pc-windows-msvc-freethreaded%2Bpgo-full.tar.zst", + "sha256": "39e19dcb823a2ed47d9510753a642ba468802f1c5e15771c6c22814f4acada94", "variant": "freethreaded" }, "cpython-3.13.5+debug-linux-aarch64-gnu": { @@ -5307,8 +6171,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-aarch64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "6280fc111ff607571362c12e8b1b12a3799a3fbec60498bd0ebff77d30efa89f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "fc7e5a8765892887b887b31eaa03b952405c98ad0b79bf412489810ab4872a18", "variant": "debug" }, "cpython-3.13.5+debug-linux-armv7-gnueabi": { @@ -5323,8 +6187,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-armv7-unknown-linux-gnueabi-debug-full.tar.zst", - "sha256": "48b4fb6454358be8383598876d389fcf5cb5144af07d200e0b0e7c7824084e3e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "61e5a7e1c6bd86db10302899fe1053f54815a4d3e846ad3e8d4ebc5a858aa1ae", "variant": "debug" }, "cpython-3.13.5+debug-linux-armv7-gnueabihf": { @@ -5339,8 +6203,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", - "sha256": "bd5df3367a45decbb740432d8e838975ac58c40fc495ed54dbbe321dccb0cd44", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "e400e3e2ae88108034e62076edbc5518974eb76a46d236b5322fa7b2aa2110f4", "variant": "debug" }, "cpython-3.13.5+debug-linux-powerpc64le-gnu": { @@ -5355,8 +6219,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-ppc64le-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "e95e45ad547ab71a919a22d76b065881926a00920c5cc1ee6d97be4d66daa12d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "1216b39a34397f25065f80bb6f3ffa56f364da9dae09a519df9d384c3a1c7505", "variant": "debug" }, "cpython-3.13.5+debug-linux-riscv64-gnu": { @@ -5371,8 +6235,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-riscv64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "ba433760881a5b0da9b46dcdcf2dd8ca6917901e78dbac74c4ba3ab5e6f3ced3", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "f2212f189076e6631e4447cc0c37872d03fc39eb92bb717a922899852e17765b", "variant": "debug" }, "cpython-3.13.5+debug-linux-s390x-gnu": { @@ -5387,8 +6251,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-s390x-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "f6f6f8187ede00fa3938d4c734008eafec2041636a8987e2c85df3273004b822", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "f672e057e98d5909b6ef94558018036254d4d4e40660cfb1654ce2c3b87bcd82", "variant": "debug" }, "cpython-3.13.5+debug-linux-x86_64-gnu": { @@ -5403,8 +6267,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "137c6262fa2b26f20d7e1bed1c1467a7665086bb88dc1c2cb40cf23e7da6d469", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "3e798c809c4a9fc7308626ff72540036d5f01f9ac85ce6176acbdd581e973302", "variant": "debug" }, "cpython-3.13.5+debug-linux-x86_64-musl": { @@ -5419,8 +6283,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64-unknown-linux-musl-debug-full.tar.zst", - "sha256": "f3c2dce0fb4b62106d3957fc23b757a604251ff624a0d66ef126fab4ece9de0c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "1d5a3c193673b52eab5c5a362a18e6e184e4298a36a809fe5e21be6f01f9b76f", "variant": "debug" }, "cpython-3.13.5+debug-linux-x86_64_v2-gnu": { @@ -5435,8 +6299,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "962846d36bbb78d76f6ac1db77fb37bb9fdda4d545d211048cc3212247890845", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "8886ff6fd5483254a234e4ce52b4707147bc493f6556fa9869da4d1264af9440", "variant": "debug" }, "cpython-3.13.5+debug-linux-x86_64_v2-musl": { @@ -5451,8 +6315,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", - "sha256": "515b5cdbb612b606a2215aa2ce94114a2442b34e96a1fcc4a45cb3944a0cc159", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "8894486f97353fd0279fd7e4d107625aa57c68010c6fc8fcba6a549e3c4aa499", "variant": "debug" }, "cpython-3.13.5+debug-linux-x86_64_v3-gnu": { @@ -5467,8 +6331,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "7e7ec18893cff4597a6268416baba95ac640644527ae7531e074a787819eff8e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "914baf954e3decbe182413b38a8a5c347246e889a4d34a91db3f4466945dba0a", "variant": "debug" }, "cpython-3.13.5+debug-linux-x86_64_v3-musl": { @@ -5483,8 +6347,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", - "sha256": "bed63cf51a8ef135e7a03aa303c7e5ee76934cd845e946a64033be8b4d3ea246", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "9c5de2ef49b4b871157e81cd7a0f4e881971d0c16873e6ad6376ace2914c07c5", "variant": "debug" }, "cpython-3.13.5+debug-linux-x86_64_v4-gnu": { @@ -5499,8 +6363,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "98d2a0c121042905aa874d2afd442098a99d3e00e16af16d940e9460339c7f73", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "48f0eaeeac55dbe85593e659962e6ea44cc638f74cc0aab1d943da805a4eca39", "variant": "debug" }, "cpython-3.13.5+debug-linux-x86_64_v4-musl": { @@ -5515,8 +6379,8 @@ "minor": 13, "patch": 5, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.13.5%2B20250612-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", - "sha256": "d0125aa6426294f2b66a5ab39a13e392d93ff2e61d7304814fae937297c0d45f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.13.5%2B20250702-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "d0b7928f0e56c3509d541ecb5538d93d0dd673ba6460159f0d05c6a453c575c4", "variant": "debug" }, "cpython-3.13.4-darwin-aarch64-none": { @@ -9739,8 +10603,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-aarch64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "74dd3b2bbbcb5c87a5044e1f3513fe3b07e72fcfdeb039d0ae83b754911ac31e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "0f3a56aeca07b511050210932e035a3b325abb032fca1e6b5d571f19cc93bc5b", "variant": null }, "cpython-3.12.11-darwin-x86_64-none": { @@ -9755,8 +10619,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "16797cdee1b879ce0f32d9162f2a3af8b91d8ccb663c75ed3afc2384845c24d7", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "1543bcace1e0aadc5cdcc4a750202a7faa9be21fb50782aee67824f26f2668ad", "variant": null }, "cpython-3.12.11-linux-aarch64-gnu": { @@ -9771,8 +10635,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "df383a0992be93314880232c2ecbe9764ee65caee5f72a13ef672684fc7b8063", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "1b7260c6aa4d3c7d27c5fc5750e6ece2312cf24e56c60239d55b5ad7a96b17cb", "variant": null }, "cpython-3.12.11-linux-armv7-gnueabi": { @@ -9787,8 +10651,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", - "sha256": "92c9c4bd3e4f8bd086ae7ff234273898e83340e4d65fa5b50b0e87db8197fdff", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "b977bb031eeffcf07b7373c509601dd26963df1a42964196fccf193129be6e3b", "variant": null }, "cpython-3.12.11-linux-armv7-gnueabihf": { @@ -9803,8 +10667,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", - "sha256": "197e34e0a74504f2700d4e4c11cb0d281aa13c628af8b9ad21532250bda45659", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "bf67827338f2b17443e6df04b19827ed2e8e4072850b18d4feca70ba26ba2d56", "variant": null }, "cpython-3.12.11-linux-powerpc64le-gnu": { @@ -9819,8 +10683,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "f4bce8f1dcf7bef91d1ea54af48a45333983a41b83c0b8e33e9b07bb4b4499a0", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "22f894d8e6d6848c4bc9ead18961febeaaecfea65bcf97ccc3ca1bd4fdcd4f70", "variant": null }, "cpython-3.12.11-linux-riscv64-gnu": { @@ -9835,8 +10699,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "26afb0f604cd9cac7af5e3078bbdcb7f701cd1f4956fba0620cc184bc9b32927", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "96c5c30c57d5fd1bdb705bfe73170462201a519f8a27cc0a394cd4ed875ae535", "variant": null }, "cpython-3.12.11-linux-s390x-gnu": { @@ -9851,8 +10715,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "1c7327875d7669862fd1627c57a813378d866998c5d5008276c8952af7323d19", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "1967e03dd3d1f8137d29556eec8f7a51890816fd71c8b37060bd061bce74715a", "variant": null }, "cpython-3.12.11-linux-x86_64-gnu": { @@ -9867,8 +10731,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "15a3c9964e485f04d3c92739aca190616e09b2c4fac29b263432f6f29f00c6cf", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "2eed351d3f6e99b4b6d1fe1f4202407fe041d799585dffdf6d93c49d1f899e37", "variant": null }, "cpython-3.12.11-linux-x86_64-musl": { @@ -9883,8 +10747,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "0b68e1de34febc8c57df3d0bf13e3397493bacc432b4cc3d27a338c2d4b8a428", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "8b201f157e437f4f3777e469b50f8e23dfa02f1c6757dfb2a19bde9f1bae9e0a", "variant": null }, "cpython-3.12.11-linux-x86_64_v2-gnu": { @@ -9899,8 +10763,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "351b7a97142bc0539ef67e1ad61961a99df419487af422b2242664702f3d3fde", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "978249b5f885216be70d6723f51f5d6ad17628bacc2b1b98078e1273326ef847", "variant": null }, "cpython-3.12.11-linux-x86_64_v2-musl": { @@ -9915,8 +10779,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "2a4d4edb63585bbfc4afa4bddd5e3efb20202903925ace6f0797df1ad2a6189d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "b81b771718ed550c4476df3c07d38383a2c9341e2e912fd58c224820cb18195c", "variant": null }, "cpython-3.12.11-linux-x86_64_v3-gnu": { @@ -9931,8 +10795,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "bb742a2e0bc09b0afd4c37056ea0bda16095d8468442eadb6285716d0fcb8ab0", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "651f0da21ac842276f9c3f955a3f3f87d0ad6ec1bba7c3bb8079c3f4752355b3", "variant": null }, "cpython-3.12.11-linux-x86_64_v3-musl": { @@ -9947,8 +10811,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "5bf4aab9fea91a6daae95f40cd12c7d4127bed98bc5ea4fcbee9f03afc1530ef", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "4d7cbbf546b75623d0f7510befd2cf0a942b8bc0a38d82876f0844383aa27ba2", "variant": null }, "cpython-3.12.11-linux-x86_64_v4-gnu": { @@ -9963,8 +10827,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "05f1a3f7711aae5c65e4557ea102315a779cbe03e39f16dc405f8fc8ede25e83", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "b368b2dd0939f9e847acb5b0851081dcf2151e553bea4ac6f266a6ca0daeca01", "variant": null }, "cpython-3.12.11-linux-x86_64_v4-musl": { @@ -9979,8 +10843,24 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "d277d0d6d58436ca6e46b7d5d9e1758a89e6b90a0524d789646a4a589c0be998", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "da76b72b295b230022a839d42edfde36f79ebfd70c9b381f6ed551066c3942bd", + "variant": null + }, + "cpython-3.12.11-windows-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 12, + "patch": 11, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-aarch64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "b5e56ebce5ea3cc0add5e460a254da1e095fdcf962552dceea1be314c45115bf", "variant": null }, "cpython-3.12.11-windows-i686-none": { @@ -9995,8 +10875,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-i686-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "41d5dd36a6964f37b709061c5c01429579ef3c3e117ed7043d6a3a1f336671d6", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "0db0a69bab9aa6159f62d99074918b67e2a81c84b445570befeb583053663b58", "variant": null }, "cpython-3.12.11-windows-x86_64-none": { @@ -10011,8 +10891,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "51bc462f3d6caf4aef3d77209d01cd5f6c8fe8213c1ae739e573e1c2c473cb2b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "195033883da90a35a57aecce522eb068b9b0a36908e6e07b91929f0acf646c8f", "variant": null }, "cpython-3.12.11+debug-linux-aarch64-gnu": { @@ -10027,8 +10907,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-aarch64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "208f407d3880dc84772d8a322776011abf362ac004540debbd2ea5e5f884f398", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "b0f04667686f489a410bb3e641b6abefa75dad033cd6d2725ab49a40413e15b7", "variant": "debug" }, "cpython-3.12.11+debug-linux-armv7-gnueabi": { @@ -10043,8 +10923,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-armv7-unknown-linux-gnueabi-debug-full.tar.zst", - "sha256": "880268f3e83952d5bdb32422da2ce7f24ee24c6251b946514ffcdbaedc4ced37", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "d882d16f304b858173b40cca5c681e8f9c9f13774c26390303bd7e7657a1d73c", "variant": "debug" }, "cpython-3.12.11+debug-linux-armv7-gnueabihf": { @@ -10059,8 +10939,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", - "sha256": "d3128aba3c94c46d0f5d15474af6a8b340b08ada33a31ad20387cfa46891752c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "5934f6214d60a958fa3cb3147dad1941d912e0a9f37280de911cbf51a2a231be", "variant": "debug" }, "cpython-3.12.11+debug-linux-powerpc64le-gnu": { @@ -10075,8 +10955,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-ppc64le-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "3c43d94ef5fc2db0fd937f16616c9aceaf0ebc4846ae9454190ed30b0ad4830c", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "a2c821f4a83c3a80d8ec25cf3ca5380aa749488d87db5c99f1c3100069309f5f", "variant": "debug" }, "cpython-3.12.11+debug-linux-riscv64-gnu": { @@ -10091,8 +10971,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-riscv64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "be24538f1d59620a3229582f7adf9ca0df3129bd6591ff45b6ce42d1bb57602f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "d1ac376b8756a057ba0d885171caa72bc7cd7ab7436ebc93bd7c0c47cff01d05", "variant": "debug" }, "cpython-3.12.11+debug-linux-s390x-gnu": { @@ -10107,8 +10987,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-s390x-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "fa5095dd99ffa5e15dbe151c3d98dbe534c230ce6b9669eef34e037fc885ed91", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "8a57e27c920d1c93d45a3703c9f3fe047bac6305660a6d5ce2df51b0f7cfef26", "variant": "debug" }, "cpython-3.12.11+debug-linux-x86_64-gnu": { @@ -10123,8 +11003,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "b1b114b14624ec9955ea1404908636580280a8537ce85ace2fd48197edf82ee0", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "6b1e42f30f027dc793f6edaf35c2ff857c63b0f72278c886917e99b6edd064b1", "variant": "debug" }, "cpython-3.12.11+debug-linux-x86_64-musl": { @@ -10139,8 +11019,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64-unknown-linux-musl-debug-full.tar.zst", - "sha256": "8535b0850eab8f8cf6e29a5642f7501f5de0318d7805de6aa2cc69c0b7f4895d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "62b9039f765e56538de58cb37be6baaf2d9da35bb6d95c5e734b432ccec474f8", "variant": "debug" }, "cpython-3.12.11+debug-linux-x86_64_v2-gnu": { @@ -10155,8 +11035,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "d701b1a19ce8fd94c01a72cbe70dd0b79cb59686a7f2fd28c8d68c38d7603f44", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "e1fb28c54a76f4e91f4d53fd5fd1840c7f0045049f7fca29f59c4d7bdfa8134d", "variant": "debug" }, "cpython-3.12.11+debug-linux-x86_64_v2-musl": { @@ -10171,8 +11051,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", - "sha256": "584bfb7e4d2bd9232072ab51fef342ba250d288cb97066a88713de33280332ad", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "c8d4fc92c668c0455a3dce10b2c107651a0d0676e449d30f2d4b6bb3cf2dac1d", "variant": "debug" }, "cpython-3.12.11+debug-linux-x86_64_v3-gnu": { @@ -10187,8 +11067,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "b7bdaf17df1f0bb374f2553d80e69bd44e6bbc1776a00253661eddbccffc7194", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "6cd111fa008f0a30345d0475e53f99148dc1aab3a39af73b7029ef4fc17c2717", "variant": "debug" }, "cpython-3.12.11+debug-linux-x86_64_v3-musl": { @@ -10203,8 +11083,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", - "sha256": "ee50314ad02b40bf613e9247718a77ac6b5c61e09254c772a5ccea4a3b446262", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "3f4595219aaa4b55f3169f71895bac0f63563a2e27c3653ba5008249d7eb4ed0", "variant": "debug" }, "cpython-3.12.11+debug-linux-x86_64_v4-gnu": { @@ -10219,8 +11099,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "7e16a050d0d0f91312048cb98a1f06d7e300c4723076fdf28d28fa282c45f8a2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "8976e1ef981ac4ceb186cb9bf367c279361060f468237a191f2ca2e52fd7a08b", "variant": "debug" }, "cpython-3.12.11+debug-linux-x86_64_v4-musl": { @@ -10235,8 +11115,8 @@ "minor": 12, "patch": 11, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.12.11%2B20250612-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", - "sha256": "44458a2a2ef2e1bbc4fb226b479d5d00a2fc15788c8b970d8df843e3535b0595", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.12.11%2B20250702-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "9008ed69a57d76a2e23b6119603217782a8ea3d30efebb550292348223ca87a5", "variant": "debug" }, "cpython-3.12.10-darwin-aarch64-none": { @@ -14283,8 +15163,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-aarch64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "3cc448659ee0d6aff8a90ca0dcaf00c29974f5d48ccc2c37e7a6e3baa6806005", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "7b32a1368af181ef16b2739f65849bb74d4ef1f324613ad9022d6f6fe3bb25f0", "variant": null }, "cpython-3.11.13-darwin-x86_64-none": { @@ -14299,8 +15179,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "6617e8e95ccbbd27fc82f29b0e56e9d9b8a346435c3510374e4410bfd1150421", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "7e9a250b61d7c5795dfe564f12869bef52898612220dfda462da88cdcf20031c", "variant": null }, "cpython-3.11.13-linux-aarch64-gnu": { @@ -14315,8 +15195,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "ac7257a5c1c9757ce4aa61d6c9bc443cd8ab052105b0e1c6714040c6e9e50eff", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "e39b0b3d68487020cbd90e8ab66af334769059b9d4200901da6a6d0af71a0033", "variant": null }, "cpython-3.11.13-linux-armv7-gnueabi": { @@ -14331,8 +15211,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", - "sha256": "0b01b96e0e4190f64fef6e2c76d0746321fc8cc91c7f3319452a90eaaa376c00", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "48c8d43677ffbdff82c286c8a3afb472eba533070f2e064c7d9df9cbb2f6decf", "variant": null }, "cpython-3.11.13-linux-armv7-gnueabihf": { @@ -14347,8 +15227,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", - "sha256": "f30cc16c9ea9a2795b5cafef91f3637165781a6a7a54fdc4baf6438a0800e7ce", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "51e2914bb9846c2d88da69043371250f1fb7c1cafbc511d34794dbec5052cf98", "variant": null }, "cpython-3.11.13-linux-powerpc64le-gnu": { @@ -14363,8 +15243,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "472ec854f7944528f366b08e8f6efbb4c02ed265eecc259c0e1f7cf12400ea14", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "be21c281b42b4fc250337111f8867d4cc7ced4f409303cc8dd5a56c6c6a820c7", "variant": null }, "cpython-3.11.13-linux-riscv64-gnu": { @@ -14379,8 +15259,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "024ba68fc755b595f4a21dbc1d8744231e51b76111d8d83e96691fb7acbf37a5", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "ab9d02b521ca79f82f25391e84f35f0a984b949da088091f65744fcf9a83add9", "variant": null }, "cpython-3.11.13-linux-s390x-gnu": { @@ -14395,8 +15275,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "6f404269ce33fe0dd8cc918da001662b6ffdfbe7eb13f906cbc92e238f75f6be", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "2ab4713ea357a1da9da835712ea73b40aa93fe7f55528366e10ea61d8edb4bd0", "variant": null }, "cpython-3.11.13-linux-x86_64-gnu": { @@ -14411,8 +15291,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "059cbbfd84bfc9ef8a92605fa8aef645bbb45b792cac8adf865050a5e7d68909", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "096a6301b7029d11db44b1fd4364a3d474a3f5c7f2cd9317521bc58abf40b990", "variant": null }, "cpython-3.11.13-linux-x86_64-musl": { @@ -14427,8 +15307,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "0237b76f625f08683a2e615ae907428240d90898b17a60bdec88a85bf9095799", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "7aba64af498dfc9056405e21d5857ebf7e2dc88550de2f9b97efc5d67f100d18", "variant": null }, "cpython-3.11.13-linux-x86_64_v2-gnu": { @@ -14443,8 +15323,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "9ae198daf242ffd6ad5b395594aa157aba62a044d007202cb03659fbb94d3132", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "806db935974b0d1c442c297fcb9e9d87b692e8f81bd4d887927449bb7eef70bf", "variant": null }, "cpython-3.11.13-linux-x86_64_v2-musl": { @@ -14459,8 +15339,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "a74c87f366001349fcf3297b87698ac879256ed4b73776ff8fa145c457c0cd13", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "e1dfc3b7064af3cbc68b95bdefcb88178fa9b3493f2a276b5b8e8610440ad9f3", "variant": null }, "cpython-3.11.13-linux-x86_64_v3-gnu": { @@ -14475,8 +15355,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "b24df39e08456e50fc99c43e695a46240b11251e8b43666f978ee98ec1197e05", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "c429bb26841da0b104644c1ab11dc8a76863e107436ad06e806f6bb54f7ec126", "variant": null }, "cpython-3.11.13-linux-x86_64_v3-musl": { @@ -14491,8 +15371,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "2767b36bb48da971dc5af762b42df2c9d1f4839326d26c5a710b543372dcc640", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "c618c57d50dd9bdd0f989d71dec9b76a742b051c1ae94111ca15515e183f87ee", "variant": null }, "cpython-3.11.13-linux-x86_64_v4-gnu": { @@ -14507,8 +15387,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "a1478d014d07e4a56f0c136534931989a93110ab0b15a051a33fbf0c22bec0d2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "c1cf678915eb527b824464841c2c56508836bf8595778f39a9bbb7975d59806d", "variant": null }, "cpython-3.11.13-linux-x86_64_v4-musl": { @@ -14523,8 +15403,24 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "adb7b67a8522c1ef1b941da9fd47dd7c263c489668a40bc9d0b0e955b0e25b18", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "d527c8614340ac46905969ac80e2c62327b7e987fbd448cfd74d66578ab42c67", + "variant": null + }, + "cpython-3.11.13-windows-aarch64-none": { + "name": "cpython", + "arch": { + "family": "aarch64", + "variant": null + }, + "os": "windows", + "libc": "none", + "major": 3, + "minor": 11, + "patch": 13, + "prerelease": "", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-aarch64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "3af266f887d83e4705e2ceb2eb7d770f9c74454d676e739e768097d3ff9dc148", "variant": null }, "cpython-3.11.13-windows-i686-none": { @@ -14539,8 +15435,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-i686-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "bdb7c0727af0b0d0d5f0d6b37a3139185a0f9259e4ce70f506c23e29e64fcb0f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "b38912438477ed7a7cb69aa92a5a834ffbb88d8fa5026eb626f1530adb3e00c7", "variant": null }, "cpython-3.11.13-windows-x86_64-none": { @@ -14555,8 +15451,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "22c2ab8be0d4028ddc115583f2c41c57ee269c115ef48a41ddde9adba5bac15b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "edb3eb9c646997de50b27932fdf10d8853614bdbd7d651c686459fc227776c1a", "variant": null }, "cpython-3.11.13+debug-linux-aarch64-gnu": { @@ -14571,8 +15467,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-aarch64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "fbd958ddbe10dd9d9304eb3d3dc5ed233524e1e96746e7703ceafedf2af3b82e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "5a58a85c773dcfd33b88b345fc899ab983e689fe5bf5ca6682fe62d1f3b65694", "variant": "debug" }, "cpython-3.11.13+debug-linux-armv7-gnueabi": { @@ -14587,8 +15483,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-armv7-unknown-linux-gnueabi-debug-full.tar.zst", - "sha256": "b03bcfe961888241beefc5648802041764f3914bcb7aadce8d2cbfffd23694d4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "6b24708d696e86792db8214cb20d7c1bd9a0d03f926542cde7a5251a466977d8", "variant": "debug" }, "cpython-3.11.13+debug-linux-armv7-gnueabihf": { @@ -14603,8 +15499,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", - "sha256": "3b7d013b8c200194962ce7dd699d628282ae4343ecdfe33ab1e4ac3cb513e5a5", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "ddd27f58a436b31bf1a3f39d53c670ab0ed481f677b1602d5fb0a5a89f471069", "variant": "debug" }, "cpython-3.11.13+debug-linux-powerpc64le-gnu": { @@ -14619,8 +15515,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-ppc64le-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "fc84e9f6c98b76525ae2267d24f27c69da6e1ddd77f1cac2613d8b152fa436f1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "5306fced1a898247f9e3cc897a28f05b647d8b70ed3ece80ea9f7fa525459d94", "variant": "debug" }, "cpython-3.11.13+debug-linux-riscv64-gnu": { @@ -14635,8 +15531,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-riscv64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "fc963382c154fbb924b05895cc9849e83998a32060083f7167ae53f5792ac145", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "fee8c8cb156c0aa84f41759b277bc27c6ce004c1bbfd03a202c8b0347ea29e50", "variant": "debug" }, "cpython-3.11.13+debug-linux-s390x-gnu": { @@ -14651,8 +15547,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-s390x-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "01679e250e8693b9d330967741569822db15693926bcdf8e4875b51a8767e9c1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "eff457ef514ffaf954fa2bfd63fde5fc128a908e5a0d72fe8dab0e4146867f54", "variant": "debug" }, "cpython-3.11.13+debug-linux-x86_64-gnu": { @@ -14667,8 +15563,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "206917fe06cdeeb76abd30e3dd01a71355fd41b685c1dbbddbfd0ad47371d5b6", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "23c7d6c58a3e9eb0055b847a72053082e1250b04c39ee0026738d0a2298d6dbb", "variant": "debug" }, "cpython-3.11.13+debug-linux-x86_64-musl": { @@ -14683,8 +15579,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64-unknown-linux-musl-debug-full.tar.zst", - "sha256": "5b7a576ba0e99e7cf734b5a0f1f0fb48564c42a04d373e1370b22f2e70995d79", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "c51dbba70ae11f65a0399d5690a4c1fbb52d9772fc8b1467ed836247225db3af", "variant": "debug" }, "cpython-3.11.13+debug-linux-x86_64_v2-gnu": { @@ -14699,8 +15595,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "1b80e41e4620e71adb0418a8bb06ec8999aa0dc69efdec0e44ca28c94543e304", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "479bc0f7b9bae4dde42ec848e508ecd8095f28ee4e89ef1f18e95ec2e29aa19d", "variant": "debug" }, "cpython-3.11.13+debug-linux-x86_64_v2-musl": { @@ -14715,8 +15611,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", - "sha256": "113f457a87a866f42662cf5789eace03b7548382e2dd0a6b539338defe621697", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "a25ddc1e2588842ada52fdf4211939d5e598defd3d45702ec0d9dfa30797060a", "variant": "debug" }, "cpython-3.11.13+debug-linux-x86_64_v3-gnu": { @@ -14731,8 +15627,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "f00ae85aa8d900bf2d4e5711395c13458ff298770281dac908117738491cbe51", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "d9f819fe8cbd7895c9b9d591e55ca67b500875c945cc0a1278149267d8cdd803", "variant": "debug" }, "cpython-3.11.13+debug-linux-x86_64_v3-musl": { @@ -14747,8 +15643,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", - "sha256": "9f10f5b6622b17237163ae8bff9ec926ce9c44229055c9233e59f16aa7d22842", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "dd22dd11e9bc4bbc716c1af20885c01a3d032eb1ce7bb74f9f939f6a08545ddc", "variant": "debug" }, "cpython-3.11.13+debug-linux-x86_64_v4-gnu": { @@ -14763,8 +15659,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "6e4de8ff2660b542278c84bf63b0747240da339865123e3a5863de2d60601ba6", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "ab7171c7e0dcfdf7135aaed53169e71222cddc8c4133b7d51f898842bb924f0e", "variant": "debug" }, "cpython-3.11.13+debug-linux-x86_64_v4-musl": { @@ -14779,8 +15675,8 @@ "minor": 11, "patch": 13, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.11.13%2B20250612-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", - "sha256": "f973507e2c54a75af1583ae7e5a6bf6ba909c5d0e372f306aa6a4d6be8eb92f9", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.11.13%2B20250702-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "c02f0ef29ce93442ac3a61bbf3560c24d74d34b8edb46b166724ff139cde8f26", "variant": "debug" }, "cpython-3.11.12-darwin-aarch64-none": { @@ -18571,8 +19467,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-aarch64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "d588e367ad0ccc96080f02a6e534b272e1769aeddc0a2ce46da818c42893ebfd", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "4ad4b0c3b60c14750fb8d0ad758829cd1a54df318dc6d83c239c279443bb854c", "variant": null }, "cpython-3.10.18-darwin-x86_64-none": { @@ -18587,8 +19483,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "2758bbee1709eb355cf0c84a098cf51807a94e2f818eb15a735b71c366b80f9b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "9c9833f4f13eed050a1440204b0477d652ae76c8e749bc26021928d5c6fcba2b", "variant": null }, "cpython-3.10.18-linux-aarch64-gnu": { @@ -18603,8 +19499,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "89a01966d48e36f5ba206f3861ad41b6246160c3feae98a2ffe0c4ce611acfeb", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "6c315a5ed0b77457c494048269e71e36e0fae2a9354da0bbfc65f3d583a306fa", "variant": null }, "cpython-3.10.18-linux-armv7-gnueabi": { @@ -18619,8 +19515,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", - "sha256": "a095eaeac162f0a57d85b7f7502621d9b9572a272563a59b89b69ae4217e031e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "c0aa7dfaef03330a1009fae6ed3696062a9c6b6a879de57643222911801f6b14", "variant": null }, "cpython-3.10.18-linux-armv7-gnueabihf": { @@ -18635,8 +19531,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", - "sha256": "559799c5b87d742b3b71dc1e7b015a9cd6d130f7b6afcf6ad8716c75c204d47e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "6a772781facf097fb9bb00fc16b9c77680fc583dbb04ef4f935f1139f5a3a818", "variant": null }, "cpython-3.10.18-linux-powerpc64le-gnu": { @@ -18651,8 +19547,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "740a2e5f935c6d7799f959731b7cd8f855c1e572ad43f0ec16417c3390f4551d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "8bbc7cd369d3c3788ca46a66b0c9f0d227054f99b7db3966a547faa7e0ede99c", "variant": null }, "cpython-3.10.18-linux-riscv64-gnu": { @@ -18667,8 +19563,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "00a436a8f8ad236d084a6d6a1308543755d9175e042b89ea3c06cc1b1651e6aa", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "954603b1e72f7b697812bb821b9820f2d1ab21b9fb166201c068df28577f3967", "variant": null }, "cpython-3.10.18-linux-s390x-gnu": { @@ -18683,8 +19579,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "d23284bc718fb049dd26056efc632042824409599cae9a4c2362277761b50e94", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "8d02a604f4ef13541a678b8f32b2955003f53471489460f867de3bbbd0b7b0a2", "variant": null }, "cpython-3.10.18-linux-x86_64-gnu": { @@ -18699,8 +19595,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "e884283a9a3bb97e9432bbda0bf274608d8fce2f27795485e4a93bbaef66e5a1", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "780e5199279301cec595590e1a12549e615f5863e794db171897b996deb5db2b", "variant": null }, "cpython-3.10.18-linux-x86_64-musl": { @@ -18715,8 +19611,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "cea06c4409e889945c448ec223f906e9e996994d6b64f05e9d92dc1b6b46a5f8", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "fac56a0d55b28dfada5b1b1ad12c38bca7fda14621f84d4dba599dfb697d0f6a", "variant": null }, "cpython-3.10.18-linux-x86_64_v2-gnu": { @@ -18731,8 +19627,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "4636b6756eb1a9f6db254aac8ae0007c39614fcbf065daf8dc547530ac77c685", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "b47af0eb09bd0ae5d5b33e0bfd3a121dd8bf042ffe61d03d54be27629db55a78", "variant": null }, "cpython-3.10.18-linux-x86_64_v2-musl": { @@ -18747,8 +19643,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "6e0a68e53059d1ccf5a0efc17d60927e53c13e40b670425c121f35cd3fd10981", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "c59eac8b665419cc94c24807fd2654cc424f7f926a6b107a7e22a9599ba416ea", "variant": null }, "cpython-3.10.18-linux-x86_64_v3-gnu": { @@ -18763,8 +19659,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "1c164e2eeb586d930a427e58db57b161b8ec4b9adf4d31797fdccf78d373a290", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f2042831ec67633ad96f27407fee67b671bb5a589c8c8491dbb9420f58246db8", "variant": null }, "cpython-3.10.18-linux-x86_64_v3-musl": { @@ -18779,8 +19675,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "4ae64d7a6ba3c3cb056c2e17c18087b1052e13045e4fbb2e62e20947de78c916", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "2205ef12cd51afe189ac152b9413788eccc5e0d8c86b78f6c2209ab8d5ead0b8", "variant": null }, "cpython-3.10.18-linux-x86_64_v4-gnu": { @@ -18795,8 +19691,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "cd64acdb527382c7791188f8b5947d1a9e80375ad933e325fb89a128af60234d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "f80c94a23c67b2cd7695fb58d3dd3bb4509cbe94bf3da9200dcc7f5c06939067", "variant": null }, "cpython-3.10.18-linux-x86_64_v4-musl": { @@ -18811,8 +19707,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "8d300050b13d3674f5d19bf402780ec2fb19bf594dd75fd488b83856ed104def", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "2356bc9f121cb555921a10155126b53ca92e471e35e93644feae37ef6adbe91d", "variant": null }, "cpython-3.10.18-windows-i686-none": { @@ -18827,8 +19723,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-i686-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "3df494fd91ccc55ea0b512f70d4b49b4bee781b6e31bfa65c8d859150b6d3715", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "16379aad0f72dffdcedc32240bceacf8c341c8ac9c49f1634a94bef3eb34ff91", "variant": null }, "cpython-3.10.18-windows-x86_64-none": { @@ -18843,8 +19739,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "4d6337bfafdb6af5c4c6fdb54fd983ead0c4d23cf40fb6b70fce0bd8b3b46b59", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "1d9028a8b16a2dddfd0334a12195eb37653e4ba3dd4691059a58dc18c9c2bad5", "variant": null }, "cpython-3.10.18+debug-linux-aarch64-gnu": { @@ -18859,8 +19755,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-aarch64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "2808899e02b96a4ed8cfe7a4e9e42372c0d8746f7cdbf52d21645bd4da1f9f9f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "ba86c13891bba5395db087bad08e2175d4fe4f7c2592f4228c8302e89b1876ae", "variant": "debug" }, "cpython-3.10.18+debug-linux-armv7-gnueabi": { @@ -18875,8 +19771,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-armv7-unknown-linux-gnueabi-debug-full.tar.zst", - "sha256": "e327ab463c630396b9a571393adfce4d63b546a2af280c894fef1c74dbb21223", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "da75e3b55503f9cc33e1476e4933457b42c5ac0a765321a8056278056f2c6032", "variant": "debug" }, "cpython-3.10.18+debug-linux-armv7-gnueabihf": { @@ -18891,8 +19787,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", - "sha256": "031a467de049990d3133e6ff26612e5d227abda4decfa12ea8340ca8ec7e55d4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "65c4d23b2507715b60f8157fda6651ad0490d38d3a354aa5e85c5401f7b791b5", "variant": "debug" }, "cpython-3.10.18+debug-linux-powerpc64le-gnu": { @@ -18907,8 +19803,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-ppc64le-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "9b48f5108b419e4371142ec5a65a388473e4990181c82343c3dfaf3d87f02a5a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "1ea4b070dc8795316798e5dde4a45f9bcbd3b8907ece534e73164e9e82902817", "variant": "debug" }, "cpython-3.10.18+debug-linux-riscv64-gnu": { @@ -18923,8 +19819,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-riscv64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "8215a063192a64fad2b5838b34d20adcd30da5cc2e9598f469eea8d3f0de09f5", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "2b9381ee30e69b0a7722a1b0917a4be8abc9b22d3542c918c8810d3bf10144f8", "variant": "debug" }, "cpython-3.10.18+debug-linux-s390x-gnu": { @@ -18939,8 +19835,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-s390x-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "77b9f02331df8acf41784648f75cc77c5ab854546a405b389067f61ded68a5c6", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "3ea8a041ed81fbc11e2781cc6b57ef0abf2ecd874374603153213d316da19e5e", "variant": "debug" }, "cpython-3.10.18+debug-linux-x86_64-gnu": { @@ -18955,8 +19851,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "bba5598c6fc8f85e68b590950b5e871143647921197be208a94349d7656eafdf", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "afd58d81e22c5f96c7021c27aedb89bc3be3c40d58625035a5b7158bb464a89f", "variant": "debug" }, "cpython-3.10.18+debug-linux-x86_64-musl": { @@ -18971,8 +19867,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64-unknown-linux-musl-debug-full.tar.zst", - "sha256": "6c006d1daae48ba176bb85fd0c444914d9e2ee20b58e8131c158bb30fe9097c9", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "c82f5cb37140257016a05c92a83813c8ad85f108898c6076750b4bfc8e49052d", "variant": "debug" }, "cpython-3.10.18+debug-linux-x86_64_v2-gnu": { @@ -18987,8 +19883,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "d93af68181616ae0f2403ac74d1cc2ea6ebced63394da5b3a3234398748ce4cf", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "fdd72ff3418b1dd74fdc5514d392e309fe615739aafeeeed80276bfa28646e93", "variant": "debug" }, "cpython-3.10.18+debug-linux-x86_64_v2-musl": { @@ -19003,8 +19899,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", - "sha256": "af25b4aed5f7cdeb133c19c61f31038020315313eadbc4481493c8efce885194", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "d13113baa9f5749b5f70a2e4b396393363df1bba14c4fca6d13455ab92786f16", "variant": "debug" }, "cpython-3.10.18+debug-linux-x86_64_v3-gnu": { @@ -19019,8 +19915,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "3da0ce9cd97415c86cdb8556e64883678e6b4684f74600b3bc9c90424db787af", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "7ac330ff09a193ef7e4a93751dd1bc243a8a2d35debdb9f1f4c967ee98be7c9b", "variant": "debug" }, "cpython-3.10.18+debug-linux-x86_64_v3-musl": { @@ -19035,8 +19931,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", - "sha256": "65fe2745075de7290c58b283af48acb6ab403396792a9682d24523bd025d7b01", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "ffa713da073c0ac6b9d46e8c34f682c936c1ee6ecacfdaa369617d621bc5f800", "variant": "debug" }, "cpython-3.10.18+debug-linux-x86_64_v4-gnu": { @@ -19051,8 +19947,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "9b91c5309cbfee08636d405fce497b371e69787e9042be62dd8e262fc3800422", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "d22dc14204be742df984cd74b086c5bce23ea6071bbccf66e0a4e9373fb7e1fc", "variant": "debug" }, "cpython-3.10.18+debug-linux-x86_64_v4-musl": { @@ -19067,8 +19963,8 @@ "minor": 10, "patch": 18, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.10.18%2B20250612-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", - "sha256": "aa146828d807b8c206541cc8b0bf2b5e7cecd1a9cf5f03b248e73b69b8ef5190", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.10.18%2B20250702-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "f8b309b55988356eeb43b1d6daaaed44c3f2c7615abb015145e750cc81c84f13", "variant": "debug" }, "cpython-3.10.17-darwin-aarch64-none": { @@ -24011,8 +24907,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-aarch64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "05741156232cc28facaefbda2d037605dd71614d343c7702e0d9ab52c156945e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-aarch64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "901b88f69f92c93991b03315f6e9853fdf6e57796b7f46eae2578f8e6cec7f79", "variant": null }, "cpython-3.9.23-darwin-x86_64-none": { @@ -24027,8 +24923,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64-apple-darwin-install_only_stripped.tar.gz", - "sha256": "3e6cf3c8c717f82f2b06442e0b78ececa7e7c67376262e48bf468b03e525ef31", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64-apple-darwin-install_only_stripped.tar.gz", + "sha256": "663403464b734f7fcb6697dc03a7bb4415f1bd7c29df8b0476d145e768b6e220", "variant": null }, "cpython-3.9.23-linux-aarch64-gnu": { @@ -24043,8 +24939,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "96ec244baeaf57a921da7797da41e49e9902a2a6b24c45072a8acee7ff9e881d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-aarch64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "fd037489d2d0006d9f74f20a751fd0369c61adf2c8ead32c9a572759162b3241", "variant": null }, "cpython-3.9.23-linux-armv7-gnueabi": { @@ -24059,8 +24955,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", - "sha256": "9c58d7126db2a66b81bff94b0e15a60974687f9ef74985d928e44397a10986cb", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-armv7-unknown-linux-gnueabi-install_only_stripped.tar.gz", + "sha256": "594b85658309561642361b1708aac18579817978ffdbb08f1c5f7040f9c30f28", "variant": null }, "cpython-3.9.23-linux-armv7-gnueabihf": { @@ -24075,8 +24971,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", - "sha256": "929fbed568da7325b7db351d32cd003ee77c4798f0f946a6840935054a07174f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-armv7-unknown-linux-gnueabihf-install_only_stripped.tar.gz", + "sha256": "40eedb55eda5598dc9335728b70f7dff8b58be111b462e392cf2f8ba249c68ac", "variant": null }, "cpython-3.9.23-linux-powerpc64le-gnu": { @@ -24091,8 +24987,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "b52371b6485ab4603da77ff49b8092d470586a2330e56d58d9f8683a5590ae68", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-ppc64le-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "4c294d9bd701ffaa60440e0e1871c5570c690051b7c8f1b674f8e7fc2239e8c9", "variant": null }, "cpython-3.9.23-linux-riscv64-gnu": { @@ -24107,8 +25003,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "ef4e83f827377bdb4656130ee953b442a33837ca31ef71547ac997d9979b91e4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-riscv64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "41c2dd0ab80b4ddd60a22fc775d87bec1e49c533ee0b0aec757e432df17c06ea", "variant": null }, "cpython-3.9.23-linux-s390x-gnu": { @@ -24123,8 +25019,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "c8f5d2951900e41db551b692f2aa368e89449d3a4cf2761a80eb350fbd25bc0b", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-s390x-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "530e5bb6e47f5e009768b96d9bed2d0c4fe21f1bc113a35571c6981922dd345f", "variant": null }, "cpython-3.9.23-linux-x86_64-gnu": { @@ -24139,8 +25035,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "461a5ef25341b2e9f62f21d3fa4184ac51af59cebb5fb9112fe64e338851109f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "b11d434321025e814b07e171e17cb183b4fe02bddbec5e036882c85fb7020b18", "variant": null }, "cpython-3.9.23-linux-x86_64-musl": { @@ -24155,8 +25051,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "f5f3b7e7be74209fa76a9eed4645458e82bec3533d8aad1c45770506b1870571", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "996f66c44d75bf681d6d5c5d2f6315b7f0fff9e9e56b628bdf0f4d865be69a31", "variant": null }, "cpython-3.9.23-linux-x86_64_v2-gnu": { @@ -24171,8 +25067,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "302ae70b497361840e6f62e233f058ea0a41b91e4cc01050da940419b6b4b3d6", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64_v2-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "9355e74e4922c9ffd62fadfd0d8949a1de860c14ad16db8ec80e04552219eeaa", "variant": null }, "cpython-3.9.23-linux-x86_64_v2-musl": { @@ -24187,8 +25083,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "740f1f6e1c1a70e7e7aaa47c0274ee6208b16bd1fe8d0c3600ecccb2481a5881", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64_v2-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "3548ddd479dc2ca6d108cba69c0e267a37664ff795d7ebc908836a3faacef9b1", "variant": null }, "cpython-3.9.23-linux-x86_64_v3-gnu": { @@ -24203,8 +25099,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "d4689ad72cb22973d12c4f7128588ed257e4976d69d92f361da4ddbcec8ce193", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64_v3-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "4364cf01c55eee28f5ca918cc9c20f3130cec3d20c45156623576986729e7a9f", "variant": null }, "cpython-3.9.23-linux-x86_64_v3-musl": { @@ -24219,8 +25115,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "5c094d6d91bf0e424c12b26627e0095f43879fefc8cf6050201b39b950163861", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64_v3-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "ac0f0cca348f51d29c3e9201e8cb35251b0eceb0e6d29ce2b652fc2bd912bf7c", "variant": null }, "cpython-3.9.23-linux-x86_64_v4-gnu": { @@ -24235,8 +25131,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", - "sha256": "2933a49ca7292e5ed1849aeb59057ec75ea9e018295d1bb8a3c155a7e46b3dde", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64_v4-unknown-linux-gnu-install_only_stripped.tar.gz", + "sha256": "4622c9b7aad91c6aa9d3b251a5721b52725866defb6132e9d8b0c7b05ebdd317", "variant": null }, "cpython-3.9.23-linux-x86_64_v4-musl": { @@ -24251,8 +25147,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", - "sha256": "b8e92fe27a41d7d3751cdbffff7b4de3c84576fd960668555c20630d0860675e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64_v4-unknown-linux-musl-install_only_stripped.tar.gz", + "sha256": "227544768f4214350a1051282a49e598a742bead5447ac7adfb1da488cf6b165", "variant": null }, "cpython-3.9.23-windows-i686-none": { @@ -24267,8 +25163,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-i686-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "d40486ddf29d6a70dda7ea8d56666733943de19ff8308b713175cba00d0f6a0f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-i686-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "0029b916ac37b330d40c6fa13f507249660f0ceaaa34415bc691e705925b6d1b", "variant": null }, "cpython-3.9.23-windows-x86_64-none": { @@ -24283,8 +25179,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", - "sha256": "5c4edcc6861f7fc72807c600666b060f3177a934577a0b1653f1ab511fdac4a0", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64-pc-windows-msvc-install_only_stripped.tar.gz", + "sha256": "fd864f5f2aff6727250bd9104409a458146552f88d6ae7b78427aed719506b9c", "variant": null }, "cpython-3.9.23+debug-linux-aarch64-gnu": { @@ -24299,8 +25195,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-aarch64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "c74a865c4e3848b38f4e1e96b24ba4e839c5776cb0ea72abe7b652a1524a1a51", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-aarch64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "080edc8aca719b776e62e1803948390cc75392db8a416f3ebc3fa1b6ec219c8e", "variant": "debug" }, "cpython-3.9.23+debug-linux-armv7-gnueabi": { @@ -24315,8 +25211,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-armv7-unknown-linux-gnueabi-debug-full.tar.zst", - "sha256": "5833d757964b5fe81f07183def651556ccd2c5cc9054373a6483b4ffb140ea72", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-armv7-unknown-linux-gnueabi-debug-full.tar.zst", + "sha256": "b187d469dd3c61efdac4ac4a9f9a17e01db860bef5e836251ad38e751bd2f2e9", "variant": "debug" }, "cpython-3.9.23+debug-linux-armv7-gnueabihf": { @@ -24331,8 +25227,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", - "sha256": "f5880944d32da9b4b65e88a21e4e2c3410ca299886a86db19566936b36fc445a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-armv7-unknown-linux-gnueabihf-debug-full.tar.zst", + "sha256": "d4f4ae11a45a4f7821caca519880fe79a052bb8191cbc7678965304d5efea5a3", "variant": "debug" }, "cpython-3.9.23+debug-linux-powerpc64le-gnu": { @@ -24347,8 +25243,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-ppc64le-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "5ef02120a1e82c3d2c60f04380c8cac171cea59bc647e6089d4b2971e70a4b06", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-ppc64le-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "2c389fc71513a2f75ef3a1a299a160d1a7d19f701f2a9635ece77454b2fddfb1", "variant": "debug" }, "cpython-3.9.23+debug-linux-riscv64-gnu": { @@ -24363,8 +25259,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-riscv64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "b480c9dd67c1c2bccce609f145f562958e1235d294f8b5be385d3b5daca76e23", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-riscv64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "4c11855610bfe76f7dd003bcf3be0df9f656a41667c835df9da870b8ee91c465", "variant": "debug" }, "cpython-3.9.23+debug-linux-s390x-gnu": { @@ -24379,8 +25275,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-s390x-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "c2f3433e9820f2c8a70363d8faa7e881079e5b9e50a4764702c470add3c899ee", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-s390x-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "90d4077a0907f4e491662b92184368b6b16f4b3623e618fdbd37ae6ceecb6813", "variant": "debug" }, "cpython-3.9.23+debug-linux-x86_64-gnu": { @@ -24395,8 +25291,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "120bc8aafdc5dcb828c27301776ccab4204feb3ad38fe03d7b0c8321617762f4", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "5a062251e9ee9f765373cb5eae61943bc214f8363392e3cffd235ca1a751ef98", "variant": "debug" }, "cpython-3.9.23+debug-linux-x86_64-musl": { @@ -24411,8 +25307,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64-unknown-linux-musl-debug-full.tar.zst", - "sha256": "c79452c6ac6b7287b4119ba7a94cdaaa7edd50dbb489c76a2b3f1e3d0563b35a", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64-unknown-linux-musl-debug-full.tar.zst", + "sha256": "e5e5ef74bd58d9f0994e583830811ec3be9149276a1434753b07bd19d77e9417", "variant": "debug" }, "cpython-3.9.23+debug-linux-x86_64_v2-gnu": { @@ -24427,8 +25323,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "75d93b474294a74cbe8b2445f5267299cf9db5e4fa0c6820408c5ac054549ff2", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64_v2-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "3fc2ad7307cd0fb5e360baea3b598ed9218313f51f83063b4d085fcf6c85c7e0", "variant": "debug" }, "cpython-3.9.23+debug-linux-x86_64_v2-musl": { @@ -24443,8 +25339,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", - "sha256": "a34fcea0dd98f275da0cb90e49df2d2bb6280fd010422fbe4f9234fabfc0f74d", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64_v2-unknown-linux-musl-debug-full.tar.zst", + "sha256": "ec069be5c7b2705b885993ed8f15f3e0456f445beeee1f372b65fdd89afc7cd1", "variant": "debug" }, "cpython-3.9.23+debug-linux-x86_64_v3-gnu": { @@ -24459,8 +25355,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "0415c91da8059adc423689961d5cf435c658efdca4f5a2036c65b9b7190ab26f", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64_v3-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "071b71c4a41da3cde092d877e36ce55f4906246c9d0755a3a349717ad4b1d7a5", "variant": "debug" }, "cpython-3.9.23+debug-linux-x86_64_v3-musl": { @@ -24475,8 +25371,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", - "sha256": "2bb65e171d6428d549162b5b305c8ab6e6877713a33009107d5f2934a13d547e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64_v3-unknown-linux-musl-debug-full.tar.zst", + "sha256": "cd3c0e2060fe94dcd346add4ee9f9053bcc35367cd2b69b46c126f4ac0681aed", "variant": "debug" }, "cpython-3.9.23+debug-linux-x86_64_v4-gnu": { @@ -24491,8 +25387,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", - "sha256": "1b40bf44d9eddf38d2e0e3a20178358ece16fc940c5ee1e3cac15ae3d2d6c70e", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64_v4-unknown-linux-gnu-debug-full.tar.zst", + "sha256": "3934b72131d7a00c5aeaec79c714315e6773bd4170596fb27265efb643444520", "variant": "debug" }, "cpython-3.9.23+debug-linux-x86_64_v4-musl": { @@ -24507,8 +25403,8 @@ "minor": 9, "patch": 23, "prerelease": "", - "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250612/cpython-3.9.23%2B20250612-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", - "sha256": "13b3c21d113b4e796dba7f52b93dfa97b7e658a910a90ab0433477db200c40ee", + "url": "https://github.com/astral-sh/python-build-standalone/releases/download/20250702/cpython-3.9.23%2B20250702-x86_64_v4-unknown-linux-musl-debug-full.tar.zst", + "sha256": "dd0957b5c94d98f94a267e3d4e8e6acc3561f9b7970532d69d533b3eb59c72e6", "variant": "debug" }, "cpython-3.9.22-darwin-aarch64-none": { diff --git a/crates/uv-python/fetch-download-metadata.py b/crates/uv-python/fetch-download-metadata.py index 0b5d9caec..08adaecea 100755 --- a/crates/uv-python/fetch-download-metadata.py +++ b/crates/uv-python/fetch-download-metadata.py @@ -630,7 +630,9 @@ class GraalPyFinder(Finder): for download in batch: url = download.url + ".sha256" checksum_requests.append(self.client.get(url)) - for download, resp in zip(batch, await asyncio.gather(*checksum_requests)): + for download, resp in zip( + batch, await asyncio.gather(*checksum_requests), strict=False + ): try: resp.raise_for_status() except httpx.HTTPStatusError as e: diff --git a/crates/uv-python/python/get_interpreter_info.py b/crates/uv-python/python/get_interpreter_info.py index 0fe088819..8e9fc37fd 100644 --- a/crates/uv-python/python/get_interpreter_info.py +++ b/crates/uv-python/python/get_interpreter_info.py @@ -39,10 +39,9 @@ if hasattr(sys, "implementation"): # GraalPy reports the CPython version as sys.implementation.version, # so we need to discover the GraalPy version from the cache_tag import re + implementation_version = re.sub( - r"graalpy(\d)(\d+)-\d+", - r"\1.\2", - sys.implementation.cache_tag + r"graalpy(\d)(\d+)-\d+", r"\1.\2", sys.implementation.cache_tag ) else: implementation_version = format_full_version(sys.implementation.version) @@ -583,7 +582,6 @@ def main() -> None: elif os_and_arch["os"]["name"] == "musllinux": manylinux_compatible = True - # By default, pip uses sysconfig on Python 3.10+. # But Python distributors can override this decision by setting: # sysconfig._PIP_USE_SYSCONFIG = True / False @@ -608,7 +606,7 @@ def main() -> None: except (ImportError, AttributeError): pass - import distutils.dist + import distutils.dist # noqa: F401 except ImportError: # We require distutils, but it's not installed; this is fairly # common in, e.g., deadsnakes where distutils is packaged @@ -641,7 +639,10 @@ def main() -> None: # Prior to the introduction of `sysconfig` patching, python-build-standalone installations would always use # "/install" as the prefix. With `sysconfig` patching, we rewrite the prefix to match the actual installation # location. So in newer versions, we also write a dedicated flag to indicate standalone builds. - "standalone": sysconfig.get_config_var("prefix") == "/install" or bool(sysconfig.get_config_var("PYTHON_BUILD_STANDALONE")), + "standalone": ( + sysconfig.get_config_var("prefix") == "/install" + or bool(sysconfig.get_config_var("PYTHON_BUILD_STANDALONE")) + ), "scheme": get_scheme(use_sysconfig_scheme), "virtualenv": get_virtualenv(), "platform": os_and_arch, diff --git a/crates/uv-python/python/packaging/_elffile.py b/crates/uv-python/python/packaging/_elffile.py index f7a02180b..8dc7fb32a 100644 --- a/crates/uv-python/python/packaging/_elffile.py +++ b/crates/uv-python/python/packaging/_elffile.py @@ -69,8 +69,7 @@ class ELFFile: }[(self.capacity, self.encoding)] except KeyError: raise ELFInvalid( - f"unrecognized capacity ({self.capacity}) or " - f"encoding ({self.encoding})" + f"unrecognized capacity ({self.capacity}) or encoding ({self.encoding})" ) try: diff --git a/crates/uv-python/python/packaging/_manylinux.py b/crates/uv-python/python/packaging/_manylinux.py index ea7125c76..7b52a5581 100644 --- a/crates/uv-python/python/packaging/_manylinux.py +++ b/crates/uv-python/python/packaging/_manylinux.py @@ -161,8 +161,7 @@ def _parse_glibc_version(version_str: str) -> _GLibCVersion: m = re.match(r"(?P[0-9]+)\.(?P[0-9]+)", version_str) if not m: warnings.warn( - f"Expected glibc version with 2 components major.minor," - f" got: {version_str}", + f"Expected glibc version with 2 components major.minor, got: {version_str}", RuntimeWarning, ) return _GLibCVersion(-1, -1) @@ -255,5 +254,6 @@ def platform_tags(archs: Sequence[str]) -> Iterator[str]: if _is_compatible(arch, glibc_version): yield "manylinux_{}_{}_{}".format(*glibc_version, arch) # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. - if legacy_tag := _LEGACY_MANYLINUX_MAP.get(glibc_version): + legacy_tag = _LEGACY_MANYLINUX_MAP.get(glibc_version) + if legacy_tag: yield f"{legacy_tag}_{arch}" diff --git a/crates/uv-python/python/ruff.toml b/crates/uv-python/python/ruff.toml new file mode 100644 index 000000000..5e6921be4 --- /dev/null +++ b/crates/uv-python/python/ruff.toml @@ -0,0 +1,2 @@ +# It is important retain compatibility when querying interpreters +target-version = "py37" diff --git a/crates/uv-python/src/discovery.rs b/crates/uv-python/src/discovery.rs index 274cb51d4..67f8f37ff 100644 --- a/crates/uv-python/src/discovery.rs +++ b/crates/uv-python/src/discovery.rs @@ -8,6 +8,7 @@ use std::{env, io, iter}; use std::{path::Path, path::PathBuf, str::FromStr}; use thiserror::Error; use tracing::{debug, instrument, trace}; +use uv_configuration::PreviewMode; use which::{which, which_all}; use uv_cache::Cache; @@ -20,12 +21,12 @@ use uv_pep440::{ use uv_static::EnvVars; use uv_warnings::warn_user_once; -use crate::downloads::PythonDownloadRequest; +use crate::downloads::{PlatformRequest, PythonDownloadRequest}; use crate::implementation::ImplementationName; use crate::installation::PythonInstallation; use crate::interpreter::Error as InterpreterError; use crate::interpreter::{StatusCodeError, UnexpectedResponseError}; -use crate::managed::ManagedPythonInstallations; +use crate::managed::{ManagedPythonInstallations, PythonMinorVersionLink}; #[cfg(windows)] use crate::microsoft_store::find_microsoft_store_pythons; use crate::virtualenv::Error as VirtualEnvError; @@ -35,12 +36,12 @@ use crate::virtualenv::{ }; #[cfg(windows)] use crate::windows_registry::{WindowsPython, registry_pythons}; -use crate::{BrokenSymlink, Interpreter, PythonVersion}; +use crate::{BrokenSymlink, Interpreter, PythonInstallationKey, PythonVersion}; /// A request to find a Python installation. /// /// See [`PythonRequest::from_str`]. -#[derive(Debug, Clone, PartialEq, Eq, Default)] +#[derive(Debug, Clone, PartialEq, Eq, Default, Hash)] pub enum PythonRequest { /// An appropriate default Python installation /// @@ -67,6 +68,26 @@ pub enum PythonRequest { Key(PythonDownloadRequest), } +impl<'a> serde::Deserialize<'a> for PythonRequest { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'a>, + { + let s = String::deserialize(deserializer)?; + Ok(PythonRequest::parse(&s)) + } +} + +impl serde::Serialize for PythonRequest { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + let s = self.to_canonical_string(); + serializer.serialize_str(&s) + } +} + #[derive(Debug, Default, Clone, Copy, PartialEq, Eq, serde::Deserialize)] #[serde(deny_unknown_fields, rename_all = "kebab-case")] #[cfg_attr(feature = "clap", derive(clap::ValueEnum))] @@ -153,7 +174,7 @@ pub enum PythonVariant { } /// A Python discovery version request. -#[derive(Clone, Debug, Default, PartialEq, Eq)] +#[derive(Clone, Debug, Default, PartialEq, Eq, Hash)] pub enum VersionRequest { /// Allow an appropriate default Python version. #[default] @@ -312,7 +333,9 @@ fn python_executables_from_virtual_environments<'a>() fn python_executables_from_installed<'a>( version: &'a VersionRequest, implementation: Option<&'a ImplementationName>, + platform: PlatformRequest, preference: PythonPreference, + preview: PreviewMode, ) -> Box> + 'a> { let from_managed_installations = iter::once_with(move || { ManagedPythonInstallations::from_settings(None) @@ -323,19 +346,44 @@ fn python_executables_from_installed<'a>( installed_installations.root().user_display() ); let installations = installed_installations.find_matching_current_platform()?; - // Check that the Python version satisfies the request to avoid unnecessary interpreter queries later + // Check that the Python version and platform satisfy the request to avoid unnecessary interpreter queries later Ok(installations .into_iter() .filter(move |installation| { - if version.matches_version(&installation.version()) { - true - } else { - debug!("Skipping incompatible managed installation `{installation}`"); - false + if !version.matches_version(&installation.version()) { + debug!("Skipping managed installation `{installation}`: does not satisfy `{version}`"); + return false; } + if !platform.matches(installation.key()) { + debug!("Skipping managed installation `{installation}`: does not satisfy `{platform}`"); + return false; + } + true }) .inspect(|installation| debug!("Found managed installation `{installation}`")) - .map(|installation| (PythonSource::Managed, installation.executable(false)))) + .map(move |installation| { + // If it's not a patch version request, then attempt to read the stable + // minor version link. + let executable = version + .patch() + .is_none() + .then(|| { + PythonMinorVersionLink::from_installation( + &installation, + preview, + ) + .filter(PythonMinorVersionLink::exists) + .map( + |minor_version_link| { + minor_version_link.symlink_executable.clone() + }, + ) + }) + .flatten() + .unwrap_or_else(|| installation.executable(false)); + (PythonSource::Managed, executable) + }) + ) }) }) .flatten_ok(); @@ -415,17 +463,20 @@ fn python_executables_from_installed<'a>( /// Lazily iterate over all discoverable Python executables. /// -/// Note that Python executables may be excluded by the given [`EnvironmentPreference`] and -/// [`PythonPreference`]. However, these filters are only applied for performance. We cannot -/// guarantee that the [`EnvironmentPreference`] is satisfied until we query the interpreter. +/// Note that Python executables may be excluded by the given [`EnvironmentPreference`], +/// [`PythonPreference`], and [`PlatformRequest`]. However, these filters are only applied for +/// performance. We cannot guarantee that the all requests or preferences are satisfied until we +/// query the interpreter. /// /// See [`python_executables_from_installed`] and [`python_executables_from_virtual_environments`] /// for more information on discovery. fn python_executables<'a>( version: &'a VersionRequest, implementation: Option<&'a ImplementationName>, + platform: PlatformRequest, environments: EnvironmentPreference, preference: PythonPreference, + preview: PreviewMode, ) -> Box> + 'a> { // Always read from `UV_INTERNAL__PARENT_INTERPRETER` — it could be a system interpreter let from_parent_interpreter = iter::once_with(|| { @@ -445,7 +496,8 @@ fn python_executables<'a>( .flatten(); let from_virtual_environments = python_executables_from_virtual_environments(); - let from_installed = python_executables_from_installed(version, implementation, preference); + let from_installed = + python_executables_from_installed(version, implementation, platform, preference, preview); // Limit the search to the relevant environment preference; this avoids unnecessary work like // traversal of the file system. Subsequent filtering should be done by the caller with @@ -630,25 +682,37 @@ fn find_all_minor( /// Lazily iterate over all discoverable Python interpreters. /// -/// Note interpreters may be excluded by the given [`EnvironmentPreference`] and [`PythonPreference`]. +/// Note interpreters may be excluded by the given [`EnvironmentPreference`], [`PythonPreference`], +/// [`VersionRequest`], or [`PlatformRequest`]. +/// +/// The [`PlatformRequest`] is currently only applied to managed Python installations before querying +/// the interpreter. The caller is responsible for ensuring it is applied otherwise. /// /// See [`python_executables`] for more information on discovery. fn python_interpreters<'a>( version: &'a VersionRequest, implementation: Option<&'a ImplementationName>, + platform: PlatformRequest, environments: EnvironmentPreference, preference: PythonPreference, cache: &'a Cache, + preview: PreviewMode, ) -> impl Iterator> + 'a { python_interpreters_from_executables( // Perform filtering on the discovered executables based on their source. This avoids // unnecessary interpreter queries, which are generally expensive. We'll filter again // with `interpreter_satisfies_environment_preference` after querying. - python_executables(version, implementation, environments, preference).filter_ok( - move |(source, path)| { - source_satisfies_environment_preference(*source, path, environments) - }, - ), + python_executables( + version, + implementation, + platform, + environments, + preference, + preview, + ) + .filter_ok(move |(source, path)| { + source_satisfies_environment_preference(*source, path, environments) + }), cache, ) .filter_ok(move |(source, interpreter)| { @@ -824,13 +888,8 @@ impl Error { | InterpreterError::BrokenSymlink(BrokenSymlink { path, .. }) => { // If the interpreter is from an active, valid virtual environment, we should // fail because it's broken - if let Some(Ok(true)) = matches!(source, PythonSource::ActiveEnvironment) - .then(|| { - path.parent() - .and_then(Path::parent) - .map(|path| path.join("pyvenv.cfg").try_exists()) - }) - .flatten() + if matches!(source, PythonSource::ActiveEnvironment) + && uv_fs::is_virtualenv_executable(path) { true } else { @@ -887,6 +946,7 @@ pub fn find_python_installations<'a>( environments: EnvironmentPreference, preference: PythonPreference, cache: &'a Cache, + preview: PreviewMode, ) -> Box> + 'a> { let sources = DiscoveryPreferences { python_preference: preference, @@ -971,17 +1031,27 @@ pub fn find_python_installations<'a>( } PythonRequest::Any => Box::new({ debug!("Searching for any Python interpreter in {sources}"); - python_interpreters(&VersionRequest::Any, None, environments, preference, cache) - .map_ok(|tuple| Ok(PythonInstallation::from_tuple(tuple))) + python_interpreters( + &VersionRequest::Any, + None, + PlatformRequest::default(), + environments, + preference, + cache, + preview, + ) + .map_ok(|tuple| Ok(PythonInstallation::from_tuple(tuple))) }), PythonRequest::Default => Box::new({ debug!("Searching for default Python interpreter in {sources}"); python_interpreters( &VersionRequest::Default, None, + PlatformRequest::default(), environments, preference, cache, + preview, ) .map_ok(|tuple| Ok(PythonInstallation::from_tuple(tuple))) }), @@ -991,8 +1061,16 @@ pub fn find_python_installations<'a>( } Box::new({ debug!("Searching for {request} in {sources}"); - python_interpreters(version, None, environments, preference, cache) - .map_ok(|tuple| Ok(PythonInstallation::from_tuple(tuple))) + python_interpreters( + version, + None, + PlatformRequest::default(), + environments, + preference, + cache, + preview, + ) + .map_ok(|tuple| Ok(PythonInstallation::from_tuple(tuple))) }) } PythonRequest::Implementation(implementation) => Box::new({ @@ -1000,9 +1078,11 @@ pub fn find_python_installations<'a>( python_interpreters( &VersionRequest::Default, Some(implementation), + PlatformRequest::default(), environments, preference, cache, + preview, ) .filter_ok(|(_source, interpreter)| { interpreter @@ -1020,9 +1100,11 @@ pub fn find_python_installations<'a>( python_interpreters( version, Some(implementation), + PlatformRequest::default(), environments, preference, cache, + preview, ) .filter_ok(|(_source, interpreter)| { interpreter @@ -1043,9 +1125,11 @@ pub fn find_python_installations<'a>( python_interpreters( request.version().unwrap_or(&VersionRequest::Default), request.implementation(), + request.platform(), environments, preference, cache, + preview, ) .filter_ok(|(_source, interpreter)| request.satisfied_by_interpreter(interpreter)) .map_ok(|tuple| Ok(PythonInstallation::from_tuple(tuple))) @@ -1063,8 +1147,10 @@ pub(crate) fn find_python_installation( environments: EnvironmentPreference, preference: PythonPreference, cache: &Cache, + preview: PreviewMode, ) -> Result { - let installations = find_python_installations(request, environments, preference, cache); + let installations = + find_python_installations(request, environments, preference, cache, preview); let mut first_prerelease = None; let mut first_error = None; for result in installations { @@ -1155,17 +1241,18 @@ pub(crate) fn find_python_installation( /// /// See [`find_python_installation`] for more details on installation discovery. #[instrument(skip_all, fields(request))] -pub fn find_best_python_installation( +pub(crate) fn find_best_python_installation( request: &PythonRequest, environments: EnvironmentPreference, preference: PythonPreference, cache: &Cache, + preview: PreviewMode, ) -> Result { debug!("Starting Python discovery for {}", request); // First, check for an exact match (or the first available version if no Python version was provided) debug!("Looking for exact match for request {request}"); - let result = find_python_installation(request, environments, preference, cache); + let result = find_python_installation(request, environments, preference, cache, preview); match result { Ok(Ok(installation)) => { warn_on_unsupported_python(installation.interpreter()); @@ -1193,7 +1280,7 @@ pub fn find_best_python_installation( _ => None, } { debug!("Looking for relaxed patch version {request}"); - let result = find_python_installation(&request, environments, preference, cache); + let result = find_python_installation(&request, environments, preference, cache, preview); match result { Ok(Ok(installation)) => { warn_on_unsupported_python(installation.interpreter()); @@ -1210,14 +1297,16 @@ pub fn find_best_python_installation( debug!("Looking for a default Python installation"); let request = PythonRequest::Default; Ok( - find_python_installation(&request, environments, preference, cache)?.map_err(|err| { - // Use a more general error in this case since we looked for multiple versions - PythonNotFound { - request, - python_preference: err.python_preference, - environment_preference: err.environment_preference, - } - }), + find_python_installation(&request, environments, preference, cache, preview)?.map_err( + |err| { + // Use a more general error in this case since we looked for multiple versions + PythonNotFound { + request, + python_preference: err.python_preference, + environment_preference: err.environment_preference, + } + }, + ), ) } @@ -1344,7 +1433,7 @@ pub(crate) fn is_windows_store_shim(path: &Path) -> bool { 0, buf.as_mut_ptr().cast(), buf.len() as u32 * 2, - &mut bytes_returned, + &raw mut bytes_returned, std::ptr::null_mut(), ) != 0 }; @@ -1595,6 +1684,24 @@ impl PythonRequest { Ok(rest.parse().ok()) } + /// Check if this request includes a specific patch version. + pub fn includes_patch(&self) -> bool { + match self { + PythonRequest::Default => false, + PythonRequest::Any => false, + PythonRequest::Version(version_request) => version_request.patch().is_some(), + PythonRequest::Directory(..) => false, + PythonRequest::File(..) => false, + PythonRequest::ExecutableName(..) => false, + PythonRequest::Implementation(..) => false, + PythonRequest::ImplementationVersion(_, version) => version.patch().is_some(), + PythonRequest::Key(request) => request + .version + .as_ref() + .is_some_and(|request| request.patch().is_some()), + } + } + /// Check if a given interpreter satisfies the interpreter request. pub fn satisfied(&self, interpreter: &Interpreter, cache: &Cache) -> bool { /// Returns `true` if the two paths refer to the same interpreter executable. @@ -2036,6 +2143,11 @@ impl fmt::Display for ExecutableName { } impl VersionRequest { + /// Derive a [`VersionRequest::MajorMinor`] from a [`PythonInstallationKey`] + pub fn major_minor_request_from_key(key: &PythonInstallationKey) -> Self { + Self::MajorMinor(key.major, key.minor, key.variant) + } + /// Return possible executable names for the given version request. pub(crate) fn executable_names( &self, diff --git a/crates/uv-python/src/downloads.rs b/crates/uv-python/src/downloads.rs index 9b7fc2825..ad516d096 100644 --- a/crates/uv-python/src/downloads.rs +++ b/crates/uv-python/src/downloads.rs @@ -12,7 +12,7 @@ use futures::TryStreamExt; use itertools::Itertools; use once_cell::sync::OnceCell; use owo_colors::OwoColorize; -use reqwest_retry::RetryPolicy; +use reqwest_retry::{RetryError, RetryPolicy}; use serde::Deserialize; use thiserror::Error; use tokio::io::{AsyncRead, AsyncWriteExt, BufWriter, ReadBuf}; @@ -53,6 +53,12 @@ pub enum Error { TooManyParts(String), #[error("Failed to download {0}")] NetworkError(DisplaySafeUrl, #[source] WrappedReqwestError), + #[error("Request failed after {retries} retries")] + NetworkErrorWithRetries { + #[source] + err: Box, + retries: u32, + }, #[error("Failed to download {0}")] NetworkMiddlewareError(DisplaySafeUrl, #[source] anyhow::Error), #[error("Failed to extract archive: {0}")] @@ -105,14 +111,41 @@ pub enum Error { }, } -#[derive(Debug, PartialEq, Clone)] +impl Error { + // Return the number of attempts that were made to complete this request before this error was + // returned. Note that e.g. 3 retries equates to 4 attempts. + // + // It's easier to do arithmetic with "attempts" instead of "retries", because if you have + // nested retry loops you can just add up all the attempts directly, while adding up the + // retries requires +1/-1 adjustments. + fn attempts(&self) -> u32 { + // Unfortunately different variants of `Error` track retry counts in different ways. We + // could consider unifying the variants we handle here in `Error::from_reqwest_middleware` + // instead, but both approaches will be fragile as new variants get added over time. + if let Error::NetworkErrorWithRetries { retries, .. } = self { + return retries + 1; + } + // TODO(jack): let-chains are stable as of Rust 1.88. We should use them here as soon as + // our rust-version is high enough. + if let Error::NetworkMiddlewareError(_, anyhow_error) = self { + if let Some(RetryError::WithRetries { retries, .. }) = + anyhow_error.downcast_ref::() + { + return retries + 1; + } + } + 1 + } +} + +#[derive(Debug, PartialEq, Eq, Clone, Hash)] pub struct ManagedPythonDownload { key: PythonInstallationKey, url: &'static str, sha256: Option<&'static str>, } -#[derive(Debug, Clone, Default, Eq, PartialEq)] +#[derive(Debug, Clone, Default, Eq, PartialEq, Hash)] pub struct PythonDownloadRequest { pub(crate) version: Option, pub(crate) implementation: Option, @@ -125,12 +158,60 @@ pub struct PythonDownloadRequest { pub(crate) prereleases: Option, } -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum ArchRequest { Explicit(Arch), Environment(Arch), } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub struct PlatformRequest { + pub(crate) os: Option, + pub(crate) arch: Option, + pub(crate) libc: Option, +} + +impl PlatformRequest { + /// Check if this platform request is satisfied by an installation key. + pub fn matches(&self, key: &PythonInstallationKey) -> bool { + if let Some(os) = self.os { + if key.os != os { + return false; + } + } + + if let Some(arch) = self.arch { + if !arch.satisfied_by(key.arch) { + return false; + } + } + + if let Some(libc) = self.libc { + if key.libc != libc { + return false; + } + } + + true + } +} + +impl Display for PlatformRequest { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut parts = Vec::new(); + if let Some(os) = &self.os { + parts.push(os.to_string()); + } + if let Some(arch) = &self.arch { + parts.push(arch.to_string()); + } + if let Some(libc) = &self.libc { + parts.push(libc.to_string()); + } + write!(f, "{}", parts.join("-")) + } +} + impl Display for ArchRequest { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { @@ -412,6 +493,15 @@ impl PythonDownloadRequest { } true } + + /// Extract the platform components of this request. + pub fn platform(&self) -> PlatformRequest { + PlatformRequest { + os: self.os, + arch: self.arch, + libc: self.libc, + } + } } impl From<&ManagedPythonInstallation> for PythonDownloadRequest { @@ -632,7 +722,8 @@ impl ManagedPythonDownload { pypy_install_mirror: Option<&str>, reporter: Option<&dyn Reporter>, ) -> Result { - let mut n_past_retries = 0; + let mut total_attempts = 0; + let mut retried_here = false; let start_time = SystemTime::now(); let retry_policy = client.retry_policy(); loop { @@ -647,25 +738,41 @@ impl ManagedPythonDownload { reporter, ) .await; - if result - .as_ref() - .err() - .is_some_and(|err| is_extended_transient_error(err)) - { - let retry_decision = retry_policy.should_retry(start_time, n_past_retries); - if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision { - debug!( - "Transient failure while handling response for {}; retrying...", - self.key() - ); - let duration = execute_after - .duration_since(SystemTime::now()) - .unwrap_or_else(|_| Duration::default()); - tokio::time::sleep(duration).await; - n_past_retries += 1; - continue; + let result = match result { + Ok(download_result) => Ok(download_result), + Err(err) => { + // Inner retry loops (e.g. `reqwest-retry` middleware) might make more than one + // attempt per error we see here. + total_attempts += err.attempts(); + // We currently interpret e.g. "3 retries" to mean we should make 4 attempts. + let n_past_retries = total_attempts - 1; + if is_extended_transient_error(&err) { + let retry_decision = retry_policy.should_retry(start_time, n_past_retries); + if let reqwest_retry::RetryDecision::Retry { execute_after } = + retry_decision + { + debug!( + "Transient failure while handling response for {}; retrying...", + self.key() + ); + let duration = execute_after + .duration_since(SystemTime::now()) + .unwrap_or_else(|_| Duration::default()); + tokio::time::sleep(duration).await; + retried_here = true; + continue; // Retry. + } + } + if retried_here { + Err(Error::NetworkErrorWithRetries { + err: Box::new(err), + retries: n_past_retries, + }) + } else { + Err(err) + } } - } + }; return result; } } @@ -709,7 +816,9 @@ impl ManagedPythonDownload { let temp_dir = tempfile::tempdir_in(scratch_dir).map_err(Error::DownloadDirError)?; - if let Some(python_builds_dir) = env::var_os(EnvVars::UV_PYTHON_CACHE_DIR) { + if let Some(python_builds_dir) = + env::var_os(EnvVars::UV_PYTHON_CACHE_DIR).filter(|s| !s.is_empty()) + { let python_builds_dir = PathBuf::from(python_builds_dir); fs_err::create_dir_all(&python_builds_dir)?; let hash_prefix = match self.sha256 { @@ -1086,8 +1195,20 @@ fn parse_json_downloads( } impl Error { - pub(crate) fn from_reqwest(url: DisplaySafeUrl, err: reqwest::Error) -> Self { - Self::NetworkError(url, WrappedReqwestError::from(err)) + pub(crate) fn from_reqwest( + url: DisplaySafeUrl, + err: reqwest::Error, + retries: Option, + ) -> Self { + let err = Self::NetworkError(url, WrappedReqwestError::from(err)); + if let Some(retries) = retries { + Self::NetworkErrorWithRetries { + err: Box::new(err), + retries, + } + } else { + err + } } pub(crate) fn from_reqwest_middleware( @@ -1203,10 +1324,15 @@ async fn read_url( .await .map_err(|err| Error::from_reqwest_middleware(url.clone(), err))?; - // Ensure the request was successful. - response - .error_for_status_ref() - .map_err(|err| Error::from_reqwest(url, err))?; + let retry_count = response + .extensions() + .get::() + .map(|retries| retries.value()); + + // Check the status code. + let response = response + .error_for_status() + .map_err(|err| Error::from_reqwest(url, err, retry_count))?; let size = response.content_length(); let stream = response diff --git a/crates/uv-python/src/environment.rs b/crates/uv-python/src/environment.rs index 34fa3eee9..02f9fd683 100644 --- a/crates/uv-python/src/environment.rs +++ b/crates/uv-python/src/environment.rs @@ -7,6 +7,7 @@ use owo_colors::OwoColorize; use tracing::debug; use uv_cache::Cache; +use uv_configuration::PreviewMode; use uv_fs::{LockedFile, Simplified}; use uv_pep440::Version; @@ -152,6 +153,7 @@ impl PythonEnvironment { request: &PythonRequest, preference: EnvironmentPreference, cache: &Cache, + preview: PreviewMode, ) -> Result { let installation = match find_python_installation( request, @@ -159,6 +161,7 @@ impl PythonEnvironment { // Ignore managed installations when looking for environments PythonPreference::OnlySystem, cache, + preview, )? { Ok(installation) => installation, Err(err) => return Err(EnvironmentNotFound::from(err).into()), diff --git a/crates/uv-python/src/implementation.rs b/crates/uv-python/src/implementation.rs index ffc61dac7..4393d56f4 100644 --- a/crates/uv-python/src/implementation.rs +++ b/crates/uv-python/src/implementation.rs @@ -44,6 +44,13 @@ impl ImplementationName { Self::GraalPy => "GraalPy", } } + + pub fn executable_name(self) -> &'static str { + match self { + Self::CPython => "python", + Self::PyPy | Self::GraalPy => self.into(), + } + } } impl LenientImplementationName { @@ -53,6 +60,13 @@ impl LenientImplementationName { Self::Unknown(name) => name, } } + + pub fn executable_name(&self) -> &str { + match self { + Self::Known(implementation) => implementation.executable_name(), + Self::Unknown(name) => name, + } + } } impl From<&ImplementationName> for &'static str { diff --git a/crates/uv-python/src/installation.rs b/crates/uv-python/src/installation.rs index 611dc2007..d46643d21 100644 --- a/crates/uv-python/src/installation.rs +++ b/crates/uv-python/src/installation.rs @@ -1,10 +1,14 @@ use std::fmt; +use std::hash::{Hash, Hasher}; use std::str::FromStr; +use indexmap::IndexMap; +use ref_cast::RefCast; use tracing::{debug, info}; use uv_cache::Cache; use uv_client::BaseClientBuilder; +use uv_configuration::PreviewMode; use uv_pep440::{Prerelease, Version}; use crate::discovery::{ @@ -54,8 +58,10 @@ impl PythonInstallation { environments: EnvironmentPreference, preference: PythonPreference, cache: &Cache, + preview: PreviewMode, ) -> Result { - let installation = find_python_installation(request, environments, preference, cache)??; + let installation = + find_python_installation(request, environments, preference, cache, preview)??; Ok(installation) } @@ -66,12 +72,14 @@ impl PythonInstallation { environments: EnvironmentPreference, preference: PythonPreference, cache: &Cache, + preview: PreviewMode, ) -> Result { Ok(find_best_python_installation( request, environments, preference, cache, + preview, )??) } @@ -89,11 +97,12 @@ impl PythonInstallation { python_install_mirror: Option<&str>, pypy_install_mirror: Option<&str>, python_downloads_json_url: Option<&str>, + preview: PreviewMode, ) -> Result { let request = request.unwrap_or(&PythonRequest::Default); // Search for the installation - let err = match Self::find(request, environments, preference, cache) { + let err = match Self::find(request, environments, preference, cache, preview) { Ok(installation) => return Ok(installation), Err(err) => err, }; @@ -129,6 +138,7 @@ impl PythonInstallation { python_install_mirror, pypy_install_mirror, python_downloads_json_url, + preview, ) .await { @@ -149,6 +159,7 @@ impl PythonInstallation { python_install_mirror: Option<&str>, pypy_install_mirror: Option<&str>, python_downloads_json_url: Option<&str>, + preview: PreviewMode, ) -> Result { let installations = ManagedPythonInstallations::from_settings(None)?.init()?; let installations_dir = installations.root(); @@ -180,6 +191,21 @@ impl PythonInstallation { installed.ensure_externally_managed()?; installed.ensure_sysconfig_patched()?; installed.ensure_canonical_executables()?; + + let minor_version = installed.minor_version_key(); + let highest_patch = installations + .find_all()? + .filter(|installation| installation.minor_version_key() == minor_version) + .filter_map(|installation| installation.version().patch()) + .fold(0, std::cmp::max); + if installed + .version() + .patch() + .is_some_and(|p| p >= highest_patch) + { + installed.ensure_minor_version_link(preview)?; + } + if let Err(e) = installed.ensure_dylib_patched() { e.warn_user(&installed); } @@ -340,6 +366,14 @@ impl PythonInstallationKey { format!("{}.{}.{}", self.major, self.minor, self.patch) } + pub fn major(&self) -> u8 { + self.major + } + + pub fn minor(&self) -> u8 { + self.minor + } + pub fn arch(&self) -> &Arch { &self.arch } @@ -490,3 +524,112 @@ impl Ord for PythonInstallationKey { .then_with(|| self.variant.cmp(&other.variant).reverse()) } } + +/// A view into a [`PythonInstallationKey`] that excludes the patch and prerelease versions. +#[derive(Clone, Eq, Ord, PartialOrd, RefCast)] +#[repr(transparent)] +pub struct PythonInstallationMinorVersionKey(PythonInstallationKey); + +impl PythonInstallationMinorVersionKey { + /// Cast a `&PythonInstallationKey` to a `&PythonInstallationMinorVersionKey` using ref-cast. + #[inline] + pub fn ref_cast(key: &PythonInstallationKey) -> &Self { + RefCast::ref_cast(key) + } + + /// Takes an [`IntoIterator`] of [`ManagedPythonInstallation`]s and returns an [`FxHashMap`] from + /// [`PythonInstallationMinorVersionKey`] to the installation with highest [`PythonInstallationKey`] + /// for that minor version key. + #[inline] + pub fn highest_installations_by_minor_version_key<'a, I>( + installations: I, + ) -> IndexMap + where + I: IntoIterator, + { + let mut minor_versions = IndexMap::default(); + for installation in installations { + minor_versions + .entry(installation.minor_version_key().clone()) + .and_modify(|high_installation: &mut ManagedPythonInstallation| { + if installation.key() >= high_installation.key() { + *high_installation = installation.clone(); + } + }) + .or_insert_with(|| installation.clone()); + } + minor_versions + } +} + +impl fmt::Display for PythonInstallationMinorVersionKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // Display every field on the wrapped key except the patch + // and prerelease (with special formatting for the variant). + let variant = match self.0.variant { + PythonVariant::Default => String::new(), + PythonVariant::Freethreaded => format!("+{}", self.0.variant), + }; + write!( + f, + "{}-{}.{}{}-{}-{}-{}", + self.0.implementation, + self.0.major, + self.0.minor, + variant, + self.0.os, + self.0.arch, + self.0.libc, + ) + } +} + +impl fmt::Debug for PythonInstallationMinorVersionKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // Display every field on the wrapped key except the patch + // and prerelease. + f.debug_struct("PythonInstallationMinorVersionKey") + .field("implementation", &self.0.implementation) + .field("major", &self.0.major) + .field("minor", &self.0.minor) + .field("variant", &self.0.variant) + .field("os", &self.0.os) + .field("arch", &self.0.arch) + .field("libc", &self.0.libc) + .finish() + } +} + +impl PartialEq for PythonInstallationMinorVersionKey { + fn eq(&self, other: &Self) -> bool { + // Compare every field on the wrapped key except the patch + // and prerelease. + self.0.implementation == other.0.implementation + && self.0.major == other.0.major + && self.0.minor == other.0.minor + && self.0.os == other.0.os + && self.0.arch == other.0.arch + && self.0.libc == other.0.libc + && self.0.variant == other.0.variant + } +} + +impl Hash for PythonInstallationMinorVersionKey { + fn hash(&self, state: &mut H) { + // Hash every field on the wrapped key except the patch + // and prerelease. + self.0.implementation.hash(state); + self.0.major.hash(state); + self.0.minor.hash(state); + self.0.os.hash(state); + self.0.arch.hash(state); + self.0.libc.hash(state); + self.0.variant.hash(state); + } +} + +impl From for PythonInstallationMinorVersionKey { + fn from(key: PythonInstallationKey) -> Self { + PythonInstallationMinorVersionKey(key) + } +} diff --git a/crates/uv-python/src/interpreter.rs b/crates/uv-python/src/interpreter.rs index 26d810db5..0f074ebb6 100644 --- a/crates/uv-python/src/interpreter.rs +++ b/crates/uv-python/src/interpreter.rs @@ -26,6 +26,7 @@ use uv_platform_tags::{Tags, TagsError}; use uv_pypi_types::{ResolverMarkerEnvironment, Scheme}; use crate::implementation::LenientImplementationName; +use crate::managed::ManagedPythonInstallations; use crate::platform::{Arch, Libc, Os}; use crate::pointer_size::PointerSize; use crate::{ @@ -168,7 +169,7 @@ impl Interpreter { Ok(path) => path, Err(err) => { warn!("Failed to find base Python executable: {err}"); - uv_fs::canonicalize_executable(base_executable)? + canonicalize_executable(base_executable)? } }; Ok(base_python) @@ -263,6 +264,21 @@ impl Interpreter { self.prefix.is_some() } + /// Returns `true` if this interpreter is managed by uv. + /// + /// Returns `false` if we cannot determine the path of the uv managed Python interpreters. + pub fn is_managed(&self) -> bool { + let Ok(installations) = ManagedPythonInstallations::from_settings(None) else { + return false; + }; + + installations + .find_all() + .into_iter() + .flatten() + .any(|install| install.path() == self.sys_base_prefix) + } + /// Returns `Some` if the environment is externally managed, optionally including an error /// message from the `EXTERNALLY-MANAGED` file. /// @@ -483,10 +499,19 @@ impl Interpreter { /// `python-build-standalone`. /// /// See: + #[cfg(unix)] pub fn is_standalone(&self) -> bool { self.standalone } + /// Returns `true` if an [`Interpreter`] may be a `python-build-standalone` interpreter. + // TODO(john): Replace this approach with patching sysconfig on Windows to + // set `PYTHON_BUILD_STANDALONE=1`.` + #[cfg(windows)] + pub fn is_standalone(&self) -> bool { + self.standalone || (self.is_managed() && self.markers().implementation_name() == "cpython") + } + /// Return the [`Layout`] environment used to install wheels into this interpreter. pub fn layout(&self) -> Layout { Layout { @@ -608,6 +633,29 @@ impl Interpreter { } } +/// Calls `fs_err::canonicalize` on Unix. On Windows, avoids attempting to resolve symlinks +/// but will resolve junctions if they are part of a trampoline target. +pub fn canonicalize_executable(path: impl AsRef) -> std::io::Result { + let path = path.as_ref(); + debug_assert!( + path.is_absolute(), + "path must be absolute: {}", + path.display() + ); + + #[cfg(windows)] + { + if let Ok(Some(launcher)) = uv_trampoline_builder::Launcher::try_from_path(path) { + Ok(dunce::canonicalize(launcher.python_path)?) + } else { + Ok(path.to_path_buf()) + } + } + + #[cfg(unix)] + fs_err::canonicalize(path) +} + /// The `EXTERNALLY-MANAGED` file in a Python installation. /// /// See: @@ -919,6 +967,31 @@ impl InterpreterInfo { pub(crate) fn query_cached(executable: &Path, cache: &Cache) -> Result { let absolute = std::path::absolute(executable)?; + // Provide a better error message if the link is broken or the file does not exist. Since + // `canonicalize_executable` does not resolve the file on Windows, we must re-use this logic + // for the subsequent metadata read as we may not have actually resolved the path. + let handle_io_error = |err: io::Error| -> Error { + if err.kind() == io::ErrorKind::NotFound { + // Check if it looks like a venv interpreter where the underlying Python + // installation was removed. + if absolute + .symlink_metadata() + .is_ok_and(|metadata| metadata.is_symlink()) + { + Error::BrokenSymlink(BrokenSymlink { + path: executable.to_path_buf(), + venv: uv_fs::is_virtualenv_executable(executable), + }) + } else { + Error::NotFound(executable.to_path_buf()) + } + } else { + err.into() + } + }; + + let canonical = canonicalize_executable(&absolute).map_err(handle_io_error)?; + let cache_entry = cache.entry( CacheBucket::Interpreter, // Shard interpreter metadata by host architecture, operating system, and version, to @@ -929,38 +1002,18 @@ impl InterpreterInfo { sys_info::os_release().unwrap_or_default(), )), // We use the absolute path for the cache entry to avoid cache collisions for relative - // paths. But we don't to query the executable with symbolic links resolved. - format!("{}.msgpack", cache_digest(&absolute)), + // paths. But we don't want to query the executable with symbolic links resolved because + // that can change reported values, e.g., `sys.executable`. We include the canonical + // path in the cache entry as well, otherwise we can have cache collisions if an + // absolute path refers to different interpreters with matching ctimes, e.g., if you + // have a `.venv/bin/python` pointing to both Python 3.12 and Python 3.13 that were + // modified at the same time. + format!("{}.msgpack", cache_digest(&(&absolute, &canonical))), ); // We check the timestamp of the canonicalized executable to check if an underlying // interpreter has been modified. - let modified = uv_fs::canonicalize_executable(&absolute) - .and_then(Timestamp::from_path) - .map_err(|err| { - if err.kind() == io::ErrorKind::NotFound { - // Check if it looks like a venv interpreter where the underlying Python - // installation was removed. - if absolute - .symlink_metadata() - .is_ok_and(|metadata| metadata.is_symlink()) - { - let venv = executable - .parent() - .and_then(Path::parent) - .map(|path| path.join("pyvenv.cfg").is_file()) - .unwrap_or(false); - Error::BrokenSymlink(BrokenSymlink { - path: executable.to_path_buf(), - venv, - }) - } else { - Error::NotFound(executable.to_path_buf()) - } - } else { - err.into() - } - })?; + let modified = Timestamp::from_path(canonical).map_err(handle_io_error)?; // Read from the cache. if cache @@ -972,7 +1025,7 @@ impl InterpreterInfo { Ok(cached) => { if cached.timestamp == modified { trace!( - "Cached interpreter info for Python {}, skipping probing: {}", + "Found cached interpreter info for Python {}, skipping query of: {}", cached.data.markers.python_full_version(), executable.user_display() ); diff --git a/crates/uv-python/src/lib.rs b/crates/uv-python/src/lib.rs index 024cd5cbc..d408bc199 100644 --- a/crates/uv-python/src/lib.rs +++ b/crates/uv-python/src/lib.rs @@ -9,10 +9,15 @@ pub use crate::discovery::{ PythonPreference, PythonRequest, PythonSource, PythonVariant, VersionRequest, find_python_installations, }; +pub use crate::downloads::PlatformRequest; pub use crate::environment::{InvalidEnvironmentKind, PythonEnvironment}; -pub use crate::implementation::ImplementationName; -pub use crate::installation::{PythonInstallation, PythonInstallationKey}; -pub use crate::interpreter::{BrokenSymlink, Error as InterpreterError, Interpreter}; +pub use crate::implementation::{ImplementationName, LenientImplementationName}; +pub use crate::installation::{ + PythonInstallation, PythonInstallationKey, PythonInstallationMinorVersionKey, +}; +pub use crate::interpreter::{ + BrokenSymlink, Error as InterpreterError, Interpreter, canonicalize_executable, +}; pub use crate::pointer_size::PointerSize; pub use crate::prefix::Prefix; pub use crate::python_version::PythonVersion; @@ -114,6 +119,7 @@ mod tests { use indoc::{formatdoc, indoc}; use temp_env::with_vars; use test_log::test; + use uv_configuration::PreviewMode; use uv_static::EnvVars; use uv_cache::Cache; @@ -446,6 +452,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::default(), &context.cache, + PreviewMode::Disabled, ) }); assert!( @@ -460,6 +467,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::default(), &context.cache, + PreviewMode::Disabled, ) }); assert!( @@ -484,6 +492,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::default(), &context.cache, + PreviewMode::Disabled, ) }); assert!( @@ -505,6 +514,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::default(), &context.cache, + PreviewMode::Disabled, ) })??; assert!( @@ -566,6 +576,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::default(), &context.cache, + PreviewMode::Disabled, ) })??; assert!( @@ -597,6 +608,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::default(), &context.cache, + PreviewMode::Disabled, ) }); assert!( @@ -633,6 +645,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::default(), &context.cache, + PreviewMode::Disabled, ) })??; assert!( @@ -664,6 +677,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -685,6 +699,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -710,6 +725,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -735,6 +751,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -757,6 +774,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; @@ -790,6 +808,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; @@ -823,6 +842,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -844,6 +864,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -865,6 +886,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; @@ -898,6 +920,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; @@ -934,6 +957,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert!( @@ -964,6 +988,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert!( @@ -998,6 +1023,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1023,6 +1049,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1049,6 +1076,7 @@ mod tests { EnvironmentPreference::OnlyVirtual, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1073,6 +1101,7 @@ mod tests { EnvironmentPreference::OnlyVirtual, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )?; @@ -1094,6 +1123,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1116,6 +1146,7 @@ mod tests { EnvironmentPreference::OnlyVirtual, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1148,6 +1179,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1168,6 +1200,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1194,6 +1227,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; @@ -1211,6 +1245,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; @@ -1239,6 +1274,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1276,6 +1312,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1303,6 +1340,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1327,6 +1365,7 @@ mod tests { EnvironmentPreference::ExplicitSystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1351,6 +1390,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1375,6 +1415,7 @@ mod tests { EnvironmentPreference::OnlyVirtual, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1412,6 +1453,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1439,6 +1481,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1455,6 +1498,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1471,6 +1515,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -1492,6 +1537,7 @@ mod tests { EnvironmentPreference::OnlyVirtual, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -1508,6 +1554,7 @@ mod tests { EnvironmentPreference::OnlySystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )?; @@ -1529,6 +1576,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1543,6 +1591,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -1556,6 +1605,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -1584,6 +1634,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1599,6 +1650,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1628,6 +1680,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1643,6 +1696,7 @@ mod tests { EnvironmentPreference::ExplicitSystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1658,6 +1712,7 @@ mod tests { EnvironmentPreference::OnlyVirtual, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1673,6 +1728,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1696,6 +1752,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1710,6 +1767,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1733,6 +1791,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1752,6 +1811,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }, )??; @@ -1780,6 +1840,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1801,6 +1862,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -1830,6 +1892,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1845,6 +1908,7 @@ mod tests { EnvironmentPreference::ExplicitSystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -1871,6 +1935,7 @@ mod tests { EnvironmentPreference::ExplicitSystem, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }) .unwrap() @@ -1895,6 +1960,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -1911,6 +1977,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1925,6 +1992,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1950,6 +2018,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1964,6 +2033,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -1989,6 +2059,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2015,6 +2086,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2041,6 +2113,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2067,6 +2140,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2093,6 +2167,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2120,6 +2195,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })?; assert!( @@ -2141,6 +2217,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2155,6 +2232,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2180,6 +2258,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2194,6 +2273,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; assert_eq!( @@ -2231,6 +2311,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }) .unwrap() @@ -2248,6 +2329,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }) .unwrap() @@ -2289,6 +2371,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }) .unwrap() @@ -2306,6 +2389,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }) .unwrap() @@ -2342,6 +2426,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }) .unwrap() @@ -2364,6 +2449,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }) .unwrap() @@ -2386,6 +2472,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) }) .unwrap() @@ -2424,6 +2511,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; @@ -2476,6 +2564,7 @@ mod tests { EnvironmentPreference::Any, PythonPreference::OnlySystem, &context.cache, + PreviewMode::Disabled, ) })??; diff --git a/crates/uv-python/src/managed.rs b/crates/uv-python/src/managed.rs index edf4087e7..ad1dacac6 100644 --- a/crates/uv-python/src/managed.rs +++ b/crates/uv-python/src/managed.rs @@ -2,6 +2,8 @@ use core::fmt; use std::cmp::Reverse; use std::ffi::OsStr; use std::io::{self, Write}; +#[cfg(windows)] +use std::os::windows::fs::MetadataExt; use std::path::{Path, PathBuf}; use std::str::FromStr; @@ -10,8 +12,11 @@ use itertools::Itertools; use same_file::is_same_file; use thiserror::Error; use tracing::{debug, warn}; +use uv_configuration::PreviewMode; +#[cfg(windows)] +use windows_sys::Win32::Storage::FileSystem::FILE_ATTRIBUTE_REPARSE_POINT; -use uv_fs::{LockedFile, Simplified, symlink_or_copy_file}; +use uv_fs::{LockedFile, Simplified, replace_symlink, symlink_or_copy_file}; use uv_state::{StateBucket, StateStore}; use uv_static::EnvVars; use uv_trampoline_builder::{Launcher, windows_python_launcher}; @@ -25,7 +30,9 @@ use crate::libc::LibcDetectionError; use crate::platform::Error as PlatformError; use crate::platform::{Arch, Libc, Os}; use crate::python_version::PythonVersion; -use crate::{PythonRequest, PythonVariant, macos_dylib, sysconfig}; +use crate::{ + PythonInstallationMinorVersionKey, PythonRequest, PythonVariant, macos_dylib, sysconfig, +}; #[derive(Error, Debug)] pub enum Error { @@ -51,6 +58,8 @@ pub enum Error { }, #[error("Missing expected Python executable at {}", _0.user_display())] MissingExecutable(PathBuf), + #[error("Missing expected target directory for Python minor version link at {}", _0.user_display())] + MissingPythonMinorVersionLinkTargetDirectory(PathBuf), #[error("Failed to create canonical Python executable at {} from {}", to.user_display(), from.user_display())] CanonicalizeExecutable { from: PathBuf, @@ -65,6 +74,13 @@ pub enum Error { #[source] err: io::Error, }, + #[error("Failed to create Python minor version link directory at {} from {}", to.user_display(), from.user_display())] + PythonMinorVersionLinkDirectory { + from: PathBuf, + to: PathBuf, + #[source] + err: io::Error, + }, #[error("Failed to create directory for Python executable link at {}", to.user_display())] ExecutableDirectory { to: PathBuf, @@ -339,18 +355,14 @@ impl ManagedPythonInstallation { /// The path to this managed installation's Python executable. /// - /// If the installation has multiple execututables i.e., `python`, `python3`, etc., this will + /// If the installation has multiple executables i.e., `python`, `python3`, etc., this will /// return the _canonical_ executable name which the other names link to. On Unix, this is /// `python{major}.{minor}{variant}` and on Windows, this is `python{exe}`. /// /// If windowed is true, `pythonw.exe` is selected over `python.exe` on windows, with no changes /// on non-windows. pub fn executable(&self, windowed: bool) -> PathBuf { - let implementation = match self.implementation() { - ImplementationName::CPython => "python", - ImplementationName::PyPy => "pypy", - ImplementationName::GraalPy => "graalpy", - }; + let implementation = self.implementation().executable_name(); let version = match self.implementation() { ImplementationName::CPython => { @@ -383,13 +395,11 @@ impl ManagedPythonInstallation { exe = std::env::consts::EXE_SUFFIX ); - let executable = if cfg!(unix) || *self.implementation() == ImplementationName::GraalPy { - self.python_dir().join("bin").join(name) - } else if cfg!(windows) { - self.python_dir().join(name) - } else { - unimplemented!("Only Windows and Unix systems are supported.") - }; + let executable = executable_path_from_base( + self.python_dir().as_path(), + &name, + &LenientImplementationName::from(*self.implementation()), + ); // Workaround for python-build-standalone v20241016 which is missing the standard // `python.exe` executable in free-threaded distributions on Windows. @@ -442,6 +452,10 @@ impl ManagedPythonInstallation { &self.key } + pub fn minor_version_key(&self) -> &PythonInstallationMinorVersionKey { + PythonInstallationMinorVersionKey::ref_cast(&self.key) + } + pub fn satisfies(&self, request: &PythonRequest) -> bool { match request { PythonRequest::File(path) => self.executable(false) == *path, @@ -503,6 +517,30 @@ impl ManagedPythonInstallation { Ok(()) } + /// Ensure the environment contains the symlink directory (or junction on Windows) + /// pointing to the patch directory for this minor version. + pub fn ensure_minor_version_link(&self, preview: PreviewMode) -> Result<(), Error> { + if let Some(minor_version_link) = PythonMinorVersionLink::from_installation(self, preview) { + minor_version_link.create_directory()?; + } + Ok(()) + } + + /// If the environment contains a symlink directory (or junction on Windows), + /// update it to the latest patch directory for this minor version. + /// + /// Unlike [`ensure_minor_version_link`], will not create a new symlink directory + /// if one doesn't already exist, + pub fn update_minor_version_link(&self, preview: PreviewMode) -> Result<(), Error> { + if let Some(minor_version_link) = PythonMinorVersionLink::from_installation(self, preview) { + if !minor_version_link.exists() { + return Ok(()); + } + minor_version_link.create_directory()?; + } + Ok(()) + } + /// Ensure the environment is marked as externally managed with the /// standard `EXTERNALLY-MANAGED` file. pub fn ensure_externally_managed(&self) -> Result<(), Error> { @@ -567,54 +605,8 @@ impl ManagedPythonInstallation { Ok(()) } - /// Create a link to the managed Python executable. - /// - /// If the file already exists at the target path, an error will be returned. - pub fn create_bin_link(&self, target: &Path) -> Result<(), Error> { - let python = self.executable(false); - - let bin = target.parent().ok_or(Error::NoExecutableDirectory)?; - fs_err::create_dir_all(bin).map_err(|err| Error::ExecutableDirectory { - to: bin.to_path_buf(), - err, - })?; - - if cfg!(unix) { - // Note this will never copy on Unix — we use it here to allow compilation on Windows - match symlink_or_copy_file(&python, target) { - Ok(()) => Ok(()), - Err(err) if err.kind() == io::ErrorKind::NotFound => { - Err(Error::MissingExecutable(python.clone())) - } - Err(err) => Err(Error::LinkExecutable { - from: python, - to: target.to_path_buf(), - err, - }), - } - } else if cfg!(windows) { - // TODO(zanieb): Install GUI launchers as well - let launcher = windows_python_launcher(&python, false)?; - - // OK to use `std::fs` here, `fs_err` does not support `File::create_new` and we attach - // error context anyway - #[allow(clippy::disallowed_types)] - { - std::fs::File::create_new(target) - .and_then(|mut file| file.write_all(launcher.as_ref())) - .map_err(|err| Error::LinkExecutable { - from: python, - to: target.to_path_buf(), - err, - }) - } - } else { - unimplemented!("Only Windows and Unix systems are supported.") - } - } - /// Returns `true` if the path is a link to this installation's binary, e.g., as created by - /// [`ManagedPythonInstallation::create_bin_link`]. + /// [`create_bin_link`]. pub fn is_bin_link(&self, path: &Path) -> bool { if cfg!(unix) { is_same_file(path, self.executable(false)).unwrap_or_default() @@ -625,7 +617,11 @@ impl ManagedPythonInstallation { if !matches!(launcher.kind, uv_trampoline_builder::LauncherKind::Python) { return false; } - launcher.python_path == self.executable(false) + // We canonicalize the target path of the launcher in case it includes a minor version + // junction directory. If canonicalization fails, we check against the launcher path + // directly. + dunce::canonicalize(&launcher.python_path).unwrap_or(launcher.python_path) + == self.executable(false) } else { unreachable!("Only Windows and Unix are supported") } @@ -669,6 +665,229 @@ impl ManagedPythonInstallation { } } +/// A representation of a minor version symlink directory (or junction on Windows) +/// linking to the home directory of a Python installation. +#[derive(Clone, Debug)] +pub struct PythonMinorVersionLink { + /// The symlink directory (or junction on Windows). + pub symlink_directory: PathBuf, + /// The full path to the executable including the symlink directory + /// (or junction on Windows). + pub symlink_executable: PathBuf, + /// The target directory for the symlink. This is the home directory for + /// a Python installation. + pub target_directory: PathBuf, +} + +impl PythonMinorVersionLink { + /// Attempt to derive a path from an executable path that substitutes a minor + /// version symlink directory (or junction on Windows) for the patch version + /// directory. + /// + /// The implementation is expected to be CPython and, on Unix, the base Python is + /// expected to be in `/bin/` on Unix. If either condition isn't true, + /// return [`None`]. + /// + /// # Examples + /// + /// ## Unix + /// For a Python 3.10.8 installation in `/path/to/uv/python/cpython-3.10.8-macos-aarch64-none/bin/python3.10`, + /// the symlink directory would be `/path/to/uv/python/cpython-3.10-macos-aarch64-none` and the executable path including the + /// symlink directory would be `/path/to/uv/python/cpython-3.10-macos-aarch64-none/bin/python3.10`. + /// + /// ## Windows + /// For a Python 3.10.8 installation in `C:\path\to\uv\python\cpython-3.10.8-windows-x86_64-none\python.exe`, + /// the junction would be `C:\path\to\uv\python\cpython-3.10-windows-x86_64-none` and the executable path including the + /// junction would be `C:\path\to\uv\python\cpython-3.10-windows-x86_64-none\python.exe`. + pub fn from_executable( + executable: &Path, + key: &PythonInstallationKey, + preview: PreviewMode, + ) -> Option { + let implementation = key.implementation(); + if !matches!( + implementation, + LenientImplementationName::Known(ImplementationName::CPython) + ) { + // We don't currently support transparent upgrades for PyPy or GraalPy. + return None; + } + let executable_name = executable + .file_name() + .expect("Executable file name should exist"); + let symlink_directory_name = PythonInstallationMinorVersionKey::ref_cast(key).to_string(); + let parent = executable + .parent() + .expect("Executable should have parent directory"); + + // The home directory of the Python installation + let target_directory = if cfg!(unix) { + if parent + .components() + .next_back() + .is_some_and(|c| c.as_os_str() == "bin") + { + parent.parent()?.to_path_buf() + } else { + return None; + } + } else if cfg!(windows) { + parent.to_path_buf() + } else { + unimplemented!("Only Windows and Unix systems are supported.") + }; + let symlink_directory = target_directory.with_file_name(symlink_directory_name); + // If this would create a circular link, return `None`. + if target_directory == symlink_directory { + return None; + } + // The full executable path including the symlink directory (or junction). + let symlink_executable = executable_path_from_base( + symlink_directory.as_path(), + &executable_name.to_string_lossy(), + implementation, + ); + let minor_version_link = Self { + symlink_directory, + symlink_executable, + target_directory, + }; + // If preview mode is disabled, still return a `MinorVersionSymlink` for + // existing symlinks, allowing continued operations without the `--preview` + // flag after initial symlink directory installation. + if preview.is_disabled() && !minor_version_link.exists() { + return None; + } + Some(minor_version_link) + } + + pub fn from_installation( + installation: &ManagedPythonInstallation, + preview: PreviewMode, + ) -> Option { + PythonMinorVersionLink::from_executable( + installation.executable(false).as_path(), + installation.key(), + preview, + ) + } + + pub fn create_directory(&self) -> Result<(), Error> { + match replace_symlink( + self.target_directory.as_path(), + self.symlink_directory.as_path(), + ) { + Ok(()) => { + debug!( + "Created link {} -> {}", + &self.symlink_directory.user_display(), + &self.target_directory.user_display(), + ); + } + Err(err) if err.kind() == io::ErrorKind::NotFound => { + return Err(Error::MissingPythonMinorVersionLinkTargetDirectory( + self.target_directory.clone(), + )); + } + Err(err) if err.kind() == io::ErrorKind::AlreadyExists => {} + Err(err) => { + return Err(Error::PythonMinorVersionLinkDirectory { + from: self.symlink_directory.clone(), + to: self.target_directory.clone(), + err, + }); + } + } + Ok(()) + } + + pub fn exists(&self) -> bool { + #[cfg(unix)] + { + self.symlink_directory + .symlink_metadata() + .map(|metadata| metadata.file_type().is_symlink()) + .unwrap_or(false) + } + #[cfg(windows)] + { + self.symlink_directory + .symlink_metadata() + .is_ok_and(|metadata| { + // Check that this is a reparse point, which indicates this + // is a symlink or junction. + (metadata.file_attributes() & FILE_ATTRIBUTE_REPARSE_POINT) != 0 + }) + } + } +} + +/// Derive the full path to an executable from the given base path and executable +/// name. On Unix, this is, e.g., `/bin/python3.10`. On Windows, this is, +/// e.g., `\python.exe`. +fn executable_path_from_base( + base: &Path, + executable_name: &str, + implementation: &LenientImplementationName, +) -> PathBuf { + if cfg!(unix) + || matches!( + implementation, + &LenientImplementationName::Known(ImplementationName::GraalPy) + ) + { + base.join("bin").join(executable_name) + } else if cfg!(windows) { + base.join(executable_name) + } else { + unimplemented!("Only Windows and Unix systems are supported.") + } +} + +/// Create a link to a managed Python executable. +/// +/// If the file already exists at the link path, an error will be returned. +pub fn create_link_to_executable(link: &Path, executable: PathBuf) -> Result<(), Error> { + let link_parent = link.parent().ok_or(Error::NoExecutableDirectory)?; + fs_err::create_dir_all(link_parent).map_err(|err| Error::ExecutableDirectory { + to: link_parent.to_path_buf(), + err, + })?; + + if cfg!(unix) { + // Note this will never copy on Unix — we use it here to allow compilation on Windows + match symlink_or_copy_file(&executable, link) { + Ok(()) => Ok(()), + Err(err) if err.kind() == io::ErrorKind::NotFound => { + Err(Error::MissingExecutable(executable.clone())) + } + Err(err) => Err(Error::LinkExecutable { + from: executable, + to: link.to_path_buf(), + err, + }), + } + } else if cfg!(windows) { + // TODO(zanieb): Install GUI launchers as well + let launcher = windows_python_launcher(&executable, false)?; + + // OK to use `std::fs` here, `fs_err` does not support `File::create_new` and we attach + // error context anyway + #[allow(clippy::disallowed_types)] + { + std::fs::File::create_new(link) + .and_then(|mut file| file.write_all(launcher.as_ref())) + .map_err(|err| Error::LinkExecutable { + from: executable, + to: link.to_path_buf(), + err, + }) + } + } else { + unimplemented!("Only Windows and Unix systems are supported.") + } +} + // TODO(zanieb): Only used in tests now. /// Generate a platform portion of a key from the environment. pub fn platform_key_from_env() -> Result { diff --git a/crates/uv-python/src/platform.rs b/crates/uv-python/src/platform.rs index 025592c37..ce8620ae2 100644 --- a/crates/uv-python/src/platform.rs +++ b/crates/uv-python/src/platform.rs @@ -43,15 +43,36 @@ impl Ord for Arch { return self.variant.cmp(&other.variant); } - let native = Arch::from_env(); + // For the time being, manually make aarch64 windows disfavored + // on its own host platform, because most packages don't have wheels for + // aarch64 windows, making emulation more useful than native execution! + // + // The reason we do this in "sorting" and not "supports" is so that we don't + // *refuse* to use an aarch64 windows pythons if they happen to be installed + // and nothing else is available. + // + // Similarly if someone manually requests an aarch64 windows install, we + // should respect that request (this is the way users should "override" + // this behaviour). + let preferred = if cfg!(all(windows, target_arch = "aarch64")) { + Arch { + family: target_lexicon::Architecture::X86_64, + variant: None, + } + } else { + // Prefer native architectures + Arch::from_env() + }; - // Prefer native architectures - match (self.family == native.family, other.family == native.family) { + match ( + self.family == preferred.family, + other.family == preferred.family, + ) { (true, true) => unreachable!(), (true, false) => std::cmp::Ordering::Less, (false, true) => std::cmp::Ordering::Greater, (false, false) => { - // Both non-native, fallback to lexicographic order + // Both non-preferred, fallback to lexicographic order self.family.to_string().cmp(&other.family.to_string()) } } diff --git a/crates/uv-python/src/python_version.rs b/crates/uv-python/src/python_version.rs index 30dfccecd..c5d8f6365 100644 --- a/crates/uv-python/src/python_version.rs +++ b/crates/uv-python/src/python_version.rs @@ -1,3 +1,5 @@ +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::fmt::{Display, Formatter}; use std::ops::Deref; use std::str::FromStr; @@ -5,7 +7,7 @@ use std::str::FromStr; use uv_pep440::Version; use uv_pep508::{MarkerEnvironment, StringVersion}; -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct PythonVersion(StringVersion); impl From for PythonVersion { @@ -65,26 +67,16 @@ impl FromStr for PythonVersion { #[cfg(feature = "schemars")] impl schemars::JsonSchema for PythonVersion { - fn schema_name() -> String { - String::from("PythonVersion") + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("PythonVersion") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - string: Some(Box::new(schemars::schema::StringValidation { - pattern: Some(r"^3\.\d+(\.\d+)?$".to_string()), - ..schemars::schema::StringValidation::default() - })), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some( - "A Python version specifier, e.g. `3.11` or `3.12.4`.".to_string(), - ), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "pattern": r"^3\.\d+(\.\d+)?$", + "description": "A Python version specifier, e.g. `3.11` or `3.12.4`." + }) } } diff --git a/crates/uv-python/src/sysconfig/generated_mappings.rs b/crates/uv-python/src/sysconfig/generated_mappings.rs index 32a432b54..2611c1ac0 100644 --- a/crates/uv-python/src/sysconfig/generated_mappings.rs +++ b/crates/uv-python/src/sysconfig/generated_mappings.rs @@ -1,7 +1,7 @@ //! DO NOT EDIT //! //! Generated with `cargo run dev generate-sysconfig-metadata` -//! Targets from +//! Targets from //! #![allow(clippy::all)] #![cfg_attr(any(), rustfmt::skip)] @@ -15,7 +15,6 @@ use crate::sysconfig::replacements::{ReplacementEntry, ReplacementMode}; pub(crate) static DEFAULT_VARIABLE_UPDATES: LazyLock>> = LazyLock::new(|| { BTreeMap::from_iter([ ("BLDSHARED".to_string(), vec![ - ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/aarch64-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabi-gcc".to_string() }, to: "cc".to_string() }, ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/arm-linux-gnueabihf-gcc".to_string() }, to: "cc".to_string() }, ReplacementEntry { mode: ReplacementMode::Partial { from: "/usr/bin/mips-linux-gnu-gcc".to_string() }, to: "cc".to_string() }, @@ -28,7 +27,6 @@ pub(crate) static DEFAULT_VARIABLE_UPDATES: LazyLock { /// The path to stop discovery at. stop_discovery_at: Option<&'a Path>, - /// When `no_config` is set, Python version files will be ignored. + /// Ignore Python version files. /// /// Discovery will still run in order to display a log about the ignored file. no_config: bool, + /// Whether `.python-version` or `.python-versions` should be preferred. preference: FilePreference, + /// Whether to ignore local version files, and only search for a global one. + no_local: bool, } impl<'a> DiscoveryOptions<'a> { @@ -62,6 +65,11 @@ impl<'a> DiscoveryOptions<'a> { ..self } } + + #[must_use] + pub fn with_no_local(self, no_local: bool) -> Self { + Self { no_local, ..self } + } } impl PythonVersionFile { @@ -70,33 +78,38 @@ impl PythonVersionFile { working_directory: impl AsRef, options: &DiscoveryOptions<'_>, ) -> Result, std::io::Error> { - let Some(path) = Self::find_nearest(&working_directory, options) else { - if let Some(stop_discovery_at) = options.stop_discovery_at { - if stop_discovery_at == working_directory.as_ref() { - debug!( - "No Python version file found in workspace: {}", - working_directory.as_ref().display() - ); + let allow_local = !options.no_local; + let Some(path) = allow_local.then(|| { + // First, try to find a local version file. + let local = Self::find_nearest(&working_directory, options); + if local.is_none() { + // Log where we searched for the file, if not found + if let Some(stop_discovery_at) = options.stop_discovery_at { + if stop_discovery_at == working_directory.as_ref() { + debug!( + "No Python version file found in workspace: {}", + working_directory.as_ref().display() + ); + } else { + debug!( + "No Python version file found between working directory `{}` and workspace root `{}`", + working_directory.as_ref().display(), + stop_discovery_at.display() + ); + } } else { debug!( - "No Python version file found between working directory `{}` and workspace root `{}`", - working_directory.as_ref().display(), - stop_discovery_at.display() + "No Python version file found in ancestors of working directory: {}", + working_directory.as_ref().display() ); } - } else { - debug!( - "No Python version file found in ancestors of working directory: {}", - working_directory.as_ref().display() - ); } - // Not found in directory or its ancestors. Looking in user-level config. - return Ok(match user_uv_config_dir() { - Some(user_dir) => Self::discover_user_config(user_dir, options) - .await? - .or(None), - None => None, - }); + local + }).flatten().or_else(|| { + // Search for a global config + Self::find_global(options) + }) else { + return Ok(None); }; if options.no_config { @@ -111,20 +124,9 @@ impl PythonVersionFile { Self::try_from_path(path).await } - pub async fn discover_user_config( - user_config_working_directory: impl AsRef, - options: &DiscoveryOptions<'_>, - ) -> Result, std::io::Error> { - if !options.no_config { - if let Some(path) = - Self::find_in_directory(user_config_working_directory.as_ref(), options) - .into_iter() - .find(|path| path.is_file()) - { - return Self::try_from_path(path).await; - } - } - Ok(None) + fn find_global(options: &DiscoveryOptions<'_>) -> Option { + let user_config_dir = user_uv_config_dir()?; + Self::find_in_directory(&user_config_dir, options) } fn find_nearest(path: impl AsRef, options: &DiscoveryOptions<'_>) -> Option { diff --git a/crates/uv-python/src/virtualenv.rs b/crates/uv-python/src/virtualenv.rs index 7d72188fc..8b51a5e1b 100644 --- a/crates/uv-python/src/virtualenv.rs +++ b/crates/uv-python/src/virtualenv.rs @@ -32,7 +32,6 @@ pub struct VirtualEnvironment { /// A parsed `pyvenv.cfg` #[derive(Debug, Clone)] -#[allow(clippy::struct_excessive_bools)] pub struct PyVenvConfiguration { /// Was the virtual environment created with the `virtualenv` package? pub(crate) virtualenv: bool, @@ -131,14 +130,14 @@ pub(crate) fn virtualenv_from_working_dir() -> Result, Error> { for dir in current_dir.ancestors() { // If we're _within_ a virtualenv, return it. - if dir.join("pyvenv.cfg").is_file() { + if uv_fs::is_virtualenv_base(dir) { return Ok(Some(dir.to_path_buf())); } // Otherwise, search for a `.venv` directory. let dot_venv = dir.join(".venv"); if dot_venv.is_dir() { - if !dot_venv.join("pyvenv.cfg").is_file() { + if !uv_fs::is_virtualenv_base(&dot_venv) { return Err(Error::MissingPyVenvCfg(dot_venv)); } return Ok(Some(dot_venv)); diff --git a/crates/uv-redacted/src/lib.rs b/crates/uv-redacted/src/lib.rs index cd023ccbf..5c9a8e278 100644 --- a/crates/uv-redacted/src/lib.rs +++ b/crates/uv-redacted/src/lib.rs @@ -177,7 +177,9 @@ impl FromStr for DisplaySafeUrl { } fn is_ssh_git_username(url: &Url) -> bool { - matches!(url.scheme(), "ssh" | "git+ssh") && url.username() == "git" && url.password().is_none() + matches!(url.scheme(), "ssh" | "git+ssh" | "git+https") + && url.username() == "git" + && url.password().is_none() } fn display_with_redacted_credentials( diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__line-endings-poetry-with-hashes.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__line-endings-poetry-with-hashes.txt.snap index ad5e2a0e6..e13ab75b7 100644 --- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__line-endings-poetry-with-hashes.txt.snap +++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__line-endings-poetry-with-hashes.txt.snap @@ -1,6 +1,7 @@ --- source: crates/uv-requirements-txt/src/lib.rs expression: actual +snapshot_kind: text --- RequirementsTxt { requirements: [ @@ -23,7 +24,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0', + marker: python_full_version >= '3.8' and python_full_version < '4', origin: Some( File( "/poetry-with-hashes.txt", @@ -54,7 +55,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0', + marker: python_full_version >= '3.8' and python_full_version < '4', origin: Some( File( "/poetry-with-hashes.txt", @@ -85,7 +86,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0' and sys_platform == 'win32', + marker: python_full_version >= '3.8' and python_full_version < '4' and sys_platform == 'win32', origin: Some( File( "/poetry-with-hashes.txt", @@ -116,7 +117,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0', + marker: python_full_version >= '3.8' and python_full_version < '4', origin: Some( File( "/poetry-with-hashes.txt", @@ -148,7 +149,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0', + marker: python_full_version >= '3.8' and python_full_version < '4', origin: Some( File( "/poetry-with-hashes.txt", diff --git a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-poetry-with-hashes.txt.snap b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-poetry-with-hashes.txt.snap index ad5e2a0e6..e13ab75b7 100644 --- a/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-poetry-with-hashes.txt.snap +++ b/crates/uv-requirements-txt/src/snapshots/uv_requirements_txt__test__parse-poetry-with-hashes.txt.snap @@ -1,6 +1,7 @@ --- source: crates/uv-requirements-txt/src/lib.rs expression: actual +snapshot_kind: text --- RequirementsTxt { requirements: [ @@ -23,7 +24,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0', + marker: python_full_version >= '3.8' and python_full_version < '4', origin: Some( File( "/poetry-with-hashes.txt", @@ -54,7 +55,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0', + marker: python_full_version >= '3.8' and python_full_version < '4', origin: Some( File( "/poetry-with-hashes.txt", @@ -85,7 +86,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0' and sys_platform == 'win32', + marker: python_full_version >= '3.8' and python_full_version < '4' and sys_platform == 'win32', origin: Some( File( "/poetry-with-hashes.txt", @@ -116,7 +117,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0', + marker: python_full_version >= '3.8' and python_full_version < '4', origin: Some( File( "/poetry-with-hashes.txt", @@ -148,7 +149,7 @@ RequirementsTxt { ), ), ), - marker: python_full_version >= '3.8' and python_full_version < '4.0', + marker: python_full_version >= '3.8' and python_full_version < '4', origin: Some( File( "/poetry-with-hashes.txt", diff --git a/crates/uv-requirements/src/source_tree.rs b/crates/uv-requirements/src/source_tree.rs index a540e4642..39fbe453b 100644 --- a/crates/uv-requirements/src/source_tree.rs +++ b/crates/uv-requirements/src/source_tree.rs @@ -1,13 +1,13 @@ -use std::path::{Path, PathBuf}; +use std::borrow::Cow; +use std::path::Path; use std::sync::Arc; -use std::{borrow::Cow, collections::BTreeMap}; use anyhow::{Context, Result}; use futures::TryStreamExt; use futures::stream::FuturesOrdered; use url::Url; -use uv_configuration::{DependencyGroups, ExtrasSpecification}; +use uv_configuration::ExtrasSpecification; use uv_distribution::{DistributionDatabase, FlatRequiresDist, Reporter, RequiresDist}; use uv_distribution_types::Requirement; use uv_distribution_types::{ @@ -37,8 +37,6 @@ pub struct SourceTreeResolution { pub struct SourceTreeResolver<'a, Context: BuildContext> { /// The extras to include when resolving requirements. extras: &'a ExtrasSpecification, - /// The groups to include when resolving requirements. - groups: &'a BTreeMap, /// The hash policy to enforce. hasher: &'a HashStrategy, /// The in-memory index for resolving dependencies. @@ -51,14 +49,12 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> { /// Instantiate a new [`SourceTreeResolver`] for a given set of `source_trees`. pub fn new( extras: &'a ExtrasSpecification, - groups: &'a BTreeMap, hasher: &'a HashStrategy, index: &'a InMemoryIndex, database: DistributionDatabase<'a, Context>, ) -> Self { Self { extras, - groups, hasher, index, database, @@ -101,46 +97,17 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> { let mut requirements = Vec::new(); - // Resolve any groups associated with this path - let default_groups = DependencyGroups::default(); - let groups = self.groups.get(path).unwrap_or(&default_groups); - // Flatten any transitive extras and include dependencies // (unless something like --only-group was passed) - if groups.prod() { - requirements.extend( - FlatRequiresDist::from_requirements(metadata.requires_dist, &metadata.name) - .into_iter() - .map(|requirement| Requirement { - origin: Some(origin.clone()), - marker: requirement.marker.simplify_extras(&extras), - ..requirement - }), - ); - } - - // Apply dependency-groups - for (group_name, group) in &metadata.dependency_groups { - if groups.contains(group_name) { - requirements.extend(group.iter().cloned().map(|group| Requirement { - origin: Some(RequirementOrigin::Group( - path.to_path_buf(), - metadata.name.clone(), - group_name.clone(), - )), - ..group - })); - } - } - // Complain if dependency groups are named that don't appear. - for name in groups.explicit_names() { - if !metadata.dependency_groups.contains_key(name) { - return Err(anyhow::anyhow!( - "The dependency group '{name}' was not found in the project: {}", - path.user_display() - )); - } - } + requirements.extend( + FlatRequiresDist::from_requirements(metadata.requires_dist, &metadata.name) + .into_iter() + .map(|requirement| Requirement { + origin: Some(origin.clone()), + marker: requirement.marker.simplify_extras(&extras), + ..requirement + }), + ); let requirements = requirements.into_boxed_slice(); let project = metadata.name; diff --git a/crates/uv-requirements/src/specification.rs b/crates/uv-requirements/src/specification.rs index a0b122de8..4c5741392 100644 --- a/crates/uv-requirements/src/specification.rs +++ b/crates/uv-requirements/src/specification.rs @@ -290,52 +290,18 @@ impl RequirementsSpecification { if !groups.is_empty() { let mut group_specs = BTreeMap::new(); for (path, groups) in groups { - // Conceptually pip `--group` flags just add the group referred to by the file. - // In uv semantics this would be like `--only-group`, however if you do this: - // - // uv pip install -r pyproject.toml --group pyproject.toml:foo - // - // We don't want to discard the package listed by `-r` in the way `--only-group` - // would. So we check to see if any other source wants to add this path, and use - // that to determine if we're doing `--group` or `--only-group` semantics. - // - // Note that it's fine if a file gets referred to multiple times by - // different-looking paths (like `./pyproject.toml` vs `pyproject.toml`). We're - // specifically trying to disambiguate in situations where the `--group` *happens* - // to match with an unrelated argument, and `--only-group` would be overzealous! - let source_exists_without_group = requirement_sources - .iter() - .any(|source| source.source_trees.contains(&path)); - let (group, only_group) = if source_exists_without_group { - (groups, Vec::new()) - } else { - (Vec::new(), groups) - }; let group_spec = DependencyGroups::from_args( false, false, false, - group, + Vec::new(), Vec::new(), false, - only_group, + groups, false, ); - - // If we're doing `--only-group` semantics it's because only `--group` flags referred - // to this file, and so we need to make sure to add it to the list of sources! - if !source_exists_without_group { - let source = Self::from_source( - &RequirementsSource::PyprojectToml(path.clone()), - client_builder, - ) - .await?; - requirement_sources.push(source); - } - group_specs.insert(path, group_spec); } - spec.groups = group_specs; } diff --git a/crates/uv-resolver/src/error.rs b/crates/uv-resolver/src/error.rs index adbdc3cc7..0916f54ac 100644 --- a/crates/uv-resolver/src/error.rs +++ b/crates/uv-resolver/src/error.rs @@ -3,6 +3,7 @@ use std::fmt::Formatter; use std::sync::Arc; use indexmap::IndexSet; +use itertools::Itertools; use owo_colors::OwoColorize; use pubgrub::{ DefaultStringReporter, DerivationTree, Derived, External, Range, Ranges, Reporter, Term, @@ -17,6 +18,8 @@ use uv_normalize::{ExtraName, InvalidNameError, PackageName}; use uv_pep440::{LocalVersionSlice, LowerBound, Version, VersionSpecifier}; use uv_pep508::{MarkerEnvironment, MarkerExpression, MarkerTree, MarkerValueVersion}; use uv_platform_tags::Tags; +use uv_pypi_types::ParsedUrl; +use uv_redacted::DisplaySafeUrl; use uv_static::EnvVars; use crate::candidate_selector::CandidateSelector; @@ -56,11 +59,14 @@ pub enum ResolveError { } else { format!(" in {env}") }, - urls.join("\n- "), + urls.iter() + .map(|url| format!("{}{}", DisplaySafeUrl::from(url.clone()), if url.is_editable() { " (editable)" } else { "" })) + .collect::>() + .join("\n- ") )] ConflictingUrls { package_name: PackageName, - urls: Vec, + urls: Vec, env: ResolverEnvironment, }, @@ -71,11 +77,14 @@ pub enum ResolveError { } else { format!(" in {env}") }, - indexes.join("\n- "), + indexes.iter() + .map(std::string::ToString::to_string) + .collect::>() + .join("\n- ") )] ConflictingIndexesForEnvironment { package_name: PackageName, - indexes: Vec, + indexes: Vec, env: ResolverEnvironment, }, @@ -148,7 +157,7 @@ impl From> for ResolveError { } } -pub(crate) type ErrorTree = DerivationTree, UnavailableReason>; +pub type ErrorTree = DerivationTree, UnavailableReason>; /// A wrapper around [`pubgrub::error::NoSolutionError`] that displays a resolution failure report. pub struct NoSolutionError { @@ -359,6 +368,11 @@ impl NoSolutionError { NoSolutionHeader::new(self.env.clone()) } + /// Get the conflict derivation tree for external analysis + pub fn derivation_tree(&self) -> &ErrorTree { + &self.error + } + /// Hint at limiting the resolver environment if universal resolution failed for a target /// that is not the current platform or not the current Python version. fn hint_disjoint_targets(&self, f: &mut Formatter) -> std::fmt::Result { @@ -396,6 +410,15 @@ impl NoSolutionError { } Ok(()) } + + /// Get the packages that are involved in this error. + pub fn packages(&self) -> impl Iterator { + self.error + .packages() + .into_iter() + .filter_map(|p| p.name()) + .unique() + } } impl std::fmt::Debug for NoSolutionError { @@ -1213,6 +1236,69 @@ impl SentinelRange<'_> { } } +/// A prefix match, e.g., `==2.4.*`, which is desugared to a range like `>=2.4.dev0,<2.5.dev0`. +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct PrefixMatch<'a> { + version: &'a Version, +} + +impl<'a> PrefixMatch<'a> { + /// Determine whether a given range is equivalent to a prefix match (e.g., `==2.4.*`). + /// + /// Prefix matches are desugared to (e.g.) `>=2.4.dev0,<2.5.dev0`, but we want to render them + /// as `==2.4.*` in error messages. + pub(crate) fn from_range(lower: &'a Bound, upper: &'a Bound) -> Option { + let Bound::Included(lower) = lower else { + return None; + }; + let Bound::Excluded(upper) = upper else { + return None; + }; + if lower.is_pre() || lower.is_post() || lower.is_local() { + return None; + } + if upper.is_pre() || upper.is_post() || upper.is_local() { + return None; + } + if lower.dev() != Some(0) { + return None; + } + if upper.dev() != Some(0) { + return None; + } + if lower.release().len() != upper.release().len() { + return None; + } + + // All segments should be the same, except the last one, which should be incremented. + let num_segments = lower.release().len(); + for (i, (lower, upper)) in lower + .release() + .iter() + .zip(upper.release().iter()) + .enumerate() + { + if i == num_segments - 1 { + if lower + 1 != *upper { + return None; + } + } else { + if lower != upper { + return None; + } + } + } + + Some(PrefixMatch { version: lower }) + } +} + +impl std::fmt::Display for PrefixMatch<'_> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "=={}.*", self.version.only_release()) + } +} + #[derive(Debug)] pub struct NoSolutionHeader { /// The [`ResolverEnvironment`] that caused the failure. diff --git a/crates/uv-resolver/src/exclude_newer.rs b/crates/uv-resolver/src/exclude_newer.rs index 40ec009f8..65fa55cfe 100644 --- a/crates/uv-resolver/src/exclude_newer.rs +++ b/crates/uv-resolver/src/exclude_newer.rs @@ -1,3 +1,5 @@ +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::str::FromStr; use jiff::{Timestamp, ToSpan, tz::TimeZone}; @@ -67,25 +69,15 @@ impl std::fmt::Display for ExcludeNewer { #[cfg(feature = "schemars")] impl schemars::JsonSchema for ExcludeNewer { - fn schema_name() -> String { - "ExcludeNewer".to_string() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("ExcludeNewer") } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - string: Some(Box::new(schemars::schema::StringValidation { - pattern: Some( - r"^\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2}))?$".to_string(), - ), - ..schemars::schema::StringValidation::default() - })), - metadata: Some(Box::new(schemars::schema::Metadata { - description: Some("Exclude distributions uploaded after the given timestamp.\n\nAccepts both RFC 3339 timestamps (e.g., `2006-12-02T02:07:43Z`) and local dates in the same format (e.g., `2006-12-02`).".to_string()), - ..schemars::schema::Metadata::default() - })), - ..schemars::schema::SchemaObject::default() - } - .into() + fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + schemars::json_schema!({ + "type": "string", + "pattern": r"^\d{4}-\d{2}-\d{2}(T\d{2}:\d{2}:\d{2}(Z|[+-]\d{2}:\d{2}))?$", + "description": "Exclude distributions uploaded after the given timestamp.\n\nAccepts both RFC 3339 timestamps (e.g., `2006-12-02T02:07:43Z`) and local dates in the same format (e.g., `2006-12-02`).", + }) } } diff --git a/crates/uv-resolver/src/fork_indexes.rs b/crates/uv-resolver/src/fork_indexes.rs index 5b39fb626..7283b5cbc 100644 --- a/crates/uv-resolver/src/fork_indexes.rs +++ b/crates/uv-resolver/src/fork_indexes.rs @@ -24,7 +24,7 @@ impl ForkIndexes { ) -> Result<(), ResolveError> { if let Some(previous) = self.0.insert(package_name.clone(), index.clone()) { if &previous != index { - let mut conflicts = vec![previous.url.to_string(), index.url.to_string()]; + let mut conflicts = vec![previous.url, index.url.clone()]; conflicts.sort(); return Err(ResolveError::ConflictingIndexesForEnvironment { package_name: package_name.clone(), diff --git a/crates/uv-resolver/src/fork_urls.rs b/crates/uv-resolver/src/fork_urls.rs index dc1b067c4..dd69f7bf7 100644 --- a/crates/uv-resolver/src/fork_urls.rs +++ b/crates/uv-resolver/src/fork_urls.rs @@ -2,7 +2,6 @@ use std::collections::hash_map::Entry; use rustc_hash::FxHashMap; -use uv_distribution_types::Verbatim; use uv_normalize::PackageName; use uv_pypi_types::VerbatimParsedUrl; @@ -34,10 +33,8 @@ impl ForkUrls { match self.0.entry(package_name.clone()) { Entry::Occupied(previous) => { if previous.get() != url { - let mut conflicting_url = vec![ - previous.get().verbatim.verbatim().to_string(), - url.verbatim.verbatim().to_string(), - ]; + let mut conflicting_url = + vec![previous.get().parsed_url.clone(), url.parsed_url.clone()]; conflicting_url.sort(); return Err(ResolveError::ConflictingUrls { package_name: package_name.clone(), diff --git a/crates/uv-resolver/src/lib.rs b/crates/uv-resolver/src/lib.rs index 3285f9a6a..e91df3a7e 100644 --- a/crates/uv-resolver/src/lib.rs +++ b/crates/uv-resolver/src/lib.rs @@ -1,5 +1,5 @@ pub use dependency_mode::DependencyMode; -pub use error::{NoSolutionError, NoSolutionHeader, ResolveError, SentinelRange}; +pub use error::{ErrorTree, NoSolutionError, NoSolutionHeader, ResolveError, SentinelRange}; pub use exclude_newer::ExcludeNewer; pub use exclusions::Exclusions; pub use flat_index::{FlatDistributions, FlatIndex}; @@ -14,7 +14,6 @@ pub use options::{Flexibility, Options, OptionsBuilder}; pub use preferences::{Preference, PreferenceError, Preferences}; pub use prerelease::PrereleaseMode; pub use python_requirement::PythonRequirement; -pub use requires_python::{RequiresPython, RequiresPythonRange}; pub use resolution::{ AnnotationStyle, ConflictingDistributionError, DisplayResolutionGraph, ResolverOutput, }; @@ -55,10 +54,9 @@ mod options; mod pins; mod preferences; mod prerelease; -mod pubgrub; +pub mod pubgrub; mod python_requirement; mod redirect; -mod requires_python; mod resolution; mod resolution_mode; mod resolver; diff --git a/crates/uv-resolver/src/lock/export/pylock_toml.rs b/crates/uv-resolver/src/lock/export/pylock_toml.rs index 4f3e885ab..d2c2383a5 100644 --- a/crates/uv-resolver/src/lock/export/pylock_toml.rs +++ b/crates/uv-resolver/src/lock/export/pylock_toml.rs @@ -23,8 +23,8 @@ use uv_distribution_filename::{ use uv_distribution_types::{ BuiltDist, DirectUrlBuiltDist, DirectUrlSourceDist, DirectorySourceDist, Dist, Edge, FileLocation, GitSourceDist, IndexUrl, Name, Node, PathBuiltDist, PathSourceDist, - RegistryBuiltDist, RegistryBuiltWheel, RegistrySourceDist, RemoteSource, Resolution, - ResolvedDist, SourceDist, ToUrlError, UrlString, + RegistryBuiltDist, RegistryBuiltWheel, RegistrySourceDist, RemoteSource, RequiresPython, + Resolution, ResolvedDist, SourceDist, ToUrlError, UrlString, }; use uv_fs::{PortablePathBuf, relative_to}; use uv_git::{RepositoryReference, ResolvedRepositoryReference}; @@ -40,7 +40,7 @@ use uv_small_str::SmallString; use crate::lock::export::ExportableRequirements; use crate::lock::{Source, WheelTagHint, each_element_on_its_line_array}; use crate::resolution::ResolutionGraphNode; -use crate::{Installable, LockError, RequiresPython, ResolverOutput}; +use crate::{Installable, LockError, ResolverOutput}; #[derive(Debug, thiserror::Error)] pub enum PylockTomlErrorKind { diff --git a/crates/uv-resolver/src/lock/mod.rs b/crates/uv-resolver/src/lock/mod.rs index faacae736..beeadc912 100644 --- a/crates/uv-resolver/src/lock/mod.rs +++ b/crates/uv-resolver/src/lock/mod.rs @@ -29,8 +29,8 @@ use uv_distribution_types::{ BuiltDist, DependencyMetadata, DirectUrlBuiltDist, DirectUrlSourceDist, DirectorySourceDist, Dist, DistributionMetadata, FileLocation, GitSourceDist, IndexLocations, IndexMetadata, IndexUrl, Name, PathBuiltDist, PathSourceDist, RegistryBuiltDist, RegistryBuiltWheel, - RegistrySourceDist, RemoteSource, Requirement, RequirementSource, ResolvedDist, StaticMetadata, - ToUrlError, UrlString, + RegistrySourceDist, RemoteSource, Requirement, RequirementSource, RequiresPython, ResolvedDist, + SimplifiedMarkerTree, StaticMetadata, ToUrlError, UrlString, }; use uv_fs::{PortablePath, PortablePathBuf, relative_to}; use uv_git::{RepositoryReference, ResolvedRepositoryReference}; @@ -57,12 +57,10 @@ pub use crate::lock::export::{PylockToml, PylockTomlErrorKind}; pub use crate::lock::installable::Installable; pub use crate::lock::map::PackageMap; pub use crate::lock::tree::TreeDisplay; -use crate::requires_python::SimplifiedMarkerTree; use crate::resolution::{AnnotatedDist, ResolutionGraphNode}; use crate::universal_marker::{ConflictMarker, UniversalMarker}; use crate::{ - ExcludeNewer, InMemoryIndex, MetadataResponse, PrereleaseMode, RequiresPython, ResolutionMode, - ResolverOutput, + ExcludeNewer, InMemoryIndex, MetadataResponse, PrereleaseMode, ResolutionMode, ResolverOutput, }; mod export; @@ -770,6 +768,36 @@ impl Lock { } } + /// Checks whether the new requires-python specification is disjoint with + /// the fork markers in this lock file. + /// + /// If they are disjoint, then the union of the fork markers along with the + /// given requires-python specification (converted to a marker tree) are + /// returned. + /// + /// When disjoint, the fork markers in the lock file should be dropped and + /// not used. + pub fn requires_python_coverage( + &self, + new_requires_python: &RequiresPython, + ) -> Result<(), (MarkerTree, MarkerTree)> { + let fork_markers_union = if self.fork_markers().is_empty() { + self.requires_python.to_marker_tree() + } else { + let mut fork_markers_union = MarkerTree::FALSE; + for fork_marker in self.fork_markers() { + fork_markers_union.or(fork_marker.pep508()); + } + fork_markers_union + }; + let new_requires_python = new_requires_python.to_marker_tree(); + if fork_markers_union.is_disjoint(new_requires_python) { + Err((fork_markers_union, new_requires_python)) + } else { + Ok(()) + } + } + /// Returns the TOML representation of this lockfile. pub fn to_toml(&self) -> Result { // Catch a lockfile where the union of fork markers doesn't cover the supported @@ -1450,9 +1478,11 @@ impl Lock { if let Source::Registry(index) = &package.id.source { match index { RegistrySource::Url(url) => { + // Normalize URL before validating. + let url = url.without_trailing_slash(); if remotes .as_ref() - .is_some_and(|remotes| !remotes.contains(url)) + .is_some_and(|remotes| !remotes.contains(&url)) { let name = &package.id.name; let version = &package @@ -1460,7 +1490,11 @@ impl Lock { .version .as_ref() .expect("version for registry source"); - return Ok(SatisfiesResult::MissingRemoteIndex(name, version, url)); + return Ok(SatisfiesResult::MissingRemoteIndex( + name, + version, + url.into_owned(), + )); } } RegistrySource::Path(path) => { @@ -1765,7 +1799,7 @@ pub enum SatisfiesResult<'lock> { /// The lockfile is missing a workspace member. MissingRoot(PackageName), /// The lockfile referenced a remote index that was not provided - MissingRemoteIndex(&'lock PackageName, &'lock Version, &'lock UrlString), + MissingRemoteIndex(&'lock PackageName, &'lock Version, UrlString), /// The lockfile referenced a local index that was not provided MissingLocalIndex(&'lock PackageName, &'lock Version, &'lock Path), /// A package in the lockfile contains different `requires-dist` metadata than expected. @@ -2343,7 +2377,13 @@ impl Package { let sdist = match &self.id.source { Source::Path(path) => { // A direct path source can also be a wheel, so validate the extension. - let DistExtension::Source(ext) = DistExtension::from_path(path)? else { + let DistExtension::Source(ext) = DistExtension::from_path(path).map_err(|err| { + LockErrorKind::MissingExtension { + id: self.id.clone(), + err, + } + })? + else { return Ok(None); }; let install_path = absolute_path(workspace_root, path)?; @@ -2416,7 +2456,14 @@ impl Package { } Source::Direct(url, direct) => { // A direct URL source can also be a wheel, so validate the extension. - let DistExtension::Source(ext) = DistExtension::from_path(url.as_ref())? else { + let DistExtension::Source(ext) = + DistExtension::from_path(url.base_str()).map_err(|err| { + LockErrorKind::MissingExtension { + id: self.id.clone(), + err, + } + })? + else { return Ok(None); }; let location = url.to_url().map_err(LockErrorKind::InvalidUrl)?; @@ -2455,7 +2502,12 @@ impl Package { .ok_or_else(|| LockErrorKind::MissingFilename { id: self.id.clone(), })?; - let ext = SourceDistExtension::from_path(filename.as_ref())?; + let ext = SourceDistExtension::from_path(filename.as_ref()).map_err(|err| { + LockErrorKind::MissingExtension { + id: self.id.clone(), + err, + } + })?; let file = Box::new(uv_distribution_types::File { dist_info_metadata: false, filename: SmallString::from(filename), @@ -2495,19 +2547,41 @@ impl Package { .as_ref() .expect("version for registry source"); - let file_path = sdist.path().ok_or_else(|| LockErrorKind::MissingPath { - name: name.clone(), - version: version.clone(), - })?; - let file_url = - DisplaySafeUrl::from_file_path(workspace_root.join(path).join(file_path)) - .map_err(|()| LockErrorKind::PathToUrl)?; + let file_url = match sdist { + SourceDist::Url { url: file_url, .. } => { + FileLocation::AbsoluteUrl(file_url.clone()) + } + SourceDist::Path { + path: file_path, .. + } => { + let file_path = workspace_root.join(path).join(file_path); + let file_url = + DisplaySafeUrl::from_file_path(&file_path).map_err(|()| { + LockErrorKind::PathToUrl { + path: file_path.into_boxed_path(), + } + })?; + FileLocation::AbsoluteUrl(UrlString::from(file_url)) + } + SourceDist::Metadata { .. } => { + return Err(LockErrorKind::MissingPath { + name: name.clone(), + version: version.clone(), + } + .into()); + } + }; let filename = sdist .filename() .ok_or_else(|| LockErrorKind::MissingFilename { id: self.id.clone(), })?; - let ext = SourceDistExtension::from_path(filename.as_ref())?; + let ext = SourceDistExtension::from_path(filename.as_ref()).map_err(|err| { + LockErrorKind::MissingExtension { + id: self.id.clone(), + err, + } + })?; let file = Box::new(uv_distribution_types::File { dist_info_metadata: false, filename: SmallString::from(filename), @@ -2517,9 +2591,10 @@ impl Package { requires_python: None, size: sdist.size(), upload_time_utc_ms: sdist.upload_time().map(Timestamp::as_millisecond), - url: FileLocation::AbsoluteUrl(UrlString::from(file_url)), + url: file_url, yanked: None, }); + let index = IndexUrl::from( VerbatimUrl::from_absolute_path(workspace_root.join(path)) .map_err(LockErrorKind::RegistryVerbatimUrl)?, @@ -3199,7 +3274,9 @@ impl Source { Ok(Source::Registry(source)) } IndexUrl::Path(url) => { - let path = url.to_file_path().map_err(|()| LockErrorKind::UrlToPath)?; + let path = url + .to_file_path() + .map_err(|()| LockErrorKind::UrlToPath { url: url.to_url() })?; let path = relative_to(&path, root) .or_else(|_| std::path::absolute(&path)) .map_err(LockErrorKind::IndexRelativePath)?; @@ -3632,14 +3709,6 @@ impl SourceDist { } } - fn path(&self) -> Option<&Path> { - match &self { - SourceDist::Metadata { .. } => None, - SourceDist::Url { .. } => None, - SourceDist::Path { path, .. } => Some(path), - } - } - pub(crate) fn hash(&self) -> Option<&Hash> { match &self { SourceDist::Metadata { metadata } => metadata.hash.as_ref(), @@ -3759,34 +3828,60 @@ impl SourceDist { })) } IndexUrl::Path(path) => { - let index_path = path.to_file_path().map_err(|()| LockErrorKind::UrlToPath)?; - let reg_dist_path = reg_dist + let index_path = path + .to_file_path() + .map_err(|()| LockErrorKind::UrlToPath { url: path.to_url() })?; + let url = reg_dist .file .url .to_url() - .map_err(LockErrorKind::InvalidUrl)? - .to_file_path() - .map_err(|()| LockErrorKind::UrlToPath)?; - let path = relative_to(®_dist_path, index_path) - .or_else(|_| std::path::absolute(®_dist_path)) - .map_err(LockErrorKind::DistributionRelativePath)? - .into_boxed_path(); - let hash = reg_dist.file.hashes.iter().max().cloned().map(Hash::from); - let size = reg_dist.file.size; - let upload_time = reg_dist - .file - .upload_time_utc_ms - .map(Timestamp::from_millisecond) - .transpose() - .map_err(LockErrorKind::InvalidTimestamp)?; - Ok(Some(SourceDist::Path { - path, - metadata: SourceDistMetadata { - hash, - size, - upload_time, - }, - })) + .map_err(LockErrorKind::InvalidUrl)?; + + if url.scheme() == "file" { + let reg_dist_path = url + .to_file_path() + .map_err(|()| LockErrorKind::UrlToPath { url })?; + let path = relative_to(®_dist_path, index_path) + .or_else(|_| std::path::absolute(®_dist_path)) + .map_err(LockErrorKind::DistributionRelativePath)? + .into_boxed_path(); + let hash = reg_dist.file.hashes.iter().max().cloned().map(Hash::from); + let size = reg_dist.file.size; + let upload_time = reg_dist + .file + .upload_time_utc_ms + .map(Timestamp::from_millisecond) + .transpose() + .map_err(LockErrorKind::InvalidTimestamp)?; + Ok(Some(SourceDist::Path { + path, + metadata: SourceDistMetadata { + hash, + size, + upload_time, + }, + })) + } else { + let url = normalize_file_location(®_dist.file.url) + .map_err(LockErrorKind::InvalidUrl) + .map_err(LockError::from)?; + let hash = reg_dist.file.hashes.iter().max().cloned().map(Hash::from); + let size = reg_dist.file.size; + let upload_time = reg_dist + .file + .upload_time_utc_ms + .map(Timestamp::from_millisecond) + .transpose() + .map_err(LockErrorKind::InvalidTimestamp)?; + Ok(Some(SourceDist::Url { + url, + metadata: SourceDistMetadata { + hash, + size, + upload_time, + }, + })) + } } } } @@ -4089,25 +4184,46 @@ impl Wheel { }) } IndexUrl::Path(path) => { - let index_path = path.to_file_path().map_err(|()| LockErrorKind::UrlToPath)?; - let wheel_path = wheel - .file - .url - .to_url() - .map_err(LockErrorKind::InvalidUrl)? + let index_path = path .to_file_path() - .map_err(|()| LockErrorKind::UrlToPath)?; - let path = relative_to(&wheel_path, index_path) - .or_else(|_| std::path::absolute(&wheel_path)) - .map_err(LockErrorKind::DistributionRelativePath)? - .into_boxed_path(); - Ok(Wheel { - url: WheelWireSource::Path { path }, - hash: None, - size: None, - upload_time: None, - filename, - }) + .map_err(|()| LockErrorKind::UrlToPath { url: path.to_url() })?; + let wheel_url = wheel.file.url.to_url().map_err(LockErrorKind::InvalidUrl)?; + + if wheel_url.scheme() == "file" { + let wheel_path = wheel_url + .to_file_path() + .map_err(|()| LockErrorKind::UrlToPath { url: wheel_url })?; + let path = relative_to(&wheel_path, index_path) + .or_else(|_| std::path::absolute(&wheel_path)) + .map_err(LockErrorKind::DistributionRelativePath)? + .into_boxed_path(); + Ok(Wheel { + url: WheelWireSource::Path { path }, + hash: None, + size: None, + upload_time: None, + filename, + }) + } else { + let url = normalize_file_location(&wheel.file.url) + .map_err(LockErrorKind::InvalidUrl) + .map_err(LockError::from)?; + let hash = wheel.file.hashes.iter().max().cloned().map(Hash::from); + let size = wheel.file.size; + let upload_time = wheel + .file + .upload_time_utc_ms + .map(Timestamp::from_millisecond) + .transpose() + .map_err(LockErrorKind::InvalidTimestamp)?; + Ok(Wheel { + url: WheelWireSource::Url { url }, + hash, + size, + filename, + upload_time, + }) + } } } } @@ -4145,8 +4261,10 @@ impl Wheel { match source { RegistrySource::Url(url) => { - let file_url = match &self.url { - WheelWireSource::Url { url } => url, + let file_location = match &self.url { + WheelWireSource::Url { url: file_url } => { + FileLocation::AbsoluteUrl(file_url.clone()) + } WheelWireSource::Path { .. } | WheelWireSource::Filename { .. } => { return Err(LockErrorKind::MissingUrl { name: filename.name, @@ -4162,7 +4280,7 @@ impl Wheel { requires_python: None, size: self.size, upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond), - url: FileLocation::AbsoluteUrl(file_url.clone()), + url: file_location, yanked: None, }); let index = IndexUrl::from(VerbatimUrl::from_url( @@ -4175,9 +4293,21 @@ impl Wheel { }) } RegistrySource::Path(index_path) => { - let file_path = match &self.url { - WheelWireSource::Path { path } => path, - WheelWireSource::Url { .. } | WheelWireSource::Filename { .. } => { + let file_location = match &self.url { + WheelWireSource::Url { url: file_url } => { + FileLocation::AbsoluteUrl(file_url.clone()) + } + WheelWireSource::Path { path: file_path } => { + let file_path = root.join(index_path).join(file_path); + let file_url = + DisplaySafeUrl::from_file_path(&file_path).map_err(|()| { + LockErrorKind::PathToUrl { + path: file_path.into_boxed_path(), + } + })?; + FileLocation::AbsoluteUrl(UrlString::from(file_url)) + } + WheelWireSource::Filename { .. } => { return Err(LockErrorKind::MissingPath { name: filename.name, version: filename.version, @@ -4185,9 +4315,6 @@ impl Wheel { .into()); } }; - let file_url = - DisplaySafeUrl::from_file_path(root.join(index_path).join(file_path)) - .map_err(|()| LockErrorKind::PathToUrl)?; let file = Box::new(uv_distribution_types::File { dist_info_metadata: false, filename: SmallString::from(filename.to_string()), @@ -4195,7 +4322,7 @@ impl Wheel { requires_python: None, size: self.size, upload_time_utc_ms: self.upload_time.map(Timestamp::as_millisecond), - url: FileLocation::AbsoluteUrl(UrlString::from(file_url)), + url: file_location, yanked: None, }); let index = IndexUrl::from( @@ -4569,7 +4696,7 @@ impl From for Hashes { /// Convert a [`FileLocation`] into a normalized [`UrlString`]. fn normalize_file_location(location: &FileLocation) -> Result { match location { - FileLocation::AbsoluteUrl(absolute) => Ok(absolute.without_fragment()), + FileLocation::AbsoluteUrl(absolute) => Ok(absolute.without_fragment().into_owned()), FileLocation::RelativeUrl(_, _) => Ok(normalize_url(location.to_url()?)), } } @@ -5194,8 +5321,13 @@ enum LockErrorKind { ), /// An error that occurs when the extension can't be determined /// for a given wheel or source distribution. - #[error("Failed to parse file extension; expected one of: {0}")] - MissingExtension(#[from] ExtensionError), + #[error("Failed to parse file extension for `{id}`; expected one of: {err}", id = id.cyan())] + MissingExtension { + /// The filename that was expected to have an extension. + id: PackageId, + /// The list of valid extensions that were expected. + err: ExtensionError, + }, /// Failed to parse a Git source URL. #[error("Failed to parse Git URL")] InvalidGitSourceUrl( @@ -5393,11 +5525,11 @@ enum LockErrorKind { VerbatimUrlError, ), /// An error that occurs when converting a path to a URL. - #[error("Failed to convert path to URL")] - PathToUrl, + #[error("Failed to convert path to URL: {path}", path = path.display().cyan())] + PathToUrl { path: Box }, /// An error that occurs when converting a URL to a path - #[error("Failed to convert URL to path")] - UrlToPath, + #[error("Failed to convert URL to path: {url}", url = url.cyan())] + UrlToPath { url: DisplaySafeUrl }, /// An error that occurs when multiple packages with the same /// name were found when identifying the root packages. #[error("Found multiple packages matching `{name}`", name = name.cyan())] diff --git a/crates/uv-resolver/src/marker.rs b/crates/uv-resolver/src/marker.rs index 1bb938a33..b63d51401 100644 --- a/crates/uv-resolver/src/marker.rs +++ b/crates/uv-resolver/src/marker.rs @@ -5,7 +5,7 @@ use std::ops::Bound; use uv_pep440::{LowerBound, UpperBound, Version}; use uv_pep508::{CanonicalMarkerValueVersion, MarkerTree, MarkerTreeKind}; -use crate::requires_python::RequiresPythonRange; +use uv_distribution_types::RequiresPythonRange; /// Returns the bounding Python versions that can satisfy the [`MarkerTree`], if it's constrained. pub(crate) fn requires_python(tree: MarkerTree) -> Option { diff --git a/crates/uv-resolver/src/pubgrub/mod.rs b/crates/uv-resolver/src/pubgrub/mod.rs index f4802a2ca..bd58fbc72 100644 --- a/crates/uv-resolver/src/pubgrub/mod.rs +++ b/crates/uv-resolver/src/pubgrub/mod.rs @@ -1,6 +1,6 @@ pub(crate) use crate::pubgrub::dependencies::PubGrubDependency; pub(crate) use crate::pubgrub::distribution::PubGrubDistribution; -pub(crate) use crate::pubgrub::package::{PubGrubPackage, PubGrubPackageInner, PubGrubPython}; +pub use crate::pubgrub::package::{PubGrubPackage, PubGrubPackageInner, PubGrubPython}; pub(crate) use crate::pubgrub::priority::{PubGrubPriorities, PubGrubPriority, PubGrubTiebreaker}; pub(crate) use crate::pubgrub::report::PubGrubReportFormatter; diff --git a/crates/uv-resolver/src/pubgrub/package.rs b/crates/uv-resolver/src/pubgrub/package.rs index 8c40f8080..2e67a715a 100644 --- a/crates/uv-resolver/src/pubgrub/package.rs +++ b/crates/uv-resolver/src/pubgrub/package.rs @@ -9,7 +9,7 @@ use crate::python_requirement::PythonRequirement; /// [`Arc`] wrapper around [`PubGrubPackageInner`] to make cloning (inside PubGrub) cheap. #[derive(Debug, Clone, Eq, Hash, PartialEq, PartialOrd, Ord)] -pub(crate) struct PubGrubPackage(Arc); +pub struct PubGrubPackage(Arc); impl Deref for PubGrubPackage { type Target = PubGrubPackageInner; @@ -39,7 +39,7 @@ impl From for PubGrubPackage { /// package (e.g., `black[colorama]`), and mark it as a dependency of the real package (e.g., /// `black`). We then discard the virtual packages at the end of the resolution process. #[derive(Debug, Clone, Eq, Hash, PartialEq, PartialOrd, Ord)] -pub(crate) enum PubGrubPackageInner { +pub enum PubGrubPackageInner { /// The root package, which is used to start the resolution process. Root(Option), /// A Python version. @@ -295,7 +295,7 @@ impl PubGrubPackage { } #[derive(Debug, Copy, Clone, Eq, PartialEq, PartialOrd, Hash, Ord)] -pub(crate) enum PubGrubPython { +pub enum PubGrubPython { /// The Python version installed in the current environment. Installed, /// The Python version for which dependencies are being resolved. diff --git a/crates/uv-resolver/src/pubgrub/report.rs b/crates/uv-resolver/src/pubgrub/report.rs index b7b83a19b..5c62f0b1f 100644 --- a/crates/uv-resolver/src/pubgrub/report.rs +++ b/crates/uv-resolver/src/pubgrub/report.rs @@ -11,14 +11,14 @@ use rustc_hash::FxHashMap; use uv_configuration::{IndexStrategy, NoBinary, NoBuild}; use uv_distribution_types::{ IncompatibleDist, IncompatibleSource, IncompatibleWheel, Index, IndexCapabilities, - IndexLocations, IndexMetadata, IndexUrl, + IndexLocations, IndexMetadata, IndexUrl, RequiresPython, }; use uv_normalize::PackageName; use uv_pep440::{Version, VersionSpecifiers}; use uv_platform_tags::{AbiTag, IncompatibleTag, LanguageTag, PlatformTag, Tags}; use crate::candidate_selector::CandidateSelector; -use crate::error::ErrorTree; +use crate::error::{ErrorTree, PrefixMatch}; use crate::fork_indexes::ForkIndexes; use crate::fork_urls::ForkUrls; use crate::prerelease::AllowPrerelease; @@ -27,9 +27,7 @@ use crate::python_requirement::{PythonRequirement, PythonRequirementSource}; use crate::resolver::{ MetadataUnavailable, UnavailablePackage, UnavailableReason, UnavailableVersion, }; -use crate::{ - Flexibility, InMemoryIndex, Options, RequiresPython, ResolverEnvironment, VersionsResponse, -}; +use crate::{Flexibility, InMemoryIndex, Options, ResolverEnvironment, VersionsResponse}; #[derive(Debug)] pub(crate) struct PubGrubReportFormatter<'a> { @@ -946,17 +944,30 @@ impl PubGrubReportFormatter<'_> { hints: &mut IndexSet, ) { let any_prerelease = set.iter().any(|(start, end)| { + // Ignore, e.g., `>=2.4.dev0,<2.5.dev0`, which is the desugared form of `==2.4.*`. + if PrefixMatch::from_range(start, end).is_some() { + return false; + } + let is_pre1 = match start { Bound::Included(version) => version.any_prerelease(), Bound::Excluded(version) => version.any_prerelease(), Bound::Unbounded => false, }; + if is_pre1 { + return true; + } + let is_pre2 = match end { Bound::Included(version) => version.any_prerelease(), Bound::Excluded(version) => version.any_prerelease(), Bound::Unbounded => false, }; - is_pre1 || is_pre2 + if is_pre2 { + return true; + } + + false }); if any_prerelease { @@ -1930,11 +1941,11 @@ impl std::fmt::Display for PackageRange<'_> { PackageRangeKind::Available => write!(f, "are available:")?, } } - for segment in &segments { + for (lower, upper) in &segments { if segments.len() > 1 { write!(f, "\n ")?; } - match segment { + match (lower, upper) { (Bound::Unbounded, Bound::Unbounded) => match self.kind { PackageRangeKind::Dependency => write!(f, "{package}")?, PackageRangeKind::Compatibility => write!(f, "all versions of {package}")?, @@ -1950,7 +1961,13 @@ impl std::fmt::Display for PackageRange<'_> { write!(f, "{package}>={v},<={b}")?; } } - (Bound::Included(v), Bound::Excluded(b)) => write!(f, "{package}>={v},<{b}")?, + (Bound::Included(v), Bound::Excluded(b)) => { + if let Some(prefix) = PrefixMatch::from_range(lower, upper) { + write!(f, "{package}{prefix}")?; + } else { + write!(f, "{package}>={v},<{b}")?; + } + } (Bound::Excluded(v), Bound::Unbounded) => write!(f, "{package}>{v}")?, (Bound::Excluded(v), Bound::Included(b)) => write!(f, "{package}>{v},<={b}")?, (Bound::Excluded(v), Bound::Excluded(b)) => write!(f, "{package}>{v},<{b}")?, diff --git a/crates/uv-resolver/src/python_requirement.rs b/crates/uv-resolver/src/python_requirement.rs index 178b77866..0dce9b4f7 100644 --- a/crates/uv-resolver/src/python_requirement.rs +++ b/crates/uv-resolver/src/python_requirement.rs @@ -1,11 +1,10 @@ use std::collections::Bound; +use uv_distribution_types::{RequiresPython, RequiresPythonRange}; use uv_pep440::Version; use uv_pep508::{MarkerEnvironment, MarkerTree}; use uv_python::{Interpreter, PythonVersion}; -use crate::{RequiresPython, RequiresPythonRange}; - #[derive(Debug, Clone, Eq, PartialEq)] pub struct PythonRequirement { source: PythonRequirementSource, diff --git a/crates/uv-resolver/src/resolution/display.rs b/crates/uv-resolver/src/resolution/display.rs index 2f70f00f6..318fb4e54 100644 --- a/crates/uv-resolver/src/resolution/display.rs +++ b/crates/uv-resolver/src/resolution/display.rs @@ -14,7 +14,6 @@ use crate::{ResolverEnvironment, ResolverOutput}; /// A [`std::fmt::Display`] implementation for the resolution graph. #[derive(Debug)] -#[allow(clippy::struct_excessive_bools)] pub struct DisplayResolutionGraph<'a> { /// The underlying graph. resolution: &'a ResolverOutput, diff --git a/crates/uv-resolver/src/resolution/output.rs b/crates/uv-resolver/src/resolution/output.rs index 5df5ae6c3..928b9c605 100644 --- a/crates/uv-resolver/src/resolution/output.rs +++ b/crates/uv-resolver/src/resolution/output.rs @@ -12,8 +12,8 @@ use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet}; use uv_configuration::{Constraints, Overrides}; use uv_distribution::Metadata; use uv_distribution_types::{ - Dist, DistributionMetadata, Edge, IndexUrl, Name, Node, Requirement, ResolutionDiagnostic, - ResolvedDist, VersionId, VersionOrUrlRef, + Dist, DistributionMetadata, Edge, IndexUrl, Name, Node, Requirement, RequiresPython, + ResolutionDiagnostic, ResolvedDist, VersionId, VersionOrUrlRef, }; use uv_git::GitResolver; use uv_normalize::{ExtraName, GroupName, PackageName}; @@ -30,8 +30,7 @@ use crate::resolution_mode::ResolutionStrategy; use crate::resolver::{Resolution, ResolutionDependencyEdge, ResolutionPackage}; use crate::universal_marker::{ConflictMarker, UniversalMarker}; use crate::{ - InMemoryIndex, MetadataResponse, Options, PythonRequirement, RequiresPython, ResolveError, - VersionsResponse, + InMemoryIndex, MetadataResponse, Options, PythonRequirement, ResolveError, VersionsResponse, }; /// The output of a successful resolution. diff --git a/crates/uv-resolver/src/resolution/requirements_txt.rs b/crates/uv-resolver/src/resolution/requirements_txt.rs index 5ad6480c2..bcdef207b 100644 --- a/crates/uv-resolver/src/resolution/requirements_txt.rs +++ b/crates/uv-resolver/src/resolution/requirements_txt.rs @@ -4,16 +4,16 @@ use std::path::Path; use itertools::Itertools; -use uv_distribution_types::{DistributionMetadata, Name, ResolvedDist, Verbatim, VersionOrUrlRef}; +use uv_distribution_types::{ + DistributionMetadata, Name, RequiresPython, ResolvedDist, SimplifiedMarkerTree, Verbatim, + VersionOrUrlRef, +}; use uv_normalize::{ExtraName, PackageName}; use uv_pep440::Version; use uv_pep508::{MarkerTree, Scheme, split_scheme}; use uv_pypi_types::HashDigest; -use crate::{ - requires_python::{RequiresPython, SimplifiedMarkerTree}, - resolution::AnnotatedDist, -}; +use crate::resolution::AnnotatedDist; #[derive(Debug, Clone)] /// A pinned package with its resolved distribution and all the extras that were pinned for it. diff --git a/crates/uv-resolver/src/resolver/availability.rs b/crates/uv-resolver/src/resolver/availability.rs index d2e9296b9..64721b4b6 100644 --- a/crates/uv-resolver/src/resolver/availability.rs +++ b/crates/uv-resolver/src/resolver/availability.rs @@ -7,7 +7,7 @@ use uv_platform_tags::{AbiTag, Tags}; /// The reason why a package or a version cannot be used. #[derive(Debug, Clone, Eq, PartialEq)] -pub(crate) enum UnavailableReason { +pub enum UnavailableReason { /// The entire package cannot be used. Package(UnavailablePackage), /// A single version cannot be used. @@ -29,7 +29,7 @@ impl Display for UnavailableReason { /// Most variant are from [`MetadataResponse`] without the error source, since we don't format /// the source and we want to merge unavailable messages across versions. #[derive(Debug, Clone, Eq, PartialEq)] -pub(crate) enum UnavailableVersion { +pub enum UnavailableVersion { /// Version is incompatible because it has no usable distributions IncompatibleDist(IncompatibleDist), /// The wheel metadata was found, but could not be parsed. @@ -123,7 +123,7 @@ impl From<&MetadataUnavailable> for UnavailableVersion { /// The package is unavailable and cannot be used. #[derive(Debug, Clone, Eq, PartialEq)] -pub(crate) enum UnavailablePackage { +pub enum UnavailablePackage { /// Index lookups were disabled (i.e., `--no-index`) and the package was not found in a flat index (i.e. from `--find-links`). NoIndex, /// Network requests were disabled (i.e., `--offline`), and the package was not found in the cache. diff --git a/crates/uv-resolver/src/resolver/environment.rs b/crates/uv-resolver/src/resolver/environment.rs index 354941886..6e816f991 100644 --- a/crates/uv-resolver/src/resolver/environment.rs +++ b/crates/uv-resolver/src/resolver/environment.rs @@ -1,14 +1,14 @@ use std::sync::Arc; use tracing::trace; +use uv_distribution_types::{RequiresPython, RequiresPythonRange}; use uv_pep440::VersionSpecifiers; use uv_pep508::{MarkerEnvironment, MarkerTree}; use uv_pypi_types::{ConflictItem, ConflictItemRef, ResolverMarkerEnvironment}; use crate::pubgrub::{PubGrubDependency, PubGrubPackage}; -use crate::requires_python::RequiresPythonRange; use crate::resolver::ForkState; use crate::universal_marker::{ConflictMarker, UniversalMarker}; -use crate::{PythonRequirement, RequiresPython, ResolveError}; +use crate::{PythonRequirement, ResolveError}; /// Represents one or more marker environments for a resolution. /// @@ -628,7 +628,7 @@ mod tests { use uv_pep440::{LowerBound, UpperBound, Version}; use uv_pep508::{MarkerEnvironment, MarkerEnvironmentBuilder}; - use crate::requires_python::{RequiresPython, RequiresPythonRange}; + use uv_distribution_types::{RequiresPython, RequiresPythonRange}; use super::*; diff --git a/crates/uv-resolver/src/resolver/mod.rs b/crates/uv-resolver/src/resolver/mod.rs index 1384ce4f7..ed1cd48af 100644 --- a/crates/uv-resolver/src/resolver/mod.rs +++ b/crates/uv-resolver/src/resolver/mod.rs @@ -1814,7 +1814,7 @@ impl ResolverState; pub type WheelMetadataResult = Result; diff --git a/crates/uv-resolver/src/resolver/system.rs b/crates/uv-resolver/src/resolver/system.rs index 806b1c01c..a815697da 100644 --- a/crates/uv-resolver/src/resolver/system.rs +++ b/crates/uv-resolver/src/resolver/system.rs @@ -23,11 +23,17 @@ impl SystemDependency { /// For example, given `https://download.pytorch.org/whl/cu124`, returns CUDA 12.4. pub(super) fn from_index(index: &DisplaySafeUrl) -> Option { let backend = TorchBackend::from_index(index)?; - let cuda_version = backend.cuda_version()?; - Some(Self { - name: PackageName::from_str("cuda").unwrap(), - version: cuda_version, - }) + if let Some(cuda_version) = backend.cuda_version() { + Some(Self { + name: PackageName::from_str("cuda").unwrap(), + version: cuda_version, + }) + } else { + backend.rocm_version().map(|rocm_version| Self { + name: PackageName::from_str("rocm").unwrap(), + version: rocm_version, + }) + } } } @@ -80,4 +86,10 @@ mod tests { let url = DisplaySafeUrl::parse("https://download.pytorch.org/whl/cpu").unwrap(); assert_eq!(SystemDependency::from_index(&url), None); } + + #[test] + fn pytorch_xpu() { + let url = DisplaySafeUrl::parse("https://download.pytorch.org/whl/xpu").unwrap(); + assert_eq!(SystemDependency::from_index(&url), None); + } } diff --git a/crates/uv-resolver/src/resolver/urls.rs b/crates/uv-resolver/src/resolver/urls.rs index a41f33371..73d190b4a 100644 --- a/crates/uv-resolver/src/resolver/urls.rs +++ b/crates/uv-resolver/src/resolver/urls.rs @@ -4,7 +4,6 @@ use same_file::is_same_file; use tracing::debug; use uv_cache_key::CanonicalUrl; -use uv_distribution_types::Verbatim; use uv_git::GitResolver; use uv_normalize::PackageName; use uv_pep508::{MarkerTree, VerbatimUrl}; @@ -170,8 +169,8 @@ impl Urls { let [allowed_url] = matching_urls.as_slice() else { let mut conflicting_urls: Vec<_> = matching_urls .into_iter() - .map(|parsed_url| parsed_url.verbatim.verbatim().to_string()) - .chain(std::iter::once(verbatim_url.verbatim().to_string())) + .map(|parsed_url| parsed_url.parsed_url.clone()) + .chain(std::iter::once(parsed_url.clone())) .collect(); conflicting_urls.sort(); return Err(ResolveError::ConflictingUrls { diff --git a/crates/uv-resolver/src/version_map.rs b/crates/uv-resolver/src/version_map.rs index 44e70e73b..63132ad0d 100644 --- a/crates/uv-resolver/src/version_map.rs +++ b/crates/uv-resolver/src/version_map.rs @@ -11,7 +11,8 @@ use uv_configuration::BuildOptions; use uv_distribution_filename::{DistFilename, WheelFilename}; use uv_distribution_types::{ HashComparison, IncompatibleSource, IncompatibleWheel, IndexUrl, PrioritizedDist, - RegistryBuiltWheel, RegistrySourceDist, SourceDistCompatibility, WheelCompatibility, + RegistryBuiltWheel, RegistrySourceDist, RequiresPython, SourceDistCompatibility, + WheelCompatibility, }; use uv_normalize::PackageName; use uv_pep440::Version; @@ -21,7 +22,7 @@ use uv_types::HashStrategy; use uv_warnings::warn_user_once; use crate::flat_index::FlatDistributions; -use crate::{ExcludeNewer, RequiresPython, yanks::AllowedYanks}; +use crate::{ExcludeNewer, yanks::AllowedYanks}; /// A map from versions to distributions. #[derive(Debug)] @@ -344,7 +345,6 @@ struct VersionMapEager { /// avoiding another conversion step into a fully filled out `VersionMap` can /// provide substantial savings in some cases. #[derive(Debug)] -#[allow(clippy::struct_excessive_bools)] struct VersionMapLazy { /// A map from version to possibly-initialized distribution. map: BTreeMap, diff --git a/crates/uv-scripts/Cargo.toml b/crates/uv-scripts/Cargo.toml index 993633918..124eb1fea 100644 --- a/crates/uv-scripts/Cargo.toml +++ b/crates/uv-scripts/Cargo.toml @@ -16,11 +16,13 @@ uv-pep508 = { workspace = true } uv-pypi-types = { workspace = true } uv-redacted = { workspace = true } uv-settings = { workspace = true } +uv-warnings = { workspace = true } uv-workspace = { workspace = true } fs-err = { workspace = true, features = ["tokio"] } indoc = { workspace = true } memchr = { workspace = true } +regex = { workspace = true } serde = { workspace = true, features = ["derive"] } thiserror = { workspace = true } toml = { workspace = true } diff --git a/crates/uv-scripts/src/lib.rs b/crates/uv-scripts/src/lib.rs index 1023b4141..b80cdc219 100644 --- a/crates/uv-scripts/src/lib.rs +++ b/crates/uv-scripts/src/lib.rs @@ -14,6 +14,7 @@ use uv_pep508::PackageName; use uv_pypi_types::VerbatimParsedUrl; use uv_redacted::DisplaySafeUrl; use uv_settings::{GlobalOptions, ResolverInstallerOptions}; +use uv_warnings::warn_user; use uv_workspace::pyproject::Sources; static FINDER: LazyLock = LazyLock::new(|| Finder::new(b"# /// script")); @@ -238,11 +239,25 @@ impl Pep723Script { let metadata = serialize_metadata(&default_metadata); let script = if let Some(existing_contents) = existing_contents { + let (mut shebang, contents) = extract_shebang(&existing_contents)?; + if !shebang.is_empty() { + shebang.push_str("\n#\n"); + // If the shebang doesn't contain `uv`, it's probably something like + // `#! /usr/bin/env python`, which isn't going to respect the inline metadata. + // Issue a warning for users who might not know that. + // TODO: There are a lot of mistakes we could consider detecting here, like + // `uv run` without `--script` when the file doesn't end in `.py`. + if !regex::Regex::new(r"\buv\b").unwrap().is_match(&shebang) { + warn_user!( + "If you execute {} directly, it might ignore its inline metadata.\nConsider replacing its shebang with: {}", + file.to_string_lossy().cyan(), + "#!/usr/bin/env -S uv run --script".cyan(), + ); + } + } indoc::formatdoc! {r" - {metadata} - {content} - ", - content = String::from_utf8(existing_contents).map_err(|err| Pep723Error::Utf8(err.utf8_error()))?} + {shebang}{metadata} + {contents}" } } else { indoc::formatdoc! {r#" {metadata} diff --git a/crates/uv-settings/src/settings.rs b/crates/uv-settings/src/settings.rs index ff6d39995..d80ccce2f 100644 --- a/crates/uv-settings/src/settings.rs +++ b/crates/uv-settings/src/settings.rs @@ -41,6 +41,7 @@ pub(crate) struct Tools { #[derive(Debug, Clone, Default, Deserialize, CombineOptions, OptionsMetadata)] #[serde(from = "OptionsWire", rename_all = "kebab-case")] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[cfg_attr(feature = "schemars", schemars(!from))] pub struct Options { #[serde(flatten)] pub globals: GlobalOptions, @@ -140,6 +141,9 @@ pub struct Options { #[cfg_attr(feature = "schemars", schemars(skip))] pub default_groups: Option, + #[cfg_attr(feature = "schemars", schemars(skip))] + pub dependency_groups: Option, + #[cfg_attr(feature = "schemars", schemars(skip))] pub managed: Option, @@ -1870,6 +1874,7 @@ pub struct OptionsWire { managed: Option, r#package: Option, default_groups: Option, + dependency_groups: Option, dev_dependencies: Option, // Build backend @@ -1934,6 +1939,7 @@ impl From for Options { workspace, sources, default_groups, + dependency_groups, dev_dependencies, managed, package, @@ -2010,6 +2016,7 @@ impl From for Options { sources, dev_dependencies, default_groups, + dependency_groups, managed, package, } diff --git a/crates/uv-small-str/src/lib.rs b/crates/uv-small-str/src/lib.rs index 7395c090a..1524f1b99 100644 --- a/crates/uv-small-str/src/lib.rs +++ b/crates/uv-small-str/src/lib.rs @@ -147,15 +147,15 @@ impl PartialOrd for rkyv::string::ArchivedString { /// An [`schemars::JsonSchema`] implementation for [`SmallString`]. #[cfg(feature = "schemars")] impl schemars::JsonSchema for SmallString { - fn is_referenceable() -> bool { - String::is_referenceable() + fn inline_schema() -> bool { + true } - fn schema_name() -> String { + fn schema_name() -> Cow<'static, str> { String::schema_name() } - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - String::json_schema(_gen) + fn json_schema(generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + String::json_schema(generator) } } diff --git a/crates/uv-static/src/env_vars.rs b/crates/uv-static/src/env_vars.rs index aff56df45..4ac2976d9 100644 --- a/crates/uv-static/src/env_vars.rs +++ b/crates/uv-static/src/env_vars.rs @@ -359,10 +359,6 @@ impl EnvVars { #[attr_hidden] pub const UV_INTERNAL__SHOW_DERIVATION_TREE: &'static str = "UV_INTERNAL__SHOW_DERIVATION_TREE"; - /// Used to set a temporary directory for some tests. - #[attr_hidden] - pub const UV_INTERNAL__TEST_DIR: &'static str = "UV_INTERNAL__TEST_DIR"; - /// Path to system-level configuration directory on Unix systems. pub const XDG_CONFIG_DIRS: &'static str = "XDG_CONFIG_DIRS"; @@ -667,6 +663,10 @@ impl EnvVars { #[attr_hidden] pub const UV_TEST_INDEX_URL: &'static str = "UV_TEST_INDEX_URL"; + /// Used to set the GitHub fast-path url for tests. + #[attr_hidden] + pub const UV_GITHUB_FAST_PATH_URL: &'static str = "UV_GITHUB_FAST_PATH_URL"; + /// Hide progress messages with non-deterministic order in tests. #[attr_hidden] pub const UV_TEST_NO_CLI_PROGRESS: &'static str = "UV_TEST_NO_CLI_PROGRESS"; @@ -718,10 +718,14 @@ impl EnvVars { /// This is a quasi-standard variable, described, e.g., in `ncurses(3x)`. pub const COLUMNS: &'static str = "COLUMNS"; - /// The CUDA driver version to assume when inferring the PyTorch backend. + /// The CUDA driver version to assume when inferring the PyTorch backend (e.g., `550.144.03`). #[attr_hidden] pub const UV_CUDA_DRIVER_VERSION: &'static str = "UV_CUDA_DRIVER_VERSION"; + /// The AMD GPU architecture to assume when inferring the PyTorch backend (e.g., `gfx1100`). + #[attr_hidden] + pub const UV_AMD_GPU_ARCHITECTURE: &'static str = "UV_AMD_GPU_ARCHITECTURE"; + /// Equivalent to the `--torch-backend` command-line argument (e.g., `cpu`, `cu126`, or `auto`). pub const UV_TORCH_BACKEND: &'static str = "UV_TORCH_BACKEND"; diff --git a/crates/uv-tool/Cargo.toml b/crates/uv-tool/Cargo.toml index d01a3209d..210c17c00 100644 --- a/crates/uv-tool/Cargo.toml +++ b/crates/uv-tool/Cargo.toml @@ -17,6 +17,7 @@ workspace = true [dependencies] uv-cache = { workspace = true } +uv-configuration = { workspace = true } uv-dirs = { workspace = true } uv-distribution-types = { workspace = true } uv-fs = { workspace = true } diff --git a/crates/uv-tool/src/lib.rs b/crates/uv-tool/src/lib.rs index f85075ea6..ee80a2854 100644 --- a/crates/uv-tool/src/lib.rs +++ b/crates/uv-tool/src/lib.rs @@ -1,6 +1,7 @@ use core::fmt; use fs_err as fs; +use uv_configuration::PreviewMode; use uv_dirs::user_executable_directory; use uv_pep440::Version; use uv_pep508::{InvalidNameError, PackageName}; @@ -257,6 +258,7 @@ impl InstalledTools { &self, name: &PackageName, interpreter: Interpreter, + preview: PreviewMode, ) -> Result { let environment_path = self.tool_dir(name); @@ -286,6 +288,8 @@ impl InstalledTools { false, false, false, + false, + preview, )?; Ok(venv) diff --git a/crates/uv-tool/src/tool.rs b/crates/uv-tool/src/tool.rs index df8571c94..cce3a2f58 100644 --- a/crates/uv-tool/src/tool.rs +++ b/crates/uv-tool/src/tool.rs @@ -7,6 +7,7 @@ use toml_edit::{Array, Item, Table, Value, value}; use uv_distribution_types::Requirement; use uv_fs::{PortablePath, Simplified}; use uv_pypi_types::VerbatimParsedUrl; +use uv_python::PythonRequest; use uv_settings::ToolOptions; /// A tool entry. @@ -22,7 +23,7 @@ pub struct Tool { /// The build constraints requested by the user during installation. build_constraints: Vec, /// The Python requested by the user during installation. - python: Option, + python: Option, /// A mapping of entry point names to their metadata. entrypoints: Vec, /// The [`ToolOptions`] used to install this tool. @@ -40,7 +41,7 @@ struct ToolWire { overrides: Vec, #[serde(default)] build_constraint_dependencies: Vec, - python: Option, + python: Option, entrypoints: Vec, #[serde(default)] options: ToolOptions, @@ -164,7 +165,7 @@ impl Tool { constraints: Vec, overrides: Vec, build_constraints: Vec, - python: Option, + python: Option, entrypoints: impl Iterator, options: ToolOptions, ) -> Self { @@ -280,7 +281,13 @@ impl Tool { } if let Some(ref python) = self.python { - table.insert("python", value(python)); + table.insert( + "python", + value(serde::Serialize::serialize( + &python, + toml_edit::ser::ValueSerializer::new(), + )?), + ); } table.insert("entrypoints", { @@ -327,7 +334,7 @@ impl Tool { &self.build_constraints } - pub fn python(&self) -> &Option { + pub fn python(&self) -> &Option { &self.python } diff --git a/crates/uv-torch/src/accelerator.rs b/crates/uv-torch/src/accelerator.rs index 8ec55ac2a..3165bd4c5 100644 --- a/crates/uv-torch/src/accelerator.rs +++ b/crates/uv-torch/src/accelerator.rs @@ -13,17 +13,30 @@ pub enum AcceleratorError { Version(#[from] uv_pep440::VersionParseError), #[error(transparent)] Utf8(#[from] std::string::FromUtf8Error), + #[error("Unknown AMD GPU architecture: {0}")] + UnknownAmdGpuArchitecture(String), } #[derive(Debug, Clone, Eq, PartialEq)] pub enum Accelerator { + /// The CUDA driver version (e.g., `550.144.03`). + /// + /// This is in contrast to the CUDA toolkit version (e.g., `12.8.0`). Cuda { driver_version: Version }, + /// The AMD GPU architecture (e.g., `gfx906`). + /// + /// This is in contrast to the user-space ROCm version (e.g., `6.4.0-47`) or the kernel-mode + /// driver version (e.g., `6.12.12`). + Amd { + gpu_architecture: AmdGpuArchitecture, + }, } impl std::fmt::Display for Accelerator { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { Self::Cuda { driver_version } => write!(f, "CUDA {driver_version}"), + Self::Amd { gpu_architecture } => write!(f, "AMD {gpu_architecture}"), } } } @@ -33,9 +46,11 @@ impl Accelerator { /// /// Query, in order: /// 1. The `UV_CUDA_DRIVER_VERSION` environment variable. + /// 2. The `UV_AMD_GPU_ARCHITECTURE` environment variable. /// 2. `/sys/module/nvidia/version`, which contains the driver version (e.g., `550.144.03`). /// 3. `/proc/driver/nvidia/version`, which contains the driver version among other information. /// 4. `nvidia-smi --query-gpu=driver_version --format=csv,noheader`. + /// 5. `rocm_agent_enumerator`, which lists the AMD GPU architectures. pub fn detect() -> Result, AcceleratorError> { // Read from `UV_CUDA_DRIVER_VERSION`. if let Ok(driver_version) = std::env::var(EnvVars::UV_CUDA_DRIVER_VERSION) { @@ -44,6 +59,15 @@ impl Accelerator { return Ok(Some(Self::Cuda { driver_version })); } + // Read from `UV_AMD_GPU_ARCHITECTURE`. + if let Ok(gpu_architecture) = std::env::var(EnvVars::UV_AMD_GPU_ARCHITECTURE) { + let gpu_architecture = AmdGpuArchitecture::from_str(&gpu_architecture)?; + debug!( + "Detected AMD GPU architecture from `UV_AMD_GPU_ARCHITECTURE`: {gpu_architecture}" + ); + return Ok(Some(Self::Amd { gpu_architecture })); + } + // Read from `/sys/module/nvidia/version`. match fs_err::read_to_string("/sys/module/nvidia/version") { Ok(content) => { @@ -100,7 +124,34 @@ impl Accelerator { ); } - debug!("Failed to detect CUDA driver version"); + // Query `rocm_agent_enumerator` to detect the AMD GPU architecture. + // + // See: https://rocm.docs.amd.com/projects/rocminfo/en/latest/how-to/use-rocm-agent-enumerator.html + if let Ok(output) = std::process::Command::new("rocm_agent_enumerator").output() { + if output.status.success() { + let stdout = String::from_utf8(output.stdout)?; + if let Some(gpu_architecture) = stdout + .lines() + .map(str::trim) + .filter_map(|line| AmdGpuArchitecture::from_str(line).ok()) + .min() + { + debug!( + "Detected AMD GPU architecture from `rocm_agent_enumerator`: {gpu_architecture}" + ); + return Ok(Some(Self::Amd { gpu_architecture })); + } + } else { + debug!( + "Failed to query AMD GPU architecture with `rocm_agent_enumerator` with status `{}`: {}", + output.status, + String::from_utf8_lossy(&output.stderr) + ); + } + } + + debug!("Failed to detect GPU driver version"); + Ok(None) } } @@ -129,6 +180,63 @@ fn parse_proc_driver_nvidia_version(content: &str) -> Result, Ac Ok(Some(driver_version)) } +/// A GPU architecture for AMD GPUs. +/// +/// See: +#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)] +pub enum AmdGpuArchitecture { + Gfx900, + Gfx906, + Gfx908, + Gfx90a, + Gfx942, + Gfx1030, + Gfx1100, + Gfx1101, + Gfx1102, + Gfx1200, + Gfx1201, +} + +impl FromStr for AmdGpuArchitecture { + type Err = AcceleratorError; + + fn from_str(s: &str) -> Result { + match s { + "gfx900" => Ok(Self::Gfx900), + "gfx906" => Ok(Self::Gfx906), + "gfx908" => Ok(Self::Gfx908), + "gfx90a" => Ok(Self::Gfx90a), + "gfx942" => Ok(Self::Gfx942), + "gfx1030" => Ok(Self::Gfx1030), + "gfx1100" => Ok(Self::Gfx1100), + "gfx1101" => Ok(Self::Gfx1101), + "gfx1102" => Ok(Self::Gfx1102), + "gfx1200" => Ok(Self::Gfx1200), + "gfx1201" => Ok(Self::Gfx1201), + _ => Err(AcceleratorError::UnknownAmdGpuArchitecture(s.to_string())), + } + } +} + +impl std::fmt::Display for AmdGpuArchitecture { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Gfx900 => write!(f, "gfx900"), + Self::Gfx906 => write!(f, "gfx906"), + Self::Gfx908 => write!(f, "gfx908"), + Self::Gfx90a => write!(f, "gfx90a"), + Self::Gfx942 => write!(f, "gfx942"), + Self::Gfx1030 => write!(f, "gfx1030"), + Self::Gfx1100 => write!(f, "gfx1100"), + Self::Gfx1101 => write!(f, "gfx1101"), + Self::Gfx1102 => write!(f, "gfx1102"), + Self::Gfx1200 => write!(f, "gfx1200"), + Self::Gfx1201 => write!(f, "gfx1201"), + } + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/uv-torch/src/backend.rs b/crates/uv-torch/src/backend.rs index 0df5bd844..0f2b72077 100644 --- a/crates/uv-torch/src/backend.rs +++ b/crates/uv-torch/src/backend.rs @@ -35,7 +35,6 @@ //! OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE //! OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. //! ``` -//! use std::str::FromStr; use std::sync::LazyLock; @@ -48,7 +47,7 @@ use uv_normalize::PackageName; use uv_pep440::Version; use uv_platform_tags::Os; -use crate::{Accelerator, AcceleratorError}; +use crate::{Accelerator, AcceleratorError, AmdGpuArchitecture}; /// The strategy to use when determining the appropriate PyTorch index. #[derive(Debug, Copy, Clone, Eq, PartialEq, serde::Deserialize, serde::Serialize)] @@ -108,13 +107,84 @@ pub enum TorchMode { Cu90, /// Use the PyTorch index for CUDA 8.0. Cu80, + /// Use the PyTorch index for ROCm 6.3. + #[serde(rename = "rocm6.3")] + #[cfg_attr(feature = "clap", clap(name = "rocm6.3"))] + Rocm63, + /// Use the PyTorch index for ROCm 6.2.4. + #[serde(rename = "rocm6.2.4")] + #[cfg_attr(feature = "clap", clap(name = "rocm6.2.4"))] + Rocm624, + /// Use the PyTorch index for ROCm 6.2. + #[serde(rename = "rocm6.2")] + #[cfg_attr(feature = "clap", clap(name = "rocm6.2"))] + Rocm62, + /// Use the PyTorch index for ROCm 6.1. + #[serde(rename = "rocm6.1")] + #[cfg_attr(feature = "clap", clap(name = "rocm6.1"))] + Rocm61, + /// Use the PyTorch index for ROCm 6.0. + #[serde(rename = "rocm6.0")] + #[cfg_attr(feature = "clap", clap(name = "rocm6.0"))] + Rocm60, + /// Use the PyTorch index for ROCm 5.7. + #[serde(rename = "rocm5.7")] + #[cfg_attr(feature = "clap", clap(name = "rocm5.7"))] + Rocm57, + /// Use the PyTorch index for ROCm 5.6. + #[serde(rename = "rocm5.6")] + #[cfg_attr(feature = "clap", clap(name = "rocm5.6"))] + Rocm56, + /// Use the PyTorch index for ROCm 5.5. + #[serde(rename = "rocm5.5")] + #[cfg_attr(feature = "clap", clap(name = "rocm5.5"))] + Rocm55, + /// Use the PyTorch index for ROCm 5.4.2. + #[serde(rename = "rocm5.4.2")] + #[cfg_attr(feature = "clap", clap(name = "rocm5.4.2"))] + Rocm542, + /// Use the PyTorch index for ROCm 5.4. + #[serde(rename = "rocm5.4")] + #[cfg_attr(feature = "clap", clap(name = "rocm5.4"))] + Rocm54, + /// Use the PyTorch index for ROCm 5.3. + #[serde(rename = "rocm5.3")] + #[cfg_attr(feature = "clap", clap(name = "rocm5.3"))] + Rocm53, + /// Use the PyTorch index for ROCm 5.2. + #[serde(rename = "rocm5.2")] + #[cfg_attr(feature = "clap", clap(name = "rocm5.2"))] + Rocm52, + /// Use the PyTorch index for ROCm 5.1.1. + #[serde(rename = "rocm5.1.1")] + #[cfg_attr(feature = "clap", clap(name = "rocm5.1.1"))] + Rocm511, + /// Use the PyTorch index for ROCm 4.2. + #[serde(rename = "rocm4.2")] + #[cfg_attr(feature = "clap", clap(name = "rocm4.2"))] + Rocm42, + /// Use the PyTorch index for ROCm 4.1. + #[serde(rename = "rocm4.1")] + #[cfg_attr(feature = "clap", clap(name = "rocm4.1"))] + Rocm41, + /// Use the PyTorch index for ROCm 4.0.1. + #[serde(rename = "rocm4.0.1")] + #[cfg_attr(feature = "clap", clap(name = "rocm4.0.1"))] + Rocm401, + /// Use the PyTorch index for Intel XPU. + Xpu, } /// The strategy to use when determining the appropriate PyTorch index. #[derive(Debug, Clone, Eq, PartialEq)] pub enum TorchStrategy { - /// Select the appropriate PyTorch index based on the operating system and CUDA driver version. - Auto { os: Os, driver_version: Version }, + /// Select the appropriate PyTorch index based on the operating system and CUDA driver version (e.g., `550.144.03`). + Cuda { os: Os, driver_version: Version }, + /// Select the appropriate PyTorch index based on the operating system and AMD GPU architecture (e.g., `gfx1100`). + Amd { + os: Os, + gpu_architecture: AmdGpuArchitecture, + }, /// Use the specified PyTorch index. Backend(TorchBackend), } @@ -123,16 +193,17 @@ impl TorchStrategy { /// Determine the [`TorchStrategy`] from the given [`TorchMode`], [`Os`], and [`Accelerator`]. pub fn from_mode(mode: TorchMode, os: &Os) -> Result { match mode { - TorchMode::Auto => { - if let Some(Accelerator::Cuda { driver_version }) = Accelerator::detect()? { - Ok(Self::Auto { - os: os.clone(), - driver_version: driver_version.clone(), - }) - } else { - Ok(Self::Backend(TorchBackend::Cpu)) - } - } + TorchMode::Auto => match Accelerator::detect()? { + Some(Accelerator::Cuda { driver_version }) => Ok(Self::Cuda { + os: os.clone(), + driver_version: driver_version.clone(), + }), + Some(Accelerator::Amd { gpu_architecture }) => Ok(Self::Amd { + os: os.clone(), + gpu_architecture, + }), + None => Ok(Self::Backend(TorchBackend::Cpu)), + }, TorchMode::Cpu => Ok(Self::Backend(TorchBackend::Cpu)), TorchMode::Cu128 => Ok(Self::Backend(TorchBackend::Cu128)), TorchMode::Cu126 => Ok(Self::Backend(TorchBackend::Cu126)), @@ -158,6 +229,23 @@ impl TorchStrategy { TorchMode::Cu91 => Ok(Self::Backend(TorchBackend::Cu91)), TorchMode::Cu90 => Ok(Self::Backend(TorchBackend::Cu90)), TorchMode::Cu80 => Ok(Self::Backend(TorchBackend::Cu80)), + TorchMode::Rocm63 => Ok(Self::Backend(TorchBackend::Rocm63)), + TorchMode::Rocm624 => Ok(Self::Backend(TorchBackend::Rocm624)), + TorchMode::Rocm62 => Ok(Self::Backend(TorchBackend::Rocm62)), + TorchMode::Rocm61 => Ok(Self::Backend(TorchBackend::Rocm61)), + TorchMode::Rocm60 => Ok(Self::Backend(TorchBackend::Rocm60)), + TorchMode::Rocm57 => Ok(Self::Backend(TorchBackend::Rocm57)), + TorchMode::Rocm56 => Ok(Self::Backend(TorchBackend::Rocm56)), + TorchMode::Rocm55 => Ok(Self::Backend(TorchBackend::Rocm55)), + TorchMode::Rocm542 => Ok(Self::Backend(TorchBackend::Rocm542)), + TorchMode::Rocm54 => Ok(Self::Backend(TorchBackend::Rocm54)), + TorchMode::Rocm53 => Ok(Self::Backend(TorchBackend::Rocm53)), + TorchMode::Rocm52 => Ok(Self::Backend(TorchBackend::Rocm52)), + TorchMode::Rocm511 => Ok(Self::Backend(TorchBackend::Rocm511)), + TorchMode::Rocm42 => Ok(Self::Backend(TorchBackend::Rocm42)), + TorchMode::Rocm41 => Ok(Self::Backend(TorchBackend::Rocm41)), + TorchMode::Rocm401 => Ok(Self::Backend(TorchBackend::Rocm401)), + TorchMode::Xpu => Ok(Self::Backend(TorchBackend::Xpu)), } } @@ -177,31 +265,35 @@ impl TorchStrategy { | "torchtext" | "torchvision" | "pytorch-triton" + | "pytorch-triton-rocm" + | "pytorch-triton-xpu" ) } /// Return the appropriate index URLs for the given [`TorchStrategy`]. pub fn index_urls(&self) -> impl Iterator { match self { - TorchStrategy::Auto { os, driver_version } => { + TorchStrategy::Cuda { os, driver_version } => { // If this is a GPU-enabled package, and CUDA drivers are installed, use PyTorch's CUDA // indexes. // // See: https://github.com/pmeier/light-the-torch/blob/33397cbe45d07b51ad8ee76b004571a4c236e37f/light_the_torch/_patch.py#L36-L49 match os { - Os::Manylinux { .. } | Os::Musllinux { .. } => Either::Left(Either::Left( - LINUX_DRIVERS - .iter() - .filter_map(move |(backend, version)| { - if driver_version >= version { - Some(backend.index_url()) - } else { - None - } - }) - .chain(std::iter::once(TorchBackend::Cpu.index_url())), - )), - Os::Windows => Either::Left(Either::Right( + Os::Manylinux { .. } | Os::Musllinux { .. } => { + Either::Left(Either::Left(Either::Left( + LINUX_CUDA_DRIVERS + .iter() + .filter_map(move |(backend, version)| { + if driver_version >= version { + Some(backend.index_url()) + } else { + None + } + }) + .chain(std::iter::once(TorchBackend::Cpu.index_url())), + ))) + } + Os::Windows => Either::Left(Either::Left(Either::Right( WINDOWS_CUDA_VERSIONS .iter() .filter_map(move |(backend, version)| { @@ -212,7 +304,7 @@ impl TorchStrategy { } }) .chain(std::iter::once(TorchBackend::Cpu.index_url())), - )), + ))), Os::Macos { .. } | Os::FreeBsd { .. } | Os::NetBsd { .. } @@ -222,11 +314,42 @@ impl TorchStrategy { | Os::Haiku { .. } | Os::Android { .. } | Os::Pyodide { .. } => { - Either::Right(std::iter::once(TorchBackend::Cpu.index_url())) + Either::Right(Either::Left(std::iter::once(TorchBackend::Cpu.index_url()))) } } } - TorchStrategy::Backend(backend) => Either::Right(std::iter::once(backend.index_url())), + TorchStrategy::Amd { + os, + gpu_architecture, + } => match os { + Os::Manylinux { .. } | Os::Musllinux { .. } => Either::Left(Either::Right( + LINUX_AMD_GPU_DRIVERS + .iter() + .filter_map(move |(backend, architecture)| { + if gpu_architecture == architecture { + Some(backend.index_url()) + } else { + None + } + }) + .chain(std::iter::once(TorchBackend::Cpu.index_url())), + )), + Os::Windows + | Os::Macos { .. } + | Os::FreeBsd { .. } + | Os::NetBsd { .. } + | Os::OpenBsd { .. } + | Os::Dragonfly { .. } + | Os::Illumos { .. } + | Os::Haiku { .. } + | Os::Android { .. } + | Os::Pyodide { .. } => { + Either::Right(Either::Left(std::iter::once(TorchBackend::Cpu.index_url()))) + } + }, + TorchStrategy::Backend(backend) => { + Either::Right(Either::Right(std::iter::once(backend.index_url()))) + } } } } @@ -259,6 +382,23 @@ pub enum TorchBackend { Cu91, Cu90, Cu80, + Rocm63, + Rocm624, + Rocm62, + Rocm61, + Rocm60, + Rocm57, + Rocm56, + Rocm55, + Rocm542, + Rocm54, + Rocm53, + Rocm52, + Rocm511, + Rocm42, + Rocm41, + Rocm401, + Xpu, } impl TorchBackend { @@ -290,6 +430,23 @@ impl TorchBackend { Self::Cu91 => &CU91_INDEX_URL, Self::Cu90 => &CU90_INDEX_URL, Self::Cu80 => &CU80_INDEX_URL, + Self::Rocm63 => &ROCM63_INDEX_URL, + Self::Rocm624 => &ROCM624_INDEX_URL, + Self::Rocm62 => &ROCM62_INDEX_URL, + Self::Rocm61 => &ROCM61_INDEX_URL, + Self::Rocm60 => &ROCM60_INDEX_URL, + Self::Rocm57 => &ROCM57_INDEX_URL, + Self::Rocm56 => &ROCM56_INDEX_URL, + Self::Rocm55 => &ROCM55_INDEX_URL, + Self::Rocm542 => &ROCM542_INDEX_URL, + Self::Rocm54 => &ROCM54_INDEX_URL, + Self::Rocm53 => &ROCM53_INDEX_URL, + Self::Rocm52 => &ROCM52_INDEX_URL, + Self::Rocm511 => &ROCM511_INDEX_URL, + Self::Rocm42 => &ROCM42_INDEX_URL, + Self::Rocm41 => &ROCM41_INDEX_URL, + Self::Rocm401 => &ROCM401_INDEX_URL, + Self::Xpu => &XPU_INDEX_URL, } } @@ -336,6 +493,71 @@ impl TorchBackend { TorchBackend::Cu91 => Some(Version::new([9, 1])), TorchBackend::Cu90 => Some(Version::new([9, 0])), TorchBackend::Cu80 => Some(Version::new([8, 0])), + TorchBackend::Rocm63 => None, + TorchBackend::Rocm624 => None, + TorchBackend::Rocm62 => None, + TorchBackend::Rocm61 => None, + TorchBackend::Rocm60 => None, + TorchBackend::Rocm57 => None, + TorchBackend::Rocm56 => None, + TorchBackend::Rocm55 => None, + TorchBackend::Rocm542 => None, + TorchBackend::Rocm54 => None, + TorchBackend::Rocm53 => None, + TorchBackend::Rocm52 => None, + TorchBackend::Rocm511 => None, + TorchBackend::Rocm42 => None, + TorchBackend::Rocm41 => None, + TorchBackend::Rocm401 => None, + TorchBackend::Xpu => None, + } + } + + /// Returns the ROCM [`Version`] for the given [`TorchBackend`]. + pub fn rocm_version(&self) -> Option { + match self { + TorchBackend::Cpu => None, + TorchBackend::Cu128 => None, + TorchBackend::Cu126 => None, + TorchBackend::Cu125 => None, + TorchBackend::Cu124 => None, + TorchBackend::Cu123 => None, + TorchBackend::Cu122 => None, + TorchBackend::Cu121 => None, + TorchBackend::Cu120 => None, + TorchBackend::Cu118 => None, + TorchBackend::Cu117 => None, + TorchBackend::Cu116 => None, + TorchBackend::Cu115 => None, + TorchBackend::Cu114 => None, + TorchBackend::Cu113 => None, + TorchBackend::Cu112 => None, + TorchBackend::Cu111 => None, + TorchBackend::Cu110 => None, + TorchBackend::Cu102 => None, + TorchBackend::Cu101 => None, + TorchBackend::Cu100 => None, + TorchBackend::Cu92 => None, + TorchBackend::Cu91 => None, + TorchBackend::Cu90 => None, + TorchBackend::Cu80 => None, + TorchBackend::Rocm63 => Some(Version::new([6, 3])), + TorchBackend::Rocm624 => Some(Version::new([6, 2, 4])), + TorchBackend::Rocm62 => Some(Version::new([6, 2])), + TorchBackend::Rocm61 => Some(Version::new([6, 1])), + TorchBackend::Rocm60 => Some(Version::new([6, 0])), + TorchBackend::Rocm57 => Some(Version::new([5, 7])), + TorchBackend::Rocm56 => Some(Version::new([5, 6])), + TorchBackend::Rocm55 => Some(Version::new([5, 5])), + TorchBackend::Rocm542 => Some(Version::new([5, 4, 2])), + TorchBackend::Rocm54 => Some(Version::new([5, 4])), + TorchBackend::Rocm53 => Some(Version::new([5, 3])), + TorchBackend::Rocm52 => Some(Version::new([5, 2])), + TorchBackend::Rocm511 => Some(Version::new([5, 1, 1])), + TorchBackend::Rocm42 => Some(Version::new([4, 2])), + TorchBackend::Rocm41 => Some(Version::new([4, 1])), + TorchBackend::Rocm401 => Some(Version::new([4, 0, 1])), + TorchBackend::Xpu => None, } } } @@ -370,6 +592,23 @@ impl FromStr for TorchBackend { "cu91" => Ok(TorchBackend::Cu91), "cu90" => Ok(TorchBackend::Cu90), "cu80" => Ok(TorchBackend::Cu80), + "rocm6.3" => Ok(TorchBackend::Rocm63), + "rocm6.2.4" => Ok(TorchBackend::Rocm624), + "rocm6.2" => Ok(TorchBackend::Rocm62), + "rocm6.1" => Ok(TorchBackend::Rocm61), + "rocm6.0" => Ok(TorchBackend::Rocm60), + "rocm5.7" => Ok(TorchBackend::Rocm57), + "rocm5.6" => Ok(TorchBackend::Rocm56), + "rocm5.5" => Ok(TorchBackend::Rocm55), + "rocm5.4.2" => Ok(TorchBackend::Rocm542), + "rocm5.4" => Ok(TorchBackend::Rocm54), + "rocm5.3" => Ok(TorchBackend::Rocm53), + "rocm5.2" => Ok(TorchBackend::Rocm52), + "rocm5.1.1" => Ok(TorchBackend::Rocm511), + "rocm4.2" => Ok(TorchBackend::Rocm42), + "rocm4.1" => Ok(TorchBackend::Rocm41), + "rocm4.0.1" => Ok(TorchBackend::Rocm401), + "xpu" => Ok(TorchBackend::Xpu), _ => Err(format!("Unknown PyTorch backend: {s}")), } } @@ -378,7 +617,7 @@ impl FromStr for TorchBackend { /// Linux CUDA driver versions and the corresponding CUDA versions. /// /// See: -static LINUX_DRIVERS: LazyLock<[(TorchBackend, Version); 24]> = LazyLock::new(|| { +static LINUX_CUDA_DRIVERS: LazyLock<[(TorchBackend, Version); 24]> = LazyLock::new(|| { [ // Table 2 from // https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html @@ -451,6 +690,73 @@ static WINDOWS_CUDA_VERSIONS: LazyLock<[(TorchBackend, Version); 24]> = LazyLock ] }); +/// Linux AMD GPU architectures and the corresponding PyTorch backends. +/// +/// These were inferred by running the following snippet for each ROCm version: +/// +/// ```python +/// import torch +/// +/// print(torch.cuda.get_arch_list()) +/// ``` +/// +/// AMD also provides a compatibility matrix: ; +/// however, this list includes a broader array of GPUs than those in the matrix. +static LINUX_AMD_GPU_DRIVERS: LazyLock<[(TorchBackend, AmdGpuArchitecture); 44]> = + LazyLock::new(|| { + [ + // ROCm 6.3 + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx900), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx906), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx908), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx90a), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx942), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx1030), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx1100), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx1101), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx1102), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx1200), + (TorchBackend::Rocm63, AmdGpuArchitecture::Gfx1201), + // ROCm 6.2.4 + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx900), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx906), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx908), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx90a), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx942), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx1030), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx1100), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx1101), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx1102), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx1200), + (TorchBackend::Rocm624, AmdGpuArchitecture::Gfx1201), + // ROCm 6.2 + (TorchBackend::Rocm62, AmdGpuArchitecture::Gfx900), + (TorchBackend::Rocm62, AmdGpuArchitecture::Gfx906), + (TorchBackend::Rocm62, AmdGpuArchitecture::Gfx908), + (TorchBackend::Rocm62, AmdGpuArchitecture::Gfx90a), + (TorchBackend::Rocm62, AmdGpuArchitecture::Gfx1030), + (TorchBackend::Rocm62, AmdGpuArchitecture::Gfx1100), + (TorchBackend::Rocm62, AmdGpuArchitecture::Gfx942), + // ROCm 6.1 + (TorchBackend::Rocm61, AmdGpuArchitecture::Gfx900), + (TorchBackend::Rocm61, AmdGpuArchitecture::Gfx906), + (TorchBackend::Rocm61, AmdGpuArchitecture::Gfx908), + (TorchBackend::Rocm61, AmdGpuArchitecture::Gfx90a), + (TorchBackend::Rocm61, AmdGpuArchitecture::Gfx942), + (TorchBackend::Rocm61, AmdGpuArchitecture::Gfx1030), + (TorchBackend::Rocm61, AmdGpuArchitecture::Gfx1100), + (TorchBackend::Rocm61, AmdGpuArchitecture::Gfx1101), + // ROCm 6.0 + (TorchBackend::Rocm60, AmdGpuArchitecture::Gfx900), + (TorchBackend::Rocm60, AmdGpuArchitecture::Gfx906), + (TorchBackend::Rocm60, AmdGpuArchitecture::Gfx908), + (TorchBackend::Rocm60, AmdGpuArchitecture::Gfx90a), + (TorchBackend::Rocm60, AmdGpuArchitecture::Gfx1030), + (TorchBackend::Rocm60, AmdGpuArchitecture::Gfx1100), + (TorchBackend::Rocm60, AmdGpuArchitecture::Gfx942), + ] + }); + static CPU_INDEX_URL: LazyLock = LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/cpu").unwrap()); static CU128_INDEX_URL: LazyLock = @@ -501,3 +807,37 @@ static CU90_INDEX_URL: LazyLock = LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/cu90").unwrap()); static CU80_INDEX_URL: LazyLock = LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/cu80").unwrap()); +static ROCM63_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm6.3").unwrap()); +static ROCM624_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm6.2.4").unwrap()); +static ROCM62_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm6.2").unwrap()); +static ROCM61_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm6.1").unwrap()); +static ROCM60_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm6.0").unwrap()); +static ROCM57_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm5.7").unwrap()); +static ROCM56_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm5.6").unwrap()); +static ROCM55_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm5.5").unwrap()); +static ROCM542_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm5.4.2").unwrap()); +static ROCM54_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm5.4").unwrap()); +static ROCM53_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm5.3").unwrap()); +static ROCM52_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm5.2").unwrap()); +static ROCM511_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm5.1.1").unwrap()); +static ROCM42_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm4.2").unwrap()); +static ROCM41_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm4.1").unwrap()); +static ROCM401_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/rocm4.0.1").unwrap()); +static XPU_INDEX_URL: LazyLock = + LazyLock::new(|| IndexUrl::from_str("https://download.pytorch.org/whl/xpu").unwrap()); diff --git a/crates/uv-trampoline-builder/src/lib.rs b/crates/uv-trampoline-builder/src/lib.rs index 15b435ec5..2e1cde872 100644 --- a/crates/uv-trampoline-builder/src/lib.rs +++ b/crates/uv-trampoline-builder/src/lib.rs @@ -521,7 +521,7 @@ if __name__ == "__main__": } #[test] - #[ignore] + #[ignore = "This test will spawn a GUI and wait until you close the window."] fn gui_launcher() -> Result<()> { // Create Temp Dirs let temp_dir = assert_fs::TempDir::new()?; diff --git a/crates/uv-trampoline/Cargo.lock b/crates/uv-trampoline/Cargo.lock index 89a7c5979..37edf9ede 100644 --- a/crates/uv-trampoline/Cargo.lock +++ b/crates/uv-trampoline/Cargo.lock @@ -100,9 +100,9 @@ dependencies = [ [[package]] name = "windows" -version = "0.61.1" +version = "0.61.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5ee8f3d025738cb02bad7868bbb5f8a6327501e870bf51f1b455b0a2454a419" +checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893" dependencies = [ "windows-collections", "windows-core", @@ -122,9 +122,9 @@ dependencies = [ [[package]] name = "windows-core" -version = "0.61.0" +version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4763c1de310c86d75a878046489e2e5ba02c649d185f21c67d4cf8a56d098980" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" dependencies = [ "windows-implement", "windows-interface", @@ -135,12 +135,13 @@ dependencies = [ [[package]] name = "windows-future" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a1d6bbefcb7b60acd19828e1bc965da6fcf18a7e39490c5f8be71e54a19ba32" +checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e" dependencies = [ "windows-core", "windows-link", + "windows-threading", ] [[package]] @@ -167,9 +168,9 @@ dependencies = [ [[package]] name = "windows-link" -version = "0.1.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" [[package]] name = "windows-numerics" @@ -183,18 +184,27 @@ dependencies = [ [[package]] name = "windows-result" -version = "0.3.2" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" dependencies = [ "windows-link", ] [[package]] name = "windows-strings" -version = "0.4.0" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ba9642430ee452d5a7aa78d72907ebe8cfda358e8cb7918a2050581322f97" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-threading" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6" dependencies = [ "windows-link", ] diff --git a/crates/uv-trampoline/src/bounce.rs b/crates/uv-trampoline/src/bounce.rs index 1e90f035d..8d658bdab 100644 --- a/crates/uv-trampoline/src/bounce.rs +++ b/crates/uv-trampoline/src/bounce.rs @@ -78,7 +78,34 @@ fn make_child_cmdline() -> CString { // Only execute the trampoline again if it's a script, otherwise, just invoke Python. match kind { - TrampolineKind::Python => {} + TrampolineKind::Python => { + // SAFETY: `std::env::set_var` is safe to call on Windows, and + // this code only ever runs on Windows. + unsafe { + // Setting this env var will cause `getpath.py` to set + // `executable` to the path to this trampoline. This is + // the approach taken by CPython for Python Launchers + // (in `launcher.c`). This allows virtual environments to + // be correctly detected when using trampolines. + std::env::set_var("__PYVENV_LAUNCHER__", &executable_name); + + // If this is not a virtual environment and `PYTHONHOME` has + // not been set, then set `PYTHONHOME` to the parent directory of + // the executable. This ensures that the correct installation + // directories are added to `sys.path` when running with a junction + // trampoline. + let python_home_set = + std::env::var("PYTHONHOME").is_ok_and(|home| !home.is_empty()); + if !is_virtualenv(python_exe.as_path()) && !python_home_set { + std::env::set_var( + "PYTHONHOME", + python_exe + .parent() + .expect("Python executable should have a parent directory"), + ); + } + } + } TrampolineKind::Script => { // Use the full executable name because CMD only passes the name of the executable (but not the path) // when e.g. invoking `black` instead of `/Scripts/black` and Python then fails @@ -118,6 +145,20 @@ fn push_quoted_path(path: &Path, command: &mut Vec) { command.extend(br#"""#); } +/// Checks if the given executable is part of a virtual environment +/// +/// Checks if a `pyvenv.cfg` file exists in grandparent directory of the given executable. +/// PEP 405 specifies a more robust procedure (checking both the parent and grandparent +/// directory and then scanning for a `home` key), but in practice we have found this to +/// be unnecessary. +fn is_virtualenv(executable: &Path) -> bool { + executable + .parent() + .and_then(Path::parent) + .map(|path| path.join("pyvenv.cfg").is_file()) + .unwrap_or(false) +} + /// Reads the executable binary from the back to find: /// /// * The path to the Python executable @@ -240,10 +281,18 @@ fn read_trampoline_metadata(executable_name: &Path) -> (TrampolineKind, PathBuf) parent_dir.join(path) }; - // NOTICE: dunce adds 5kb~ - let path = dunce::canonicalize(path.as_path()).unwrap_or_else(|_| { - error_and_exit("Failed to canonicalize script path"); - }); + let path = if !path.is_absolute() || matches!(kind, TrampolineKind::Script) { + // NOTICE: dunce adds 5kb~ + // TODO(john): In order to avoid resolving junctions and symlinks for relative paths and + // scripts, we can consider reverting https://github.com/astral-sh/uv/pull/5750/files#diff-969979506be03e89476feade2edebb4689a9c261f325988d3c7efc5e51de26d1L273-L277. + dunce::canonicalize(path.as_path()).unwrap_or_else(|_| { + error_and_exit("Failed to canonicalize script path"); + }) + } else { + // For Python trampolines with absolute paths, we skip `dunce::canonicalize` to + // avoid resolving junctions. + path + }; (kind, path) } diff --git a/crates/uv-trampoline/trampolines/uv-trampoline-aarch64-console.exe b/crates/uv-trampoline/trampolines/uv-trampoline-aarch64-console.exe index 3b7f76564..5f2d6115e 100755 Binary files a/crates/uv-trampoline/trampolines/uv-trampoline-aarch64-console.exe and b/crates/uv-trampoline/trampolines/uv-trampoline-aarch64-console.exe differ diff --git a/crates/uv-trampoline/trampolines/uv-trampoline-aarch64-gui.exe b/crates/uv-trampoline/trampolines/uv-trampoline-aarch64-gui.exe index 74080d4db..3a5a2e348 100755 Binary files a/crates/uv-trampoline/trampolines/uv-trampoline-aarch64-gui.exe and b/crates/uv-trampoline/trampolines/uv-trampoline-aarch64-gui.exe differ diff --git a/crates/uv-trampoline/trampolines/uv-trampoline-i686-console.exe b/crates/uv-trampoline/trampolines/uv-trampoline-i686-console.exe index 3fd1e0aff..bdc225e4d 100755 Binary files a/crates/uv-trampoline/trampolines/uv-trampoline-i686-console.exe and b/crates/uv-trampoline/trampolines/uv-trampoline-i686-console.exe differ diff --git a/crates/uv-trampoline/trampolines/uv-trampoline-i686-gui.exe b/crates/uv-trampoline/trampolines/uv-trampoline-i686-gui.exe index 4221696a1..d6753380d 100755 Binary files a/crates/uv-trampoline/trampolines/uv-trampoline-i686-gui.exe and b/crates/uv-trampoline/trampolines/uv-trampoline-i686-gui.exe differ diff --git a/crates/uv-trampoline/trampolines/uv-trampoline-x86_64-console.exe b/crates/uv-trampoline/trampolines/uv-trampoline-x86_64-console.exe index 5b8fa6acc..b93c242e7 100755 Binary files a/crates/uv-trampoline/trampolines/uv-trampoline-x86_64-console.exe and b/crates/uv-trampoline/trampolines/uv-trampoline-x86_64-console.exe differ diff --git a/crates/uv-trampoline/trampolines/uv-trampoline-x86_64-gui.exe b/crates/uv-trampoline/trampolines/uv-trampoline-x86_64-gui.exe index 8cb19cf8f..c81d8e4e5 100755 Binary files a/crates/uv-trampoline/trampolines/uv-trampoline-x86_64-gui.exe and b/crates/uv-trampoline/trampolines/uv-trampoline-x86_64-gui.exe differ diff --git a/crates/uv-types/Cargo.toml b/crates/uv-types/Cargo.toml index 0973a5218..f29af4ca4 100644 --- a/crates/uv-types/Cargo.toml +++ b/crates/uv-types/Cargo.toml @@ -31,6 +31,7 @@ uv-redacted = { workspace = true } uv-workspace = { workspace = true } anyhow = { workspace = true } +dashmap = { workspace = true } rustc-hash = { workspace = true } thiserror = { workspace = true } diff --git a/crates/uv-types/src/builds.rs b/crates/uv-types/src/builds.rs index ea5e0b6a3..e8c622057 100644 --- a/crates/uv-types/src/builds.rs +++ b/crates/uv-types/src/builds.rs @@ -1,3 +1,9 @@ +use std::path::Path; +use std::sync::Arc; + +use dashmap::DashMap; + +use uv_configuration::{BuildKind, SourceStrategy}; use uv_pep508::PackageName; use uv_python::PythonEnvironment; @@ -37,3 +43,42 @@ impl BuildIsolation<'_> { } } } + +/// A key for the build cache, which includes the interpreter, source root, subdirectory, source +/// strategy, and build kind. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct BuildKey { + pub base_python: Box, + pub source_root: Box, + pub subdirectory: Option>, + pub source_strategy: SourceStrategy, + pub build_kind: BuildKind, +} + +/// An arena of in-process builds. +#[derive(Debug)] +pub struct BuildArena(Arc>); + +impl Default for BuildArena { + fn default() -> Self { + Self(Arc::new(DashMap::new())) + } +} + +impl Clone for BuildArena { + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl BuildArena { + /// Insert a build entry into the arena. + pub fn insert(&self, key: BuildKey, value: T) { + self.0.insert(key, value); + } + + /// Remove a build entry from the arena. + pub fn remove(&self, key: &BuildKey) -> Option { + self.0.remove(key).map(|entry| entry.1) + } +} diff --git a/crates/uv-types/src/traits.rs b/crates/uv-types/src/traits.rs index 6f724b27a..a95367fef 100644 --- a/crates/uv-types/src/traits.rs +++ b/crates/uv-types/src/traits.rs @@ -18,6 +18,8 @@ use uv_pep508::PackageName; use uv_python::{Interpreter, PythonEnvironment}; use uv_workspace::WorkspaceCache; +use crate::BuildArena; + /// Avoids cyclic crate dependencies between resolver, installer and builder. /// /// To resolve the dependencies of a packages, we may need to build one or more source @@ -67,6 +69,9 @@ pub trait BuildContext { /// Return a reference to the Git resolver. fn git(&self) -> &GitResolver; + /// Return a reference to the build arena. + fn build_arena(&self) -> &BuildArena; + /// Return a reference to the discovered registry capabilities. fn capabilities(&self) -> &IndexCapabilities; @@ -180,13 +185,13 @@ pub trait InstalledPackagesProvider: Clone + Send + Sync + 'static { pub struct EmptyInstalledPackages; impl InstalledPackagesProvider for EmptyInstalledPackages { - fn get_packages(&self, _name: &PackageName) -> Vec<&InstalledDist> { - Vec::new() - } - fn iter(&self) -> impl Iterator { std::iter::empty() } + + fn get_packages(&self, _name: &PackageName) -> Vec<&InstalledDist> { + Vec::new() + } } /// [`anyhow::Error`]-like wrapper type for [`BuildDispatch`] method return values, that also makes diff --git a/crates/uv-version/Cargo.toml b/crates/uv-version/Cargo.toml index a029b8196..9b9ccd9bd 100644 --- a/crates/uv-version/Cargo.toml +++ b/crates/uv-version/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv-version" -version = "0.7.13" +version = "0.7.19" edition = { workspace = true } rust-version = { workspace = true } homepage = { workspace = true } diff --git a/crates/uv-virtualenv/Cargo.toml b/crates/uv-virtualenv/Cargo.toml index e9610176b..cb0ae1b9d 100644 --- a/crates/uv-virtualenv/Cargo.toml +++ b/crates/uv-virtualenv/Cargo.toml @@ -20,6 +20,7 @@ doctest = false workspace = true [dependencies] +uv-configuration = { workspace = true } uv-fs = { workspace = true } uv-pypi-types = { workspace = true } uv-python = { workspace = true } diff --git a/crates/uv-virtualenv/src/lib.rs b/crates/uv-virtualenv/src/lib.rs index 8c4e1feab..277ab6a8c 100644 --- a/crates/uv-virtualenv/src/lib.rs +++ b/crates/uv-virtualenv/src/lib.rs @@ -3,6 +3,7 @@ use std::path::Path; use thiserror::Error; +use uv_configuration::PreviewMode; use uv_python::{Interpreter, PythonEnvironment}; mod virtualenv; @@ -15,6 +16,8 @@ pub enum Error { "Could not find a suitable Python executable for the virtual environment based on the interpreter: {0}" )] NotFound(String), + #[error(transparent)] + Python(#[from] uv_python::managed::Error), } /// The value to use for the shell prompt when inside a virtual environment. @@ -50,6 +53,8 @@ pub fn create_venv( allow_existing: bool, relocatable: bool, seed: bool, + upgradeable: bool, + preview: PreviewMode, ) -> Result { // Create the virtualenv at the given location. let virtualenv = virtualenv::create( @@ -60,6 +65,8 @@ pub fn create_venv( allow_existing, relocatable, seed, + upgradeable, + preview, )?; // Create the corresponding `PythonEnvironment`. diff --git a/crates/uv-virtualenv/src/virtualenv.rs b/crates/uv-virtualenv/src/virtualenv.rs index a641e5541..bad380c4c 100644 --- a/crates/uv-virtualenv/src/virtualenv.rs +++ b/crates/uv-virtualenv/src/virtualenv.rs @@ -10,8 +10,10 @@ use fs_err::File; use itertools::Itertools; use tracing::debug; +use uv_configuration::PreviewMode; use uv_fs::{CWD, Simplified, cachedir}; use uv_pypi_types::Scheme; +use uv_python::managed::{PythonMinorVersionLink, create_link_to_executable}; use uv_python::{Interpreter, VirtualEnvironment}; use uv_shell::escape_posix_for_single_quotes; use uv_version::version; @@ -53,6 +55,8 @@ pub(crate) fn create( allow_existing: bool, relocatable: bool, seed: bool, + upgradeable: bool, + preview: PreviewMode, ) -> Result { // Determine the base Python executable; that is, the Python executable that should be // considered the "base" for the virtual environment. @@ -81,7 +85,7 @@ pub(crate) fn create( } else if metadata.is_dir() { if allow_existing { debug!("Allowing existing directory"); - } else if location.join("pyvenv.cfg").is_file() { + } else if uv_fs::is_virtualenv_base(location) { debug!("Removing existing directory"); // On Windows, if the current executable is in the directory, guard against @@ -143,13 +147,51 @@ pub(crate) fn create( // Create a `.gitignore` file to ignore all files in the venv. fs::write(location.join(".gitignore"), "*")?; + let mut using_minor_version_link = false; + let executable_target = if upgradeable && interpreter.is_standalone() { + if let Some(minor_version_link) = PythonMinorVersionLink::from_executable( + base_python.as_path(), + &interpreter.key(), + preview, + ) { + if !minor_version_link.exists() { + base_python.clone() + } else { + let debug_symlink_term = if cfg!(windows) { + "junction" + } else { + "symlink directory" + }; + debug!( + "Using {} {} instead of base Python path: {}", + debug_symlink_term, + &minor_version_link.symlink_directory.display(), + &base_python.display() + ); + using_minor_version_link = true; + minor_version_link.symlink_executable.clone() + } + } else { + base_python.clone() + } + } else { + base_python.clone() + }; + // Per PEP 405, the Python `home` is the parent directory of the interpreter. - let python_home = base_python.parent().ok_or_else(|| { - io::Error::new( - io::ErrorKind::NotFound, - "The Python interpreter needs to have a parent directory", - ) - })?; + // In preview mode, for standalone interpreters, this `home` value will include a + // symlink directory on Unix or junction on Windows to enable transparent Python patch + // upgrades. + let python_home = executable_target + .parent() + .ok_or_else(|| { + io::Error::new( + io::ErrorKind::NotFound, + "The Python interpreter needs to have a parent directory", + ) + })? + .to_path_buf(); + let python_home = python_home.as_path(); // Different names for the python interpreter fs::create_dir_all(&scripts)?; @@ -157,7 +199,7 @@ pub(crate) fn create( #[cfg(unix)] { - uv_fs::replace_symlink(&base_python, &executable)?; + uv_fs::replace_symlink(&executable_target, &executable)?; uv_fs::replace_symlink( "python", scripts.join(format!("python{}", interpreter.python_major())), @@ -184,91 +226,102 @@ pub(crate) fn create( } } - // No symlinking on Windows, at least not on a regular non-dev non-admin Windows install. + // On Windows, we use trampolines that point to an executable target. For standalone + // interpreters, this target path includes a minor version junction to enable + // transparent upgrades. if cfg!(windows) { - copy_launcher_windows( - WindowsExecutable::Python, - interpreter, - &base_python, - &scripts, - python_home, - )?; - - if interpreter.markers().implementation_name() == "graalpy" { - copy_launcher_windows( - WindowsExecutable::GraalPy, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PythonMajor, - interpreter, - &base_python, - &scripts, - python_home, - )?; + if using_minor_version_link { + let target = scripts.join(WindowsExecutable::Python.exe(interpreter)); + create_link_to_executable(target.as_path(), executable_target.clone()) + .map_err(Error::Python)?; + let targetw = scripts.join(WindowsExecutable::Pythonw.exe(interpreter)); + create_link_to_executable(targetw.as_path(), executable_target) + .map_err(Error::Python)?; } else { copy_launcher_windows( - WindowsExecutable::Pythonw, + WindowsExecutable::Python, interpreter, &base_python, &scripts, python_home, )?; - } - if interpreter.markers().implementation_name() == "pypy" { - copy_launcher_windows( - WindowsExecutable::PythonMajor, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PythonMajorMinor, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PyPy, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PyPyMajor, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PyPyMajorMinor, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PyPyw, - interpreter, - &base_python, - &scripts, - python_home, - )?; - copy_launcher_windows( - WindowsExecutable::PyPyMajorMinorw, - interpreter, - &base_python, - &scripts, - python_home, - )?; + if interpreter.markers().implementation_name() == "graalpy" { + copy_launcher_windows( + WindowsExecutable::GraalPy, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PythonMajor, + interpreter, + &base_python, + &scripts, + python_home, + )?; + } else { + copy_launcher_windows( + WindowsExecutable::Pythonw, + interpreter, + &base_python, + &scripts, + python_home, + )?; + } + + if interpreter.markers().implementation_name() == "pypy" { + copy_launcher_windows( + WindowsExecutable::PythonMajor, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PythonMajorMinor, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PyPy, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PyPyMajor, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PyPyMajorMinor, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PyPyw, + interpreter, + &base_python, + &scripts, + python_home, + )?; + copy_launcher_windows( + WindowsExecutable::PyPyMajorMinorw, + interpreter, + &base_python, + &scripts, + python_home, + )?; + } } } diff --git a/crates/uv-warnings/src/lib.rs b/crates/uv-warnings/src/lib.rs index 9ed4c646e..2b664be8d 100644 --- a/crates/uv-warnings/src/lib.rs +++ b/crates/uv-warnings/src/lib.rs @@ -13,27 +13,27 @@ pub static ENABLED: AtomicBool = AtomicBool::new(false); /// Enable user-facing warnings. pub fn enable() { - ENABLED.store(true, std::sync::atomic::Ordering::SeqCst); + ENABLED.store(true, std::sync::atomic::Ordering::Relaxed); } /// Disable user-facing warnings. pub fn disable() { - ENABLED.store(false, std::sync::atomic::Ordering::SeqCst); + ENABLED.store(false, std::sync::atomic::Ordering::Relaxed); } /// Warn a user, if warnings are enabled. #[macro_export] macro_rules! warn_user { - ($($arg:tt)*) => { + ($($arg:tt)*) => {{ use $crate::anstream::eprintln; use $crate::owo_colors::OwoColorize; - if $crate::ENABLED.load(std::sync::atomic::Ordering::SeqCst) { + if $crate::ENABLED.load(std::sync::atomic::Ordering::Relaxed) { let message = format!("{}", format_args!($($arg)*)); let formatted = message.bold(); eprintln!("{}{} {formatted}", "warning".yellow().bold(), ":".bold()); } - }; + }}; } pub static WARNINGS: LazyLock>> = LazyLock::new(Mutex::default); @@ -42,11 +42,11 @@ pub static WARNINGS: LazyLock>> = LazyLock::new(Mutex::d /// message. #[macro_export] macro_rules! warn_user_once { - ($($arg:tt)*) => { + ($($arg:tt)*) => {{ use $crate::anstream::eprintln; use $crate::owo_colors::OwoColorize; - if $crate::ENABLED.load(std::sync::atomic::Ordering::SeqCst) { + if $crate::ENABLED.load(std::sync::atomic::Ordering::Relaxed) { if let Ok(mut states) = $crate::WARNINGS.lock() { let message = format!("{}", format_args!($($arg)*)); if states.insert(message.clone()) { @@ -54,5 +54,5 @@ macro_rules! warn_user_once { } } } - }; + }}; } diff --git a/crates/uv-workspace/Cargo.toml b/crates/uv-workspace/Cargo.toml index a8d672aab..36059f10f 100644 --- a/crates/uv-workspace/Cargo.toml +++ b/crates/uv-workspace/Cargo.toml @@ -18,6 +18,7 @@ workspace = true [dependencies] uv-build-backend = { workspace = true, features = ["schemars"] } uv-cache-key = { workspace = true } +uv-configuration = { workspace = true } uv-distribution-types = { workspace = true } uv-fs = { workspace = true, features = ["tokio", "schemars"] } uv-git-types = { workspace = true } diff --git a/crates/uv-workspace/src/dependency_groups.rs b/crates/uv-workspace/src/dependency_groups.rs index e6964544a..8503ae3ad 100644 --- a/crates/uv-workspace/src/dependency_groups.rs +++ b/crates/uv-workspace/src/dependency_groups.rs @@ -1,32 +1,106 @@ -use std::collections::BTreeMap; use std::collections::btree_map::Entry; use std::str::FromStr; +use std::{collections::BTreeMap, path::Path}; use thiserror::Error; use tracing::error; +use uv_distribution_types::RequiresPython; +use uv_fs::Simplified; use uv_normalize::{DEV_DEPENDENCIES, GroupName}; +use uv_pep440::VersionSpecifiers; use uv_pep508::Pep508Error; use uv_pypi_types::{DependencyGroupSpecifier, VerbatimParsedUrl}; +use crate::pyproject::{DependencyGroupSettings, PyProjectToml, ToolUvDependencyGroups}; + /// PEP 735 dependency groups, with any `include-group` entries resolved. #[derive(Debug, Default, Clone)] -pub struct FlatDependencyGroups( - BTreeMap>>, -); +pub struct FlatDependencyGroups(BTreeMap); + +#[derive(Debug, Default, Clone)] +pub struct FlatDependencyGroup { + pub requirements: Vec>, + pub requires_python: Option, +} impl FlatDependencyGroups { + /// Gather and flatten all the dependency-groups defined in the given pyproject.toml + /// + /// The path is only used in diagnostics. + pub fn from_pyproject_toml( + path: &Path, + pyproject_toml: &PyProjectToml, + ) -> Result { + // First, collect `tool.uv.dev_dependencies` + let dev_dependencies = pyproject_toml + .tool + .as_ref() + .and_then(|tool| tool.uv.as_ref()) + .and_then(|uv| uv.dev_dependencies.as_ref()); + + // Then, collect `dependency-groups` + let dependency_groups = pyproject_toml + .dependency_groups + .iter() + .flatten() + .collect::>(); + + // Get additional settings + let empty_settings = ToolUvDependencyGroups::default(); + let group_settings = pyproject_toml + .tool + .as_ref() + .and_then(|tool| tool.uv.as_ref()) + .and_then(|uv| uv.dependency_groups.as_ref()) + .unwrap_or(&empty_settings); + + // Flatten the dependency groups. + let mut dependency_groups = FlatDependencyGroups::from_dependency_groups( + &dependency_groups, + group_settings.inner(), + ) + .map_err(|err| DependencyGroupError { + package: pyproject_toml + .project + .as_ref() + .map(|project| project.name.to_string()) + .unwrap_or_default(), + path: path.user_display().to_string(), + error: err.with_dev_dependencies(dev_dependencies), + })?; + + // Add the `dev` group, if the legacy `dev-dependencies` is defined. + // + // NOTE: the fact that we do this out here means that nothing can inherit from + // the legacy dev-dependencies group (or define a group requires-python for it). + // This is intentional, we want groups to be defined in a standard interoperable + // way, and letting things include-group a group that isn't defined would be a + // mess for other python tools. + if let Some(dev_dependencies) = dev_dependencies { + dependency_groups + .entry(DEV_DEPENDENCIES.clone()) + .or_insert_with(FlatDependencyGroup::default) + .requirements + .extend(dev_dependencies.clone()); + } + + Ok(dependency_groups) + } + /// Resolve the dependency groups (which may contain references to other groups) into concrete /// lists of requirements. - pub fn from_dependency_groups( + fn from_dependency_groups( groups: &BTreeMap<&GroupName, &Vec>, - ) -> Result { + settings: &BTreeMap, + ) -> Result { fn resolve_group<'data>( - resolved: &mut BTreeMap>>, + resolved: &mut BTreeMap, groups: &'data BTreeMap<&GroupName, &Vec>, + settings: &BTreeMap, name: &'data GroupName, parents: &mut Vec<&'data GroupName>, - ) -> Result<(), DependencyGroupError> { + ) -> Result<(), DependencyGroupErrorInner> { let Some(specifiers) = groups.get(name) else { // Missing group let parent_name = parents @@ -34,7 +108,7 @@ impl FlatDependencyGroups { .last() .copied() .expect("parent when group is missing"); - return Err(DependencyGroupError::GroupNotFound( + return Err(DependencyGroupErrorInner::GroupNotFound( name.clone(), parent_name.clone(), )); @@ -42,7 +116,7 @@ impl FlatDependencyGroups { // "Dependency Group Includes MUST NOT include cycles, and tools SHOULD report an error if they detect a cycle." if parents.contains(&name) { - return Err(DependencyGroupError::DependencyGroupCycle(Cycle( + return Err(DependencyGroupErrorInner::DependencyGroupCycle(Cycle( parents.iter().copied().cloned().collect(), ))); } @@ -54,13 +128,14 @@ impl FlatDependencyGroups { parents.push(name); let mut requirements = Vec::with_capacity(specifiers.len()); + let mut requires_python_intersection = VersionSpecifiers::empty(); for specifier in *specifiers { match specifier { DependencyGroupSpecifier::Requirement(requirement) => { match uv_pep508::Requirement::::from_str(requirement) { Ok(requirement) => requirements.push(requirement), Err(err) => { - return Err(DependencyGroupError::GroupParseError( + return Err(DependencyGroupErrorInner::GroupParseError( name.clone(), requirement.clone(), Box::new(err), @@ -69,72 +144,107 @@ impl FlatDependencyGroups { } } DependencyGroupSpecifier::IncludeGroup { include_group } => { - resolve_group(resolved, groups, include_group, parents)?; - requirements - .extend(resolved.get(include_group).into_iter().flatten().cloned()); + resolve_group(resolved, groups, settings, include_group, parents)?; + if let Some(included) = resolved.get(include_group) { + requirements.extend(included.requirements.iter().cloned()); + + // Intersect the requires-python for this group with the the included group's + requires_python_intersection = requires_python_intersection + .into_iter() + .chain(included.requires_python.clone().into_iter().flatten()) + .collect(); + } } DependencyGroupSpecifier::Object(map) => { - return Err(DependencyGroupError::DependencyObjectSpecifierNotSupported( - name.clone(), - map.clone(), - )); + return Err( + DependencyGroupErrorInner::DependencyObjectSpecifierNotSupported( + name.clone(), + map.clone(), + ), + ); } } } + + let empty_settings = DependencyGroupSettings::default(); + let DependencyGroupSettings { requires_python } = + settings.get(name).unwrap_or(&empty_settings); + if let Some(requires_python) = requires_python { + // Intersect the requires-python for this group to get the final requires-python + // that will be used by interpreter discovery and checking. + requires_python_intersection = requires_python_intersection + .into_iter() + .chain(requires_python.clone()) + .collect(); + + // Add the group requires-python as a marker to each requirement + // We don't use `requires_python_intersection` because each `include-group` + // should already have its markers applied to these. + for requirement in &mut requirements { + let extra_markers = + RequiresPython::from_specifiers(requires_python).to_marker_tree(); + requirement.marker.and(extra_markers); + } + } + parents.pop(); - resolved.insert(name.clone(), requirements); + resolved.insert( + name.clone(), + FlatDependencyGroup { + requirements, + requires_python: if requires_python_intersection.is_empty() { + None + } else { + Some(requires_python_intersection) + }, + }, + ); Ok(()) } + // Validate the settings + for (group_name, ..) in settings { + if !groups.contains_key(group_name) { + return Err(DependencyGroupErrorInner::SettingsGroupNotFound( + group_name.clone(), + )); + } + } + let mut resolved = BTreeMap::new(); for name in groups.keys() { let mut parents = Vec::new(); - resolve_group(&mut resolved, groups, name, &mut parents)?; + resolve_group(&mut resolved, groups, settings, name, &mut parents)?; } Ok(Self(resolved)) } /// Return the requirements for a given group, if any. - pub fn get( - &self, - group: &GroupName, - ) -> Option<&Vec>> { + pub fn get(&self, group: &GroupName) -> Option<&FlatDependencyGroup> { self.0.get(group) } /// Return the entry for a given group, if any. - pub fn entry( - &mut self, - group: GroupName, - ) -> Entry>> { + pub fn entry(&mut self, group: GroupName) -> Entry { self.0.entry(group) } /// Consume the [`FlatDependencyGroups`] and return the inner map. - pub fn into_inner(self) -> BTreeMap>> { + pub fn into_inner(self) -> BTreeMap { self.0 } } -impl FromIterator<(GroupName, Vec>)> - for FlatDependencyGroups -{ - fn from_iter< - T: IntoIterator>)>, - >( - iter: T, - ) -> Self { +impl FromIterator<(GroupName, FlatDependencyGroup)> for FlatDependencyGroups { + fn from_iter>(iter: T) -> Self { Self(iter.into_iter().collect()) } } impl IntoIterator for FlatDependencyGroups { - type Item = (GroupName, Vec>); - type IntoIter = std::collections::btree_map::IntoIter< - GroupName, - Vec>, - >; + type Item = (GroupName, FlatDependencyGroup); + type IntoIter = std::collections::btree_map::IntoIter; fn into_iter(self) -> Self::IntoIter { self.0.into_iter() @@ -142,7 +252,24 @@ impl IntoIterator for FlatDependencyGroups { } #[derive(Debug, Error)] -pub enum DependencyGroupError { +#[error("{} has malformed dependency groups", if path.is_empty() && package.is_empty() { + "Project".to_string() +} else if path.is_empty() { + format!("Project `{package}`") +} else if package.is_empty() { + format!("`{path}`") +} else { + format!("Project `{package} @ {path}`") +})] +pub struct DependencyGroupError { + package: String, + path: String, + #[source] + error: DependencyGroupErrorInner, +} + +#[derive(Debug, Error)] +pub enum DependencyGroupErrorInner { #[error("Failed to parse entry in group `{0}`: `{1}`")] GroupParseError( GroupName, @@ -159,9 +286,15 @@ pub enum DependencyGroupError { DependencyGroupCycle(Cycle), #[error("Group `{0}` contains an unknown dependency object specifier: {1:?}")] DependencyObjectSpecifierNotSupported(GroupName, BTreeMap), + #[error("Failed to find group `{0}` specified in `[tool.uv.dependency-groups]`")] + SettingsGroupNotFound(GroupName), + #[error( + "`[tool.uv.dependency-groups]` specifies the `dev` group, but only `tool.uv.dev-dependencies` was found. To reference the `dev` group, remove the `tool.uv.dev-dependencies` section and add any development dependencies to the `dev` entry in the `[dependency-groups]` table instead." + )] + SettingsDevGroupInclude, } -impl DependencyGroupError { +impl DependencyGroupErrorInner { /// Enrich a [`DependencyGroupError`] with the `tool.uv.dev-dependencies` metadata, if applicable. #[must_use] pub fn with_dev_dependencies( @@ -169,10 +302,15 @@ impl DependencyGroupError { dev_dependencies: Option<&Vec>>, ) -> Self { match self { - DependencyGroupError::GroupNotFound(group, parent) + Self::GroupNotFound(group, parent) if dev_dependencies.is_some() && group == *DEV_DEPENDENCIES => { - DependencyGroupError::DevGroupInclude(parent) + Self::DevGroupInclude(parent) + } + Self::SettingsGroupNotFound(group) + if dev_dependencies.is_some() && group == *DEV_DEPENDENCIES => + { + Self::SettingsDevGroupInclude } _ => self, } diff --git a/crates/uv-workspace/src/lib.rs b/crates/uv-workspace/src/lib.rs index 83be6bd88..0e1b3974c 100644 --- a/crates/uv-workspace/src/lib.rs +++ b/crates/uv-workspace/src/lib.rs @@ -1,6 +1,6 @@ pub use workspace::{ - DiscoveryOptions, MemberDiscovery, ProjectWorkspace, VirtualProject, Workspace, WorkspaceCache, - WorkspaceError, WorkspaceMember, + DiscoveryOptions, MemberDiscovery, ProjectWorkspace, RequiresPythonSources, VirtualProject, + Workspace, WorkspaceCache, WorkspaceError, WorkspaceMember, }; pub mod dependency_groups; diff --git a/crates/uv-workspace/src/pyproject.rs b/crates/uv-workspace/src/pyproject.rs index 2b0e44c16..124a62881 100644 --- a/crates/uv-workspace/src/pyproject.rs +++ b/crates/uv-workspace/src/pyproject.rs @@ -6,6 +6,8 @@ //! //! Then lowers them into a dependency specification. +#[cfg(feature = "schemars")] +use std::borrow::Cow; use std::collections::BTreeMap; use std::fmt::Formatter; use std::ops::Deref; @@ -23,6 +25,7 @@ use uv_fs::{PortablePathBuf, relative_to}; use uv_git_types::GitReference; use uv_macros::OptionsMetadata; use uv_normalize::{DefaultGroups, ExtraName, GroupName, PackageName}; +use uv_options_metadata::{OptionSet, OptionsMetadata, Visit}; use uv_pep440::{Version, VersionSpecifiers}; use uv_pep508::MarkerTree; use uv_pypi_types::{ @@ -353,6 +356,24 @@ pub struct ToolUv { )] pub default_groups: Option, + /// Additional settings for `dependency-groups`. + /// + /// Currently this can only be used to add `requires-python` constraints + /// to dependency groups (typically to inform uv that your dev tooling + /// has a higher python requirement than your actual project). + /// + /// This cannot be used to define dependency groups, use the top-level + /// `[dependency-groups]` table for that. + #[option( + default = "[]", + value_type = "dict", + example = r#" + [tool.uv.dependency-groups] + my-group = {requires-python = ">=3.12"} + "# + )] + pub dependency_groups: Option, + /// The project's development dependencies. /// /// Development dependencies will be installed by default in `uv run` and `uv sync`, but will @@ -591,7 +612,7 @@ pub struct ToolUv { /// Note that those settings only apply when using the `uv_build` backend, other build backends /// (such as hatchling) have their own configuration. #[option_group] - pub build_backend: Option, + pub build_backend: Option, } #[derive(Default, Debug, Clone, PartialEq, Eq)] @@ -653,6 +674,77 @@ impl<'de> serde::de::Deserialize<'de> for ToolUvSources { } } +#[derive(Default, Debug, Clone, PartialEq, Eq)] +#[cfg_attr(test, derive(Serialize))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +pub struct ToolUvDependencyGroups(BTreeMap); + +impl ToolUvDependencyGroups { + /// Returns the underlying `BTreeMap` of group names to settings. + pub fn inner(&self) -> &BTreeMap { + &self.0 + } + + /// Convert the [`ToolUvDependencyGroups`] into its inner `BTreeMap`. + #[must_use] + pub fn into_inner(self) -> BTreeMap { + self.0 + } +} + +/// Ensure that all keys in the TOML table are unique. +impl<'de> serde::de::Deserialize<'de> for ToolUvDependencyGroups { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct SourcesVisitor; + + impl<'de> serde::de::Visitor<'de> for SourcesVisitor { + type Value = ToolUvDependencyGroups; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_str("a map with unique keys") + } + + fn visit_map(self, mut access: M) -> Result + where + M: serde::de::MapAccess<'de>, + { + let mut groups = BTreeMap::new(); + while let Some((key, value)) = + access.next_entry::()? + { + match groups.entry(key) { + std::collections::btree_map::Entry::Occupied(entry) => { + return Err(serde::de::Error::custom(format!( + "duplicate settings for dependency group `{}`", + entry.key() + ))); + } + std::collections::btree_map::Entry::Vacant(entry) => { + entry.insert(value); + } + } + } + Ok(ToolUvDependencyGroups(groups)) + } + } + + deserializer.deserialize_map(SourcesVisitor) + } +} + +#[derive(Deserialize, Default, Debug, Clone, PartialEq, Eq)] +#[cfg_attr(test, derive(Serialize))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[serde(rename_all = "kebab-case")] +pub struct DependencyGroupSettings { + /// Version of python to require when installing this group + #[cfg_attr(feature = "schemars", schemars(with = "Option"))] + pub requires_python: Option, +} + #[derive(Deserialize, OptionsMetadata, Default, Debug, Clone, PartialEq, Eq)] #[cfg_attr(test, derive(Serialize))] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] @@ -724,12 +816,12 @@ impl<'de> serde::Deserialize<'de> for SerdePattern { #[cfg(feature = "schemars")] impl schemars::JsonSchema for SerdePattern { - fn schema_name() -> String { - ::schema_name() + fn schema_name() -> Cow<'static, str> { + Cow::Borrowed("SerdePattern") } - fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - ::json_schema(r#gen) + fn json_schema(generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema { + ::json_schema(generator) } } @@ -1594,3 +1686,44 @@ pub enum DependencyType { /// A dependency in `dependency-groups.{0}`. Group(GroupName), } + +#[derive(Debug, Clone, PartialEq, Eq)] +#[cfg_attr(test, derive(Serialize))] +pub struct BuildBackendSettingsSchema; + +impl<'de> Deserialize<'de> for BuildBackendSettingsSchema { + fn deserialize(_deserializer: D) -> Result + where + D: Deserializer<'de>, + { + Ok(BuildBackendSettingsSchema) + } +} + +#[cfg(feature = "schemars")] +impl schemars::JsonSchema for BuildBackendSettingsSchema { + fn schema_name() -> Cow<'static, str> { + BuildBackendSettings::schema_name() + } + + fn json_schema(generator: &mut schemars::SchemaGenerator) -> schemars::Schema { + BuildBackendSettings::json_schema(generator) + } +} + +impl OptionsMetadata for BuildBackendSettingsSchema { + fn record(visit: &mut dyn Visit) { + BuildBackendSettings::record(visit); + } + + fn documentation() -> Option<&'static str> { + BuildBackendSettings::documentation() + } + + fn metadata() -> OptionSet + where + Self: Sized + 'static, + { + BuildBackendSettings::metadata() + } +} diff --git a/crates/uv-workspace/src/workspace.rs b/crates/uv-workspace/src/workspace.rs index 3caaa8f8c..1349d739c 100644 --- a/crates/uv-workspace/src/workspace.rs +++ b/crates/uv-workspace/src/workspace.rs @@ -8,6 +8,7 @@ use glob::{GlobError, PatternError, glob}; use rustc_hash::{FxHashMap, FxHashSet}; use tracing::{debug, trace, warn}; +use uv_configuration::DependencyGroupsWithDefaults; use uv_distribution_types::{Index, Requirement, RequirementSource}; use uv_fs::{CWD, Simplified}; use uv_normalize::{DEV_DEPENDENCIES, GroupName, PackageName}; @@ -17,7 +18,7 @@ use uv_pypi_types::{Conflicts, SupportedEnvironments, VerbatimParsedUrl}; use uv_static::EnvVars; use uv_warnings::warn_user_once; -use crate::dependency_groups::{DependencyGroupError, FlatDependencyGroups}; +use crate::dependency_groups::{DependencyGroupError, FlatDependencyGroup, FlatDependencyGroups}; use crate::pyproject::{ Project, PyProjectToml, PyprojectTomlError, Sources, ToolUvSources, ToolUvWorkspace, }; @@ -95,6 +96,8 @@ pub struct DiscoveryOptions { pub members: MemberDiscovery, } +pub type RequiresPythonSources = BTreeMap<(PackageName, Option), VersionSpecifiers>; + /// A workspace, consisting of a root directory and members. See [`ProjectWorkspace`]. #[derive(Debug, Clone)] #[cfg_attr(test, derive(serde::Serialize))] @@ -413,15 +416,44 @@ impl Workspace { } /// Returns an iterator over the `requires-python` values for each member of the workspace. - pub fn requires_python(&self) -> impl Iterator { - self.packages().iter().filter_map(|(name, member)| { - member + pub fn requires_python( + &self, + groups: &DependencyGroupsWithDefaults, + ) -> Result { + let mut requires = RequiresPythonSources::new(); + for (name, member) in self.packages() { + // Get the top-level requires-python for this package, which is always active + // + // Arguably we could check groups.prod() to disable this, since, the requires-python + // of the project is *technically* not relevant if you're doing `--only-group`, but, + // that would be a big surprising change so let's *not* do that until someone asks! + let top_requires = member .pyproject_toml() .project .as_ref() .and_then(|project| project.requires_python.as_ref()) - .map(|requires_python| (name, requires_python)) - }) + .map(|requires_python| ((name.to_owned(), None), requires_python.clone())); + requires.extend(top_requires); + + // Get the requires-python for each enabled group on this package + // We need to do full flattening here because include-group can transfer requires-python + let dependency_groups = + FlatDependencyGroups::from_pyproject_toml(member.root(), &member.pyproject_toml)?; + let group_requires = + dependency_groups + .into_iter() + .filter_map(move |(group_name, flat_group)| { + if groups.contains(&group_name) { + flat_group.requires_python.map(|requires_python| { + ((name.to_owned(), Some(group_name)), requires_python) + }) + } else { + None + } + }); + requires.extend(group_requires); + } + Ok(requires) } /// Returns any requirements that are exclusive to the workspace root, i.e., not included in @@ -439,12 +471,9 @@ impl Workspace { /// corresponding `pyproject.toml`. /// /// Otherwise, returns an empty list. - pub fn dependency_groups( + pub fn workspace_dependency_groups( &self, - ) -> Result< - BTreeMap>>, - DependencyGroupError, - > { + ) -> Result, DependencyGroupError> { if self .packages .values() @@ -455,35 +484,10 @@ impl Workspace { Ok(BTreeMap::default()) } else { // Otherwise, return the dependency groups in the non-project workspace root. - // First, collect `tool.uv.dev_dependencies` - let dev_dependencies = self - .pyproject_toml - .tool - .as_ref() - .and_then(|tool| tool.uv.as_ref()) - .and_then(|uv| uv.dev_dependencies.as_ref()); - - // Then, collect `dependency-groups` - let dependency_groups = self - .pyproject_toml - .dependency_groups - .iter() - .flatten() - .collect::>(); - - // Flatten the dependency groups. - let mut dependency_groups = - FlatDependencyGroups::from_dependency_groups(&dependency_groups) - .map_err(|err| err.with_dev_dependencies(dev_dependencies))?; - - // Add the `dev` group, if `dev-dependencies` is defined. - if let Some(dev_dependencies) = dev_dependencies { - dependency_groups - .entry(DEV_DEPENDENCIES.clone()) - .or_insert_with(Vec::new) - .extend(dev_dependencies.clone()); - } - + let dependency_groups = FlatDependencyGroups::from_pyproject_toml( + &self.install_path, + &self.pyproject_toml, + )?; Ok(dependency_groups.into_inner()) } } @@ -1430,6 +1434,33 @@ impl VirtualProject { path: &Path, options: &DiscoveryOptions, cache: &WorkspaceCache, + ) -> Result { + Self::discover_impl(path, options, cache, false).await + } + + /// Equivalent to [`VirtualProject::discover`] but consider it acceptable for + /// both `[project]` and `[tool.uv.workspace]` to be missing. + /// + /// If they are, we act as if an empty `[tool.uv.workspace]` was found. + pub async fn discover_defaulted( + path: &Path, + options: &DiscoveryOptions, + cache: &WorkspaceCache, + ) -> Result { + Self::discover_impl(path, options, cache, true).await + } + + /// Find the current project or virtual workspace root, given the current directory. + /// + /// Similar to calling [`ProjectWorkspace::discover`] with a fallback to [`Workspace::discover`], + /// but avoids rereading the `pyproject.toml` (and relying on error-handling as control flow). + /// + /// This method requires an absolute path and panics otherwise. + async fn discover_impl( + path: &Path, + options: &DiscoveryOptions, + cache: &WorkspaceCache, + default_missing_workspace: bool, ) -> Result { assert!( path.is_absolute(), @@ -1493,6 +1524,24 @@ impl VirtualProject { ) .await?; + Ok(Self::NonProject(workspace)) + } else if default_missing_workspace { + // Otherwise it's a pyproject.toml that maybe contains dependency-groups + // that we want to treat like a project/workspace to handle those uniformly + let project_path = std::path::absolute(project_root) + .map_err(WorkspaceError::Normalize)? + .clone(); + + let workspace = Workspace::collect_members( + project_path, + ToolUvWorkspace::default(), + pyproject_toml, + None, + options, + cache, + ) + .await?; + Ok(Self::NonProject(workspace)) } else { Err(WorkspaceError::MissingProject(pyproject_path)) @@ -1818,6 +1867,7 @@ mod tests { "managed": null, "package": null, "default-groups": null, + "dependency-groups": null, "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, @@ -1913,6 +1963,7 @@ mod tests { "managed": null, "package": null, "default-groups": null, + "dependency-groups": null, "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, @@ -2123,6 +2174,7 @@ mod tests { "managed": null, "package": null, "default-groups": null, + "dependency-groups": null, "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, @@ -2230,6 +2282,7 @@ mod tests { "managed": null, "package": null, "default-groups": null, + "dependency-groups": null, "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, @@ -2350,6 +2403,7 @@ mod tests { "managed": null, "package": null, "default-groups": null, + "dependency-groups": null, "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, @@ -2444,6 +2498,7 @@ mod tests { "managed": null, "package": null, "default-groups": null, + "dependency-groups": null, "dev-dependencies": null, "override-dependencies": null, "constraint-dependencies": null, diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml index b3accc211..0a352d2b1 100644 --- a/crates/uv/Cargo.toml +++ b/crates/uv/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv" -version = "0.7.13" +version = "0.7.19" edition = { workspace = true } rust-version = { workspace = true } homepage = { workspace = true } @@ -70,10 +70,12 @@ clap = { workspace = true, features = ["derive", "string", "wrap_help"] } console = { workspace = true } ctrlc = { workspace = true } dotenvy = { workspace = true } +dunce = { workspace = true } flate2 = { workspace = true, default-features = false } fs-err = { workspace = true, features = ["tokio"] } futures = { workspace = true } http = { workspace = true } +indexmap = { workspace = true } indicatif = { workspace = true } indoc = { workspace = true } itertools = { workspace = true } @@ -112,7 +114,6 @@ assert_cmd = { version = "2.0.16" } assert_fs = { version = "1.1.2" } base64 = { workspace = true } byteorder = { version = "1.5.0" } -etcetera = { workspace = true } filetime = { version = "0.2.25" } flate2 = { workspace = true, default-features = false } ignore = { version = "0.4.23" } diff --git a/crates/uv/src/commands/build_frontend.rs b/crates/uv/src/commands/build_frontend.rs index c601541da..2cef9a406 100644 --- a/crates/uv/src/commands/build_frontend.rs +++ b/crates/uv/src/commands/build_frontend.rs @@ -3,7 +3,6 @@ use std::fmt::Write as _; use std::io::Write as _; use std::path::{Path, PathBuf}; use std::str::FromStr; -use std::sync::Arc; use std::{fmt, io}; use anyhow::{Context, Result}; @@ -16,13 +15,16 @@ use uv_cache::{Cache, CacheBucket}; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; use uv_configuration::{ BuildKind, BuildOptions, BuildOutput, Concurrency, ConfigSettings, Constraints, - HashCheckingMode, IndexStrategy, KeyringProviderType, PreviewMode, SourceStrategy, + DependencyGroupsWithDefaults, HashCheckingMode, IndexStrategy, KeyringProviderType, + PreviewMode, SourceStrategy, }; use uv_dispatch::{BuildDispatch, SharedState}; use uv_distribution_filename::{ DistFilename, SourceDistExtension, SourceDistFilename, WheelFilename, }; -use uv_distribution_types::{DependencyMetadata, Index, IndexLocations, SourceDist}; +use uv_distribution_types::{ + DependencyMetadata, Index, IndexLocations, RequiresPython, SourceDist, +}; use uv_fs::{Simplified, relative_to}; use uv_install_wheel::LinkMode; use uv_normalize::PackageName; @@ -33,7 +35,7 @@ use uv_python::{ VersionRequest, }; use uv_requirements::RequirementsSource; -use uv_resolver::{ExcludeNewer, FlatIndex, RequiresPython}; +use uv_resolver::{ExcludeNewer, FlatIndex}; use uv_settings::PythonInstallMirrors; use uv_types::{AnyErrorBuild, BuildContext, BuildIsolation, BuildStack, HashStrategy}; use uv_workspace::{DiscoveryOptions, Workspace, WorkspaceCache, WorkspaceError}; @@ -185,15 +187,6 @@ async fn build_impl( printer: Printer, preview: PreviewMode, ) -> Result { - if list && preview.is_disabled() { - // We need the direct build for list and that is preview only. - writeln!( - printer.stderr(), - "The `--list` option is only available in preview mode; add the `--preview` flag to use `--list`" - )?; - return Ok(BuildResult::Failure); - } - // Extract the resolver settings. let ResolverSettings { index_locations, @@ -471,7 +464,8 @@ async fn build_package( // (3) `Requires-Python` in `pyproject.toml` if interpreter_request.is_none() { if let Ok(workspace) = workspace { - interpreter_request = find_requires_python(workspace)? + let groups = DependencyGroupsWithDefaults::none(); + interpreter_request = find_requires_python(workspace, &groups)? .as_ref() .map(RequiresPython::specifiers) .map(|specifiers| { @@ -495,20 +489,12 @@ async fn build_package( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Read build constraints. let build_constraints = @@ -610,10 +596,7 @@ async fn build_package( } BuildAction::List - } else if preview.is_enabled() - && !force_pep517 - && check_direct_build(source.path(), source.path().user_display()) - { + } else if !force_pep517 && check_direct_build(source.path(), source.path().user_display()) { BuildAction::DirectBuild } else { BuildAction::Pep517 diff --git a/crates/uv/src/commands/pip/check.rs b/crates/uv/src/commands/pip/check.rs index f504503af..bfbb20ee6 100644 --- a/crates/uv/src/commands/pip/check.rs +++ b/crates/uv/src/commands/pip/check.rs @@ -5,6 +5,7 @@ use anyhow::Result; use owo_colors::OwoColorize; use uv_cache::Cache; +use uv_configuration::PreviewMode; use uv_distribution_types::{Diagnostic, InstalledDist}; use uv_installer::{SitePackages, SitePackagesDiagnostic}; use uv_python::{EnvironmentPreference, PythonEnvironment, PythonRequest}; @@ -19,6 +20,7 @@ pub(crate) fn pip_check( system: bool, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { let start = Instant::now(); @@ -27,6 +29,7 @@ pub(crate) fn pip_check( &python.map(PythonRequest::parse).unwrap_or_default(), EnvironmentPreference::from_system_flag(system, false), cache, + preview, )?; report_target_environment(&environment, cache, printer)?; diff --git a/crates/uv/src/commands/pip/compile.rs b/crates/uv/src/commands/pip/compile.rs index 8da16ef46..a1846d418 100644 --- a/crates/uv/src/commands/pip/compile.rs +++ b/crates/uv/src/commands/pip/compile.rs @@ -3,7 +3,6 @@ use std::env; use std::ffi::OsStr; use std::path::{Path, PathBuf}; use std::str::FromStr; -use std::sync::Arc; use anyhow::{Result, anyhow}; use itertools::Itertools; @@ -21,7 +20,7 @@ use uv_configuration::{KeyringProviderType, TargetTriple}; use uv_dispatch::{BuildDispatch, SharedState}; use uv_distribution_types::{ DependencyMetadata, HashGeneration, Index, IndexLocations, NameRequirementSpecification, - Origin, Requirement, UnresolvedRequirementSpecification, Verbatim, + Origin, Requirement, RequiresPython, UnresolvedRequirementSpecification, Verbatim, }; use uv_fs::{CWD, Simplified}; use uv_git::ResolvedRepositoryReference; @@ -38,8 +37,8 @@ use uv_requirements::{ }; use uv_resolver::{ AnnotationStyle, DependencyMode, DisplayResolutionGraph, ExcludeNewer, FlatIndex, ForkStrategy, - InMemoryIndex, OptionsBuilder, PrereleaseMode, PylockToml, PythonRequirement, RequiresPython, - ResolutionMode, ResolverEnvironment, + InMemoryIndex, OptionsBuilder, PrereleaseMode, PylockToml, PythonRequirement, ResolutionMode, + ResolverEnvironment, }; use uv_torch::{TorchMode, TorchStrategy}; use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy}; @@ -271,7 +270,13 @@ pub(crate) async fn pip_compile( let environment_preference = EnvironmentPreference::from_system_flag(system, false); let interpreter = if let Some(python) = python.as_ref() { let request = PythonRequest::parse(python); - PythonInstallation::find(&request, environment_preference, python_preference, &cache) + PythonInstallation::find( + &request, + environment_preference, + python_preference, + &cache, + preview, + ) } else { // TODO(zanieb): The split here hints at a problem with the request abstraction; we should // be able to use `PythonInstallation::find(...)` here. @@ -281,7 +286,13 @@ pub(crate) async fn pip_compile( } else { PythonRequest::default() }; - PythonInstallation::find_best(&request, environment_preference, python_preference, &cache) + PythonInstallation::find_best( + &request, + environment_preference, + python_preference, + &cache, + preview, + ) }? .into_interpreter(); @@ -326,13 +337,12 @@ pub(crate) async fn pip_compile( // Determine the Python requirement, if the user requested a specific version. let python_requirement = if universal { - let requires_python = RequiresPython::greater_than_equal_version( - if let Some(python_version) = python_version.as_ref() { - &python_version.version - } else { - interpreter.python_version() - }, - ); + let requires_python = if let Some(python_version) = python_version.as_ref() { + RequiresPython::greater_than_equal_version(&python_version.version) + } else { + let version = interpreter.python_minor_version(); + RequiresPython::greater_than_equal_version(&version) + }; PythonRequirement::from_requires_python(&interpreter, requires_python) } else if let Some(python_version) = python_version.as_ref() { PythonRequirement::from_python_version(&interpreter, python_version) @@ -376,32 +386,21 @@ pub(crate) async fn pip_compile( no_index, ); - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Determine the PyTorch backend. - let torch_backend = torch_backend.map(|mode| { - if preview.is_disabled() { - warn_user!("The `--torch-backend` setting is experimental and may change without warning. Pass `--preview` to disable this warning."); - } - - TorchStrategy::from_mode( - mode, - python_platform - .map(TargetTriple::platform) - .as_ref() - .unwrap_or(interpreter.platform()) - .os(), - ) - }).transpose()?; + let torch_backend = torch_backend + .map(|mode| { + TorchStrategy::from_mode( + mode, + python_platform + .map(TargetTriple::platform) + .as_ref() + .unwrap_or(interpreter.platform()) + .os(), + ) + }) + .transpose()?; // Initialize the registry client. let client = RegistryClientBuilder::try_from(client_builder)? diff --git a/crates/uv/src/commands/pip/freeze.rs b/crates/uv/src/commands/pip/freeze.rs index 7ad5517af..8c8491d45 100644 --- a/crates/uv/src/commands/pip/freeze.rs +++ b/crates/uv/src/commands/pip/freeze.rs @@ -6,6 +6,7 @@ use itertools::Itertools; use owo_colors::OwoColorize; use uv_cache::Cache; +use uv_configuration::PreviewMode; use uv_distribution_types::{Diagnostic, InstalledDist, Name}; use uv_installer::SitePackages; use uv_python::{EnvironmentPreference, PythonEnvironment, PythonRequest}; @@ -23,12 +24,14 @@ pub(crate) fn pip_freeze( paths: Option>, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { // Detect the current Python interpreter. let environment = PythonEnvironment::find( &python.map(PythonRequest::parse).unwrap_or_default(), EnvironmentPreference::from_system_flag(system, false), cache, + preview, )?; report_target_environment(&environment, cache, printer)?; diff --git a/crates/uv/src/commands/pip/install.rs b/crates/uv/src/commands/pip/install.rs index eb9c1cd2b..aa6e6a6c9 100644 --- a/crates/uv/src/commands/pip/install.rs +++ b/crates/uv/src/commands/pip/install.rs @@ -1,12 +1,11 @@ use std::collections::{BTreeMap, BTreeSet}; use std::fmt::Write; use std::path::PathBuf; -use std::sync::Arc; use anyhow::Context; use itertools::Itertools; use owo_colors::OwoColorize; -use tracing::{Level, debug, enabled}; +use tracing::{Level, debug, enabled, warn}; use uv_cache::Cache; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; @@ -182,6 +181,7 @@ pub(crate) async fn pip_install( EnvironmentPreference::from_system_flag(system, false), python_preference, &cache, + preview, )?; report_interpreter(&installation, true, printer)?; PythonEnvironment::from_installation(installation) @@ -193,6 +193,7 @@ pub(crate) async fn pip_install( .unwrap_or_default(), EnvironmentPreference::from_system_flag(system, true), &cache, + preview, )?; report_target_environment(&environment, &cache, printer)?; environment @@ -235,7 +236,13 @@ pub(crate) async fn pip_install( } } - let _lock = environment.lock().await?; + let _lock = environment + .lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); // Determine the markers to use for the resolution. let interpreter = environment.interpreter(); @@ -254,6 +261,7 @@ pub(crate) async fn pip_install( if reinstall.is_none() && upgrade.is_none() && source_trees.is_empty() + && groups.is_empty() && pylock.is_none() && matches!(modifications, Modifications::Sufficient) { @@ -331,32 +339,21 @@ pub(crate) async fn pip_install( no_index, ); - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Determine the PyTorch backend. - let torch_backend = torch_backend.map(|mode| { - if preview.is_disabled() { - warn_user!("The `--torch-backend` setting is experimental and may change without warning. Pass `--preview` to disable this warning."); - } - - TorchStrategy::from_mode( - mode, - python_platform - .map(TargetTriple::platform) - .as_ref() - .unwrap_or(interpreter.platform()) - .os(), - ) - }).transpose()?; + let torch_backend = torch_backend + .map(|mode| { + TorchStrategy::from_mode( + mode, + python_platform + .map(TargetTriple::platform) + .as_ref() + .unwrap_or(interpreter.platform()) + .os(), + ) + }) + .transpose()?; // Initialize the registry client. let client = RegistryClientBuilder::try_from(client_builder)? diff --git a/crates/uv/src/commands/pip/latest.rs b/crates/uv/src/commands/pip/latest.rs index ac3ce7d1f..25da8466c 100644 --- a/crates/uv/src/commands/pip/latest.rs +++ b/crates/uv/src/commands/pip/latest.rs @@ -3,10 +3,10 @@ use tracing::debug; use uv_client::{MetadataFormat, RegistryClient, VersionFiles}; use uv_distribution_filename::DistFilename; -use uv_distribution_types::{IndexCapabilities, IndexMetadataRef, IndexUrl}; +use uv_distribution_types::{IndexCapabilities, IndexMetadataRef, IndexUrl, RequiresPython}; use uv_normalize::PackageName; use uv_platform_tags::Tags; -use uv_resolver::{ExcludeNewer, PrereleaseMode, RequiresPython}; +use uv_resolver::{ExcludeNewer, PrereleaseMode}; use uv_warnings::warn_user_once; /// A client to fetch the latest version of a package from an index. diff --git a/crates/uv/src/commands/pip/list.rs b/crates/uv/src/commands/pip/list.rs index 48786d86c..356574436 100644 --- a/crates/uv/src/commands/pip/list.rs +++ b/crates/uv/src/commands/pip/list.rs @@ -15,16 +15,18 @@ use uv_cache::{Cache, Refresh}; use uv_cache_info::Timestamp; use uv_cli::ListFormat; use uv_client::{BaseClientBuilder, RegistryClientBuilder}; -use uv_configuration::{Concurrency, IndexStrategy, KeyringProviderType}; +use uv_configuration::{Concurrency, IndexStrategy, KeyringProviderType, PreviewMode}; use uv_distribution_filename::DistFilename; -use uv_distribution_types::{Diagnostic, IndexCapabilities, IndexLocations, InstalledDist, Name}; +use uv_distribution_types::{ + Diagnostic, IndexCapabilities, IndexLocations, InstalledDist, Name, RequiresPython, +}; use uv_fs::Simplified; use uv_installer::SitePackages; use uv_normalize::PackageName; use uv_pep440::Version; use uv_python::PythonRequest; use uv_python::{EnvironmentPreference, PythonEnvironment}; -use uv_resolver::{ExcludeNewer, PrereleaseMode, RequiresPython}; +use uv_resolver::{ExcludeNewer, PrereleaseMode}; use crate::commands::ExitStatus; use crate::commands::pip::latest::LatestClient; @@ -52,6 +54,7 @@ pub(crate) async fn pip_list( system: bool, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { // Disallow `--outdated` with `--format freeze`. if outdated && matches!(format, ListFormat::Freeze) { @@ -63,6 +66,7 @@ pub(crate) async fn pip_list( &python.map(PythonRequest::parse).unwrap_or_default(), EnvironmentPreference::from_system_flag(system, false), cache, + preview, )?; report_target_environment(&environment, cache, printer)?; diff --git a/crates/uv/src/commands/pip/operations.rs b/crates/uv/src/commands/pip/operations.rs index 907f79075..55ab2aa1b 100644 --- a/crates/uv/src/commands/pip/operations.rs +++ b/crates/uv/src/commands/pip/operations.rs @@ -8,7 +8,6 @@ use std::fmt::Write; use std::path::PathBuf; use std::sync::Arc; use tracing::debug; -use uv_tool::InstalledTools; use uv_cache::Cache; use uv_client::{BaseClientBuilder, RegistryClient}; @@ -17,9 +16,9 @@ use uv_configuration::{ ExtrasSpecification, Overrides, Reinstall, Upgrade, }; use uv_dispatch::BuildDispatch; -use uv_distribution::DistributionDatabase; +use uv_distribution::{DistributionDatabase, SourcedDependencyGroups}; use uv_distribution_types::{ - CachedDist, Diagnostic, InstalledDist, LocalDist, NameRequirementSpecification, + CachedDist, Diagnostic, InstalledDist, LocalDist, NameRequirementSpecification, Requirement, ResolutionDiagnostic, UnresolvedRequirement, UnresolvedRequirementSpecification, }; use uv_distribution_types::{ @@ -29,7 +28,7 @@ use uv_fs::Simplified; use uv_install_wheel::LinkMode; use uv_installer::{Plan, Planner, Preparer, SitePackages}; use uv_normalize::{GroupName, PackageName}; -use uv_pep508::MarkerEnvironment; +use uv_pep508::{MarkerEnvironment, RequirementOrigin}; use uv_platform_tags::Tags; use uv_pypi_types::{Conflicts, ResolverMarkerEnvironment}; use uv_python::{PythonEnvironment, PythonInstallation}; @@ -41,7 +40,8 @@ use uv_resolver::{ DependencyMode, Exclusions, FlatIndex, InMemoryIndex, Manifest, Options, Preference, Preferences, PythonRequirement, Resolver, ResolverEnvironment, ResolverOutput, }; -use uv_types::{HashStrategy, InFlight, InstalledPackagesProvider}; +use uv_tool::InstalledTools; +use uv_types::{BuildContext, HashStrategy, InFlight, InstalledPackagesProvider}; use uv_warnings::warn_user; use crate::commands::pip::loggers::{DefaultInstallLogger, InstallLogger, ResolveLogger}; @@ -166,7 +166,6 @@ pub(crate) async fn resolve( if !source_trees.is_empty() { let resolutions = SourceTreeResolver::new( extras, - groups, hasher, index, DistributionDatabase::new(client, build_dispatch, concurrency.downloads), @@ -212,6 +211,47 @@ pub(crate) async fn resolve( ); } + for (pyproject_path, groups) in groups { + let metadata = SourcedDependencyGroups::from_virtual_project( + pyproject_path, + None, + build_dispatch.locations(), + build_dispatch.sources(), + build_dispatch.workspace_cache(), + ) + .await + .map_err(|e| { + anyhow!( + "Failed to read dependency groups from: {}\n{}", + pyproject_path.display(), + e + ) + })?; + + // Complain if dependency groups are named that don't appear. + for name in groups.explicit_names() { + if !metadata.dependency_groups.contains_key(name) { + return Err(anyhow!( + "The dependency group '{name}' was not found in the project: {}", + pyproject_path.user_display() + ))?; + } + } + // Apply dependency-groups + for (group_name, group) in &metadata.dependency_groups { + if groups.contains(group_name) { + requirements.extend(group.iter().cloned().map(|group| Requirement { + origin: Some(RequirementOrigin::Group( + pyproject_path.clone(), + metadata.name.clone(), + group_name.clone(), + )), + ..group + })); + } + } + } + requirements }; diff --git a/crates/uv/src/commands/pip/show.rs b/crates/uv/src/commands/pip/show.rs index a77c29cd5..4d2b3c3a7 100644 --- a/crates/uv/src/commands/pip/show.rs +++ b/crates/uv/src/commands/pip/show.rs @@ -7,6 +7,7 @@ use owo_colors::OwoColorize; use rustc_hash::FxHashMap; use uv_cache::Cache; +use uv_configuration::PreviewMode; use uv_distribution_types::{Diagnostic, Name}; use uv_fs::Simplified; use uv_install_wheel::read_record_file; @@ -27,6 +28,7 @@ pub(crate) fn pip_show( files: bool, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { if packages.is_empty() { #[allow(clippy::print_stderr)] @@ -46,6 +48,7 @@ pub(crate) fn pip_show( &python.map(PythonRequest::parse).unwrap_or_default(), EnvironmentPreference::from_system_flag(system, false), cache, + preview, )?; report_target_environment(&environment, cache, printer)?; diff --git a/crates/uv/src/commands/pip/sync.rs b/crates/uv/src/commands/pip/sync.rs index 35cef5907..8f26aaea2 100644 --- a/crates/uv/src/commands/pip/sync.rs +++ b/crates/uv/src/commands/pip/sync.rs @@ -1,10 +1,9 @@ use std::collections::{BTreeMap, BTreeSet}; use std::fmt::Write; -use std::sync::Arc; use anyhow::{Context, Result}; use owo_colors::OwoColorize; -use tracing::debug; +use tracing::{debug, warn}; use uv_cache::Cache; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; @@ -157,6 +156,7 @@ pub(crate) async fn pip_sync( EnvironmentPreference::from_system_flag(system, false), python_preference, &cache, + preview, )?; report_interpreter(&installation, true, printer)?; PythonEnvironment::from_installation(installation) @@ -168,6 +168,7 @@ pub(crate) async fn pip_sync( .unwrap_or_default(), EnvironmentPreference::from_system_flag(system, true), &cache, + preview, )?; report_target_environment(&environment, &cache, printer)?; environment @@ -210,7 +211,13 @@ pub(crate) async fn pip_sync( } } - let _lock = environment.lock().await?; + let _lock = environment + .lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); let interpreter = environment.interpreter(); @@ -265,32 +272,21 @@ pub(crate) async fn pip_sync( no_index, ); - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Determine the PyTorch backend. - let torch_backend = torch_backend.map(|mode| { - if preview.is_disabled() { - warn_user!("The `--torch-backend` setting is experimental and may change without warning. Pass `--preview` to disable this warning."); - } - - TorchStrategy::from_mode( - mode, - python_platform - .map(TargetTriple::platform) - .as_ref() - .unwrap_or(interpreter.platform()) - .os(), - ) - }).transpose()?; + let torch_backend = torch_backend + .map(|mode| { + TorchStrategy::from_mode( + mode, + python_platform + .map(TargetTriple::platform) + .as_ref() + .unwrap_or(interpreter.platform()) + .os(), + ) + }) + .transpose()?; // Initialize the registry client. let client = RegistryClientBuilder::try_from(client_builder)? diff --git a/crates/uv/src/commands/pip/tree.rs b/crates/uv/src/commands/pip/tree.rs index 05290ffd0..b0ba44c35 100644 --- a/crates/uv/src/commands/pip/tree.rs +++ b/crates/uv/src/commands/pip/tree.rs @@ -13,15 +13,15 @@ use tokio::sync::Semaphore; use uv_cache::{Cache, Refresh}; use uv_cache_info::Timestamp; use uv_client::{BaseClientBuilder, RegistryClientBuilder}; -use uv_configuration::{Concurrency, IndexStrategy, KeyringProviderType}; -use uv_distribution_types::{Diagnostic, IndexCapabilities, IndexLocations, Name}; +use uv_configuration::{Concurrency, IndexStrategy, KeyringProviderType, PreviewMode}; +use uv_distribution_types::{Diagnostic, IndexCapabilities, IndexLocations, Name, RequiresPython}; use uv_installer::SitePackages; use uv_normalize::PackageName; use uv_pep440::Version; use uv_pep508::{Requirement, VersionOrUrl}; use uv_pypi_types::{ResolutionMetadata, ResolverMarkerEnvironment, VerbatimParsedUrl}; use uv_python::{EnvironmentPreference, PythonEnvironment, PythonRequest}; -use uv_resolver::{ExcludeNewer, PrereleaseMode, RequiresPython}; +use uv_resolver::{ExcludeNewer, PrereleaseMode}; use crate::commands::ExitStatus; use crate::commands::pip::latest::LatestClient; @@ -52,12 +52,14 @@ pub(crate) async fn pip_tree( system: bool, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { // Detect the current Python interpreter. let environment = PythonEnvironment::find( &python.map(PythonRequest::parse).unwrap_or_default(), EnvironmentPreference::from_system_flag(system, false), cache, + preview, )?; report_target_environment(&environment, cache, printer)?; diff --git a/crates/uv/src/commands/pip/uninstall.rs b/crates/uv/src/commands/pip/uninstall.rs index 787ba5aae..835e7de65 100644 --- a/crates/uv/src/commands/pip/uninstall.rs +++ b/crates/uv/src/commands/pip/uninstall.rs @@ -3,11 +3,11 @@ use std::fmt::Write; use anyhow::Result; use itertools::{Either, Itertools}; use owo_colors::OwoColorize; -use tracing::debug; +use tracing::{debug, warn}; use uv_cache::Cache; use uv_client::BaseClientBuilder; -use uv_configuration::{DryRun, KeyringProviderType}; +use uv_configuration::{DryRun, KeyringProviderType, PreviewMode}; use uv_distribution_types::Requirement; use uv_distribution_types::{InstalledMetadata, Name, UnresolvedRequirement}; use uv_fs::Simplified; @@ -37,6 +37,7 @@ pub(crate) async fn pip_uninstall( network_settings: &NetworkSettings, dry_run: DryRun, printer: Printer, + preview: PreviewMode, ) -> Result { let start = std::time::Instant::now(); @@ -57,6 +58,7 @@ pub(crate) async fn pip_uninstall( .unwrap_or_default(), EnvironmentPreference::from_system_flag(system, true), &cache, + preview, )?; report_target_environment(&environment, &cache, printer)?; @@ -98,7 +100,13 @@ pub(crate) async fn pip_uninstall( } } - let _lock = environment.lock().await?; + let _lock = environment + .lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); // Index the current `site-packages` directory. let site_packages = uv_installer::SitePackages::from_environment(&environment)?; diff --git a/crates/uv/src/commands/project/add.rs b/crates/uv/src/commands/project/add.rs index a4091504d..04fd7d822 100644 --- a/crates/uv/src/commands/project/add.rs +++ b/crates/uv/src/commands/project/add.rs @@ -10,15 +10,16 @@ use anyhow::{Context, Result, bail}; use itertools::Itertools; use owo_colors::OwoColorize; use rustc_hash::{FxBuildHasher, FxHashMap}; -use tracing::debug; +use tracing::{debug, warn}; use url::Url; use uv_cache::Cache; use uv_cache_key::RepositoryUrl; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; use uv_configuration::{ - Concurrency, Constraints, DependencyGroups, DevMode, DryRun, EditableMode, ExtrasSpecification, - InstallOptions, PreviewMode, SourceStrategy, + Concurrency, Constraints, DependencyGroups, DependencyGroupsWithDefaults, DevMode, DryRun, + EditableMode, ExtrasSpecification, ExtrasSpecificationWithDefaults, InstallOptions, + PreviewMode, SourceStrategy, }; use uv_dispatch::BuildDispatch; use uv_distribution::DistributionDatabase; @@ -29,7 +30,7 @@ use uv_distribution_types::{ use uv_fs::{LockedFile, Simplified}; use uv_git::GIT_STORE; use uv_git_types::GitReference; -use uv_normalize::{DEV_DEPENDENCIES, DefaultExtras, PackageName}; +use uv_normalize::{DEV_DEPENDENCIES, DefaultExtras, DefaultGroups, PackageName}; use uv_pep508::{ExtraName, MarkerTree, UnnamedRequirement, VersionOrUrl}; use uv_pypi_types::{ParsedUrl, VerbatimParsedUrl}; use uv_python::{Interpreter, PythonDownloads, PythonEnvironment, PythonPreference, PythonRequest}; @@ -79,7 +80,7 @@ pub(crate) async fn add( rev: Option, tag: Option, branch: Option, - extras: Vec, + extras_of_dependency: Vec, package: Option, python: Option, install_mirrors: PythonInstallMirrors, @@ -122,6 +123,34 @@ pub(crate) async fn add( let reporter = PythonDownloadReporter::single(printer); + // Determine what defaults/extras we're explicitly enabling + let (extras, groups) = match &dependency_type { + DependencyType::Production => { + let extras = ExtrasSpecification::from_extra(vec![]); + let groups = DependencyGroups::from_dev_mode(DevMode::Exclude); + (extras, groups) + } + DependencyType::Dev => { + let extras = ExtrasSpecification::from_extra(vec![]); + let groups = DependencyGroups::from_dev_mode(DevMode::Include); + (extras, groups) + } + DependencyType::Optional(extra_name) => { + let extras = ExtrasSpecification::from_extra(vec![extra_name.clone()]); + let groups = DependencyGroups::from_dev_mode(DevMode::Exclude); + (extras, groups) + } + DependencyType::Group(group_name) => { + let extras = ExtrasSpecification::from_extra(vec![]); + let groups = DependencyGroups::from_group(group_name.clone()); + (extras, groups) + } + }; + // Default extras currently always disabled + let defaulted_extras = extras.with_defaults(DefaultExtras::default()); + // Default groups we need the actual project for, interpreter discovery will use this! + let defaulted_groups; + let target = if let Some(script) = script { // If we found a PEP 723 script and the user provided a project-only setting, warn. if package.is_some() { @@ -166,12 +195,16 @@ pub(crate) async fn add( &client_builder, cache, &reporter, + preview, ) .await?; Pep723Script::init(&path, requires_python.specifiers()).await? } }; + // Scripts don't actually have groups + defaulted_groups = groups.with_defaults(DefaultGroups::default()); + // Discover the interpreter. let interpreter = ScriptInterpreter::discover( Pep723ItemRef::Script(&script), @@ -185,6 +218,7 @@ pub(crate) async fn add( active, cache, printer, + preview, ) .await? .into_interpreter(); @@ -234,11 +268,16 @@ pub(crate) async fn add( } } + // Enable the default groups of the project + defaulted_groups = + groups.with_defaults(default_dependency_groups(project.pyproject_toml())?); + if frozen || no_sync { // Discover the interpreter. let interpreter = ProjectInterpreter::discover( project.workspace(), project_dir, + &defaulted_groups, python.as_deref().map(PythonRequest::parse), &network_settings, python_preference, @@ -249,6 +288,7 @@ pub(crate) async fn add( active, cache, printer, + preview, ) .await? .into_interpreter(); @@ -258,6 +298,7 @@ pub(crate) async fn add( // Discover or create the virtual environment. let environment = ProjectEnvironment::get_or_init( project.workspace(), + &defaulted_groups, python.as_deref().map(PythonRequest::parse), &install_mirrors, &network_settings, @@ -269,6 +310,7 @@ pub(crate) async fn add( cache, DryRun::Disabled, printer, + preview, ) .await? .into_environment()?; @@ -277,7 +319,13 @@ pub(crate) async fn add( } }; - let _lock = target.acquire_lock().await?; + let _lock = target + .acquire_lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); let client_builder = BaseClientBuilder::new() .connectivity(network_settings.connectivity) @@ -332,16 +380,7 @@ pub(crate) async fn add( let hasher = HashStrategy::default(); let sources = SourceStrategy::Enabled; - // Add all authenticated sources to the cache. - for index in settings.resolver.index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + settings.resolver.index_locations.cache_index_credentials(); // Initialize the registry client. let client = RegistryClientBuilder::try_from(client_builder)? @@ -468,14 +507,15 @@ pub(crate) async fn add( rev.as_deref(), tag.as_deref(), branch.as_deref(), - &extras, + &extras_of_dependency, index, &mut toml, )?; // Validate any indexes that were provided on the command-line to ensure - // they point to existing directories when using path URLs. - for index in &indexes { + // they point to existing non-empty directories when using path URLs. + let mut valid_indexes = Vec::with_capacity(indexes.len()); + for index in indexes { if let IndexUrl::Path(url) = &index.url { let path = url .to_file_path() @@ -483,8 +523,14 @@ pub(crate) async fn add( if !path.is_dir() { bail!("Directory not found for index: {url}"); } + if fs_err::read_dir(&path)?.next().is_none() { + warn_user_once!("Index directory `{url}` is empty, skipping"); + continue; + } } + valid_indexes.push(index); } + let indexes = valid_indexes; // Add any indexes that were provided on the command-line, in priority order. if !raw { @@ -551,7 +597,8 @@ pub(crate) async fn add( lock_state, sync_state, locked, - &dependency_type, + &defaulted_extras, + &defaulted_groups, raw, bounds, constraints, @@ -778,7 +825,8 @@ async fn lock_and_sync( lock_state: UniversalState, sync_state: PlatformState, locked: bool, - dependency_type: &DependencyType, + extras: &ExtrasSpecificationWithDefaults, + groups: &DependencyGroupsWithDefaults, raw: bool, bound_kind: Option, constraints: Vec, @@ -802,6 +850,7 @@ async fn lock_and_sync( Box::new(DefaultResolveLogger), concurrency, cache, + &WorkspaceCache::default(), printer, preview, ) @@ -923,6 +972,7 @@ async fn lock_and_sync( Box::new(SummaryResolveLogger), concurrency, cache, + &WorkspaceCache::default(), printer, preview, ) @@ -942,36 +992,6 @@ async fn lock_and_sync( return Ok(()); }; - // Sync the environment. - let (extras, dev) = match dependency_type { - DependencyType::Production => { - let extras = ExtrasSpecification::from_extra(vec![]); - let dev = DependencyGroups::from_dev_mode(DevMode::Exclude); - (extras, dev) - } - DependencyType::Dev => { - let extras = ExtrasSpecification::from_extra(vec![]); - let dev = DependencyGroups::from_dev_mode(DevMode::Include); - (extras, dev) - } - DependencyType::Optional(extra_name) => { - let extras = ExtrasSpecification::from_extra(vec![extra_name.clone()]); - let dev = DependencyGroups::from_dev_mode(DevMode::Exclude); - (extras, dev) - } - DependencyType::Group(group_name) => { - let extras = ExtrasSpecification::from_extra(vec![]); - let dev = DependencyGroups::from_group(group_name.clone()); - (extras, dev) - } - }; - - // Determine the default groups to include. - let default_groups = default_dependency_groups(project.pyproject_toml())?; - - // Determine the default extras to include. - let default_extras = DefaultExtras::default(); - // Identify the installation target. let target = match &project { VirtualProject::Project(project) => InstallTarget::Project { @@ -988,8 +1008,8 @@ async fn lock_and_sync( project::sync::do_sync( target, venv, - &extras.with_defaults(default_extras), - &dev.with_defaults(default_groups), + extras, + groups, EditableMode::Editable, InstallOptions::default(), Modifications::Sufficient, @@ -1000,6 +1020,7 @@ async fn lock_and_sync( installer_metadata, concurrency, cache, + WorkspaceCache::default(), DryRun::Disabled, printer, preview, diff --git a/crates/uv/src/commands/project/environment.rs b/crates/uv/src/commands/project/environment.rs index f7ba006c5..f43587ff0 100644 --- a/crates/uv/src/commands/project/environment.rs +++ b/crates/uv/src/commands/project/environment.rs @@ -7,7 +7,7 @@ use uv_cache_key::{cache_digest, hash_digest}; use uv_configuration::{Concurrency, Constraints, PreviewMode}; use uv_distribution_types::{Name, Resolution}; use uv_fs::PythonExt; -use uv_python::{Interpreter, PythonEnvironment}; +use uv_python::{Interpreter, PythonEnvironment, canonicalize_executable}; use crate::commands::pip::loggers::{InstallLogger, ResolveLogger}; use crate::commands::pip::operations::Modifications; @@ -44,13 +44,16 @@ impl CachedEnvironment { printer: Printer, preview: PreviewMode, ) -> Result { - let interpreter = Self::base_interpreter(interpreter, cache)?; + // Resolve the "base" interpreter, which resolves to an underlying parent interpreter if the + // given interpreter is a virtual environment. + let base_interpreter = Self::base_interpreter(interpreter, cache)?; // Resolve the requirements with the interpreter. let resolution = Resolution::from( resolve_environment( spec, - &interpreter, + &base_interpreter, + build_constraints.clone(), &settings.resolver, network_settings, state, @@ -72,12 +75,34 @@ impl CachedEnvironment { hash_digest(&distributions) }; - // Hash the interpreter based on its path. - // TODO(charlie): Come up with a robust hash for the interpreter. - let interpreter_hash = cache_digest(&interpreter.sys_executable()); + // Construct a hash for the environment. + // + // Use the canonicalized base interpreter path since that's the interpreter we performed the + // resolution with and the interpreter the environment will be created with. + // + // We also include the canonicalized `sys.prefix` of the non-base interpreter, that is, the + // virtual environment's path. Originally, we shared cached environments independent of the + // environment they'd be layered on top of. However, this causes collisions as the overlay + // `.pth` file can be overridden by another instance of uv. Including this element in the key + // avoids this problem at the cost of creating separate cached environments for identical + // `--with` invocations across projects. We use `sys.prefix` rather than `sys.executable` so + // we can canonicalize it without invalidating the purpose of the element — it'd probably be + // safe to just use the absolute `sys.executable` as well. + // + // TODO(zanieb): Since we're not sharing these environmments across projects, we should move + // [`CachedEvnvironment::set_overlay`] etc. here since the values there should be constant + // now. + // + // TODO(zanieb): We should include the version of the base interpreter in the hash, so if + // the interpreter at the canonicalized path changes versions we construct a new + // environment. + let environment_hash = cache_digest(&( + &canonicalize_executable(base_interpreter.sys_executable())?, + &interpreter.sys_prefix().canonicalize()?, + )); // Search in the content-addressed cache. - let cache_entry = cache.entry(CacheBucket::Environments, interpreter_hash, resolution_hash); + let cache_entry = cache.entry(CacheBucket::Environments, environment_hash, resolution_hash); if cache.refresh().is_none() { if let Ok(root) = cache.resolve_link(cache_entry.path()) { @@ -91,12 +116,14 @@ impl CachedEnvironment { let temp_dir = cache.venv_dir()?; let venv = uv_virtualenv::create_venv( temp_dir.path(), - interpreter, + base_interpreter, uv_virtualenv::Prompt::None, false, false, true, false, + false, + preview, )?; sync_environment( diff --git a/crates/uv/src/commands/project/export.rs b/crates/uv/src/commands/project/export.rs index 566f4af41..c14bfd904 100644 --- a/crates/uv/src/commands/project/export.rs +++ b/crates/uv/src/commands/project/export.rs @@ -61,7 +61,7 @@ pub(crate) async fn export( install_options: InstallOptions, output_file: Option, extras: ExtrasSpecification, - dev: DependencyGroups, + groups: DependencyGroups, editable: EditableMode, locked: bool, frozen: bool, @@ -122,7 +122,7 @@ pub(crate) async fn export( ExportTarget::Script(_) => DefaultExtras::default(), }; - let dev = dev.with_defaults(default_groups); + let groups = groups.with_defaults(default_groups); let extras = extras.with_defaults(default_extras); // Find an interpreter for the project, unless `--frozen` is set. @@ -142,12 +142,14 @@ pub(crate) async fn export( Some(false), cache, printer, + preview, ) .await? .into_interpreter(), ExportTarget::Project(project) => ProjectInterpreter::discover( project.workspace(), project_dir, + &groups, python.as_deref().map(PythonRequest::parse), &network_settings, python_preference, @@ -158,6 +160,7 @@ pub(crate) async fn export( Some(false), cache, printer, + preview, ) .await? .into_interpreter(), @@ -190,6 +193,7 @@ pub(crate) async fn export( Box::new(DefaultResolveLogger), concurrency, cache, + &workspace_cache, printer, preview, ) @@ -206,7 +210,7 @@ pub(crate) async fn export( }; // Validate that the set of requested extras and development groups are compatible. - detect_conflicts(&lock, &extras, &dev)?; + detect_conflicts(&lock, &extras, &groups)?; // Identify the installation target. let target = match &target { @@ -259,7 +263,7 @@ pub(crate) async fn export( // Validate that the set of requested extras and development groups are defined in the lockfile. target.validate_extras(&extras)?; - target.validate_groups(&dev)?; + target.validate_groups(&groups)?; // Write the resolved dependencies to the output channel. let mut writer = OutputWriter::new(!quiet || output_file.is_none(), output_file.as_deref()); @@ -306,7 +310,7 @@ pub(crate) async fn export( &target, &prune, &extras, - &dev, + &groups, include_annotations, editable, hashes, @@ -328,7 +332,7 @@ pub(crate) async fn export( &target, &prune, &extras, - &dev, + &groups, include_annotations, editable, &install_options, diff --git a/crates/uv/src/commands/project/init.rs b/crates/uv/src/commands/project/init.rs index 376e8e007..15fed409e 100644 --- a/crates/uv/src/commands/project/init.rs +++ b/crates/uv/src/commands/project/init.rs @@ -4,13 +4,15 @@ use std::fmt::Write; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::str::FromStr; +use uv_distribution_types::RequiresPython; use tracing::{debug, trace, warn}; use uv_cache::Cache; use uv_cli::AuthorFrom; use uv_client::BaseClientBuilder; use uv_configuration::{ - PreviewMode, ProjectBuildBackend, VersionControlError, VersionControlSystem, + DependencyGroupsWithDefaults, PreviewMode, ProjectBuildBackend, VersionControlError, + VersionControlSystem, }; use uv_fs::{CWD, Simplified}; use uv_git::GIT; @@ -21,7 +23,6 @@ use uv_python::{ PythonPreference, PythonRequest, PythonVariant, PythonVersionFile, VersionFileDiscoveryOptions, VersionRequest, }; -use uv_resolver::RequiresPython; use uv_scripts::{Pep723Script, ScriptTag}; use uv_settings::PythonInstallMirrors; use uv_static::EnvVars; @@ -86,6 +87,7 @@ pub(crate) async fn init( pin_python, package, no_config, + preview, ) .await?; @@ -201,6 +203,7 @@ async fn init_script( pin_python: bool, package: bool, no_config: bool, + preview: PreviewMode, ) -> Result<()> { if no_workspace { warn_user_once!("`--no-workspace` is a no-op for Python scripts, which are standalone"); @@ -257,6 +260,7 @@ async fn init_script( &client_builder, cache, &reporter, + preview, ) .await?; @@ -433,6 +437,7 @@ async fn init_project( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); @@ -460,6 +465,7 @@ async fn init_project( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); @@ -502,7 +508,7 @@ async fn init_project( (requires_python, python_request) } else if let Some(requires_python) = workspace .as_ref() - .map(find_requires_python) + .map(|workspace| find_requires_python(workspace, &DependencyGroupsWithDefaults::none())) .transpose()? .flatten() { @@ -526,6 +532,7 @@ async fn init_project( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); @@ -553,6 +560,7 @@ async fn init_project( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); diff --git a/crates/uv/src/commands/project/install_target.rs b/crates/uv/src/commands/project/install_target.rs index d225114c9..b0f20e76f 100644 --- a/crates/uv/src/commands/project/install_target.rs +++ b/crates/uv/src/commands/project/install_target.rs @@ -165,11 +165,18 @@ impl<'lock> InstallTarget<'lock> { .requirements() .into_iter() .map(Cow::Owned) - .chain(workspace.dependency_groups().ok().into_iter().flat_map( - |dependency_groups| { - dependency_groups.into_values().flatten().map(Cow::Owned) - }, - )) + .chain( + workspace + .workspace_dependency_groups() + .ok() + .into_iter() + .flat_map(|dependency_groups| { + dependency_groups + .into_values() + .flat_map(|group| group.requirements) + .map(Cow::Owned) + }), + ) .chain(workspace.packages().values().flat_map(|member| { // Iterate over all dependencies in each member. let dependencies = member @@ -316,9 +323,15 @@ impl<'lock> InstallTarget<'lock> { let known_groups = member_packages .iter() .flat_map(|package| package.dependency_groups().keys().map(Cow::Borrowed)) - .chain(workspace.dependency_groups().ok().into_iter().flat_map( - |dependency_groups| dependency_groups.into_keys().map(Cow::Owned), - )) + .chain( + workspace + .workspace_dependency_groups() + .ok() + .into_iter() + .flat_map(|dependency_groups| { + dependency_groups.into_keys().map(Cow::Owned) + }), + ) .collect::>(); for group in groups.explicit_names() { diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs index 89b3713cc..cd4242833 100644 --- a/crates/uv/src/commands/project/lock.rs +++ b/crates/uv/src/commands/project/lock.rs @@ -12,13 +12,14 @@ use tracing::debug; use uv_cache::Cache; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; use uv_configuration::{ - Concurrency, Constraints, DryRun, ExtrasSpecification, PreviewMode, Reinstall, Upgrade, + Concurrency, Constraints, DependencyGroupsWithDefaults, DryRun, ExtrasSpecification, + PreviewMode, Reinstall, Upgrade, }; use uv_dispatch::BuildDispatch; use uv_distribution::DistributionDatabase; use uv_distribution_types::{ DependencyMetadata, HashGeneration, Index, IndexLocations, NameRequirementSpecification, - Requirement, UnresolvedRequirementSpecification, + Requirement, RequiresPython, UnresolvedRequirementSpecification, }; use uv_git::ResolvedRepositoryReference; use uv_normalize::{GroupName, PackageName}; @@ -28,7 +29,7 @@ use uv_python::{Interpreter, PythonDownloads, PythonEnvironment, PythonPreferenc use uv_requirements::ExtrasResolver; use uv_requirements::upgrade::{LockedRequirements, read_lock_requirements}; use uv_resolver::{ - FlatIndex, InMemoryIndex, Lock, Options, OptionsBuilder, PythonRequirement, RequiresPython, + FlatIndex, InMemoryIndex, Lock, Options, OptionsBuilder, PythonRequirement, ResolverEnvironment, ResolverManifest, SatisfiesResult, UniversalMarker, }; use uv_scripts::{Pep723ItemRef, Pep723Script}; @@ -113,6 +114,7 @@ pub(crate) async fn lock( &client_builder, cache, &reporter, + preview, ) .await?; Some(Pep723Script::init(&path, requires_python.specifiers()).await?) @@ -142,6 +144,8 @@ pub(crate) async fn lock( LockTarget::Workspace(workspace) => ProjectInterpreter::discover( workspace, project_dir, + // Don't enable any groups' requires-python for interpreter discovery + &DependencyGroupsWithDefaults::none(), python.as_deref().map(PythonRequest::parse), &network_settings, python_preference, @@ -152,6 +156,7 @@ pub(crate) async fn lock( Some(false), cache, printer, + preview, ) .await? .into_interpreter(), @@ -167,6 +172,7 @@ pub(crate) async fn lock( Some(false), cache, printer, + preview, ) .await? .into_interpreter(), @@ -193,6 +199,7 @@ pub(crate) async fn lock( Box::new(DefaultResolveLogger), concurrency, cache, + &workspace_cache, printer, preview, ) @@ -257,6 +264,7 @@ pub(super) struct LockOperation<'env> { logger: Box, concurrency: Concurrency, cache: &'env Cache, + workspace_cache: &'env WorkspaceCache, printer: Printer, preview: PreviewMode, } @@ -271,6 +279,7 @@ impl<'env> LockOperation<'env> { logger: Box, concurrency: Concurrency, cache: &'env Cache, + workspace_cache: &'env WorkspaceCache, printer: Printer, preview: PreviewMode, ) -> Self { @@ -283,6 +292,7 @@ impl<'env> LockOperation<'env> { logger, concurrency, cache, + workspace_cache, printer, preview, } @@ -328,6 +338,7 @@ impl<'env> LockOperation<'env> { self.logger, self.concurrency, self.cache, + self.workspace_cache, self.printer, self.preview, ) @@ -366,6 +377,7 @@ impl<'env> LockOperation<'env> { self.logger, self.concurrency, self.cache, + self.workspace_cache, self.printer, self.preview, ) @@ -396,6 +408,7 @@ async fn do_lock( logger: Box, concurrency: Concurrency, cache: &Cache, + workspace_cache: &WorkspaceCache, printer: Printer, preview: PreviewMode, ) -> Result { @@ -437,8 +450,8 @@ async fn do_lock( let build_constraints = target.lower(build_constraints, index_locations, *sources)?; let dependency_groups = dependency_groups .into_iter() - .map(|(name, requirements)| { - let requirements = target.lower(requirements, index_locations, *sources)?; + .map(|(name, group)| { + let requirements = target.lower(group.requirements, index_locations, *sources)?; Ok((name, requirements)) }) .collect::, ProjectError>>()?; @@ -580,16 +593,7 @@ async fn do_lock( .keyring(*keyring_provider) .allow_insecure_host(network_settings.allow_insecure_host.clone()); - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); for index in target.indexes() { if let Some(credentials) = index.credentials() { @@ -648,8 +652,6 @@ async fn do_lock( FlatIndex::from_entries(entries, None, &hasher, build_options) }; - let workspace_cache = WorkspaceCache::default(); - // Create a build dispatch. let build_dispatch = BuildDispatch::new( &client, @@ -668,7 +670,7 @@ async fn do_lock( &build_hasher, *exclude_newer, *sources, - workspace_cache, + workspace_cache.clone(), concurrency, preview, ); @@ -980,13 +982,54 @@ impl ValidatedLock { return Ok(Self::Unusable(lock)); } Upgrade::Packages(_) => { - // If the user specified `--upgrade-package`, then at best we can prefer some of - // the existing versions. - debug!("Ignoring existing lockfile due to `--upgrade-package`"); - return Ok(Self::Preferable(lock)); + // This is handled below, after some checks regarding fork + // markers. In particular, we'd like to return `Preferable` + // here, but we shouldn't if the fork markers cannot be + // reused. } } + // NOTE: It's important that this appears before any possible path that + // returns `Self::Preferable`. In particular, if our fork markers are + // bunk, then we shouldn't return a result that indicates we should try + // to re-use the existing fork markers. + if let Err((fork_markers_union, environments_union)) = lock.check_marker_coverage() { + warn_user!( + "Ignoring existing lockfile due to fork markers not covering the supported environments: `{}` vs `{}`", + fork_markers_union + .try_to_string() + .unwrap_or("true".to_string()), + environments_union + .try_to_string() + .unwrap_or("true".to_string()), + ); + return Ok(Self::Versions(lock)); + } + + // NOTE: Similarly as above, this should also appear before any + // possible code path that can return `Self::Preferable`. + if let Err((fork_markers_union, requires_python_marker)) = + lock.requires_python_coverage(requires_python) + { + warn_user!( + "Ignoring existing lockfile due to fork markers being disjoint with `requires-python`: `{}` vs `{}`", + fork_markers_union + .try_to_string() + .unwrap_or("true".to_string()), + requires_python_marker + .try_to_string() + .unwrap_or("true".to_string()), + ); + return Ok(Self::Versions(lock)); + } + + if let Upgrade::Packages(_) = upgrade { + // If the user specified `--upgrade-package`, then at best we can prefer some of + // the existing versions. + debug!("Ignoring existing lockfile due to `--upgrade-package`"); + return Ok(Self::Preferable(lock)); + } + // If the Requires-Python bound has changed, we have to perform a clean resolution, since // the set of `resolution-markers` may no longer cover the entire supported Python range. if lock.requires_python().range() != requires_python.range() { @@ -1019,19 +1062,6 @@ impl ValidatedLock { return Ok(Self::Versions(lock)); } - if let Err((fork_markers_union, environments_union)) = lock.check_marker_coverage() { - warn_user!( - "Ignoring existing lockfile due to fork markers not covering the supported environments: `{}` vs `{}`", - fork_markers_union - .try_to_string() - .unwrap_or("true".to_string()), - environments_union - .try_to_string() - .unwrap_or("true".to_string()), - ); - return Ok(Self::Versions(lock)); - } - // If the set of required platforms has changed, we have to perform a clean resolution. let expected = lock.simplified_required_environments(); let actual = required_environments diff --git a/crates/uv/src/commands/project/lock_target.rs b/crates/uv/src/commands/project/lock_target.rs index cb45aa8ec..4618b3b84 100644 --- a/crates/uv/src/commands/project/lock_target.rs +++ b/crates/uv/src/commands/project/lock_target.rs @@ -3,15 +3,15 @@ use std::path::{Path, PathBuf}; use itertools::Either; -use uv_configuration::SourceStrategy; +use uv_configuration::{DependencyGroupsWithDefaults, SourceStrategy}; use uv_distribution::LoweredRequirement; -use uv_distribution_types::{Index, IndexLocations, Requirement}; +use uv_distribution_types::{Index, IndexLocations, Requirement, RequiresPython}; use uv_normalize::{GroupName, PackageName}; use uv_pep508::RequirementOrigin; use uv_pypi_types::{Conflicts, SupportedEnvironments, VerbatimParsedUrl}; -use uv_resolver::{Lock, LockVersion, RequiresPython, VERSION}; +use uv_resolver::{Lock, LockVersion, VERSION}; use uv_scripts::Pep723Script; -use uv_workspace::dependency_groups::DependencyGroupError; +use uv_workspace::dependency_groups::{DependencyGroupError, FlatDependencyGroup}; use uv_workspace::{Workspace, WorkspaceMember}; use crate::commands::project::{ProjectError, find_requires_python}; @@ -100,12 +100,9 @@ impl<'lock> LockTarget<'lock> { /// attached to any members within the target. pub(crate) fn dependency_groups( self, - ) -> Result< - BTreeMap>>, - DependencyGroupError, - > { + ) -> Result, DependencyGroupError> { match self { - Self::Workspace(workspace) => workspace.dependency_groups(), + Self::Workspace(workspace) => workspace.workspace_dependency_groups(), Self::Script(_) => Ok(BTreeMap::new()), } } @@ -219,7 +216,11 @@ impl<'lock> LockTarget<'lock> { #[allow(clippy::result_large_err)] pub(crate) fn requires_python(self) -> Result, ProjectError> { match self { - Self::Workspace(workspace) => find_requires_python(workspace), + Self::Workspace(workspace) => { + // When locking, don't try to enforce requires-python bounds that appear on groups + let groups = DependencyGroupsWithDefaults::none(); + find_requires_python(workspace, &groups) + } Self::Script(script) => Ok(script .metadata .requires_python diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs index d2efc3ccd..c327e8a44 100644 --- a/crates/uv/src/commands/project/mod.rs +++ b/crates/uv/src/commands/project/mod.rs @@ -18,13 +18,14 @@ use uv_configuration::{ use uv_dispatch::{BuildDispatch, SharedState}; use uv_distribution::{DistributionDatabase, LoweredRequirement}; use uv_distribution_types::{ - Index, Requirement, Resolution, UnresolvedRequirement, UnresolvedRequirementSpecification, + Index, Requirement, RequiresPython, Resolution, UnresolvedRequirement, + UnresolvedRequirementSpecification, }; use uv_fs::{CWD, LockedFile, Simplified}; use uv_git::ResolvedRepositoryReference; use uv_installer::{SatisfiesResult, SitePackages}; use uv_normalize::{DEV_DEPENDENCIES, DefaultGroups, ExtraName, GroupName, PackageName}; -use uv_pep440::{Version, VersionSpecifiers}; +use uv_pep440::{TildeVersionSpecifier, Version, VersionSpecifiers}; use uv_pep508::MarkerTreeContents; use uv_pypi_types::{ConflictPackage, ConflictSet, Conflicts}; use uv_python::{ @@ -35,8 +36,8 @@ use uv_python::{ use uv_requirements::upgrade::{LockedRequirements, read_lock_requirements}; use uv_requirements::{NamedRequirementsResolver, RequirementsSpecification}; use uv_resolver::{ - FlatIndex, Lock, OptionsBuilder, Preference, PythonRequirement, RequiresPython, - ResolverEnvironment, ResolverOutput, + FlatIndex, Lock, OptionsBuilder, Preference, PythonRequirement, ResolverEnvironment, + ResolverOutput, }; use uv_scripts::Pep723ItemRef; use uv_settings::PythonInstallMirrors; @@ -45,7 +46,7 @@ use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy}; use uv_warnings::{warn_user, warn_user_once}; use uv_workspace::dependency_groups::DependencyGroupError; use uv_workspace::pyproject::PyProjectToml; -use uv_workspace::{Workspace, WorkspaceCache}; +use uv_workspace::{RequiresPythonSources, Workspace, WorkspaceCache}; use crate::commands::pip::loggers::{InstallLogger, ResolveLogger}; use crate::commands::pip::operations::{Changelog, Modifications}; @@ -108,19 +109,28 @@ pub(crate) enum ProjectError { Conflict(#[from] ConflictError), #[error( - "The requested interpreter resolved to Python {0}, which is incompatible with the project's Python requirement: `{1}`" + "The requested interpreter resolved to Python {_0}, which is incompatible with the project's Python requirement: `{_1}`{}", + format_optional_requires_python_sources(_2, *_3) )] - RequestedPythonProjectIncompatibility(Version, RequiresPython), + RequestedPythonProjectIncompatibility(Version, RequiresPython, RequiresPythonSources, bool), #[error( - "The Python request from `{0}` resolved to Python {1}, which is incompatible with the project's Python requirement: `{2}`. Use `uv python pin` to update the `.python-version` file to a compatible version." + "The Python request from `{_0}` resolved to Python {_1}, which is incompatible with the project's Python requirement: `{_2}`{}\nUse `uv python pin` to update the `.python-version` file to a compatible version", + format_optional_requires_python_sources(_3, *_4) )] - DotPythonVersionProjectIncompatibility(String, Version, RequiresPython), + DotPythonVersionProjectIncompatibility( + String, + Version, + RequiresPython, + RequiresPythonSources, + bool, + ), #[error( - "The resolved Python interpreter (Python {0}) is incompatible with the project's Python requirement: `{1}`" + "The resolved Python interpreter (Python {_0}) is incompatible with the project's Python requirement: `{_1}`{}", + format_optional_requires_python_sources(_2, *_3) )] - RequiresPythonProjectIncompatibility(Version, RequiresPython), + RequiresPythonProjectIncompatibility(Version, RequiresPython, RequiresPythonSources, bool), #[error( "The requested interpreter resolved to Python {0}, which is incompatible with the script's Python requirement: `{1}`" @@ -137,34 +147,6 @@ pub(crate) enum ProjectError { )] RequiresPythonScriptIncompatibility(Version, RequiresPython), - #[error("The requested interpreter resolved to Python {0}, which is incompatible with the project's Python requirement: `{1}`. However, a workspace member (`{member}`) supports Python {3}. To install the workspace member on its own, navigate to `{path}`, then run `{venv}` followed by `{install}`.", member = _2.cyan(), venv = format!("uv venv --python {_0}").green(), install = "uv pip install -e .".green(), path = _4.user_display().cyan() )] - RequestedMemberIncompatibility( - Version, - RequiresPython, - PackageName, - VersionSpecifiers, - PathBuf, - ), - - #[error("The Python request from `{0}` resolved to Python {1}, which is incompatible with the project's Python requirement: `{2}`. However, a workspace member (`{member}`) supports Python {4}. To install the workspace member on its own, navigate to `{path}`, then run `{venv}` followed by `{install}`.", member = _3.cyan(), venv = format!("uv venv --python {_1}").green(), install = "uv pip install -e .".green(), path = _5.user_display().cyan() )] - DotPythonVersionMemberIncompatibility( - String, - Version, - RequiresPython, - PackageName, - VersionSpecifiers, - PathBuf, - ), - - #[error("The resolved Python interpreter (Python {0}) is incompatible with the project's Python requirement: `{1}`. However, a workspace member (`{member}`) supports Python {3}. To install the workspace member on its own, navigate to `{path}`, then run `{venv}` followed by `{install}`.", member = _2.cyan(), venv = format!("uv venv --python {_0}").green(), install = "uv pip install -e .".green(), path = _4.user_display().cyan() )] - RequiresPythonMemberIncompatibility( - Version, - RequiresPython, - PackageName, - VersionSpecifiers, - PathBuf, - ), - #[error("Group `{0}` is not defined in the project's `dependency-groups` table")] MissingGroupProject(GroupName), @@ -194,8 +176,11 @@ pub(crate) enum ProjectError { #[error("Environment markers `{0}` don't overlap with Python requirement `{1}`")] DisjointEnvironment(MarkerTreeContents, VersionSpecifiers), - #[error("The workspace contains conflicting Python requirements:\n{}", _0.iter().map(|(name, specifiers)| format!("- `{name}`: `{specifiers}`")).join("\n"))] - DisjointRequiresPython(BTreeMap), + #[error( + "Found conflicting Python requirements:\n{}", + format_requires_python_sources(_0) + )] + DisjointRequiresPython(BTreeMap<(PackageName, Option), VersionSpecifiers>), #[error("Environment marker is empty")] EmptyEnvironment, @@ -286,7 +271,7 @@ pub(crate) struct ConflictError { /// The items from the set that were enabled, and thus create the conflict. pub(crate) conflicts: Vec, /// Enabled dependency groups with defaults applied. - pub(crate) dev: DependencyGroupsWithDefaults, + pub(crate) groups: DependencyGroupsWithDefaults, } impl std::fmt::Display for ConflictError { @@ -338,7 +323,7 @@ impl std::fmt::Display for ConflictError { .iter() .map(|conflict| match conflict { ConflictPackage::Group(group) - if self.dev.contains_because_default(group) => + if self.groups.contains_because_default(group) => format!("`{group}` (enabled by default)"), ConflictPackage::Group(group) => format!("`{group}`"), ConflictPackage::Extra(..) => unreachable!(), @@ -358,7 +343,7 @@ impl std::fmt::Display for ConflictError { let conflict = match conflict { ConflictPackage::Extra(extra) => format!("extra `{extra}`"), ConflictPackage::Group(group) - if self.dev.contains_because_default(group) => + if self.groups.contains_because_default(group) => { format!("group `{group}` (enabled by default)") } @@ -429,23 +414,40 @@ impl PlatformState { #[allow(clippy::result_large_err)] pub(crate) fn find_requires_python( workspace: &Workspace, + groups: &DependencyGroupsWithDefaults, ) -> Result, ProjectError> { + let requires_python = workspace.requires_python(groups)?; // If there are no `Requires-Python` specifiers in the workspace, return `None`. - if workspace.requires_python().next().is_none() { + if requires_python.is_empty() { return Ok(None); } - match RequiresPython::intersection( - workspace - .requires_python() - .map(|(.., specifiers)| specifiers), - ) { + for ((package, group), specifiers) in &requires_python { + if let [spec] = &specifiers[..] { + if let Some(spec) = TildeVersionSpecifier::from_specifier_ref(spec) { + if spec.has_patch() { + continue; + } + let (lower, upper) = spec.bounding_specifiers(); + let spec_0 = spec.with_patch_version(0); + let (lower_0, upper_0) = spec_0.bounding_specifiers(); + warn_user_once!( + "The `requires-python` specifier (`{spec}`) in `{package}{group}` \ + uses the tilde specifier (`~=`) without a patch version. This will be \ + interpreted as `{lower}, {upper}`. Did you mean `{spec_0}` to constrain the \ + version as `{lower_0}, {upper_0}`? We recommend only using \ + the tilde specifier with a patch version to avoid ambiguity.", + group = if let Some(group) = group { + format!(":{group}") + } else { + String::new() + }, + ); + } + } + } + match RequiresPython::intersection(requires_python.iter().map(|(.., specifiers)| specifiers)) { Some(requires_python) => Ok(Some(requires_python)), - None => Err(ProjectError::DisjointRequiresPython( - workspace - .requires_python() - .map(|(name, specifiers)| (name.clone(), specifiers.clone())) - .collect(), - )), + None => Err(ProjectError::DisjointRequiresPython(requires_python)), } } @@ -457,6 +459,7 @@ pub(crate) fn find_requires_python( pub(crate) fn validate_project_requires_python( interpreter: &Interpreter, workspace: Option<&Workspace>, + groups: &DependencyGroupsWithDefaults, requires_python: &RequiresPython, source: &PythonRequestSource, ) -> Result<(), ProjectError> { @@ -464,57 +467,24 @@ pub(crate) fn validate_project_requires_python( return Ok(()); } - // If the Python version is compatible with one of the workspace _members_, raise - // a dedicated error. For example, if the workspace root requires Python >=3.12, but - // a library in the workspace is compatible with Python >=3.8, the user may attempt - // to sync on Python 3.8. This will fail, but we should provide a more helpful error - // message. - for (name, member) in workspace.into_iter().flat_map(Workspace::packages) { - let Some(project) = member.pyproject_toml().project.as_ref() else { - continue; - }; - let Some(specifiers) = project.requires_python.as_ref() else { - continue; - }; - if specifiers.contains(interpreter.python_version()) { - return match source { - PythonRequestSource::UserRequest => { - Err(ProjectError::RequestedMemberIncompatibility( - interpreter.python_version().clone(), - requires_python.clone(), - name.clone(), - specifiers.clone(), - member.root().clone(), - )) - } - PythonRequestSource::DotPythonVersion(file) => { - Err(ProjectError::DotPythonVersionMemberIncompatibility( - file.path().user_display().to_string(), - interpreter.python_version().clone(), - requires_python.clone(), - name.clone(), - specifiers.clone(), - member.root().clone(), - )) - } - PythonRequestSource::RequiresPython => { - Err(ProjectError::RequiresPythonMemberIncompatibility( - interpreter.python_version().clone(), - requires_python.clone(), - name.clone(), - specifiers.clone(), - member.root().clone(), - )) - } - }; - } - } + // Find all the individual requires_python constraints that conflict + let conflicting_requires = workspace + .and_then(|workspace| workspace.requires_python(groups).ok()) + .into_iter() + .flatten() + .filter(|(.., requires)| !requires.contains(interpreter.python_version())) + .collect::(); + let workspace_non_trivial = workspace + .map(|workspace| workspace.packages().len() > 1) + .unwrap_or(false); match source { PythonRequestSource::UserRequest => { Err(ProjectError::RequestedPythonProjectIncompatibility( interpreter.python_version().clone(), requires_python.clone(), + conflicting_requires, + workspace_non_trivial, )) } PythonRequestSource::DotPythonVersion(file) => { @@ -522,12 +492,16 @@ pub(crate) fn validate_project_requires_python( file.path().user_display().to_string(), interpreter.python_version().clone(), requires_python.clone(), + conflicting_requires, + workspace_non_trivial, )) } PythonRequestSource::RequiresPython => { Err(ProjectError::RequiresPythonProjectIncompatibility( interpreter.python_version().clone(), requires_python.clone(), + conflicting_requires, + workspace_non_trivial, )) } } @@ -675,6 +649,7 @@ impl ScriptInterpreter { active: Option, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { // For now, we assume that scripts are never evaluated in the context of a workspace. let workspace = None; @@ -732,13 +707,20 @@ impl ScriptInterpreter { install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); if let Err(err) = match requires_python { Some((requires_python, RequiresPythonSource::Project)) => { - validate_project_requires_python(&interpreter, workspace, &requires_python, &source) + validate_project_requires_python( + &interpreter, + workspace, + &DependencyGroupsWithDefaults::none(), + &requires_python, + &source, + ) } Some((requires_python, RequiresPythonSource::Script)) => { validate_script_requires_python(&interpreter, &requires_python, &source) @@ -811,7 +793,7 @@ pub(crate) enum EnvironmentIncompatibilityError { RequiresPython(EnvironmentKind, RequiresPython), #[error( - "The interpreter in the {0} environment has different version ({1}) than it was created with ({2})" + "The interpreter in the {0} environment has a different version ({1}) than it was created with ({2})" )] PyenvVersionConflict(EnvironmentKind, Version, Version), } @@ -827,8 +809,8 @@ fn environment_is_usable( if let Some((cfg_version, int_version)) = environment.get_pyvenv_version_conflict() { return Err(EnvironmentIncompatibilityError::PyenvVersionConflict( kind, - cfg_version, int_version, + cfg_version, )); } @@ -874,6 +856,7 @@ impl ProjectInterpreter { pub(crate) async fn discover( workspace: &Workspace, project_dir: &Path, + groups: &DependencyGroupsWithDefaults, python_request: Option, network_settings: &NetworkSettings, python_preference: PythonPreference, @@ -884,14 +867,21 @@ impl ProjectInterpreter { active: Option, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { // Resolve the Python request and requirement for the workspace. let WorkspacePython { source, python_request, requires_python, - } = WorkspacePython::from_request(python_request, Some(workspace), project_dir, no_config) - .await?; + } = WorkspacePython::from_request( + python_request, + Some(workspace), + groups, + project_dir, + no_config, + ) + .await?; // Read from the virtual environment first. let root = workspace.venv(active); @@ -974,6 +964,7 @@ impl ProjectInterpreter { install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await?; @@ -1002,6 +993,7 @@ impl ProjectInterpreter { validate_project_requires_python( &interpreter, Some(workspace), + groups, requires_python, &source, )?; @@ -1081,10 +1073,14 @@ impl WorkspacePython { pub(crate) async fn from_request( python_request: Option, workspace: Option<&Workspace>, + groups: &DependencyGroupsWithDefaults, project_dir: &Path, no_config: bool, ) -> Result { - let requires_python = workspace.map(find_requires_python).transpose()?.flatten(); + let requires_python = workspace + .map(|workspace| find_requires_python(workspace, groups)) + .transpose()? + .flatten(); let workspace_root = workspace.map(Workspace::install_path); @@ -1165,6 +1161,8 @@ impl ScriptPython { } = WorkspacePython::from_request( python_request, workspace, + // Scripts have no groups to hang requires-python settings off of + &DependencyGroupsWithDefaults::none(), script.path().and_then(Path::parent).unwrap_or(&**CWD), no_config, ) @@ -1231,6 +1229,7 @@ impl ProjectEnvironment { /// Initialize a virtual environment for the current project. pub(crate) async fn get_or_init( workspace: &Workspace, + groups: &DependencyGroupsWithDefaults, python: Option, install_mirrors: &PythonInstallMirrors, network_settings: &NetworkSettings, @@ -1242,13 +1241,25 @@ impl ProjectEnvironment { cache: &Cache, dry_run: DryRun, printer: Printer, + preview: PreviewMode, ) -> Result { // Lock the project environment to avoid synchronization issues. - let _lock = ProjectInterpreter::lock(workspace).await?; + let _lock = ProjectInterpreter::lock(workspace) + .await + .inspect_err(|err| { + warn!("Failed to acquire project environment lock: {err}"); + }) + .ok(); + + let upgradeable = preview.is_enabled() + && python + .as_ref() + .is_none_or(|request| !request.includes_patch()); match ProjectInterpreter::discover( workspace, workspace.install_path().as_ref(), + groups, python, network_settings, python_preference, @@ -1259,6 +1270,7 @@ impl ProjectEnvironment { active, cache, printer, + preview, ) .await? { @@ -1328,6 +1340,8 @@ impl ProjectEnvironment { false, false, false, + upgradeable, + preview, )?; return Ok(if replace { Self::WouldReplace(root, environment, temp_dir) @@ -1365,6 +1379,8 @@ impl ProjectEnvironment { false, false, false, + upgradeable, + preview, )?; if replace { @@ -1448,9 +1464,19 @@ impl ScriptEnvironment { cache: &Cache, dry_run: DryRun, printer: Printer, + preview: PreviewMode, ) -> Result { // Lock the script environment to avoid synchronization issues. - let _lock = ScriptInterpreter::lock(script).await?; + let _lock = ScriptInterpreter::lock(script) + .await + .inspect_err(|err| { + warn!("Failed to acquire script environment lock: {err}"); + }) + .ok(); + + let upgradeable = python_request + .as_ref() + .is_none_or(|request| !request.includes_patch()); match ScriptInterpreter::discover( script, @@ -1464,6 +1490,7 @@ impl ScriptEnvironment { active, cache, printer, + preview, ) .await? { @@ -1496,6 +1523,8 @@ impl ScriptEnvironment { false, false, false, + upgradeable, + preview, )?; return Ok(if root.exists() { Self::WouldReplace(root, environment, temp_dir) @@ -1530,6 +1559,8 @@ impl ScriptEnvironment { false, false, false, + upgradeable, + preview, )?; Ok(if replaced { @@ -1630,16 +1661,7 @@ pub(crate) async fn resolve_names( .keyring(*keyring_provider) .allow_insecure_host(network_settings.allow_insecure_host.clone()); - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Initialize the registry client. let client = RegistryClientBuilder::try_from(client_builder)? @@ -1750,6 +1772,7 @@ impl<'lock> EnvironmentSpecification<'lock> { pub(crate) async fn resolve_environment( spec: EnvironmentSpecification<'_>, interpreter: &Interpreter, + build_constraints: Constraints, settings: &ResolverSettings, network_settings: &NetworkSettings, state: &PlatformState, @@ -1800,16 +1823,7 @@ pub(crate) async fn resolve_environment( let marker_env = interpreter.resolver_marker_environment(); let python_requirement = PythonRequirement::from_interpreter(interpreter); - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Initialize the registry client. let client = RegistryClientBuilder::try_from(client_builder)? @@ -1846,7 +1860,6 @@ pub(crate) async fn resolve_environment( let extras = ExtrasSpecification::default(); let groups = BTreeMap::new(); let hasher = HashStrategy::default(); - let build_constraints = Constraints::default(); let build_hasher = HashStrategy::default(); // When resolving from an interpreter, we assume an empty environment, so reinstalls and @@ -1982,16 +1995,7 @@ pub(crate) async fn sync_environment( let interpreter = venv.interpreter(); let tags = venv.interpreter().tags()?; - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Initialize the registry client. let client = RegistryClientBuilder::try_from(client_builder)? @@ -2197,16 +2201,7 @@ pub(crate) async fn update_environment( } } - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Initialize the registry client. let client = RegistryClientBuilder::try_from(client_builder)? @@ -2361,6 +2356,7 @@ pub(crate) async fn init_script_python_requirement( client_builder: &BaseClientBuilder<'_>, cache: &Cache, reporter: &PythonDownloadReporter, + preview: PreviewMode, ) -> anyhow::Result { let python_request = if let Some(request) = python { // (1) Explicit request from user @@ -2392,6 +2388,7 @@ pub(crate) async fn init_script_python_requirement( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); @@ -2434,7 +2431,7 @@ pub(crate) fn default_dependency_groups( pub(crate) fn detect_conflicts( lock: &Lock, extras: &ExtrasSpecification, - dev: &DependencyGroupsWithDefaults, + groups: &DependencyGroupsWithDefaults, ) -> Result<(), ProjectError> { // Note that we need to collect all extras and groups that match in // a particular set, since extras can be declared as conflicting with @@ -2453,7 +2450,7 @@ pub(crate) fn detect_conflicts( } if item .group() - .map(|group| dev.contains(group)) + .map(|group| groups.contains(group)) .unwrap_or(false) { conflicts.push(item.conflict().clone()); @@ -2463,7 +2460,7 @@ pub(crate) fn detect_conflicts( return Err(ProjectError::Conflict(ConflictError { set: set.clone(), conflicts, - dev: dev.clone(), + groups: groups.clone(), })); } } @@ -2677,6 +2674,50 @@ fn cache_name(name: &str) -> Option> { } } +fn format_requires_python_sources(conflicts: &RequiresPythonSources) -> String { + conflicts + .iter() + .map(|((package, group), specifiers)| { + if let Some(group) = group { + format!("- {package}:{group}: {specifiers}") + } else { + format!("- {package}: {specifiers}") + } + }) + .join("\n") +} + +fn format_optional_requires_python_sources( + conflicts: &RequiresPythonSources, + workspace_non_trivial: bool, +) -> String { + // If there's lots of conflicts, print a list + if conflicts.len() > 1 { + return format!( + ".\nThe following `requires-python` declarations do not permit this version:\n{}", + format_requires_python_sources(conflicts) + ); + } + // If there's one conflict, give a clean message + if conflicts.len() == 1 { + let ((package, group), _) = conflicts.iter().next().unwrap(); + if let Some(group) = group { + if workspace_non_trivial { + return format!( + " (from workspace member `{package}`'s `tool.uv.dependency-groups.{group}.requires-python`)." + ); + } + return format!(" (from `tool.uv.dependency-groups.{group}.requires-python`)."); + } + if workspace_non_trivial { + return format!(" (from workspace member `{package}`'s `project.requires-python`)."); + } + return " (from `project.requires-python`)".to_owned(); + } + // Otherwise don't elaborate + String::new() +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/uv/src/commands/project/remove.rs b/crates/uv/src/commands/project/remove.rs index 6dab60012..6bc04160e 100644 --- a/crates/uv/src/commands/project/remove.rs +++ b/crates/uv/src/commands/project/remove.rs @@ -5,7 +5,7 @@ use std::str::FromStr; use anyhow::{Context, Result}; use owo_colors::OwoColorize; -use tracing::debug; +use tracing::{debug, warn}; use uv_cache::Cache; use uv_configuration::{ @@ -13,7 +13,7 @@ use uv_configuration::{ PreviewMode, }; use uv_fs::Simplified; -use uv_normalize::{DEV_DEPENDENCIES, DefaultExtras}; +use uv_normalize::{DEV_DEPENDENCIES, DefaultExtras, DefaultGroups}; use uv_pep508::PackageName; use uv_python::{PythonDownloads, PythonPreference, PythonRequest}; use uv_scripts::{Pep723ItemRef, Pep723Metadata, Pep723Script}; @@ -202,6 +202,14 @@ pub(crate) async fn remove( // Update the `pypackage.toml` in-memory. let target = target.update(&content)?; + // Determine enabled groups and extras + let default_groups = match &target { + RemoveTarget::Project(project) => default_dependency_groups(project.pyproject_toml())?, + RemoveTarget::Script(_) => DefaultGroups::default(), + }; + let groups = DependencyGroups::default().with_defaults(default_groups); + let extras = ExtrasSpecification::default().with_defaults(DefaultExtras::default()); + // Convert to an `AddTarget` by attaching the appropriate interpreter or environment. let target = match target { RemoveTarget::Project(project) => { @@ -210,6 +218,7 @@ pub(crate) async fn remove( let interpreter = ProjectInterpreter::discover( project.workspace(), project_dir, + &groups, python.as_deref().map(PythonRequest::parse), &network_settings, python_preference, @@ -220,6 +229,7 @@ pub(crate) async fn remove( active, cache, printer, + preview, ) .await? .into_interpreter(); @@ -229,6 +239,7 @@ pub(crate) async fn remove( // Discover or create the virtual environment. let environment = ProjectEnvironment::get_or_init( project.workspace(), + &groups, python.as_deref().map(PythonRequest::parse), &install_mirrors, &network_settings, @@ -240,6 +251,7 @@ pub(crate) async fn remove( cache, DryRun::Disabled, printer, + preview, ) .await? .into_environment()?; @@ -260,6 +272,7 @@ pub(crate) async fn remove( active, cache, printer, + preview, ) .await? .into_interpreter(); @@ -268,7 +281,13 @@ pub(crate) async fn remove( } }; - let _lock = target.acquire_lock().await?; + let _lock = target + .acquire_lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); // Determine the lock mode. let mode = if locked { @@ -289,6 +308,7 @@ pub(crate) async fn remove( Box::new(DefaultResolveLogger), concurrency, cache, + &WorkspaceCache::default(), printer, preview, ) @@ -314,12 +334,6 @@ pub(crate) async fn remove( return Ok(ExitStatus::Success); }; - // Determine the default groups to include. - let default_groups = default_dependency_groups(project.pyproject_toml())?; - - // Determine the default extras to include. - let default_extras = DefaultExtras::default(); - // Identify the installation target. let target = match &project { VirtualProject::Project(project) => InstallTarget::Project { @@ -338,8 +352,8 @@ pub(crate) async fn remove( match project::sync::do_sync( target, venv, - &ExtrasSpecification::default().with_defaults(default_extras), - &DependencyGroups::default().with_defaults(default_groups), + &extras, + &groups, EditableMode::Editable, InstallOptions::default(), Modifications::Exact, @@ -350,6 +364,7 @@ pub(crate) async fn remove( installer_metadata, concurrency, cache, + WorkspaceCache::default(), DryRun::Disabled, printer, preview, diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs index 035528b1c..a6ea4c0e0 100644 --- a/crates/uv/src/commands/project/run.rs +++ b/crates/uv/src/commands/project/run.rs @@ -78,7 +78,7 @@ pub(crate) async fn run( no_project: bool, no_config: bool, extras: ExtrasSpecification, - dev: DependencyGroups, + groups: DependencyGroups, editable: EditableMode, modifications: Modifications, python: Option, @@ -235,10 +235,19 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl cache, DryRun::Disabled, printer, + preview, ) .await? .into_environment()?; + let _lock = environment + .lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); + // Determine the lock mode. let mode = if frozen { LockMode::Frozen @@ -261,6 +270,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl }, concurrency, cache, + &workspace_cache, printer, preview, ) @@ -291,7 +301,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl target, &environment, &extras.with_defaults(DefaultExtras::default()), - &dev.with_defaults(DefaultGroups::default()), + &groups.with_defaults(DefaultGroups::default()), editable, install_options, modifications, @@ -306,6 +316,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl installer_metadata, concurrency, cache, + workspace_cache.clone(), DryRun::Disabled, printer, preview, @@ -359,6 +370,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl cache, DryRun::Disabled, printer, + preview, ) .await? .into_environment()?; @@ -380,6 +392,14 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl ) }); + let _lock = environment + .lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); + match update_environment( environment, spec, @@ -401,7 +421,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl installer_metadata, concurrency, cache, - workspace_cache, + workspace_cache.clone(), DryRun::Disabled, printer, preview, @@ -433,6 +453,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl active.map_or(Some(false), Some), cache, printer, + preview, ) .await? .into_interpreter(); @@ -446,6 +467,8 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl false, false, false, + false, + preview, )?; Some(environment.into_interpreter()) @@ -456,7 +479,6 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl }; // Discover and sync the base environment. - let workspace_cache = WorkspaceCache::default(); let temp_dir; let base_interpreter = if let Some(script_interpreter) = script_interpreter { // If we found a PEP 723 script and the user provided a project-only setting, warn. @@ -468,7 +490,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl if !extras.is_empty() { warn_user!("Extras are not supported for Python scripts with inline metadata"); } - for flag in dev.history().as_flags_pretty() { + for flag in groups.history().as_flags_pretty() { warn_user!("`{flag}` is not supported for Python scripts with inline metadata"); } if all_packages { @@ -543,7 +565,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl for flag in extras.history().as_flags_pretty() { warn_user!("`{flag}` has no effect when used alongside `--no-project`"); } - for flag in dev.history().as_flags_pretty() { + for flag in groups.history().as_flags_pretty() { warn_user!("`{flag}` has no effect when used alongside `--no-project`"); } if locked { @@ -560,7 +582,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl for flag in extras.history().as_flags_pretty() { warn_user!("`{flag}` has no effect when used outside of a project"); } - for flag in dev.history().as_flags_pretty() { + for flag in groups.history().as_flags_pretty() { warn_user!("`{flag}` has no effect when used outside of a project"); } if locked { @@ -583,6 +605,11 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl project.workspace().install_path().display() ); } + // Determine the groups and extras to include. + let default_groups = default_dependency_groups(project.pyproject_toml())?; + let default_extras = DefaultExtras::default(); + let groups = groups.with_defaults(default_groups); + let extras = extras.with_defaults(default_extras); let venv = if isolated { debug!("Creating isolated virtual environment"); @@ -602,6 +629,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl } = WorkspacePython::from_request( python.as_deref().map(PythonRequest::parse), Some(project.workspace()), + &groups, project_dir, no_config, ) @@ -618,6 +646,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); @@ -626,6 +655,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl validate_project_requires_python( &interpreter, Some(project.workspace()), + &groups, requires_python, &source, )?; @@ -641,12 +671,15 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl false, false, false, + false, + preview, )? } else { // If we're not isolating the environment, reuse the base environment for the // project. ProjectEnvironment::get_or_init( project.workspace(), + &groups, python.as_deref().map(PythonRequest::parse), &install_mirrors, &network_settings, @@ -658,6 +691,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl cache, DryRun::Disabled, printer, + preview, ) .await? .into_environment()? @@ -677,13 +711,13 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl .map(|lock| (lock, project.workspace().install_path().to_owned())); } } else { - // Validate that any referenced dependency groups are defined in the workspace. - - // Determine the default groups to include. - let default_groups = default_dependency_groups(project.pyproject_toml())?; - - // Determine the default extras to include. - let default_extras = DefaultExtras::default(); + let _lock = venv + .lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); // Determine the lock mode. let mode = if frozen { @@ -706,6 +740,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl }, concurrency, cache, + &workspace_cache, printer, preview, ) @@ -769,18 +804,15 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl }; let install_options = InstallOptions::default(); - let dev = dev.with_defaults(default_groups); - let extras = extras.with_defaults(default_extras); - // Validate that the set of requested extras and development groups are defined in the lockfile. target.validate_extras(&extras)?; - target.validate_groups(&dev)?; + target.validate_groups(&groups)?; match project::sync::do_sync( target, &venv, &extras, - &dev, + &groups, editable, install_options, modifications, @@ -795,6 +827,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl installer_metadata, concurrency, cache, + workspace_cache.clone(), DryRun::Disabled, printer, preview, @@ -853,6 +886,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await?; @@ -872,6 +906,8 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl false, false, false, + false, + preview, )?; venv.into_interpreter() } else { diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index ed96795e5..6e057446e 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -6,6 +6,7 @@ use std::sync::Arc; use anyhow::{Context, Result}; use itertools::Itertools; use owo_colors::OwoColorize; +use tracing::warn; use uv_cache::Cache; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; @@ -57,7 +58,7 @@ pub(crate) async fn sync( all_packages: bool, package: Option, extras: ExtrasSpecification, - dev: DependencyGroups, + groups: DependencyGroups, editable: EditableMode, install_options: InstallOptions, modifications: Modifications, @@ -116,23 +117,24 @@ pub(crate) async fn sync( SyncTarget::Project(project) }; - // Determine the default groups to include. + // Determine the groups and extras to include. let default_groups = match &target { SyncTarget::Project(project) => default_dependency_groups(project.pyproject_toml())?, SyncTarget::Script(..) => DefaultGroups::default(), }; - - // Determine the default extras to include. let default_extras = match &target { SyncTarget::Project(_project) => DefaultExtras::default(), SyncTarget::Script(..) => DefaultExtras::default(), }; + let groups = groups.with_defaults(default_groups); + let extras = extras.with_defaults(default_extras); // Discover or create the virtual environment. let environment = match &target { SyncTarget::Project(project) => SyncEnvironment::Project( ProjectEnvironment::get_or_init( project.workspace(), + &groups, python.as_deref().map(PythonRequest::parse), &install_mirrors, &network_settings, @@ -144,6 +146,7 @@ pub(crate) async fn sync( cache, dry_run, printer, + preview, ) .await?, ), @@ -161,12 +164,19 @@ pub(crate) async fn sync( cache, dry_run, printer, + preview, ) .await?, ), }; - let _lock = environment.lock().await?; + let _lock = environment + .lock() + .await + .inspect_err(|err| { + warn!("Failed to acquire environment lock: {err}"); + }) + .ok(); // Notify the user of any environment changes. match &environment { @@ -321,7 +331,7 @@ pub(crate) async fn sync( installer_metadata, concurrency, cache, - workspace_cache, + workspace_cache.clone(), dry_run, printer, preview, @@ -368,6 +378,7 @@ pub(crate) async fn sync( Box::new(DefaultResolveLogger), concurrency, cache, + &workspace_cache, printer, preview, ) @@ -437,8 +448,8 @@ pub(crate) async fn sync( match do_sync( sync_target, &environment, - &extras.with_defaults(default_extras), - &dev.with_defaults(default_groups), + &extras, + &groups, editable, install_options, modifications, @@ -449,6 +460,7 @@ pub(crate) async fn sync( installer_metadata, concurrency, cache, + workspace_cache, dry_run, printer, preview, @@ -573,7 +585,7 @@ pub(super) async fn do_sync( target: InstallTarget<'_>, venv: &PythonEnvironment, extras: &ExtrasSpecificationWithDefaults, - dev: &DependencyGroupsWithDefaults, + groups: &DependencyGroupsWithDefaults, editable: EditableMode, install_options: InstallOptions, modifications: Modifications, @@ -584,6 +596,7 @@ pub(super) async fn do_sync( installer_metadata: bool, concurrency: Concurrency, cache: &Cache, + workspace_cache: WorkspaceCache, dry_run: DryRun, printer: Printer, preview: PreviewMode, @@ -624,11 +637,11 @@ pub(super) async fn do_sync( } // Validate that the set of requested extras and development groups are compatible. - detect_conflicts(target.lock(), extras, dev)?; + detect_conflicts(target.lock(), extras, groups)?; // Validate that the set of requested extras and development groups are defined in the lockfile. target.validate_extras(extras)?; - target.validate_groups(dev)?; + target.validate_groups(groups)?; // Determine the markers to use for resolution. let marker_env = venv.interpreter().resolver_marker_environment(); @@ -665,7 +678,7 @@ pub(super) async fn do_sync( &marker_env, tags, extras, - dev, + groups, build_options, &install_options, )?; @@ -676,16 +689,7 @@ pub(super) async fn do_sync( // If necessary, convert editable to non-editable distributions. let resolution = apply_editable_mode(resolution, editable); - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Populate credentials from the target. store_credentials_from_target(target); @@ -745,7 +749,7 @@ pub(super) async fn do_sync( &build_hasher, exclude_newer, sources, - WorkspaceCache::default(), + workspace_cache.clone(), concurrency, preview, ); diff --git a/crates/uv/src/commands/project/tree.rs b/crates/uv/src/commands/project/tree.rs index 6bf57d1a7..d401940d9 100644 --- a/crates/uv/src/commands/project/tree.rs +++ b/crates/uv/src/commands/project/tree.rs @@ -34,7 +34,7 @@ use crate::settings::{NetworkSettings, ResolverSettings}; #[allow(clippy::fn_params_excessive_bools)] pub(crate) async fn tree( project_dir: &Path, - dev: DependencyGroups, + groups: DependencyGroups, locked: bool, frozen: bool, universal: bool, @@ -71,11 +71,12 @@ pub(crate) async fn tree( LockTarget::Workspace(&workspace) }; - // Determine the default groups to include. - let defaults = match target { + // Determine the groups to include. + let default_groups = match target { LockTarget::Workspace(workspace) => default_dependency_groups(workspace.pyproject_toml())?, LockTarget::Script(_) => DefaultGroups::default(), }; + let groups = groups.with_defaults(default_groups); let native_tls = network_settings.native_tls; @@ -96,12 +97,14 @@ pub(crate) async fn tree( Some(false), cache, printer, + preview, ) .await? .into_interpreter(), LockTarget::Workspace(workspace) => ProjectInterpreter::discover( workspace, project_dir, + &groups, python.as_deref().map(PythonRequest::parse), network_settings, python_preference, @@ -112,6 +115,7 @@ pub(crate) async fn tree( Some(false), cache, printer, + preview, ) .await? .into_interpreter(), @@ -142,6 +146,7 @@ pub(crate) async fn tree( Box::new(DefaultResolveLogger), concurrency, cache, + &WorkspaceCache::default(), printer, preview, ) @@ -271,7 +276,7 @@ pub(crate) async fn tree( depth.into(), &prune, &package, - &dev.with_defaults(defaults), + &groups, no_dedupe, invert, ); diff --git a/crates/uv/src/commands/project/version.rs b/crates/uv/src/commands/project/version.rs index 0e50c2ac0..bc79f8eb9 100644 --- a/crates/uv/src/commands/project/version.rs +++ b/crates/uv/src/commands/project/version.rs @@ -10,8 +10,8 @@ use uv_cache::Cache; use uv_cli::version::VersionInfo; use uv_cli::{VersionBump, VersionFormat}; use uv_configuration::{ - Concurrency, DependencyGroups, DryRun, EditableMode, ExtrasSpecification, InstallOptions, - PreviewMode, + Concurrency, DependencyGroups, DependencyGroupsWithDefaults, DryRun, EditableMode, + ExtrasSpecification, InstallOptions, PreviewMode, }; use uv_fs::Simplified; use uv_normalize::DefaultExtras; @@ -285,6 +285,7 @@ async fn print_frozen_version( let interpreter = ProjectInterpreter::discover( project.workspace(), project_dir, + &DependencyGroupsWithDefaults::none(), python.as_deref().map(PythonRequest::parse), &network_settings, python_preference, @@ -295,6 +296,7 @@ async fn print_frozen_version( active, cache, printer, + preview, ) .await? .into_interpreter(); @@ -313,6 +315,7 @@ async fn print_frozen_version( Box::new(DefaultResolveLogger), concurrency, cache, + &WorkspaceCache::default(), printer, preview, ) @@ -378,12 +381,20 @@ async fn lock_and_sync( return Ok(ExitStatus::Success); } + // Determine the groups and extras that should be enabled. + let default_groups = default_dependency_groups(project.pyproject_toml())?; + let default_extras = DefaultExtras::default(); + let groups = DependencyGroups::default().with_defaults(default_groups); + let extras = ExtrasSpecification::default().with_defaults(default_extras); + let install_options = InstallOptions::default(); + // Convert to an `AddTarget` by attaching the appropriate interpreter or environment. let target = if no_sync { // Discover the interpreter. let interpreter = ProjectInterpreter::discover( project.workspace(), project_dir, + &groups, python.as_deref().map(PythonRequest::parse), &network_settings, python_preference, @@ -394,6 +405,7 @@ async fn lock_and_sync( active, cache, printer, + preview, ) .await? .into_interpreter(); @@ -403,6 +415,7 @@ async fn lock_and_sync( // Discover or create the virtual environment. let environment = ProjectEnvironment::get_or_init( project.workspace(), + &groups, python.as_deref().map(PythonRequest::parse), &install_mirrors, &network_settings, @@ -414,6 +427,7 @@ async fn lock_and_sync( cache, DryRun::Disabled, printer, + preview, ) .await? .into_environment()?; @@ -430,6 +444,7 @@ async fn lock_and_sync( // Initialize any shared state. let state = UniversalState::default(); + let workspace_cache = WorkspaceCache::default(); // Lock and sync the environment, if necessary. let lock = match project::lock::LockOperation::new( @@ -440,6 +455,7 @@ async fn lock_and_sync( Box::new(DefaultResolveLogger), concurrency, cache, + &workspace_cache, printer, preview, ) @@ -466,15 +482,6 @@ async fn lock_and_sync( }; // Perform a full sync, because we don't know what exactly is affected by the version. - // TODO(ibraheem): Should we accept CLI overrides for this? Should we even sync here? - let extras = ExtrasSpecification::from_all_extras(); - let install_options = InstallOptions::default(); - - // Determine the default groups to include. - let default_groups = default_dependency_groups(project.pyproject_toml())?; - - // Determine the default extras to include. - let default_extras = DefaultExtras::default(); // Identify the installation target. let target = match &project { @@ -494,8 +501,8 @@ async fn lock_and_sync( match project::sync::do_sync( target, venv, - &extras.with_defaults(default_extras), - &DependencyGroups::default().with_defaults(default_groups), + &extras, + &groups, EditableMode::Editable, install_options, Modifications::Sufficient, @@ -506,6 +513,7 @@ async fn lock_and_sync( installer_metadata, concurrency, cache, + workspace_cache, DryRun::Disabled, printer, preview, diff --git a/crates/uv/src/commands/python/find.rs b/crates/uv/src/commands/python/find.rs index 63e25fed1..e188e9d20 100644 --- a/crates/uv/src/commands/python/find.rs +++ b/crates/uv/src/commands/python/find.rs @@ -3,6 +3,7 @@ use std::fmt::Write; use std::path::Path; use uv_cache::Cache; +use uv_configuration::{DependencyGroupsWithDefaults, PreviewMode}; use uv_fs::Simplified; use uv_python::{ EnvironmentPreference, PythonDownloads, PythonInstallation, PythonPreference, PythonRequest, @@ -31,6 +32,7 @@ pub(crate) async fn find( python_preference: PythonPreference, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { let environment_preference = if system { EnvironmentPreference::OnlySystem @@ -56,6 +58,8 @@ pub(crate) async fn find( } }; + // Don't enable the requires-python settings on groups + let groups = DependencyGroupsWithDefaults::none(); let WorkspacePython { source, python_request, @@ -63,6 +67,7 @@ pub(crate) async fn find( } = WorkspacePython::from_request( request.map(|request| PythonRequest::parse(&request)), project.as_ref().map(VirtualProject::workspace), + &groups, project_dir, no_config, ) @@ -73,6 +78,7 @@ pub(crate) async fn find( environment_preference, python_preference, cache, + preview, )?; // Warn if the discovered Python version is incompatible with the current workspace @@ -80,6 +86,7 @@ pub(crate) async fn find( match validate_project_requires_python( python.interpreter(), project.as_ref().map(VirtualProject::workspace), + &groups, &requires_python, &source, ) { @@ -116,6 +123,7 @@ pub(crate) async fn find_script( no_config: bool, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { let interpreter = match ScriptInterpreter::discover( script, @@ -129,6 +137,7 @@ pub(crate) async fn find_script( Some(false), cache, printer, + preview, ) .await { diff --git a/crates/uv/src/commands/python/install.rs b/crates/uv/src/commands/python/install.rs index 8f5beedc9..3df0cf91d 100644 --- a/crates/uv/src/commands/python/install.rs +++ b/crates/uv/src/commands/python/install.rs @@ -1,11 +1,14 @@ use std::borrow::Cow; +use std::collections::BTreeMap; use std::fmt::Write; use std::io::ErrorKind; use std::path::{Path, PathBuf}; +use std::str::FromStr; use anyhow::{Error, Result}; use futures::StreamExt; use futures::stream::FuturesUnordered; +use indexmap::IndexSet; use itertools::{Either, Itertools}; use owo_colors::OwoColorize; use rustc_hash::{FxHashMap, FxHashSet}; @@ -13,14 +16,17 @@ use tracing::{debug, trace}; use uv_configuration::PreviewMode; use uv_fs::Simplified; -use uv_python::downloads::{self, DownloadResult, ManagedPythonDownload, PythonDownloadRequest}; +use uv_python::downloads::{ + self, ArchRequest, DownloadResult, ManagedPythonDownload, PythonDownloadRequest, +}; use uv_python::managed::{ - ManagedPythonInstallation, ManagedPythonInstallations, python_executable_dir, + ManagedPythonInstallation, ManagedPythonInstallations, PythonMinorVersionLink, + create_link_to_executable, python_executable_dir, }; use uv_python::platform::{Arch, Libc}; use uv_python::{ - PythonDownloads, PythonInstallationKey, PythonRequest, PythonVersionFile, - VersionFileDiscoveryOptions, VersionFilePreference, + PythonDownloads, PythonInstallationKey, PythonInstallationMinorVersionKey, PythonRequest, + PythonVersionFile, VersionFileDiscoveryOptions, VersionFilePreference, VersionRequest, }; use uv_shell::Shell; use uv_trampoline_builder::{Launcher, LauncherKind}; @@ -32,7 +38,7 @@ use crate::commands::{ExitStatus, elapsed}; use crate::printer::Printer; use crate::settings::NetworkSettings; -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] struct InstallRequest { /// The original request from the user request: PythonRequest, @@ -82,6 +88,10 @@ impl InstallRequest { fn matches_installation(&self, installation: &ManagedPythonInstallation) -> bool { self.download_request.satisfied_by_key(installation.key()) } + + fn python_request(&self) -> &PythonRequest { + &self.request + } } impl std::fmt::Display for InstallRequest { @@ -132,6 +142,7 @@ pub(crate) async fn install( install_dir: Option, targets: Vec, reinstall: bool, + upgrade: bool, force: bool, python_install_mirror: Option, pypy_install_mirror: Option, @@ -153,34 +164,66 @@ pub(crate) async fn install( return Ok(ExitStatus::Failure); } + if upgrade && preview.is_disabled() { + warn_user!( + "`uv python upgrade` is experimental and may change without warning. Pass `--preview` to disable this warning" + ); + } + if default && targets.len() > 1 { anyhow::bail!("The `--default` flag cannot be used with multiple targets"); } + // Read the existing installations, lock the directory for the duration + let installations = ManagedPythonInstallations::from_settings(install_dir.clone())?.init()?; + let installations_dir = installations.root(); + let scratch_dir = installations.scratch(); + let _lock = installations.lock().await?; + let existing_installations: Vec<_> = installations + .find_all()? + .inspect(|installation| trace!("Found existing installation {}", installation.key())) + .collect(); + // Resolve the requests let mut is_default_install = false; + let mut is_unspecified_upgrade = false; let requests: Vec<_> = if targets.is_empty() { - PythonVersionFile::discover( - project_dir, - &VersionFileDiscoveryOptions::default() - .with_no_config(no_config) - .with_preference(VersionFilePreference::Versions), - ) - .await? - .map(PythonVersionFile::into_versions) - .unwrap_or_else(|| { - // If no version file is found and no requests were made - is_default_install = true; - vec![if reinstall { - // On bare `--reinstall`, reinstall all Python versions - PythonRequest::Any - } else { - PythonRequest::Default - }] - }) - .into_iter() - .map(|a| InstallRequest::new(a, python_downloads_json_url.as_deref())) - .collect::>>()? + if upgrade { + is_unspecified_upgrade = true; + let mut minor_version_requests = IndexSet::::default(); + for installation in &existing_installations { + let request = VersionRequest::major_minor_request_from_key(installation.key()); + if let Ok(request) = InstallRequest::new( + PythonRequest::Version(request), + python_downloads_json_url.as_deref(), + ) { + minor_version_requests.insert(request); + } + } + minor_version_requests.into_iter().collect::>() + } else { + PythonVersionFile::discover( + project_dir, + &VersionFileDiscoveryOptions::default() + .with_no_config(no_config) + .with_preference(VersionFilePreference::Versions), + ) + .await? + .map(PythonVersionFile::into_versions) + .unwrap_or_else(|| { + // If no version file is found and no requests were made + is_default_install = true; + vec![if reinstall { + // On bare `--reinstall`, reinstall all Python versions + PythonRequest::Any + } else { + PythonRequest::Default + }] + }) + .into_iter() + .map(|a| InstallRequest::new(a, python_downloads_json_url.as_deref())) + .collect::>>()? + } } else { targets .iter() @@ -190,18 +233,39 @@ pub(crate) async fn install( }; let Some(first_request) = requests.first() else { + if upgrade { + writeln!( + printer.stderr(), + "There are no installed versions to upgrade" + )?; + } return Ok(ExitStatus::Success); }; - // Read the existing installations, lock the directory for the duration - let installations = ManagedPythonInstallations::from_settings(install_dir)?.init()?; - let installations_dir = installations.root(); - let scratch_dir = installations.scratch(); - let _lock = installations.lock().await?; - let existing_installations: Vec<_> = installations - .find_all()? - .inspect(|installation| trace!("Found existing installation {}", installation.key())) - .collect(); + let requested_minor_versions = requests + .iter() + .filter_map(|request| { + if let PythonRequest::Version(VersionRequest::MajorMinor(major, minor, ..)) = + request.python_request() + { + uv_pep440::Version::from_str(&format!("{major}.{minor}")).ok() + } else { + None + } + }) + .collect::>(); + + if upgrade + && requests + .iter() + .any(|request| request.request.includes_patch()) + { + writeln!( + printer.stderr(), + "error: `uv python upgrade` only accepts minor versions" + )?; + return Ok(ExitStatus::Failure); + } // Find requests that are already satisfied let mut changelog = Changelog::default(); @@ -259,15 +323,20 @@ pub(crate) async fn install( } } } - (vec![], unsatisfied) } else { // If we can find one existing installation that matches the request, it is satisfied requests.iter().partition_map(|request| { - if let Some(installation) = existing_installations - .iter() - .find(|installation| request.matches_installation(installation)) - { + if let Some(installation) = existing_installations.iter().find(|installation| { + if upgrade { + // If this is an upgrade, the requested version is a minor version + // but the requested download is the highest patch for that minor + // version. We need to install it unless an exact match is found. + request.download.key() == installation.key() + } else { + request.matches_installation(installation) + } + }) { debug!( "Found `{}` for request `{}`", installation.key().green(), @@ -335,6 +404,7 @@ pub(crate) async fn install( let mut errors = vec![]; let mut downloaded = Vec::with_capacity(downloads.len()); + let mut requests_by_new_installation = BTreeMap::new(); while let Some((download, result)) = tasks.next().await { match result { Ok(download_result) => { @@ -346,10 +416,19 @@ pub(crate) async fn install( let installation = ManagedPythonInstallation::new(path, download); changelog.installed.insert(installation.key().clone()); + for request in &requests { + // Take note of which installations satisfied which requests + if request.matches_installation(&installation) { + requests_by_new_installation + .entry(installation.key().clone()) + .or_insert(Vec::new()) + .push(request); + } + } if changelog.existing.contains(installation.key()) { changelog.uninstalled.insert(installation.key().clone()); } - downloaded.push(installation); + downloaded.push(installation.clone()); } Err(err) => { errors.push((download.key().clone(), anyhow::Error::new(err))); @@ -385,18 +464,24 @@ pub(crate) async fn install( .expect("We should have a bin directory with preview enabled") .as_path(); + let upgradeable = (default || is_default_install) + || requested_minor_versions.contains(&installation.key().version().python_version()); + create_bin_links( installation, bin, reinstall, force, default, + upgradeable, + upgrade, is_default_install, first_request, &existing_installations, &installations, &mut changelog, &mut errors, + preview, )?; if preview.is_enabled() { @@ -407,19 +492,92 @@ pub(crate) async fn install( } } + let minor_versions = + PythonInstallationMinorVersionKey::highest_installations_by_minor_version_key( + installations + .iter() + .copied() + .chain(existing_installations.iter()), + ); + + for installation in minor_versions.values() { + if upgrade { + // During an upgrade, update existing symlinks but avoid + // creating new ones. + installation.update_minor_version_link(preview)?; + } else { + installation.ensure_minor_version_link(preview)?; + } + } + if changelog.installed.is_empty() && errors.is_empty() { if is_default_install { writeln!( printer.stderr(), "Python is already installed. Use `uv python install ` to install another version.", )?; + } else if upgrade && requests.is_empty() { + writeln!( + printer.stderr(), + "There are no installed versions to upgrade" + )?; } else if requests.len() > 1 { - writeln!(printer.stderr(), "All requested versions already installed")?; + if upgrade { + if is_unspecified_upgrade { + writeln!( + printer.stderr(), + "All versions already on latest supported patch release" + )?; + } else { + writeln!( + printer.stderr(), + "All requested versions already on latest supported patch release" + )?; + } + } else { + writeln!(printer.stderr(), "All requested versions already installed")?; + } } return Ok(ExitStatus::Success); } if !changelog.installed.is_empty() { + for install_key in &changelog.installed { + // Make a note if the selected python is non-native for the architecture, + // if none of the matching user requests were explicit + let native_arch = Arch::from_env(); + if install_key.arch().family() != native_arch.family() { + let not_explicit = + requests_by_new_installation + .get(install_key) + .and_then(|requests| { + let all_non_explicit = requests.iter().all(|request| { + if let PythonRequest::Key(key) = &request.request { + !matches!(key.arch(), Some(ArchRequest::Explicit(_))) + } else { + true + } + }); + if all_non_explicit { + requests.iter().next() + } else { + None + } + }); + if let Some(not_explicit) = not_explicit { + let native_request = + not_explicit.download_request.clone().with_arch(native_arch); + writeln!( + printer.stderr(), + "{} uv selected a Python distribution with an emulated architecture ({}) for your platform because support for the native architecture ({}) is not yet mature; to override this behaviour, request the native architecture explicitly with: {}", + "note:".bold(), + install_key.arch(), + native_arch, + native_request + )?; + } + } + } if changelog.installed.len() == 1 { let installed = changelog.installed.iter().next().unwrap(); // Ex) "Installed Python 3.9.7 in 1.68s" @@ -520,12 +678,15 @@ fn create_bin_links( reinstall: bool, force: bool, default: bool, + upgradeable: bool, + upgrade: bool, is_default_install: bool, first_request: &InstallRequest, existing_installations: &[ManagedPythonInstallation], installations: &[&ManagedPythonInstallation], changelog: &mut Changelog, errors: &mut Vec<(PythonInstallationKey, Error)>, + preview: PreviewMode, ) -> Result<(), Error> { let targets = if (default || is_default_install) && first_request.matches_installation(installation) { @@ -540,7 +701,19 @@ fn create_bin_links( for target in targets { let target = bin.join(target); - match installation.create_bin_link(&target) { + let executable = if upgradeable { + if let Some(minor_version_link) = + PythonMinorVersionLink::from_installation(installation, preview) + { + minor_version_link.symlink_executable.clone() + } else { + installation.executable(false) + } + } else { + installation.executable(false) + }; + + match create_link_to_executable(&target, executable.clone()) { Ok(()) => { debug!( "Installed executable at `{}` for {}", @@ -589,13 +762,23 @@ fn create_bin_links( // There's an existing executable we don't manage, require `--force` if valid_link { if !force { - errors.push(( - installation.key().clone(), - anyhow::anyhow!( - "Executable already exists at `{}` but is not managed by uv; use `--force` to replace it", - to.simplified_display() - ), - )); + if upgrade { + warn_user!( + "Executable already exists at `{}` but is not managed by uv; use `uv python install {}.{}{} --force` to replace it", + to.simplified_display(), + installation.key().major(), + installation.key().minor(), + installation.key().variant().suffix() + ); + } else { + errors.push(( + installation.key().clone(), + anyhow::anyhow!( + "Executable already exists at `{}` but is not managed by uv; use `--force` to replace it", + to.simplified_display() + ), + )); + } continue; } debug!( @@ -676,7 +859,7 @@ fn create_bin_links( .remove(&target); } - installation.create_bin_link(&target)?; + create_link_to_executable(&target, executable)?; debug!( "Updated executable at `{}` to {}", target.simplified_display(), @@ -747,8 +930,7 @@ fn warn_if_not_on_path(bin: &Path) { /// Find the [`ManagedPythonInstallation`] corresponding to an executable link installed at the /// given path, if any. /// -/// Like [`ManagedPythonInstallation::is_bin_link`], but this method will only resolve the -/// given path one time. +/// Will resolve symlinks on Unix. On Windows, will resolve the target link for a trampoline. fn find_matching_bin_link<'a>( mut installations: impl Iterator, path: &Path, @@ -757,13 +939,13 @@ fn find_matching_bin_link<'a>( if !path.is_symlink() { return None; } - path.read_link().ok()? + fs_err::canonicalize(path).ok()? } else if cfg!(windows) { let launcher = Launcher::try_from_path(path).ok()??; if !matches!(launcher.kind, LauncherKind::Python) { return None; } - launcher.python_path + dunce::canonicalize(launcher.python_path).ok()? } else { unreachable!("Only Windows and Unix are supported") }; diff --git a/crates/uv/src/commands/python/list.rs b/crates/uv/src/commands/python/list.rs index 71bfb9c55..2cd54747c 100644 --- a/crates/uv/src/commands/python/list.rs +++ b/crates/uv/src/commands/python/list.rs @@ -2,6 +2,7 @@ use serde::Serialize; use std::collections::BTreeSet; use std::fmt::Write; use uv_cli::PythonListFormat; +use uv_configuration::PreviewMode; use uv_pep440::Version; use anyhow::Result; @@ -64,6 +65,7 @@ pub(crate) async fn list( python_downloads: PythonDownloads, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { let request = request.as_deref().map(PythonRequest::parse); let base_download_request = if python_preference == PythonPreference::OnlySystem { @@ -124,6 +126,7 @@ pub(crate) async fn list( EnvironmentPreference::OnlySystem, python_preference, cache, + preview, ) // Raise discovery errors if critical .filter(|result| { diff --git a/crates/uv/src/commands/python/pin.rs b/crates/uv/src/commands/python/pin.rs index e0b241bcc..f0dc06cff 100644 --- a/crates/uv/src/commands/python/pin.rs +++ b/crates/uv/src/commands/python/pin.rs @@ -8,6 +8,7 @@ use tracing::debug; use uv_cache::Cache; use uv_client::BaseClientBuilder; +use uv_configuration::{DependencyGroupsWithDefaults, PreviewMode}; use uv_dirs::user_uv_config_dir; use uv_fs::Simplified; use uv_python::{ @@ -39,6 +40,7 @@ pub(crate) async fn pin( network_settings: NetworkSettings, cache: &Cache, printer: Printer, + preview: PreviewMode, ) -> Result { let workspace_cache = WorkspaceCache::default(); let virtual_project = if no_project { @@ -55,16 +57,13 @@ pub(crate) async fn pin( } }; - let version_file = if global { - if let Some(path) = user_uv_config_dir() { - PythonVersionFile::discover_user_config(path, &VersionFileDiscoveryOptions::default()) - .await - } else { - Ok(None) - } - } else { - PythonVersionFile::discover(project_dir, &VersionFileDiscoveryOptions::default()).await - }; + // Search for an existing file, we won't necessarily write to this, we'll construct a target + // path if there's a request later on. + let version_file = PythonVersionFile::discover( + project_dir, + &VersionFileDiscoveryOptions::default().with_no_local(global), + ) + .await; if rm { let Some(file) = version_file? else { @@ -93,6 +92,7 @@ pub(crate) async fn pin( virtual_project, python_preference, cache, + preview, ); } } @@ -123,6 +123,7 @@ pub(crate) async fn pin( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await { @@ -259,6 +260,7 @@ fn warn_if_existing_pin_incompatible_with_project( virtual_project: &VirtualProject, python_preference: PythonPreference, cache: &Cache, + preview: PreviewMode, ) { // Check if the pinned version is compatible with the project. if let Some(pin_version) = pep440_version_from_request(pin) { @@ -283,6 +285,7 @@ fn warn_if_existing_pin_incompatible_with_project( EnvironmentPreference::OnlySystem, python_preference, cache, + preview, ) { Ok(python) => { let python_version = python.python_version(); @@ -322,6 +325,9 @@ struct Pin<'a> { /// Checks if the pinned Python version is compatible with the workspace/project's `Requires-Python`. fn assert_pin_compatible_with_project(pin: &Pin, virtual_project: &VirtualProject) -> Result<()> { + // Don't factor in requires-python settings on dependency-groups + let groups = DependencyGroupsWithDefaults::none(); + let (requires_python, project_type) = match virtual_project { VirtualProject::Project(project_workspace) => { debug!( @@ -329,7 +335,8 @@ fn assert_pin_compatible_with_project(pin: &Pin, virtual_project: &VirtualProjec project_workspace.project_name(), project_workspace.workspace().install_path().display() ); - let requires_python = find_requires_python(project_workspace.workspace())?; + + let requires_python = find_requires_python(project_workspace.workspace(), &groups)?; (requires_python, "project") } VirtualProject::NonProject(workspace) => { @@ -337,7 +344,7 @@ fn assert_pin_compatible_with_project(pin: &Pin, virtual_project: &VirtualProjec "Discovered virtual workspace at: {}", workspace.install_path().display() ); - let requires_python = find_requires_python(workspace)?; + let requires_python = find_requires_python(workspace, &groups)?; (requires_python, "workspace") } }; diff --git a/crates/uv/src/commands/python/uninstall.rs b/crates/uv/src/commands/python/uninstall.rs index ac159344c..642942d07 100644 --- a/crates/uv/src/commands/python/uninstall.rs +++ b/crates/uv/src/commands/python/uninstall.rs @@ -5,6 +5,7 @@ use std::path::PathBuf; use anyhow::Result; use futures::StreamExt; use futures::stream::FuturesUnordered; +use indexmap::IndexSet; use itertools::Itertools; use owo_colors::OwoColorize; use rustc_hash::{FxHashMap, FxHashSet}; @@ -13,8 +14,10 @@ use tracing::{debug, warn}; use uv_configuration::PreviewMode; use uv_fs::Simplified; use uv_python::downloads::PythonDownloadRequest; -use uv_python::managed::{ManagedPythonInstallations, python_executable_dir}; -use uv_python::{PythonInstallationKey, PythonRequest}; +use uv_python::managed::{ + ManagedPythonInstallations, PythonMinorVersionLink, python_executable_dir, +}; +use uv_python::{PythonInstallationKey, PythonInstallationMinorVersionKey, PythonRequest}; use crate::commands::python::install::format_executables; use crate::commands::python::{ChangeEvent, ChangeEventKind}; @@ -87,7 +90,6 @@ async fn do_uninstall( // Always include pre-releases in uninstalls .map(|result| result.map(|request| request.with_prereleases(true))) .collect::>>()?; - let installed_installations: Vec<_> = installations.find_all()?.collect(); let mut matching_installations = BTreeSet::default(); for (request, download_request) in requests.iter().zip(download_requests) { @@ -198,13 +200,13 @@ async fn do_uninstall( }); } - let mut uninstalled = vec![]; + let mut uninstalled = IndexSet::::default(); let mut errors = vec![]; while let Some((key, result)) = tasks.next().await { if let Err(err) = result { errors.push((key.clone(), anyhow::Error::new(err))); } else { - uninstalled.push(key.clone()); + uninstalled.insert(key.clone()); } } @@ -218,29 +220,86 @@ async fn do_uninstall( uv_python::windows_registry::remove_orphan_registry_entries(&installed_installations); } + // Read all existing managed installations and find the highest installed patch + // for each installed minor version. Ensure the minor version link directory + // is still valid. + let uninstalled_minor_versions: IndexSet<_> = uninstalled + .iter() + .map(PythonInstallationMinorVersionKey::ref_cast) + .collect(); + let remaining_installations: Vec<_> = installed_installations + .into_iter() + .filter(|installation| !uninstalled.contains(installation.key())) + .collect(); + + let remaining_minor_versions = + PythonInstallationMinorVersionKey::highest_installations_by_minor_version_key( + remaining_installations.iter(), + ); + + for (_, installation) in remaining_minor_versions + .iter() + .filter(|(minor_version, _)| uninstalled_minor_versions.contains(minor_version)) + { + installation.update_minor_version_link(preview)?; + } + // For each uninstalled installation, check if there are no remaining installations + // for its minor version. If there are none remaining, remove the symlink directory + // (or junction on Windows) if it exists. + for installation in &matching_installations { + if !remaining_minor_versions.contains_key(installation.minor_version_key()) { + if let Some(minor_version_link) = + PythonMinorVersionLink::from_installation(installation, preview) + { + if minor_version_link.exists() { + let result = if cfg!(windows) { + fs_err::remove_dir(minor_version_link.symlink_directory.as_path()) + } else { + fs_err::remove_file(minor_version_link.symlink_directory.as_path()) + }; + if result.is_err() { + return Err(anyhow::anyhow!( + "Failed to remove symlink directory {}", + minor_version_link.symlink_directory.display() + )); + } + let symlink_term = if cfg!(windows) { + "junction" + } else { + "symlink directory" + }; + debug!( + "Removed {}: {}", + symlink_term, + minor_version_link.symlink_directory.to_string_lossy() + ); + } + } + } + } + // Report on any uninstalled installations. - if !uninstalled.is_empty() { - if let [uninstalled] = uninstalled.as_slice() { + if let Some(first_uninstalled) = uninstalled.first() { + if uninstalled.len() == 1 { // Ex) "Uninstalled Python 3.9.7 in 1.68s" writeln!( printer.stderr(), "{}", format!( "Uninstalled {} {}", - format!("Python {}", uninstalled.version()).bold(), + format!("Python {}", first_uninstalled.version()).bold(), format!("in {}", elapsed(start.elapsed())).dimmed() ) .dimmed() )?; } else { // Ex) "Uninstalled 2 versions in 1.68s" - let s = if uninstalled.len() == 1 { "" } else { "s" }; writeln!( printer.stderr(), "{}", format!( "Uninstalled {} {}", - format!("{} version{s}", uninstalled.len()).bold(), + format!("{} versions", uninstalled.len()).bold(), format!("in {}", elapsed(start.elapsed())).dimmed() ) .dimmed() diff --git a/crates/uv/src/commands/tool/common.rs b/crates/uv/src/commands/tool/common.rs index 77aba8619..ffc1b5645 100644 --- a/crates/uv/src/commands/tool/common.rs +++ b/crates/uv/src/commands/tool/common.rs @@ -7,6 +7,7 @@ use std::{collections::BTreeSet, ffi::OsString}; use tracing::{debug, warn}; use uv_cache::Cache; use uv_client::BaseClientBuilder; +use uv_configuration::PreviewMode; use uv_distribution_types::Requirement; use uv_distribution_types::{InstalledDist, Name}; use uv_fs::Simplified; @@ -80,6 +81,7 @@ pub(crate) async fn refine_interpreter( python_preference: PythonPreference, python_downloads: PythonDownloads, cache: &Cache, + preview: PreviewMode, ) -> anyhow::Result, ProjectError> { let pip::operations::Error::Resolve(uv_resolver::ResolveError::NoSolution(no_solution_err)) = err @@ -151,6 +153,7 @@ pub(crate) async fn refine_interpreter( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); @@ -158,14 +161,18 @@ pub(crate) async fn refine_interpreter( Ok(Some(interpreter)) } -/// Installs tool executables for a given package and handles any conflicts. -pub(crate) fn install_executables( +/// Finalizes a tool installation, after creation of an environment. +/// +/// Installs tool executables for a given package, handling any conflicts. +/// +/// Adds a receipt for the tool. +pub(crate) fn finalize_tool_install( environment: &PythonEnvironment, name: &PackageName, installed_tools: &InstalledTools, options: ToolOptions, force: bool, - python: Option, + python: Option, requirements: Vec, constraints: Vec, overrides: Vec, @@ -211,7 +218,7 @@ pub(crate) fn install_executables( if target_entry_points.is_empty() { writeln!( printer.stdout(), - "No executables are provided by `{from}`", + "No executables are provided by package `{from}`; removing tool", from = name.cyan() )?; @@ -347,7 +354,9 @@ fn hint_executable_from_dependency( let command = format!("uv tool install {}", package.name()); writeln!( printer.stdout(), - "However, an executable with the name `{}` is available via dependency `{}`.\nDid you mean `{}`?", + "{}{} An executable with the name `{}` is available via dependency `{}`.\n Did you mean `{}`?", + "hint".bold().cyan(), + ":".bold(), name.cyan(), package.name().cyan(), command.bold(), @@ -356,7 +365,9 @@ fn hint_executable_from_dependency( packages => { writeln!( printer.stdout(), - "However, an executable with the name `{}` is available via the following dependencies::", + "{}{} An executable with the name `{}` is available via the following dependencies::", + "hint".bold().cyan(), + ":".bold(), name.cyan(), )?; @@ -365,7 +376,7 @@ fn hint_executable_from_dependency( } writeln!( printer.stdout(), - "Did you mean to install one of them instead?" + " Did you mean to install one of them instead?" )?; } } diff --git a/crates/uv/src/commands/tool/install.rs b/crates/uv/src/commands/tool/install.rs index 86b0d4bc6..5ced211b3 100644 --- a/crates/uv/src/commands/tool/install.rs +++ b/crates/uv/src/commands/tool/install.rs @@ -33,7 +33,9 @@ use crate::commands::project::{ EnvironmentSpecification, PlatformState, ProjectError, resolve_environment, resolve_names, sync_environment, update_environment, }; -use crate::commands::tool::common::{install_executables, refine_interpreter, remove_entrypoints}; +use crate::commands::tool::common::{ + finalize_tool_install, refine_interpreter, remove_entrypoints, +}; use crate::commands::tool::{Target, ToolRequest}; use crate::commands::{diagnostics, reporters::PythonDownloadReporter}; use crate::printer::Printer; @@ -85,6 +87,7 @@ pub(crate) async fn install( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); @@ -474,6 +477,7 @@ pub(crate) async fn install( let resolution = resolve_environment( spec.clone(), &interpreter, + Constraints::from_requirements(build_constraints.iter().cloned()), &settings.resolver, &network_settings, &state, @@ -506,6 +510,7 @@ pub(crate) async fn install( python_preference, python_downloads, &cache, + preview, ) .await .ok() @@ -526,6 +531,7 @@ pub(crate) async fn install( match resolve_environment( spec, &interpreter, + Constraints::from_requirements(build_constraints.iter().cloned()), &settings.resolver, &network_settings, &state, @@ -552,7 +558,7 @@ pub(crate) async fn install( }, }; - let environment = installed_tools.create_environment(&from.name, interpreter)?; + let environment = installed_tools.create_environment(&from.name, interpreter, preview)?; // At this point, we removed any existing environment, so we should remove any of its // executables. @@ -592,13 +598,13 @@ pub(crate) async fn install( } }; - install_executables( + finalize_tool_install( &environment, &from.name, &installed_tools, options, force || invalid_tool_receipt, - python, + python_request, requirements, constraints, overrides, diff --git a/crates/uv/src/commands/tool/run.rs b/crates/uv/src/commands/tool/run.rs index 4d270c445..2746d65ad 100644 --- a/crates/uv/src/commands/tool/run.rs +++ b/crates/uv/src/commands/tool/run.rs @@ -747,6 +747,7 @@ async fn get_or_create_environment( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(); @@ -1036,6 +1037,7 @@ async fn get_or_create_environment( python_preference, python_downloads, cache, + preview, ) .await .ok() diff --git a/crates/uv/src/commands/tool/upgrade.rs b/crates/uv/src/commands/tool/upgrade.rs index 9f4d3bcab..95b7d1e2d 100644 --- a/crates/uv/src/commands/tool/upgrade.rs +++ b/crates/uv/src/commands/tool/upgrade.rs @@ -29,7 +29,7 @@ use crate::commands::project::{ }; use crate::commands::reporters::PythonDownloadReporter; use crate::commands::tool::common::remove_entrypoints; -use crate::commands::{ExitStatus, conjunction, tool::common::install_executables}; +use crate::commands::{ExitStatus, conjunction, tool::common::finalize_tool_install}; use crate::printer::Printer; use crate::settings::{NetworkSettings, ResolverInstallerSettings}; @@ -99,6 +99,7 @@ pub(crate) async fn upgrade( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await? .into_interpreter(), @@ -297,6 +298,7 @@ async fn upgrade_tool( let resolution = resolve_environment( spec.into(), interpreter, + build_constraints.clone(), &settings.resolver, network_settings, &state, @@ -308,7 +310,7 @@ async fn upgrade_tool( ) .await?; - let environment = installed_tools.create_environment(name, interpreter.clone())?; + let environment = installed_tools.create_environment(name, interpreter.clone(), preview)?; let environment = sync_environment( environment, @@ -375,7 +377,7 @@ async fn upgrade_tool( remove_entrypoints(&existing_tool_receipt); // If we modified the target tool, reinstall the entrypoints. - install_executables( + finalize_tool_install( &environment, name, installed_tools, diff --git a/crates/uv/src/commands/venv.rs b/crates/uv/src/commands/venv.rs index c0cf03921..9334d844d 100644 --- a/crates/uv/src/commands/venv.rs +++ b/crates/uv/src/commands/venv.rs @@ -13,14 +13,15 @@ use thiserror::Error; use uv_cache::Cache; use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder}; use uv_configuration::{ - BuildOptions, Concurrency, ConfigSettings, Constraints, IndexStrategy, KeyringProviderType, - NoBinary, NoBuild, PreviewMode, SourceStrategy, + BuildOptions, Concurrency, ConfigSettings, Constraints, DependencyGroups, IndexStrategy, + KeyringProviderType, NoBinary, NoBuild, PreviewMode, SourceStrategy, }; use uv_dispatch::{BuildDispatch, SharedState}; use uv_distribution_types::Requirement; use uv_distribution_types::{DependencyMetadata, Index, IndexLocations}; use uv_fs::Simplified; use uv_install_wheel::LinkMode; +use uv_normalize::DefaultGroups; use uv_python::{ EnvironmentPreference, PythonDownloads, PythonInstallation, PythonPreference, PythonRequest, }; @@ -39,12 +40,14 @@ use crate::commands::reporters::PythonDownloadReporter; use crate::printer::Printer; use crate::settings::NetworkSettings; +use super::project::default_dependency_groups; + /// Create a virtual environment. #[allow(clippy::unnecessary_wraps, clippy::fn_params_excessive_bools)] pub(crate) async fn venv( project_dir: &Path, path: Option, - python_request: Option<&str>, + python_request: Option, install_mirrors: PythonInstallMirrors, python_preference: PythonPreference, python_downloads: PythonDownloads, @@ -127,7 +130,7 @@ enum VenvError { async fn venv_impl( project_dir: &Path, path: Option, - python_request: Option<&str>, + python_request: Option, install_mirrors: PythonInstallMirrors, link_mode: LinkMode, index_locations: &IndexLocations, @@ -197,13 +200,21 @@ async fn venv_impl( let reporter = PythonDownloadReporter::single(printer); + // If the default dependency-groups demand a higher requires-python + // we should bias an empty venv to that to avoid churn. + let default_groups = match &project { + Some(project) => default_dependency_groups(project.pyproject_toml()).into_diagnostic()?, + None => DefaultGroups::default(), + }; + let groups = DependencyGroups::default().with_defaults(default_groups); let WorkspacePython { source, python_request, requires_python, } = WorkspacePython::from_request( - python_request.map(PythonRequest::parse), + python_request, project.as_ref().map(VirtualProject::workspace), + &groups, project_dir, no_config, ) @@ -223,6 +234,7 @@ async fn venv_impl( install_mirrors.python_install_mirror.as_deref(), install_mirrors.pypy_install_mirror.as_deref(), install_mirrors.python_downloads_json_url.as_deref(), + preview, ) .await .into_diagnostic()?; @@ -230,22 +242,14 @@ async fn venv_impl( python.into_interpreter() }; - // Add all authenticated sources to the cache. - for index in index_locations.allowed_indexes() { - if let Some(credentials) = index.credentials() { - let credentials = Arc::new(credentials); - uv_auth::store_credentials(index.raw_url(), credentials.clone()); - if let Some(root_url) = index.root_url() { - uv_auth::store_credentials(&root_url, credentials.clone()); - } - } - } + index_locations.cache_index_credentials(); // Check if the discovered Python version is incompatible with the current workspace if let Some(requires_python) = requires_python { match validate_project_requires_python( &interpreter, project.as_ref().map(VirtualProject::workspace), + &groups, &requires_python, &source, ) { @@ -264,6 +268,11 @@ async fn venv_impl( ) .into_diagnostic()?; + let upgradeable = preview.is_enabled() + && python_request + .as_ref() + .is_none_or(|request| !request.includes_patch()); + // Create the virtual environment. let venv = uv_virtualenv::create_venv( &path, @@ -273,6 +282,8 @@ async fn venv_impl( allow_existing, relocatable, seed, + upgradeable, + preview, ) .map_err(VenvError::Creation)?; diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index b7b1a7859..ab4aee9e9 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -35,6 +35,7 @@ use uv_fs::{CWD, Simplified}; use uv_pep440::release_specifiers_to_ranges; use uv_pep508::VersionOrUrl; use uv_pypi_types::{ParsedDirectoryUrl, ParsedUrl}; +use uv_python::PythonRequest; use uv_requirements::RequirementsSource; use uv_requirements_txt::RequirementsTxtRequirement; use uv_scripts::{Pep723Error, Pep723Item, Pep723ItemRef, Pep723Metadata, Pep723Script}; @@ -399,7 +400,7 @@ async fn run(mut cli: Cli) -> Result { }))?; // Don't initialize the rayon threadpool yet, this is too costly when we're doing a noop sync. - uv_configuration::RAYON_PARALLELISM.store(globals.concurrency.installs, Ordering::SeqCst); + uv_configuration::RAYON_PARALLELISM.store(globals.concurrency.installs, Ordering::Relaxed); debug!("uv {}", uv_cli::version::uv_self_version()); @@ -793,6 +794,7 @@ async fn run(mut cli: Cli) -> Result { &globals.network_settings, args.dry_run, printer, + globals.preview, ) .await } @@ -814,6 +816,7 @@ async fn run(mut cli: Cli) -> Result { args.paths, &cache, printer, + globals.preview, ) } Commands::Pip(PipNamespace { @@ -845,6 +848,7 @@ async fn run(mut cli: Cli) -> Result { args.settings.system, &cache, printer, + globals.preview, ) .await } @@ -866,6 +870,7 @@ async fn run(mut cli: Cli) -> Result { args.files, &cache, printer, + globals.preview, ) } Commands::Pip(PipNamespace { @@ -897,6 +902,7 @@ async fn run(mut cli: Cli) -> Result { args.settings.system, &cache, printer, + globals.preview, ) .await } @@ -915,6 +921,7 @@ async fn run(mut cli: Cli) -> Result { args.settings.system, &cache, printer, + globals.preview, ) } Commands::Cache(CacheNamespace { @@ -1016,10 +1023,13 @@ async fn run(mut cli: Cli) -> Result { } }); + let python_request: Option = + args.settings.python.as_deref().map(PythonRequest::parse); + commands::venv( &project_dir, args.path, - args.settings.python.as_deref(), + python_request, args.settings.install_mirrors, globals.python_preference, globals.python_downloads, @@ -1370,6 +1380,7 @@ async fn run(mut cli: Cli) -> Result { globals.python_downloads, &cache, printer, + globals.preview, ) .await } @@ -1379,12 +1390,43 @@ async fn run(mut cli: Cli) -> Result { // Resolve the settings from the command-line arguments and workspace configuration. let args = settings::PythonInstallSettings::resolve(args, filesystem); show_settings!(args); + // TODO(john): If we later want to support `--upgrade`, we need to replace this. + let upgrade = false; commands::python_install( &project_dir, args.install_dir, args.targets, args.reinstall, + upgrade, + args.force, + args.python_install_mirror, + args.pypy_install_mirror, + args.python_downloads_json_url, + globals.network_settings, + args.default, + globals.python_downloads, + cli.top_level.no_config, + globals.preview, + printer, + ) + .await + } + Commands::Python(PythonNamespace { + command: PythonCommand::Upgrade(args), + }) => { + // Resolve the settings from the command-line arguments and workspace configuration. + let args = settings::PythonUpgradeSettings::resolve(args, filesystem); + show_settings!(args); + let reinstall = false; + let upgrade = true; + + commands::python_install( + &project_dir, + args.install_dir, + args.targets, + reinstall, + upgrade, args.force, args.python_install_mirror, args.pypy_install_mirror, @@ -1433,6 +1475,7 @@ async fn run(mut cli: Cli) -> Result { cli.top_level.no_config, &cache, printer, + globals.preview, ) .await } else { @@ -1446,6 +1489,7 @@ async fn run(mut cli: Cli) -> Result { globals.python_preference, &cache, printer, + globals.preview, ) .await } @@ -1472,6 +1516,7 @@ async fn run(mut cli: Cli) -> Result { globals.network_settings, &cache, printer, + globals.preview, ) .await } @@ -1704,7 +1749,7 @@ async fn run_project( args.no_project, no_config, args.extras, - args.dev, + args.groups, args.editable, args.modifications, args.python, @@ -1752,7 +1797,7 @@ async fn run_project( args.all_packages, args.package, args.extras, - args.dev, + args.groups, args.editable, args.install_options, args.modifications, @@ -2043,7 +2088,7 @@ async fn run_project( Box::pin(commands::tree( project_dir, - args.dev, + args.groups, args.locked, args.frozen, args.universal, @@ -2095,7 +2140,7 @@ async fn run_project( args.install_options, args.output_file, args.extras, - args.dev, + args.groups, args.editable, args.locked, args.frozen, diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs index b5eb2f5d0..004ce5053 100644 --- a/crates/uv/src/settings.rs +++ b/crates/uv/src/settings.rs @@ -10,9 +10,9 @@ use uv_cli::{ AddArgs, ColorChoice, ExternalCommand, GlobalArgs, InitArgs, ListFormat, LockArgs, Maybe, PipCheckArgs, PipCompileArgs, PipFreezeArgs, PipInstallArgs, PipListArgs, PipShowArgs, PipSyncArgs, PipTreeArgs, PipUninstallArgs, PythonFindArgs, PythonInstallArgs, PythonListArgs, - PythonListFormat, PythonPinArgs, PythonUninstallArgs, RemoveArgs, RunArgs, SyncArgs, - ToolDirArgs, ToolInstallArgs, ToolListArgs, ToolRunArgs, ToolUninstallArgs, TreeArgs, VenvArgs, - VersionArgs, VersionBump, VersionFormat, + PythonListFormat, PythonPinArgs, PythonUninstallArgs, PythonUpgradeArgs, RemoveArgs, RunArgs, + SyncArgs, ToolDirArgs, ToolInstallArgs, ToolListArgs, ToolRunArgs, ToolUninstallArgs, TreeArgs, + VenvArgs, VersionArgs, VersionBump, VersionFormat, }; use uv_cli::{ AuthorFrom, BuildArgs, ExportArgs, PublishArgs, PythonDirArgs, ResolverInstallerArgs, @@ -54,7 +54,6 @@ use crate::commands::{InitKind, InitProjectKind, pip::operations::Modifications} const PYPI_PUBLISH_URL: &str = "https://upload.pypi.org/legacy/"; /// The resolved global settings to use for any invocation of the CLI. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct GlobalSettings { pub(crate) required_version: Option, @@ -119,16 +118,20 @@ impl GlobalSettings { }, show_settings: args.show_settings, preview: PreviewMode::from( - flag(args.preview, args.no_preview) + flag(args.preview, args.no_preview, "preview") .combine(workspace.and_then(|workspace| workspace.globals.preview)) .unwrap_or(false), ), python_preference, - python_downloads: flag(args.allow_python_downloads, args.no_python_downloads) - .map(PythonDownloads::from) - .combine(env(env::UV_PYTHON_DOWNLOADS)) - .combine(workspace.and_then(|workspace| workspace.globals.python_downloads)) - .unwrap_or_default(), + python_downloads: flag( + args.allow_python_downloads, + args.no_python_downloads, + "python-downloads", + ) + .map(PythonDownloads::from) + .combine(env(env::UV_PYTHON_DOWNLOADS)) + .combine(workspace.and_then(|workspace| workspace.globals.python_downloads)) + .unwrap_or_default(), // Disable the progress bar with `RUST_LOG` to avoid progress fragments interleaving // with log messages. no_progress: args.no_progress || std::env::var_os(EnvVars::RUST_LOG).is_some(), @@ -162,7 +165,7 @@ pub(crate) struct NetworkSettings { impl NetworkSettings { pub(crate) fn resolve(args: &GlobalArgs, workspace: Option<&FilesystemOptions>) -> Self { - let connectivity = if flag(args.offline, args.no_offline) + let connectivity = if flag(args.offline, args.no_offline, "offline") .combine(workspace.and_then(|workspace| workspace.globals.offline)) .unwrap_or(false) { @@ -170,7 +173,7 @@ impl NetworkSettings { } else { Connectivity::Online }; - let native_tls = flag(args.native_tls, args.no_native_tls) + let native_tls = flag(args.native_tls, args.no_native_tls, "native-tls") .combine(workspace.and_then(|workspace| workspace.globals.native_tls)) .unwrap_or(false); let allow_insecure_host = args @@ -199,7 +202,6 @@ impl NetworkSettings { } /// The resolved cache settings to use for any invocation of the CLI. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct CacheSettings { pub(crate) no_cache: bool, @@ -222,7 +224,6 @@ impl CacheSettings { } /// The resolved settings to use for a `init` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct InitSettings { pub(crate) path: Option, @@ -277,8 +278,12 @@ impl InitSettings { (_, _, _) => unreachable!("`app`, `lib`, and `script` are mutually exclusive"), }; - let package = flag(package || build_backend.is_some(), no_package || r#virtual) - .unwrap_or(kind.packaged_by_default()); + let package = flag( + package || build_backend.is_some(), + no_package || r#virtual, + "virtual", + ) + .unwrap_or(kind.packaged_by_default()); let install_mirrors = filesystem .map(|fs| fs.install_mirrors.clone()) @@ -298,7 +303,7 @@ impl InitSettings { build_backend, no_readme: no_readme || bare, author_from, - pin_python: flag(pin_python, no_pin_python).unwrap_or(!bare), + pin_python: flag(pin_python, no_pin_python, "pin-python").unwrap_or(!bare), no_workspace, python: python.and_then(Maybe::into_option), install_mirrors, @@ -307,13 +312,12 @@ impl InitSettings { } /// The resolved settings to use for a `run` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct RunSettings { pub(crate) locked: bool, pub(crate) frozen: bool, pub(crate) extras: ExtrasSpecification, - pub(crate) dev: DependencyGroups, + pub(crate) groups: DependencyGroups, pub(crate) editable: EditableMode, pub(crate) modifications: Modifications, pub(crate) with: Vec, @@ -402,9 +406,9 @@ impl RunSettings { false, // TODO(blueraft): support only_extra vec![], - flag(all_extras, no_all_extras).unwrap_or_default(), + flag(all_extras, no_all_extras, "all-extras").unwrap_or_default(), ), - dev: DependencyGroups::from_args( + groups: DependencyGroups::from_args( dev, no_dev, only_dev, @@ -415,7 +419,7 @@ impl RunSettings { all_groups, ), editable: EditableMode::from_args(no_editable), - modifications: if flag(exact, inexact).unwrap_or(false) { + modifications: if flag(exact, inexact, "inexact").unwrap_or(false) { Modifications::Exact } else { Modifications::Sufficient @@ -438,7 +442,7 @@ impl RunSettings { package, no_project, no_sync, - active: flag(active, no_active), + active: flag(active, no_active, "active"), python: python.and_then(Maybe::into_option), refresh: Refresh::from(refresh), settings: ResolverInstallerSettings::combine( @@ -454,7 +458,6 @@ impl RunSettings { } /// The resolved settings to use for a `tool run` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct ToolRunSettings { pub(crate) command: Option, @@ -586,7 +589,6 @@ impl ToolRunSettings { } /// The resolved settings to use for a `tool install` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct ToolInstallSettings { pub(crate) package: String, @@ -681,7 +683,6 @@ impl ToolInstallSettings { } /// The resolved settings to use for a `tool upgrade` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct ToolUpgradeSettings { pub(crate) names: Vec, @@ -776,7 +777,6 @@ impl ToolUpgradeSettings { } /// The resolved settings to use for a `tool list` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct ToolListSettings { pub(crate) show_paths: bool, @@ -808,7 +808,6 @@ impl ToolListSettings { } /// The resolved settings to use for a `tool uninstall` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct ToolUninstallSettings { pub(crate) name: Vec, @@ -827,7 +826,6 @@ impl ToolUninstallSettings { } /// The resolved settings to use for a `tool dir` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct ToolDirSettings { pub(crate) bin: bool, @@ -854,7 +852,6 @@ pub(crate) enum PythonListKinds { } /// The resolved settings to use for a `tool run` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PythonListSettings { pub(crate) request: Option, @@ -914,7 +911,6 @@ impl PythonListSettings { } /// The resolved settings to use for a `python dir` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PythonDirSettings { pub(crate) bin: bool, @@ -931,7 +927,6 @@ impl PythonDirSettings { } /// The resolved settings to use for a `python install` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PythonInstallSettings { pub(crate) install_dir: Option, @@ -986,9 +981,61 @@ impl PythonInstallSettings { } } -/// The resolved settings to use for a `python uninstall` invocation. +/// The resolved settings to use for a `python upgrade` invocation. #[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] +pub(crate) struct PythonUpgradeSettings { + pub(crate) install_dir: Option, + pub(crate) targets: Vec, + pub(crate) force: bool, + pub(crate) python_install_mirror: Option, + pub(crate) pypy_install_mirror: Option, + pub(crate) python_downloads_json_url: Option, + pub(crate) default: bool, +} + +impl PythonUpgradeSettings { + /// Resolve the [`PythonUpgradeSettings`] from the CLI and filesystem configuration. + #[allow(clippy::needless_pass_by_value)] + pub(crate) fn resolve(args: PythonUpgradeArgs, filesystem: Option) -> Self { + let options = filesystem.map(FilesystemOptions::into_options); + let (python_mirror, pypy_mirror, python_downloads_json_url) = match options { + Some(options) => ( + options.install_mirrors.python_install_mirror, + options.install_mirrors.pypy_install_mirror, + options.install_mirrors.python_downloads_json_url, + ), + None => (None, None, None), + }; + let python_mirror = args.mirror.or(python_mirror); + let pypy_mirror = args.pypy_mirror.or(pypy_mirror); + let python_downloads_json_url = + args.python_downloads_json_url.or(python_downloads_json_url); + let force = false; + let default = false; + + let PythonUpgradeArgs { + install_dir, + targets, + mirror: _, + pypy_mirror: _, + python_downloads_json_url: _, + } = args; + + Self { + install_dir, + targets, + force, + python_install_mirror: python_mirror, + pypy_install_mirror: pypy_mirror, + python_downloads_json_url, + default, + } + } +} + +/// The resolved settings to use for a `python uninstall` invocation. +#[derive(Debug, Clone)] pub(crate) struct PythonUninstallSettings { pub(crate) install_dir: Option, pub(crate) targets: Vec, @@ -1017,7 +1064,6 @@ impl PythonUninstallSettings { } /// The resolved settings to use for a `python find` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PythonFindSettings { pub(crate) request: Option, @@ -1043,13 +1089,12 @@ impl PythonFindSettings { request, show_version, no_project, - system: flag(system, no_system).unwrap_or_default(), + system: flag(system, no_system, "system").unwrap_or_default(), } } } /// The resolved settings to use for a `python pin` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PythonPinSettings { pub(crate) request: Option, @@ -1079,7 +1124,7 @@ impl PythonPinSettings { Self { request, - resolved: flag(resolved, no_resolved).unwrap_or(false), + resolved: flag(resolved, no_resolved, "resolved").unwrap_or(false), no_project, global, rm, @@ -1098,7 +1143,7 @@ pub(crate) struct SyncSettings { pub(crate) script: Option, pub(crate) active: Option, pub(crate) extras: ExtrasSpecification, - pub(crate) dev: DependencyGroups, + pub(crate) groups: DependencyGroups, pub(crate) editable: EditableMode, pub(crate) install_options: InstallOptions, pub(crate) modifications: Modifications, @@ -1158,7 +1203,7 @@ impl SyncSettings { filesystem, ); - let check = flag(check, no_check).unwrap_or_default(); + let check = flag(check, no_check, "check").unwrap_or_default(); let dry_run = if check { DryRun::Check } else { @@ -1170,7 +1215,7 @@ impl SyncSettings { frozen, dry_run, script, - active: flag(active, no_active), + active: flag(active, no_active, "active"), extras: ExtrasSpecification::from_args( extra.unwrap_or_default(), no_extra, @@ -1178,9 +1223,9 @@ impl SyncSettings { false, // TODO(blueraft): support only_extra vec![], - flag(all_extras, no_all_extras).unwrap_or_default(), + flag(all_extras, no_all_extras, "all-extras").unwrap_or_default(), ), - dev: DependencyGroups::from_args( + groups: DependencyGroups::from_args( dev, no_dev, only_dev, @@ -1196,7 +1241,7 @@ impl SyncSettings { no_install_workspace, no_install_package, ), - modifications: if flag(exact, inexact).unwrap_or(true) { + modifications: if flag(exact, inexact, "inexact").unwrap_or(true) { Modifications::Exact } else { Modifications::Sufficient @@ -1349,6 +1394,12 @@ impl AddSettings { ) .collect::>(); + // Warn user if an ambiguous relative path was passed as a value for + // `--index` or `--default-index`. + indexes + .iter() + .for_each(|index| index.url().warn_on_disambiguated_relative_path()); + // If the user passed an `--index-url` or `--extra-index-url`, warn. if installer .index_args @@ -1394,7 +1445,7 @@ impl AddSettings { Self { locked, frozen, - active: flag(active, no_active), + active: flag(active, no_active, "active"), no_sync, packages, requirements, @@ -1412,7 +1463,7 @@ impl AddSettings { package, script, python: python.and_then(Maybe::into_option), - editable: flag(editable, no_editable), + editable: flag(editable, no_editable, "editable"), extras: extra.unwrap_or_default(), refresh: Refresh::from(refresh), indexes, @@ -1488,7 +1539,7 @@ impl RemoveSettings { Self { locked, frozen, - active: flag(active, no_active), + active: flag(active, no_active, "active"), no_sync, packages, dependency_type, @@ -1560,7 +1611,7 @@ impl VersionSettings { dry_run, locked, frozen, - active: flag(active, no_active), + active: flag(active, no_active, "active"), no_sync, package, python: python.and_then(Maybe::into_option), @@ -1575,10 +1626,9 @@ impl VersionSettings { } /// The resolved settings to use for a `tree` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct TreeSettings { - pub(crate) dev: DependencyGroups, + pub(crate) groups: DependencyGroups, pub(crate) locked: bool, pub(crate) frozen: bool, pub(crate) universal: bool, @@ -1626,7 +1676,7 @@ impl TreeSettings { .unwrap_or_default(); Self { - dev: DependencyGroups::from_args( + groups: DependencyGroups::from_args( dev, no_dev, only_dev, @@ -1664,7 +1714,7 @@ pub(crate) struct ExportSettings { pub(crate) package: Option, pub(crate) prune: Vec, pub(crate) extras: ExtrasSpecification, - pub(crate) dev: DependencyGroups, + pub(crate) groups: DependencyGroups, pub(crate) editable: EditableMode, pub(crate) hashes: bool, pub(crate) install_options: InstallOptions, @@ -1737,9 +1787,9 @@ impl ExportSettings { false, // TODO(blueraft): support only_extra vec![], - flag(all_extras, no_all_extras).unwrap_or_default(), + flag(all_extras, no_all_extras, "all-extras").unwrap_or_default(), ), - dev: DependencyGroups::from_args( + groups: DependencyGroups::from_args( dev, no_dev, only_dev, @@ -1750,7 +1800,7 @@ impl ExportSettings { all_groups, ), editable: EditableMode::from_args(no_editable), - hashes: flag(hashes, no_hashes).unwrap_or(true), + hashes: flag(hashes, no_hashes, "hashes").unwrap_or(true), install_options: InstallOptions::new( no_emit_project, no_emit_workspace, @@ -1759,8 +1809,8 @@ impl ExportSettings { output_file, locked, frozen, - include_annotations: flag(annotate, no_annotate).unwrap_or(true), - include_header: flag(header, no_header).unwrap_or(true), + include_annotations: flag(annotate, no_annotate, "annotate").unwrap_or(true), + include_header: flag(header, no_header, "header").unwrap_or(true), script, python: python.and_then(Maybe::into_option), refresh: Refresh::from(refresh), @@ -1771,7 +1821,6 @@ impl ExportSettings { } /// The resolved settings to use for a `pip compile` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipCompileSettings { pub(crate) format: Option, @@ -1914,30 +1963,42 @@ impl PipCompileSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), - no_build: flag(no_build, build), + system: flag(system, no_system, "system"), + no_build: flag(no_build, build, "build"), no_binary, only_binary, extra, - all_extras: flag(all_extras, no_all_extras), - no_deps: flag(no_deps, deps), + all_extras: flag(all_extras, no_all_extras, "all-extras"), + no_deps: flag(no_deps, deps, "deps"), group: Some(group), output_file, - no_strip_extras: flag(no_strip_extras, strip_extras), - no_strip_markers: flag(no_strip_markers, strip_markers), - no_annotate: flag(no_annotate, annotate), - no_header: flag(no_header, header), + no_strip_extras: flag(no_strip_extras, strip_extras, "strip-extras"), + no_strip_markers: flag(no_strip_markers, strip_markers, "strip-markers"), + no_annotate: flag(no_annotate, annotate, "annotate"), + no_header: flag(no_header, header, "header"), custom_compile_command, - generate_hashes: flag(generate_hashes, no_generate_hashes), + generate_hashes: flag(generate_hashes, no_generate_hashes, "generate-hashes"), python_version, python_platform, - universal: flag(universal, no_universal), + universal: flag(universal, no_universal, "universal"), no_emit_package, - emit_index_url: flag(emit_index_url, no_emit_index_url), - emit_find_links: flag(emit_find_links, no_emit_find_links), - emit_build_options: flag(emit_build_options, no_emit_build_options), - emit_marker_expression: flag(emit_marker_expression, no_emit_marker_expression), - emit_index_annotation: flag(emit_index_annotation, no_emit_index_annotation), + emit_index_url: flag(emit_index_url, no_emit_index_url, "emit-index-url"), + emit_find_links: flag(emit_find_links, no_emit_find_links, "emit-find-links"), + emit_build_options: flag( + emit_build_options, + no_emit_build_options, + "emit-build-options", + ), + emit_marker_expression: flag( + emit_marker_expression, + no_emit_marker_expression, + "emit-marker-expression", + ), + emit_index_annotation: flag( + emit_index_annotation, + no_emit_index_annotation, + "emit-index-annotation", + ), annotation_style, torch_backend, ..PipOptions::from(resolver) @@ -1949,7 +2010,6 @@ impl PipCompileSettings { } /// The resolved settings to use for a `pip sync` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipSyncSettings { pub(crate) src_file: Vec, @@ -2010,22 +2070,27 @@ impl PipSyncSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), - break_system_packages: flag(break_system_packages, no_break_system_packages), + system: flag(system, no_system, "system"), + break_system_packages: flag( + break_system_packages, + no_break_system_packages, + "break-system-packages", + ), target, prefix, - require_hashes: flag(require_hashes, no_require_hashes), - verify_hashes: flag(verify_hashes, no_verify_hashes), - no_build: flag(no_build, build), + require_hashes: flag(require_hashes, no_require_hashes, "require-hashes"), + verify_hashes: flag(verify_hashes, no_verify_hashes, "verify-hashes"), + no_build: flag(no_build, build, "build"), no_binary, only_binary, allow_empty_requirements: flag( allow_empty_requirements, no_allow_empty_requirements, + "allow-empty-requirements", ), python_version, python_platform, - strict: flag(strict, no_strict), + strict: flag(strict, no_strict, "strict"), torch_backend, ..PipOptions::from(installer) }, @@ -2036,7 +2101,6 @@ impl PipSyncSettings { } /// The resolved settings to use for a `pip install` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipInstallSettings { pub(crate) package: Vec, @@ -2160,7 +2224,7 @@ impl PipInstallSettings { constraints_from_workspace, overrides_from_workspace, build_constraints_from_workspace, - modifications: if flag(exact, inexact).unwrap_or(false) { + modifications: if flag(exact, inexact, "inexact").unwrap_or(false) { Modifications::Exact } else { Modifications::Sufficient @@ -2169,22 +2233,26 @@ impl PipInstallSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), - break_system_packages: flag(break_system_packages, no_break_system_packages), + system: flag(system, no_system, "system"), + break_system_packages: flag( + break_system_packages, + no_break_system_packages, + "break-system-packages", + ), target, prefix, - no_build: flag(no_build, build), + no_build: flag(no_build, build, "build"), no_binary, only_binary, - strict: flag(strict, no_strict), + strict: flag(strict, no_strict, "strict"), extra, - all_extras: flag(all_extras, no_all_extras), + all_extras: flag(all_extras, no_all_extras, "all-extras"), group: Some(group), - no_deps: flag(no_deps, deps), + no_deps: flag(no_deps, deps, "deps"), python_version, python_platform, - require_hashes: flag(require_hashes, no_require_hashes), - verify_hashes: flag(verify_hashes, no_verify_hashes), + require_hashes: flag(require_hashes, no_require_hashes, "require-hashes"), + verify_hashes: flag(verify_hashes, no_verify_hashes, "verify-hashes"), torch_backend, ..PipOptions::from(installer) }, @@ -2195,7 +2263,6 @@ impl PipInstallSettings { } /// The resolved settings to use for a `pip uninstall` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipUninstallSettings { pub(crate) package: Vec, @@ -2229,8 +2296,12 @@ impl PipUninstallSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), - break_system_packages: flag(break_system_packages, no_break_system_packages), + system: flag(system, no_system, "system"), + break_system_packages: flag( + break_system_packages, + no_break_system_packages, + "break-system-packages", + ), target, prefix, keyring_provider, @@ -2243,7 +2314,6 @@ impl PipUninstallSettings { } /// The resolved settings to use for a `pip freeze` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipFreezeSettings { pub(crate) exclude_editable: bool, @@ -2271,8 +2341,8 @@ impl PipFreezeSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), - strict: flag(strict, no_strict), + system: flag(system, no_system, "system"), + strict: flag(strict, no_strict, "strict"), ..PipOptions::default() }, filesystem, @@ -2282,7 +2352,6 @@ impl PipFreezeSettings { } /// The resolved settings to use for a `pip list` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipListSettings { pub(crate) editable: Option, @@ -2312,15 +2381,15 @@ impl PipListSettings { } = args; Self { - editable: flag(editable, exclude_editable), + editable: flag(editable, exclude_editable, "exclude-editable"), exclude, format, - outdated: flag(outdated, no_outdated).unwrap_or(false), + outdated: flag(outdated, no_outdated, "outdated").unwrap_or(false), settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), - strict: flag(strict, no_strict), + system: flag(system, no_system, "system"), + strict: flag(strict, no_strict, "strict"), ..PipOptions::from(fetch) }, filesystem, @@ -2330,7 +2399,6 @@ impl PipListSettings { } /// The resolved settings to use for a `pip show` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipShowSettings { pub(crate) package: Vec, @@ -2358,8 +2426,8 @@ impl PipShowSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), - strict: flag(strict, no_strict), + system: flag(system, no_system, "system"), + strict: flag(strict, no_strict, "strict"), ..PipOptions::default() }, filesystem, @@ -2369,7 +2437,6 @@ impl PipShowSettings { } /// The resolved settings to use for a `pip tree` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipTreeSettings { pub(crate) show_version_specifiers: bool, @@ -2408,8 +2475,8 @@ impl PipTreeSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), - strict: flag(strict, no_strict), + system: flag(system, no_system, "system"), + strict: flag(strict, no_strict, "strict"), ..PipOptions::from(fetch) }, filesystem, @@ -2419,7 +2486,6 @@ impl PipTreeSettings { } /// The resolved settings to use for a `pip check` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipCheckSettings { pub(crate) settings: PipSettings, @@ -2438,7 +2504,7 @@ impl PipCheckSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), + system: flag(system, no_system, "system"), ..PipOptions::default() }, filesystem, @@ -2448,7 +2514,6 @@ impl PipCheckSettings { } /// The resolved settings to use for a `build` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct BuildSettings { pub(crate) src: Option, @@ -2506,15 +2571,15 @@ impl BuildSettings { sdist, wheel, list, - build_logs: flag(build_logs, no_build_logs).unwrap_or(true), + build_logs: flag(build_logs, no_build_logs, "build-logs").unwrap_or(true), build_constraints: build_constraints .into_iter() .filter_map(Maybe::into_option) .collect(), force_pep517, hash_checking: HashCheckingMode::from_args( - flag(require_hashes, no_require_hashes), - flag(verify_hashes, no_verify_hashes), + flag(require_hashes, no_require_hashes, "require-hashes"), + flag(verify_hashes, no_verify_hashes, "verify-hashes"), ), python: python.and_then(Maybe::into_option), refresh: Refresh::from(refresh), @@ -2525,7 +2590,6 @@ impl BuildSettings { } /// The resolved settings to use for a `venv` invocation. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct VenvSettings { pub(crate) seed: bool, @@ -2574,7 +2638,7 @@ impl VenvSettings { settings: PipSettings::combine( PipOptions { python: python.and_then(Maybe::into_option), - system: flag(system, no_system), + system: flag(system, no_system, "system"), index_strategy, keyring_provider, exclude_newer, @@ -2612,7 +2676,6 @@ pub(crate) struct InstallerSettingsRef<'a> { /// /// Combines the `[tool.uv]` persistent configuration with the command-line arguments /// ([`ResolverArgs`], represented as [`ResolverOptions`]). -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone, Default)] pub(crate) struct ResolverSettings { pub(crate) build_options: BuildOptions, @@ -2702,7 +2765,6 @@ impl From for ResolverSettings { /// /// Represents the shared settings that are used across all uv commands outside the `pip` API. /// Analogous to the settings contained in the `[tool.uv]` table, combined with [`ResolverInstallerArgs`]. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone, Default)] pub(crate) struct ResolverInstallerSettings { pub(crate) resolver: ResolverSettings, @@ -2792,7 +2854,6 @@ impl From for ResolverInstallerSettings { /// /// Represents the shared settings that are used across all `pip` commands. Analogous to the /// settings contained in the `[tool.uv.pip]` table. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PipSettings { pub(crate) index_locations: IndexLocations, @@ -3169,7 +3230,6 @@ impl<'a> From<&'a ResolverInstallerSettings> for InstallerSettingsRef<'a> { } /// The resolved settings to use for an invocation of the `uv publish` CLI. -#[allow(clippy::struct_excessive_bools)] #[derive(Debug, Clone)] pub(crate) struct PublishSettings { // CLI only, see [`PublishArgs`] for docs. diff --git a/crates/uv/tests/it/build.rs b/crates/uv/tests/it/build.rs index 4fe7ca9cb..3d08a90d4 100644 --- a/crates/uv/tests/it/build.rs +++ b/crates/uv/tests/it/build.rs @@ -7,7 +7,6 @@ use indoc::indoc; use insta::assert_snapshot; use predicates::prelude::predicate; use std::env::current_dir; -use std::process::Command; use zip::ZipArchive; #[test] @@ -16,7 +15,7 @@ fn build_basic() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -134,7 +133,7 @@ fn build_sdist() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -190,7 +189,7 @@ fn build_wheel() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -246,7 +245,7 @@ fn build_sdist_wheel() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -304,7 +303,7 @@ fn build_wheel_from_sdist() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -413,7 +412,7 @@ fn build_fail() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -489,7 +488,6 @@ fn build_workspace() -> Result<()> { .filters() .into_iter() .chain([ - (r"exit code: 1", "exit status: 1"), (r"\\\.", ""), (r"\[project\]", "[PKG]"), (r"\[member\]", "[PKG]"), @@ -695,7 +693,6 @@ fn build_all_with_failure() -> Result<()> { .filters() .into_iter() .chain([ - (r"exit code: 1", "exit status: 1"), (r"\\\.", ""), (r"\[project\]", "[PKG]"), (r"\[member-\w+\]", "[PKG]"), @@ -841,7 +838,7 @@ fn build_constraints() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -902,7 +899,7 @@ fn build_sha() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -1188,7 +1185,7 @@ fn build_tool_uv_sources() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let build = context.temp_dir.child("backend"); @@ -1338,7 +1335,6 @@ fn build_non_package() -> Result<()> { .filters() .into_iter() .chain([ - (r"exit code: 1", "exit status: 1"), (r"\\\.", ""), (r"\[project\]", "[PKG]"), (r"\[member\]", "[PKG]"), @@ -1857,7 +1853,7 @@ fn build_unconfigured_setuptools() -> Result<()> { + greet==0.1.0 (from file://[TEMP_DIR]/) "###); - uv_snapshot!(context.filters(), Command::new(context.interpreter()).arg("-c").arg("import greet"), @r###" + uv_snapshot!(context.filters(), context.python_command().arg("-c").arg("import greet"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -1931,7 +1927,7 @@ fn build_with_nonnormalized_name() -> Result<()> { let filters = context .filters() .into_iter() - .chain([(r"exit code: 1", "exit status: 1"), (r"\\\.", "")]) + .chain([(r"\\\.", "")]) .collect::>(); let project = context.temp_dir.child("project"); @@ -1982,3 +1978,60 @@ fn build_with_nonnormalized_name() -> Result<()> { Ok(()) } + +/// Check that `--force-pep517` is respected. +/// +/// The error messages for a broken project are different for direct builds vs. PEP 517. +#[test] +fn force_pep517() -> Result<()> { + // We need to use a real `uv_build` package. + let context = TestContext::new("3.12").with_exclude_newer("2025-05-27T00:00:00Z"); + + context + .init() + .arg("--build-backend") + .arg("uv") + .assert() + .success(); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! {r#" + [project] + name = "project" + version = "1.0.0" + + [tool.uv.build-backend] + module-name = "does_not_exist" + + [build-system] + requires = ["uv_build>=0.5.15,<10000"] + build-backend = "uv_build" + "#})?; + + uv_snapshot!(context.filters(), context.build().env("RUST_BACKTRACE", "0"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + Building source distribution (uv build backend)... + × Failed to build `[TEMP_DIR]/` + ╰─▶ Expected a Python module at: `src/does_not_exist/__init__.py` + "); + + uv_snapshot!(context.filters(), context.build().arg("--force-pep517").env("RUST_BACKTRACE", "0"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + Building source distribution... + Error: Missing module directory for `does_not_exist` in `src`. Found: `temp` + × Failed to build `[TEMP_DIR]/` + ├─▶ The build backend returned an error + ╰─▶ Call to `uv_build.build_sdist` failed (exit status: 1) + hint: This usually indicates a problem with the package or the build environment. + "); + + Ok(()) +} diff --git a/crates/uv/tests/it/build_backend.rs b/crates/uv/tests/it/build_backend.rs index a806dc989..b3bd337ae 100644 --- a/crates/uv/tests/it/build_backend.rs +++ b/crates/uv/tests/it/build_backend.rs @@ -50,13 +50,9 @@ fn built_by_uv_direct_wheel() -> Result<()> { .assert() .success(); - uv_snapshot!(context - .run() - .arg("python") + uv_snapshot!(context.python_command() .arg("-c") - .arg(BUILT_BY_UV_TEST_SCRIPT) - // Python on windows - .env(EnvVars::PYTHONUTF8, "1"), @r###" + .arg(BUILT_BY_UV_TEST_SCRIPT), @r###" success: true exit_code: 0 ----- stdout ----- @@ -138,13 +134,9 @@ fn built_by_uv_direct() -> Result<()> { drop(wheel_dir); - uv_snapshot!(context - .run() - .arg("python") + uv_snapshot!(context.python_command() .arg("-c") - .arg(BUILT_BY_UV_TEST_SCRIPT) - // Python on windows - .env(EnvVars::PYTHONUTF8, "1"), @r###" + .arg(BUILT_BY_UV_TEST_SCRIPT), @r###" success: true exit_code: 0 ----- stdout ----- @@ -169,7 +161,8 @@ fn built_by_uv_editable() -> Result<()> { // Without the editable, pytest fails. context.pip_install().arg("pytest").assert().success(); - Command::new(context.interpreter()) + context + .python_command() .arg("-m") .arg("pytest") .current_dir(built_by_uv) @@ -200,7 +193,7 @@ fn built_by_uv_editable() -> Result<()> { drop(wheel_dir); // Now, pytest passes. - uv_snapshot!(Command::new(context.interpreter()) + uv_snapshot!(context.python_command() .arg("-m") .arg("pytest") // Avoid showing absolute paths and column dependent layout @@ -231,7 +224,6 @@ fn preserve_executable_bit() -> Result<()> { .init() .arg("--build-backend") .arg("uv") - .arg("--preview") .arg(&project_dir) .assert() .success(); @@ -323,8 +315,7 @@ fn rename_module() -> Result<()> { uv_snapshot!(context .build_backend() .arg("build-wheel") - .arg(temp_dir.path()) - .env("UV_PREVIEW", "1"), @r###" + .arg(temp_dir.path()), @r###" success: true exit_code: 0 ----- stdout ----- @@ -340,11 +331,9 @@ fn rename_module() -> Result<()> { .success(); // Importing the module with the `module-name` name succeeds. - uv_snapshot!(Command::new(context.interpreter()) + uv_snapshot!(context.python_command() .arg("-c") - .arg("import bar") - // Python on windows - .env(EnvVars::PYTHONUTF8, "1"), @r###" + .arg("import bar"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -354,11 +343,9 @@ fn rename_module() -> Result<()> { "###); // Importing the package name fails, it was overridden by `module-name`. - uv_snapshot!(Command::new(context.interpreter()) + uv_snapshot!(context.python_command() .arg("-c") - .arg("import foo") - // Python on windows - .env(EnvVars::PYTHONUTF8, "1"), @r###" + .arg("import foo"), @r###" success: false exit_code: 1 ----- stdout ----- @@ -402,8 +389,7 @@ fn rename_module_editable_build() -> Result<()> { uv_snapshot!(context .build_backend() .arg("build-editable") - .arg(temp_dir.path()) - .env("UV_PREVIEW", "1"), @r###" + .arg(temp_dir.path()), @r###" success: true exit_code: 0 ----- stdout ----- @@ -419,11 +405,9 @@ fn rename_module_editable_build() -> Result<()> { .success(); // Importing the module with the `module-name` name succeeds. - uv_snapshot!(Command::new(context.interpreter()) + uv_snapshot!(context.python_command() .arg("-c") - .arg("import bar") - // Python on windows - .env(EnvVars::PYTHONUTF8, "1"), @r###" + .arg("import bar"), @r###" success: true exit_code: 0 ----- stdout ----- @@ -514,11 +498,9 @@ fn build_module_name_normalization() -> Result<()> { .assert() .success(); - uv_snapshot!(Command::new(context.interpreter()) + uv_snapshot!(context.python_command() .arg("-c") - .arg("import Django_plugin") - // Python on windows - .env(EnvVars::PYTHONUTF8, "1"), @r" + .arg("import Django_plugin"), @r" success: true exit_code: 0 ----- stdout ----- @@ -583,8 +565,7 @@ fn build_sdist_with_long_path() -> Result<()> { uv_snapshot!(context .build_backend() .arg("build-sdist") - .arg(temp_dir.path()) - .env("UV_PREVIEW", "1"), @r###" + .arg(temp_dir.path()), @r###" success: true exit_code: 0 ----- stdout ----- @@ -617,8 +598,7 @@ fn sdist_error_without_module() -> Result<()> { uv_snapshot!(context .build_backend() .arg("build-sdist") - .arg(temp_dir.path()) - .env("UV_PREVIEW", "1"), @r" + .arg(temp_dir.path()), @r" success: false exit_code: 2 ----- stdout ----- @@ -632,8 +612,7 @@ fn sdist_error_without_module() -> Result<()> { uv_snapshot!(context .build_backend() .arg("build-sdist") - .arg(temp_dir.path()) - .env("UV_PREVIEW", "1"), @r" + .arg(temp_dir.path()), @r" success: false exit_code: 2 ----- stdout ----- @@ -697,7 +676,6 @@ fn complex_namespace_packages() -> Result<()> { context .build() - .arg("--preview") .arg(project.path()) .arg("--out-dir") .arg(dist.path()) @@ -728,7 +706,7 @@ fn complex_namespace_packages() -> Result<()> { " ); - uv_snapshot!(Command::new(context.interpreter()) + uv_snapshot!(context.python_command() .arg("-c") .arg("from complex_project.part_b import two; print(two())"), @r" @@ -746,7 +724,6 @@ fn complex_namespace_packages() -> Result<()> { context.filters(), context .pip_install() - .arg("--preview") .arg("-e") .arg("complex-project-part_a") .arg("-e") @@ -769,7 +746,7 @@ fn complex_namespace_packages() -> Result<()> { " ); - uv_snapshot!(Command::new(context.interpreter()) + uv_snapshot!(context.python_command() .arg("-c") .arg("from complex_project.part_b import two; print(two())"), @r" @@ -783,3 +760,129 @@ fn complex_namespace_packages() -> Result<()> { ); Ok(()) } + +/// Test that a symlinked file (here: license) gets included. +#[test] +#[cfg(unix)] +fn symlinked_file() -> Result<()> { + let context = TestContext::new("3.12"); + + let project = context.temp_dir.child("project"); + context + .init() + .arg("--build-backend") + .arg("uv") + .arg(project.path()) + .assert() + .success(); + + project.child("pyproject.toml").write_str(indoc! {r#" + [project] + name = "project" + version = "1.0.0" + license-files = ["LICENSE"] + + [build-system] + requires = ["uv_build>=0.5.15,<10000"] + build-backend = "uv_build" + "# + })?; + + let license_file = context.temp_dir.child("LICENSE"); + let license_symlink = project.child("LICENSE"); + + let license_text = "Project license"; + license_file.write_str(license_text)?; + fs_err::os::unix::fs::symlink(license_file.path(), license_symlink.path())?; + + uv_snapshot!(context + .build_backend() + .arg("build-sdist") + .arg(context.temp_dir.path()) + .current_dir(project.path()), @r" + success: true + exit_code: 0 + ----- stdout ----- + project-1.0.0.tar.gz + + ----- stderr ----- + "); + + uv_snapshot!(context + .build_backend() + .arg("build-wheel") + .arg(context.temp_dir.path()) + .current_dir(project.path()), @r" + success: true + exit_code: 0 + ----- stdout ----- + project-1.0.0-py3-none-any.whl + + ----- stderr ----- + "); + + uv_snapshot!(context.filters(), context.pip_install().arg("project-1.0.0-py3-none-any.whl"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 1 package in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + project==1.0.0 (from file://[TEMP_DIR]/project-1.0.0-py3-none-any.whl) + "); + + // Check that we included the actual license text and not a broken symlink. + let installed_license = context + .site_packages() + .join("project-1.0.0.dist-info") + .join("licenses") + .join("LICENSE"); + assert!( + fs_err::symlink_metadata(&installed_license)? + .file_type() + .is_file() + ); + let license = fs_err::read_to_string(&installed_license)?; + assert_eq!(license, license_text); + + Ok(()) +} + +/// Ignore invalid build backend settings when not building. +/// +/// They may be from another `uv_build` version that has a different schema. +#[test] +fn invalid_build_backend_settings_are_ignored() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! {r#" + [project] + name = "built-by-uv" + version = "0.1.0" + requires-python = ">=3.12" + + [tool.uv.build-backend] + # Error: `source-include` must be a list + source-include = "data/build-script.py" + + [build-system] + requires = ["uv_build>=10000,<10001"] + build-backend = "uv_build" + "#})?; + + // Since we are not building, this must pass without complaining about the error in + // `tool.uv.build-backend`. + uv_snapshot!(context.filters(), context.lock(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 1 package in [TIME] + "); + + Ok(()) +} diff --git a/crates/uv/tests/it/common/mod.rs b/crates/uv/tests/it/common/mod.rs index 67e1a6126..7b13c49b5 100644 --- a/crates/uv/tests/it/common/mod.rs +++ b/crates/uv/tests/it/common/mod.rs @@ -13,7 +13,6 @@ use assert_cmd::assert::{Assert, OutputAssertExt}; use assert_fs::assert::PathAssert; use assert_fs::fixture::{ChildPath, PathChild, PathCopy, PathCreateDir, SymlinkToFile}; use base64::{Engine, prelude::BASE64_STANDARD as base64}; -use etcetera::BaseStrategy; use futures::StreamExt; use indoc::formatdoc; use itertools::Itertools; @@ -22,6 +21,7 @@ use regex::Regex; use tokio::io::AsyncWriteExt; use uv_cache::Cache; +use uv_configuration::PreviewMode; use uv_fs::Simplified; use uv_python::managed::ManagedPythonInstallations; use uv_python::{ @@ -66,7 +66,7 @@ pub const INSTA_FILTERS: &[(&str, &str)] = &[ (r"uv\.exe", "uv"), // uv version display ( - r"uv(-.*)? \d+\.\d+\.\d+(\+\d+)?( \(.*\))?", + r"uv(-.*)? \d+\.\d+\.\d+(-(alpha|beta|rc)\.\d+)?(\+\d+)?( \([^)]*\))?", r"uv [VERSION] ([COMMIT] DATE)", ), // Trim end-of-line whitespaces, to allow removing them on save. @@ -254,7 +254,7 @@ impl TestContext { let added_filters = [ (r"home = .+".to_string(), "home = [PYTHON_HOME]".to_string()), ( - r"uv = \d+\.\d+\.\d+".to_string(), + r"uv = \d+\.\d+\.\d+(-(alpha|beta|rc)\.\d+)?(\+\d+)?".to_string(), "uv = [UV_VERSION]".to_string(), ), ( @@ -406,25 +406,20 @@ impl TestContext { self } - /// Discover the path to the XDG state directory. We use this, rather than the OS-specific - /// temporary directory, because on macOS (and Windows on GitHub Actions), they involve - /// symlinks. (On macOS, the temporary directory is, like `/var/...`, which resolves to - /// `/private/var/...`.) + /// Default to the canonicalized path to the temp directory. We need to do this because on + /// macOS (and Windows on GitHub Actions) the standard temp dir is a symlink. (On macOS, the + /// temporary directory is, like `/var/...`, which resolves to `/private/var/...`.) /// /// It turns out that, at least on macOS, if we pass a symlink as `current_dir`, it gets /// _immediately_ resolved (such that if you call `current_dir` in the running `Command`, it - /// returns resolved symlink). This is problematic, as we _don't_ want to resolve symlinks - /// for user-provided paths. + /// returns resolved symlink). This breaks some snapshot tests, since we _don't_ want to + /// resolve symlinks for user-provided paths. pub fn test_bucket_dir() -> PathBuf { - env::var(EnvVars::UV_INTERNAL__TEST_DIR) - .map(PathBuf::from) - .unwrap_or_else(|_| { - etcetera::base_strategy::choose_base_strategy() - .expect("Failed to find base strategy") - .data_dir() - .join("uv") - .join("tests") - }) + std::env::temp_dir() + .simple_canonicalize() + .expect("failed to canonicalize temp dir") + .join("uv") + .join("tests") } /// Create a new test context with multiple Python versions. @@ -522,6 +517,8 @@ impl TestContext { if cfg!(windows) { filters.push((" --link-mode ".to_string(), String::new())); filters.push((r#"link-mode = "copy"\n"#.to_string(), String::new())); + // Unix uses "exit status", Windows uses "exit code" + filters.push((r"exit code: ".to_string(), "exit status: ".to_string())); } filters.extend( @@ -650,6 +647,8 @@ impl TestContext { format!("https://raw.githubusercontent.com/astral-sh/packse/{PACKSE_VERSION}/"), "https://raw.githubusercontent.com/astral-sh/packse/PACKSE_VERSION/".to_string(), )); + // For wiremock tests + filters.push((r"127\.0\.0\.1:\d*".to_string(), "[LOCALHOST]".to_string())); Self { root: ChildPath::new(root.path()), @@ -745,6 +744,7 @@ impl TestContext { .env_remove(EnvVars::UV_CACHE_DIR) .env_remove(EnvVars::UV_TOOL_BIN_DIR) .env_remove(EnvVars::XDG_CONFIG_HOME) + .env_remove(EnvVars::XDG_DATA_HOME) .current_dir(self.temp_dir.path()); for (key, value) in &self.extra_env { @@ -956,6 +956,14 @@ impl TestContext { command } + /// Create a `uv python upgrade` command with options shared across scenarios. + pub fn python_upgrade(&self) -> Command { + let mut command = self.new_command(); + self.add_shared_options(&mut command, true); + command.arg("python").arg("upgrade"); + command + } + /// Create a `uv python pin` command with options shared across scenarios. pub fn python_pin(&self) -> Command { let mut command = self.new_command(); @@ -1082,15 +1090,30 @@ impl TestContext { } pub fn interpreter(&self) -> PathBuf { - venv_to_interpreter(&self.venv) + let venv = &self.venv; + if cfg!(unix) { + venv.join("bin").join("python") + } else if cfg!(windows) { + venv.join("Scripts").join("python.exe") + } else { + unimplemented!("Only Windows and Unix are supported") + } + } + + pub fn python_command(&self) -> Command { + let mut command = self.new_command_with(&self.interpreter()); + command + // Our tests change files in <1s, so we must disable CPython bytecode caching or we'll get stale files + // https://github.com/python/cpython/issues/75953 + .arg("-B") + // Python on windows + .env(EnvVars::PYTHONUTF8, "1"); + command } /// Run the given python code and check whether it succeeds. pub fn assert_command(&self, command: &str) -> Assert { - self.new_command_with(&venv_to_interpreter(&self.venv)) - // Our tests change files in <1s, so we must disable CPython bytecode caching or we'll get stale files - // https://github.com/python/cpython/issues/75953 - .arg("-B") + self.python_command() .arg("-c") .arg(command) .current_dir(&self.temp_dir) @@ -1099,10 +1122,7 @@ impl TestContext { /// Run the given python file and check whether it succeeds. pub fn assert_file(&self, file: impl AsRef) -> Assert { - self.new_command_with(&venv_to_interpreter(&self.venv)) - // Our tests change files in <1s, so we must disable CPython bytecode caching or we'll get stale files - // https://github.com/python/cpython/issues/75953 - .arg("-B") + self.python_command() .arg(file.as_ref()) .current_dir(&self.temp_dir) .assert() @@ -1117,6 +1137,12 @@ impl TestContext { .stdout(version); } + /// Assert a package is not installed. + pub fn assert_not_installed(&self, package: &'static str) { + self.assert_command(format!("import {package}").as_str()) + .failure(); + } + /// Generate various escaped regex patterns for the given path. pub fn path_patterns(path: impl AsRef) -> Vec { let mut patterns = Vec::new(); @@ -1344,16 +1370,6 @@ pub fn venv_bin_path(venv: impl AsRef) -> PathBuf { } } -pub fn venv_to_interpreter(venv: &Path) -> PathBuf { - if cfg!(unix) { - venv.join("bin").join("python") - } else if cfg!(windows) { - venv.join("Scripts").join("python.exe") - } else { - unimplemented!("Only Windows and Unix are supported") - } -} - /// Get the path to the python interpreter for a specific python version. pub fn get_python(version: &PythonVersion) -> PathBuf { ManagedPythonInstallations::from_settings(None) @@ -1423,6 +1439,7 @@ pub fn python_installations_for_versions( EnvironmentPreference::OnlySystem, PythonPreference::Managed, &cache, + PreviewMode::Disabled, ) { python.into_interpreter().sys_executable().to_owned() } else { @@ -1657,9 +1674,9 @@ pub async fn download_to_disk(url: &str, path: &Path) { .allow_insecure_host(trusted_hosts) .build(); let url = url.parse().unwrap(); - let client = client.for_host(&url); let response = client - .request(http::Method::GET, reqwest::Url::from(url)) + .for_host(&url) + .get(reqwest::Url::from(url)) .send() .await .unwrap(); diff --git a/crates/uv/tests/it/ecosystem.rs b/crates/uv/tests/it/ecosystem.rs index e96dca62c..a3804f426 100644 --- a/crates/uv/tests/it/ecosystem.rs +++ b/crates/uv/tests/it/ecosystem.rs @@ -73,8 +73,8 @@ fn saleor() -> Result<()> { // Currently ignored because the project doesn't build with `uv` yet. // // Source: https://github.com/apache/airflow/blob/c55438d9b2eb9b6680641eefdd0cbc67a28d1d29/pyproject.toml -#[ignore] #[test] +#[ignore = "Airflow doesn't build with `uv` yet"] fn airflow() -> Result<()> { lock_ecosystem_package("3.12", "airflow") } diff --git a/crates/uv/tests/it/edit.rs b/crates/uv/tests/it/edit.rs index f96dd7b7b..0ae2a07a6 100644 --- a/crates/uv/tests/it/edit.rs +++ b/crates/uv/tests/it/edit.rs @@ -14,6 +14,7 @@ use assert_fs::prelude::*; use indoc::{formatdoc, indoc}; use insta::assert_snapshot; use std::path::Path; +use url::Url; use uv_fs::Simplified; use wiremock::{Mock, MockServer, ResponseTemplate, matchers::method}; @@ -493,6 +494,88 @@ fn add_git_private_raw() -> Result<()> { Ok(()) } +#[tokio::test] +#[cfg(feature = "git")] +async fn add_git_private_rate_limited_by_github_rest_api_403_response() -> Result<()> { + let context = TestContext::new("3.12"); + let token = decode_token(READ_ONLY_GITHUB_TOKEN); + + let server = MockServer::start().await; + Mock::given(method("GET")) + .respond_with(ResponseTemplate::new(403)) + .expect(1) + .mount(&server) + .await; + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! {r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + "#})?; + + uv_snapshot!(context.filters(), context + .add() + .arg(format!("uv-private-pypackage @ git+https://{token}@github.com/astral-test/uv-private-pypackage")) + .env("UV_GITHUB_FAST_PATH_URL", server.uri()), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + uv-private-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-private-pypackage@d780faf0ac91257d4d5a4f0c5a0e4509608c0071) + "); + + Ok(()) +} + +#[tokio::test] +#[cfg(feature = "git")] +async fn add_git_private_rate_limited_by_github_rest_api_429_response() -> Result<()> { + use uv_client::DEFAULT_RETRIES; + + let context = TestContext::new("3.12"); + let token = decode_token(READ_ONLY_GITHUB_TOKEN); + + let server = MockServer::start().await; + Mock::given(method("GET")) + .respond_with(ResponseTemplate::new(429)) + .expect(1 + u64::from(DEFAULT_RETRIES)) // Middleware retries on 429 by default + .mount(&server) + .await; + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! {r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + "#})?; + + uv_snapshot!(context.filters(), context + .add() + .arg(format!("uv-private-pypackage @ git+https://{token}@github.com/astral-test/uv-private-pypackage")) + .env("UV_GITHUB_FAST_PATH_URL", server.uri()), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + uv-private-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-private-pypackage@d780faf0ac91257d4d5a4f0c5a0e4509608c0071) + "); + + Ok(()) +} + #[test] #[cfg(feature = "git")] fn add_git_error() -> Result<()> { @@ -4291,7 +4374,7 @@ fn add_lower_bound_local() -> Result<()> { filters => context.filters(), }, { assert_snapshot!( - pyproject_toml, @r###" + pyproject_toml, @r#" [project] name = "project" version = "0.1.0" @@ -4301,8 +4384,8 @@ fn add_lower_bound_local() -> Result<()> { ] [[tool.uv.index]] - url = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" - "### + url = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" + "# ); }); @@ -4320,7 +4403,7 @@ fn add_lower_bound_local() -> Result<()> { [[package]] name = "local-simple-a" version = "1.2.3+foo" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/local_simple_a-1.2.3+foo.tar.gz", hash = "sha256:ebd55c4a79d0a5759126657cb289ff97558902abcfb142e036b993781497edac" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/local_simple_a-1.2.3+foo-py3-none-any.whl", hash = "sha256:6f30e2e709b3e171cd734bb58705229a582587c29e0a7041227435583c7224cc" }, @@ -7163,10 +7246,7 @@ fn fail_to_add_revert_project() -> Result<()> { .child("setup.py") .write_str("1/0")?; - let filters = std::iter::once((r"exit code: 1", "exit status: 1")) - .chain(context.filters()) - .collect::>(); - uv_snapshot!(filters, context.add().arg("./child"), @r#" + uv_snapshot!(context.filters(), context.add().arg("./child"), @r#" success: false exit_code: 1 ----- stdout ----- @@ -7268,10 +7348,7 @@ fn fail_to_edit_revert_project() -> Result<()> { .child("setup.py") .write_str("1/0")?; - let filters = std::iter::once((r"exit code: 1", "exit status: 1")) - .chain(context.filters()) - .collect::>(); - uv_snapshot!(filters, context.add().arg("./child"), @r#" + uv_snapshot!(context.filters(), context.add().arg("./child"), @r#" success: false exit_code: 1 ----- stdout ----- @@ -9182,7 +9259,7 @@ fn add_index_with_trailing_slash() -> Result<()> { filters => context.filters(), }, { assert_snapshot!( - pyproject_toml, @r###" + pyproject_toml, @r#" [project] name = "project" version = "0.1.0" @@ -9195,8 +9272,8 @@ fn add_index_with_trailing_slash() -> Result<()> { constraint-dependencies = ["markupsafe<3"] [[tool.uv.index]] - url = "https://pypi.org/simple/" - "### + url = "https://pypi.org/simple" + "# ); }); @@ -9220,7 +9297,7 @@ fn add_index_with_trailing_slash() -> Result<()> { [[package]] name = "iniconfig" version = "2.0.0" - source = { registry = "https://pypi.org/simple/" } + source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646, upload-time = "2023-01-07T11:08:11.254Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892, upload-time = "2023-01-07T11:08:09.864Z" }, @@ -9363,7 +9440,7 @@ fn add_index_with_existing_relative_path_index() -> Result<()> { let wheel_dst = packages.child("ok-1.0.0-py3-none-any.whl"); fs_err::copy(&wheel_src, &wheel_dst)?; - uv_snapshot!(context.filters(), context.add().arg("iniconfig").arg("--index").arg("test-index"), @r" + uv_snapshot!(context.filters(), context.add().arg("iniconfig").arg("--index").arg("./test-index"), @r" success: true exit_code: 0 ----- stdout ----- @@ -9392,7 +9469,7 @@ fn add_index_with_non_existent_relative_path() -> Result<()> { dependencies = [] "#})?; - uv_snapshot!(context.filters(), context.add().arg("iniconfig").arg("--index").arg("test-index"), @r" + uv_snapshot!(context.filters(), context.add().arg("iniconfig").arg("--index").arg("./test-index"), @r" success: false exit_code: 2 ----- stdout ----- @@ -9422,7 +9499,7 @@ fn add_index_with_non_existent_relative_path_with_same_name_as_index() -> Result url = "https://pypi-proxy.fly.dev/simple" "#})?; - uv_snapshot!(context.filters(), context.add().arg("iniconfig").arg("--index").arg("test-index"), @r" + uv_snapshot!(context.filters(), context.add().arg("iniconfig").arg("--index").arg("./test-index"), @r" success: false exit_code: 2 ----- stdout ----- @@ -9434,6 +9511,82 @@ fn add_index_with_non_existent_relative_path_with_same_name_as_index() -> Result Ok(()) } +#[test] +fn add_index_empty_directory() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! {r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + + [[tool.uv.index]] + name = "test-index" + url = "https://pypi-proxy.fly.dev/simple" + "#})?; + + let packages = context.temp_dir.child("test-index"); + packages.create_dir_all()?; + + uv_snapshot!(context.filters(), context.add().arg("iniconfig").arg("--index").arg("./test-index"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + warning: Index directory `file://[TEMP_DIR]/test-index` is empty, skipping + Resolved 2 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + iniconfig==2.0.0 + "); + + Ok(()) +} + +#[test] +fn add_index_with_ambiguous_relative_path() -> Result<()> { + let context = TestContext::new("3.12"); + let mut filters = context.filters(); + filters.push((r"\./|\.\\\\", r"[PREFIX]")); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! {r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + "#})?; + + #[cfg(unix)] + uv_snapshot!(filters, context.add().arg("iniconfig").arg("--index").arg("test-index"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + warning: Relative paths passed to `--index` or `--default-index` should be disambiguated from index names (use `[PREFIX]test-index`). Support for ambiguous values will be removed in the future + error: Directory not found for index: file://[TEMP_DIR]/test-index + "); + + #[cfg(windows)] + uv_snapshot!(filters, context.add().arg("iniconfig").arg("--index").arg("test-index"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + warning: Relative paths passed to `--index` or `--default-index` should be disambiguated from index names (use `[PREFIX]test-index` or `[PREFIX]test-index`). Support for ambiguous values will be removed in the future + error: Directory not found for index: file://[TEMP_DIR]/test-index + "); + + Ok(()) +} + /// Add a PyPI requirement. #[test] fn add_group_comment() -> Result<()> { @@ -11041,7 +11194,7 @@ fn repeated_index_cli_reversed() -> Result<()> { filters => context.filters(), }, { assert_snapshot!( - pyproject_toml, @r###" + pyproject_toml, @r#" [project] name = "project" version = "0.1.0" @@ -11051,8 +11204,8 @@ fn repeated_index_cli_reversed() -> Result<()> { ] [[tool.uv.index]] - url = "https://test.pypi.org/simple/" - "### + url = "https://test.pypi.org/simple" + "# ); }); @@ -11073,7 +11226,7 @@ fn repeated_index_cli_reversed() -> Result<()> { [[package]] name = "iniconfig" version = "2.0.0" - source = { registry = "https://test.pypi.org/simple/" } + source = { registry = "https://test.pypi.org/simple" } sdist = { url = "https://test-files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646, upload-time = "2023-01-07T11:08:16.826Z" } wheels = [ { url = "https://test-files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892, upload-time = "2023-01-07T11:08:14.843Z" }, @@ -11453,11 +11606,6 @@ fn add_missing_package_on_pytorch() -> Result<()> { #[tokio::test] async fn add_unexpected_error_code() -> Result<()> { let context = TestContext::new("3.12"); - let filters = context - .filters() - .into_iter() - .chain([(r"127\.0\.0\.1(?::\d+)?", "[LOCALHOST]")]) - .collect::>(); let server = MockServer::start().await; @@ -11476,13 +11624,14 @@ async fn add_unexpected_error_code() -> Result<()> { "# })?; - uv_snapshot!(filters, context.add().arg("anyio").arg("--index").arg(server.uri()), @r" + uv_snapshot!(context.filters(), context.add().arg("anyio").arg("--index").arg(server.uri()), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- - error: Failed to fetch: `http://[LOCALHOST]/anyio/` + error: Request failed after 3 retries + Caused by: Failed to fetch: `http://[LOCALHOST]/anyio/` Caused by: HTTP status server error (503 Service Unavailable) for url (http://[LOCALHOST]/anyio/) " ); @@ -11810,6 +11959,255 @@ fn add_auth_policy_never_without_credentials() -> Result<()> { Ok(()) } +/// If uv receives a 302 redirect to a cross-origin server, it should not forward +/// credentials. In the absence of a netrc entry for the new location, +/// it should fail. +#[tokio::test] +async fn add_redirect_cross_origin() -> Result<()> { + let context = TestContext::new("3.12"); + let filters = context + .filters() + .into_iter() + .chain([(r"127\.0\.0\.1:\d*", "[LOCALHOST]")]) + .collect::>(); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! { r#" + [project] + name = "foo" + version = "1.0.0" + requires-python = ">=3.12" + dependencies = [] + "# + })?; + + let redirect_server = MockServer::start().await; + + Mock::given(method("GET")) + .respond_with(|req: &wiremock::Request| { + let redirect_url = redirect_url_to_pypi_proxy(req); + ResponseTemplate::new(302).insert_header("Location", &redirect_url) + }) + .mount(&redirect_server) + .await; + + let mut redirect_url = Url::parse(&redirect_server.uri())?; + let _ = redirect_url.set_username("public"); + let _ = redirect_url.set_password(Some("heron")); + + uv_snapshot!(filters, context.add().arg("--default-index").arg(redirect_url.as_str()).arg("anyio"), @r" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + × No solution found when resolving dependencies: + ╰─▶ Because anyio was not found in the package registry and your project depends on anyio, we can conclude that your project's requirements are unsatisfiable. + + hint: An index URL (http://[LOCALHOST]/) could not be queried due to a lack of valid authentication credentials (401 Unauthorized). + help: If you want to add the package regardless of the failed resolution, provide the `--frozen` flag to skip locking and syncing. + " + ); + + Ok(()) +} + +/// If uv receives a 302 redirect to a cross-origin server with credentials +/// in the location, use those credentials for the redirect request. +#[tokio::test] +async fn add_redirect_cross_origin_credentials_in_location() -> Result<()> { + let context = TestContext::new("3.12"); + let filters = context + .filters() + .into_iter() + .chain([(r"127\.0\.0\.1:\d*", "[LOCALHOST]")]) + .collect::>(); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! { r#" + [project] + name = "foo" + version = "1.0.0" + requires-python = ">=3.12" + dependencies = [] + "# + })?; + + let redirect_server = MockServer::start().await; + + Mock::given(method("GET")) + .respond_with(|req: &wiremock::Request| { + // Responds with credentials in the location + let redirect_url = redirect_url_to_base( + req, + "https://public:heron@pypi-proxy.fly.dev/basic-auth/simple/", + ); + ResponseTemplate::new(302).insert_header("Location", &redirect_url) + }) + .mount(&redirect_server) + .await; + + let redirect_url = Url::parse(&redirect_server.uri())?; + + uv_snapshot!(filters, context.add().arg("--default-index").arg(redirect_url.as_str()).arg("anyio"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 4 packages in [TIME] + Prepared 3 packages in [TIME] + Installed 3 packages in [TIME] + + anyio==4.3.0 + + idna==3.6 + + sniffio==1.3.1 + " + ); + + Ok(()) +} + +/// uv currently fails to look up keyring credentials on a cross-origin redirect. +#[tokio::test] +async fn add_redirect_with_keyring_cross_origin() -> Result<()> { + let keyring_context = TestContext::new("3.12"); + + // Install our keyring plugin + keyring_context + .pip_install() + .arg( + keyring_context + .workspace_root + .join("scripts") + .join("packages") + .join("keyring_test_plugin"), + ) + .assert() + .success(); + + let context = TestContext::new("3.12"); + let filters = context + .filters() + .into_iter() + .chain([(r"127\.0\.0\.1:\d*", "[LOCALHOST]")]) + .collect::>(); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! { r#" + [project] + name = "foo" + version = "1.0.0" + requires-python = ">=3.12" + dependencies = [] + + [tool.uv] + keyring-provider = "subprocess" + "#, + })?; + + let redirect_server = MockServer::start().await; + + Mock::given(method("GET")) + .respond_with(|req: &wiremock::Request| { + let redirect_url = redirect_url_to_pypi_proxy(req); + ResponseTemplate::new(302).insert_header("Location", &redirect_url) + }) + .mount(&redirect_server) + .await; + + let mut redirect_url = Url::parse(&redirect_server.uri())?; + let _ = redirect_url.set_username("public"); + + uv_snapshot!(filters, context.add().arg("--default-index") + .arg(redirect_url.as_str()) + .arg("anyio") + .env(EnvVars::KEYRING_TEST_CREDENTIALS, r#"{"pypi-proxy.fly.dev": {"public": "heron"}}"#) + .env(EnvVars::PATH, venv_bin_path(&keyring_context.venv)), @r" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + Keyring request for public@http://[LOCALHOST]/ + Keyring request for public@[LOCALHOST] + × No solution found when resolving dependencies: + ╰─▶ Because anyio was not found in the package registry and your project depends on anyio, we can conclude that your project's requirements are unsatisfiable. + + hint: An index URL (http://[LOCALHOST]/) could not be queried due to a lack of valid authentication credentials (401 Unauthorized). + help: If you want to add the package regardless of the failed resolution, provide the `--frozen` flag to skip locking and syncing. + " + ); + + Ok(()) +} + +/// If uv receives a cross-origin 302 redirect, it should use credentials from netrc +/// for the new location. +#[tokio::test] +async fn pip_install_redirect_with_netrc_cross_origin() -> Result<()> { + let context = TestContext::new("3.12"); + let filters = context + .filters() + .into_iter() + .chain([(r"127\.0\.0\.1:\d*", "[LOCALHOST]")]) + .collect::>(); + + let netrc = context.temp_dir.child(".netrc"); + netrc.write_str("machine pypi-proxy.fly.dev login public password heron")?; + + let redirect_server = MockServer::start().await; + + Mock::given(method("GET")) + .respond_with(|req: &wiremock::Request| { + let redirect_url = redirect_url_to_pypi_proxy(req); + ResponseTemplate::new(302).insert_header("Location", &redirect_url) + }) + .mount(&redirect_server) + .await; + + let mut redirect_url = Url::parse(&redirect_server.uri())?; + let _ = redirect_url.set_username("public"); + + uv_snapshot!(filters, context.pip_install() + .arg("anyio") + .arg("--index-url") + .arg(redirect_url.as_str()) + .env(EnvVars::NETRC, netrc.to_str().unwrap()) + .arg("--strict"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + Prepared 3 packages in [TIME] + Installed 3 packages in [TIME] + + anyio==4.3.0 + + idna==3.6 + + sniffio==1.3.1 + "### + ); + + context.assert_command("import anyio").success(); + + Ok(()) +} + +fn redirect_url_to_pypi_proxy(req: &wiremock::Request) -> String { + redirect_url_to_base(req, "https://pypi-proxy.fly.dev/basic-auth/simple/") +} + +fn redirect_url_to_base(req: &wiremock::Request, base: &str) -> String { + let last_path_segment = req + .url + .path_segments() + .expect("path has segments") + .filter(|segment| !segment.is_empty()) + .next_back() + .expect("path has a package segment"); + format!("{base}{last_path_segment}/") +} + /// Test the error message when adding a package with multiple existing references in /// `pyproject.toml`. #[test] diff --git a/crates/uv/tests/it/help.rs b/crates/uv/tests/it/help.rs index 6fd9bd466..8faebd040 100644 --- a/crates/uv/tests/it/help.rs +++ b/crates/uv/tests/it/help.rs @@ -292,6 +292,8 @@ fn help_subcommand() { Commands: list List the available Python installations install Download and install Python versions + upgrade Upgrade installed Python versions to the latest supported patch release (requires the + `--preview` flag) find Search for a Python installation pin Pin to a specific Python version dir Show the uv Python installation directory @@ -719,6 +721,8 @@ fn help_flag_subcommand() { Commands: list List the available Python installations install Download and install Python versions + upgrade Upgrade installed Python versions to the latest supported patch release (requires the + `--preview` flag) find Search for a Python installation pin Pin to a specific Python version dir Show the uv Python installation directory @@ -915,6 +919,7 @@ fn help_unknown_subsubcommand() { error: There is no command `foobar` for `uv python`. Did you mean one of: list install + upgrade find pin dir diff --git a/crates/uv/tests/it/init.rs b/crates/uv/tests/it/init.rs index e9e5e54a7..c5993d670 100644 --- a/crates/uv/tests/it/init.rs +++ b/crates/uv/tests/it/init.rs @@ -929,6 +929,65 @@ fn init_script_file_conflicts() -> Result<()> { Ok(()) } +// Init script should not trash an existing shebang. +#[test] +fn init_script_shebang() -> Result<()> { + let context = TestContext::new("3.12"); + + let script_path = context.temp_dir.child("script.py"); + + let contents = "#! /usr/bin/env python3\nprint(\"Hello, world!\")"; + fs_err::write(&script_path, contents)?; + uv_snapshot!(context.filters(), context.init().arg("--script").arg("script.py"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + warning: If you execute script.py directly, it might ignore its inline metadata. + Consider replacing its shebang with: #!/usr/bin/env -S uv run --script + Initialized script at `script.py` + "); + let resulting_script = fs_err::read_to_string(&script_path)?; + assert_snapshot!(resulting_script, @r#" + #! /usr/bin/env python3 + # + # /// script + # requires-python = ">=3.12" + # dependencies = [] + # /// + + print("Hello, world!") + "# + ); + + // If the shebang already contains `uv`, the result is the same, but we suppress the warning. + let contents = "#!/usr/bin/env -S uv run --script\nprint(\"Hello, world!\")"; + fs_err::write(&script_path, contents)?; + uv_snapshot!(context.filters(), context.init().arg("--script").arg("script.py"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Initialized script at `script.py` + "); + let resulting_script = fs_err::read_to_string(&script_path)?; + assert_snapshot!(resulting_script, @r#" + #!/usr/bin/env -S uv run --script + # + # /// script + # requires-python = ">=3.12" + # dependencies = [] + # /// + + print("Hello, world!") + "# + ); + + Ok(()) +} + /// Run `uv init --lib` with an existing py.typed file #[test] fn init_py_typed_exists() -> Result<()> { diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs index 4387d348a..5851022b8 100644 --- a/crates/uv/tests/it/lock.rs +++ b/crates/uv/tests/it/lock.rs @@ -4535,6 +4535,70 @@ fn lock_requires_python_exact() -> Result<()> { Ok(()) } +/// Lock a requirement from PyPI with a compatible release Python bound. +#[cfg(feature = "python-patch")] +#[test] +fn lock_requires_python_compatible_specifier() -> Result<()> { + let context = TestContext::new("3.13.0"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "warehouse" + version = "1.0.0" + requires-python = "~=3.13" + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + warning: The `requires-python` specifier (`~=3.13`) in `warehouse` uses the tilde specifier (`~=`) without a patch version. This will be interpreted as `>=3.13, <4`. Did you mean `~=3.13.0` to constrain the version as `>=3.13.0, <3.14`? We recommend only using the tilde specifier with a patch version to avoid ambiguity. + Resolved 1 package in [TIME] + "); + + pyproject_toml.write_str( + r#" + [project] + name = "warehouse" + version = "1.0.0" + requires-python = "~=3.13, <3.14" + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 1 package in [TIME] + "); + + pyproject_toml.write_str( + r#" + [project] + name = "warehouse" + version = "1.0.0" + requires-python = "~=3.13.0" + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 1 package in [TIME] + "); + Ok(()) +} + /// Fork, even with a single dependency, if the minimum Python version is increased. #[test] fn lock_requires_python_fork() -> Result<()> { @@ -4731,15 +4795,16 @@ fn lock_requires_python_wheels() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.lock(), @r###" + uv_snapshot!(context.filters(), context.lock(), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- Using CPython 3.11.[X] interpreter at: [PYTHON-3.11] + warning: Ignoring existing lockfile due to fork markers being disjoint with `requires-python`: `python_full_version == '3.12.*'` vs `python_full_version == '3.11.*'` Resolved 2 packages in [TIME] - "###); + "); let lock = fs_err::read_to_string(&lockfile).unwrap(); @@ -4948,14 +5013,14 @@ fn lock_requires_python_not_equal() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.lock(), @r###" + uv_snapshot!(context.filters(), context.lock(), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- Resolved 2 packages in [TIME] - "###); + "); let lock = fs_err::read_to_string(&lockfile).unwrap(); @@ -5259,16 +5324,16 @@ fn lock_requires_python_disjoint() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.lock(), @r###" + uv_snapshot!(context.filters(), context.lock(), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- - error: The workspace contains conflicting Python requirements: - - `child`: `==3.10` - - `project`: `>=3.12` - "###); + error: Found conflicting Python requirements: + - child: ==3.10 + - project: >=3.12 + "); Ok(()) } @@ -15478,7 +15543,7 @@ fn lock_trailing_slash() -> Result<()> { [[package]] name = "anyio" version = "3.7.0" - source = { registry = "https://pypi.org/simple/" } + source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "sniffio" }, @@ -15491,7 +15556,7 @@ fn lock_trailing_slash() -> Result<()> { [[package]] name = "idna" version = "3.6" - source = { registry = "https://pypi.org/simple/" } + source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" }, @@ -15511,7 +15576,7 @@ fn lock_trailing_slash() -> Result<()> { [[package]] name = "sniffio" version = "1.3.1" - source = { registry = "https://pypi.org/simple/" } + source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, @@ -18298,29 +18363,30 @@ fn lock_request_requires_python() -> Result<()> { )?; // Request a version that conflicts with `--requires-python`. - uv_snapshot!(context.filters(), context.lock().arg("--python").arg("3.12"), @r###" + uv_snapshot!(context.filters(), context.lock().arg("--python").arg("3.12"), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] - error: The requested interpreter resolved to Python 3.12.[X], which is incompatible with the project's Python requirement: `>=3.8, <=3.10` - "###); + error: The requested interpreter resolved to Python 3.12.[X], which is incompatible with the project's Python requirement: `>=3.8, <=3.10` (from `project.requires-python`) + "); // Add a `.python-version` file that conflicts. let python_version = context.temp_dir.child(".python-version"); python_version.write_str("3.12")?; - uv_snapshot!(context.filters(), context.lock(), @r###" + uv_snapshot!(context.filters(), context.lock(), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] - error: The Python request from `.python-version` resolved to Python 3.12.[X], which is incompatible with the project's Python requirement: `>=3.8, <=3.10`. Use `uv python pin` to update the `.python-version` file to a compatible version. - "###); + error: The Python request from `.python-version` resolved to Python 3.12.[X], which is incompatible with the project's Python requirement: `>=3.8, <=3.10` (from `project.requires-python`) + Use `uv python pin` to update the `.python-version` file to a compatible version + "); Ok(()) } @@ -20660,6 +20726,465 @@ fn lock_group_include() -> Result<()> { Ok(()) } +#[test] +fn lock_group_requires_python() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["typing-extensions"] + + [dependency-groups] + foo = ["idna"] + bar = ["sortedcontainers", "sniffio"] + + [tool.uv.dependency-groups] + bar = { requires-python = ">=3.13" } + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 5 packages in [TIME] + "); + + let lock = context.read("uv.lock"); + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r#" + version = 1 + revision = 2 + requires-python = ">=3.12" + resolution-markers = [ + "python_full_version >= '3.13'", + "python_full_version < '3.13'", + ] + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "idna" + version = "3.6" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" }, + ] + + [[package]] + name = "project" + version = "0.1.0" + source = { virtual = "." } + dependencies = [ + { name = "typing-extensions" }, + ] + + [package.dev-dependencies] + bar = [ + { name = "sniffio", marker = "python_full_version >= '3.13'" }, + { name = "sortedcontainers", marker = "python_full_version >= '3.13'" }, + ] + foo = [ + { name = "idna" }, + ] + + [package.metadata] + requires-dist = [{ name = "typing-extensions" }] + + [package.metadata.requires-dev] + bar = [ + { name = "sniffio", marker = "python_full_version >= '3.13'" }, + { name = "sortedcontainers", marker = "python_full_version >= '3.13'" }, + ] + foo = [{ name = "idna" }] + + [[package]] + name = "sniffio" + version = "1.3.1" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, + ] + + [[package]] + name = "sortedcontainers" + version = "2.4.0" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, + ] + + [[package]] + name = "typing-extensions" + version = "4.10.0" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/16/3a/0d26ce356c7465a19c9ea8814b960f8a36c3b0d07c323176620b7b483e44/typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb", size = 77558, upload-time = "2024-02-25T22:12:49.693Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/de/dc04a3ea60b22624b51c703a84bbe0184abcd1d0b9bc8074b5d6b7ab90bb/typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475", size = 33926, upload-time = "2024-02-25T22:12:47.72Z" }, + ] + "# + ); + }); + + Ok(()) +} + +#[test] +fn lock_group_includes_requires_python() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["typing-extensions"] + + [dependency-groups] + foo = ["idna", {include-group = "bar"}] + bar = ["sortedcontainers", "sniffio"] + baz = ["idna", {include-group = "bar"}] + blargh = ["idna", {include-group = "bar"}] + + [tool.uv.dependency-groups] + bar = { requires-python = ">=3.13" } + baz = { requires-python = ">=3.13.1" } + blargh = { requires-python = ">=3.12.1" } + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 5 packages in [TIME] + "); + + let lock = context.read("uv.lock"); + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r#" + version = 1 + revision = 2 + requires-python = ">=3.12" + resolution-markers = [ + "python_full_version >= '3.13.1'", + "python_full_version >= '3.13' and python_full_version < '3.13.1'", + "python_full_version >= '3.12.[X]' and python_full_version < '3.13'", + "python_full_version < '3.12.[X]'", + ] + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "idna" + version = "3.6" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" }, + ] + + [[package]] + name = "project" + version = "0.1.0" + source = { virtual = "." } + dependencies = [ + { name = "typing-extensions" }, + ] + + [package.dev-dependencies] + bar = [ + { name = "sniffio", marker = "python_full_version >= '3.13'" }, + { name = "sortedcontainers", marker = "python_full_version >= '3.13'" }, + ] + baz = [ + { name = "idna", marker = "python_full_version >= '3.13.1'" }, + { name = "sniffio", marker = "python_full_version >= '3.13.1'" }, + { name = "sortedcontainers", marker = "python_full_version >= '3.13.1'" }, + ] + blargh = [ + { name = "idna", marker = "python_full_version >= '3.12.[X]'" }, + { name = "sniffio", marker = "python_full_version >= '3.13'" }, + { name = "sortedcontainers", marker = "python_full_version >= '3.13'" }, + ] + foo = [ + { name = "idna" }, + { name = "sniffio", marker = "python_full_version >= '3.13'" }, + { name = "sortedcontainers", marker = "python_full_version >= '3.13'" }, + ] + + [package.metadata] + requires-dist = [{ name = "typing-extensions" }] + + [package.metadata.requires-dev] + bar = [ + { name = "sniffio", marker = "python_full_version >= '3.13'" }, + { name = "sortedcontainers", marker = "python_full_version >= '3.13'" }, + ] + baz = [ + { name = "idna", marker = "python_full_version >= '3.13.1'" }, + { name = "sniffio", marker = "python_full_version >= '3.13.1'" }, + { name = "sortedcontainers", marker = "python_full_version >= '3.13.1'" }, + ] + blargh = [ + { name = "idna", marker = "python_full_version >= '3.12.[X]'" }, + { name = "sniffio", marker = "python_full_version >= '3.13'" }, + { name = "sortedcontainers", marker = "python_full_version >= '3.13'" }, + ] + foo = [ + { name = "idna" }, + { name = "sniffio", marker = "python_full_version >= '3.13'" }, + { name = "sortedcontainers", marker = "python_full_version >= '3.13'" }, + ] + + [[package]] + name = "sniffio" + version = "1.3.1" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, + ] + + [[package]] + name = "sortedcontainers" + version = "2.4.0" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, + ] + + [[package]] + name = "typing-extensions" + version = "4.10.0" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/16/3a/0d26ce356c7465a19c9ea8814b960f8a36c3b0d07c323176620b7b483e44/typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb", size = 77558, upload-time = "2024-02-25T22:12:49.693Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/de/dc04a3ea60b22624b51c703a84bbe0184abcd1d0b9bc8074b5d6b7ab90bb/typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475", size = 33926, upload-time = "2024-02-25T22:12:47.72Z" }, + ] + "# + ); + }); + + Ok(()) +} + +/// Referring to a dependency-group with group-requires-python that does not exist +#[test] +fn lock_group_requires_undefined_group() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "myproject" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["typing-extensions"] + + [dependency-groups] + bar = ["sortedcontainers"] + + [tool.uv.dependency-groups] + foo = { requires-python = ">=3.13" } + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Project `myproject` has malformed dependency groups + Caused by: Failed to find group `foo` specified in `[tool.uv.dependency-groups]` + "); + Ok(()) +} + +/// The legacy dev-dependencies cannot be referred to by group-requires-python +#[test] +fn lock_group_requires_dev_dep() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "myproject" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["typing-extensions"] + + [tool.uv] + dev-dependencies = ["sortedcontainers"] + + [tool.uv.dependency-groups] + dev = { requires-python = ">=3.13" } + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Project `myproject` has malformed dependency groups + Caused by: `[tool.uv.dependency-groups]` specifies the `dev` group, but only `tool.uv.dev-dependencies` was found. To reference the `dev` group, remove the `tool.uv.dev-dependencies` section and add any development dependencies to the `dev` entry in the `[dependency-groups]` table instead. + "); + Ok(()) +} + +#[test] +fn lock_group_includes_requires_python_contradiction() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["typing-extensions"] + + [dependency-groups] + foo = ["idna", {include-group = "bar"}] + bar = ["sortedcontainers", "sniffio"] + + [tool.uv.dependency-groups] + bar = { requires-python = ">=3.13" } + foo = { requires-python = "<3.13" } + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 5 packages in [TIME] + "); + + let lock = context.read("uv.lock"); + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r#" + version = 1 + revision = 2 + requires-python = ">=3.12" + resolution-markers = [ + "python_full_version >= '3.13'", + "python_full_version < '3.13'", + ] + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "idna" + version = "3.6" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" }, + ] + + [[package]] + name = "project" + version = "0.1.0" + source = { virtual = "." } + dependencies = [ + { name = "typing-extensions" }, + ] + + [package.dev-dependencies] + bar = [ + { name = "sniffio", marker = "python_full_version >= '3.13'" }, + { name = "sortedcontainers", marker = "python_full_version >= '3.13'" }, + ] + foo = [ + { name = "idna", marker = "python_full_version < '3.13'" }, + ] + + [package.metadata] + requires-dist = [{ name = "typing-extensions" }] + + [package.metadata.requires-dev] + bar = [ + { name = "sniffio", marker = "python_full_version >= '3.13'" }, + { name = "sortedcontainers", marker = "python_full_version >= '3.13'" }, + ] + foo = [ + { name = "idna", marker = "python_full_version < '3.13'" }, + { name = "sniffio", marker = "python_version < '0'" }, + { name = "sortedcontainers", marker = "python_version < '0'" }, + ] + + [[package]] + name = "sniffio" + version = "1.3.1" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, + ] + + [[package]] + name = "sortedcontainers" + version = "2.4.0" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" }, + ] + + [[package]] + name = "typing-extensions" + version = "4.10.0" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/16/3a/0d26ce356c7465a19c9ea8814b960f8a36c3b0d07c323176620b7b483e44/typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb", size = 77558, upload-time = "2024-02-25T22:12:49.693Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/de/dc04a3ea60b22624b51c703a84bbe0184abcd1d0b9bc8074b5d6b7ab90bb/typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475", size = 33926, upload-time = "2024-02-25T22:12:47.72Z" }, + ] + "# + ); + }); + + Ok(()) +} + #[test] fn lock_group_include_cycle() -> Result<()> { let context = TestContext::new("3.12"); @@ -20680,15 +21205,15 @@ fn lock_group_include_cycle() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.lock(), @r###" + uv_snapshot!(context.filters(), context.lock(), @r" success: false - exit_code: 1 + exit_code: 2 ----- stdout ----- ----- stderr ----- - × Failed to build `project @ file://[TEMP_DIR]/` - ╰─▶ Detected a cycle in `dependency-groups`: `bar` -> `foobar` -> `foo` -> `bar` - "###); + error: Project `project` has malformed dependency groups + Caused by: Detected a cycle in `dependency-groups`: `bar` -> `foobar` -> `foo` -> `bar` + "); Ok(()) } @@ -20714,15 +21239,15 @@ fn lock_group_include_dev() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.lock(), @r###" + uv_snapshot!(context.filters(), context.lock(), @r#" success: false - exit_code: 1 + exit_code: 2 ----- stdout ----- ----- stderr ----- - × Failed to build `project @ file://[TEMP_DIR]/` - ╰─▶ Group `foo` includes the `dev` group (`include = "dev"`), but only `tool.uv.dev-dependencies` was found. To reference the `dev` group via an `include`, remove the `tool.uv.dev-dependencies` section and add any development dependencies to the `dev` entry in the `[dependency-groups]` table instead. - "###); + error: Project `project` has malformed dependency groups + Caused by: Group `foo` includes the `dev` group (`include = "dev"`), but only `tool.uv.dev-dependencies` was found. To reference the `dev` group via an `include`, remove the `tool.uv.dev-dependencies` section and add any development dependencies to the `dev` entry in the `[dependency-groups]` table instead. + "#); Ok(()) } @@ -20745,15 +21270,15 @@ fn lock_group_include_missing() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.lock(), @r###" + uv_snapshot!(context.filters(), context.lock(), @r" success: false - exit_code: 1 + exit_code: 2 ----- stdout ----- ----- stderr ----- - × Failed to build `project @ file://[TEMP_DIR]/` - ╰─▶ Failed to find group `bar` included by `foo` - "###); + error: Project `project` has malformed dependency groups + Caused by: Failed to find group `bar` included by `foo` + "); Ok(()) } @@ -20776,31 +21301,31 @@ fn lock_group_invalid_entry_package() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.lock(), @r###" + uv_snapshot!(context.filters(), context.lock(), @r#" success: false - exit_code: 1 + exit_code: 2 ----- stdout ----- ----- stderr ----- - × Failed to build `project @ file://[TEMP_DIR]/` - ├─▶ Failed to parse entry in group `foo`: `invalid!` - ╰─▶ no such comparison operator "!", must be one of ~= == != <= >= < > === - invalid! - ^ - "###); + error: Project `project` has malformed dependency groups + Caused by: Failed to parse entry in group `foo`: `invalid!` + Caused by: no such comparison operator "!", must be one of ~= == != <= >= < > === + invalid! + ^ + "#); - uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--group").arg("foo"), @r#" success: false - exit_code: 1 + exit_code: 2 ----- stdout ----- ----- stderr ----- - × Failed to build `project @ file://[TEMP_DIR]/` - ├─▶ Failed to parse entry in group `foo`: `invalid!` - ╰─▶ no such comparison operator "!", must be one of ~= == != <= >= < > === - invalid! - ^ - "###); + error: Project `project` has malformed dependency groups + Caused by: Failed to parse entry in group `foo`: `invalid!` + Caused by: no such comparison operator "!", must be one of ~= == != <= >= < > === + invalid! + ^ + "#); Ok(()) } @@ -20897,12 +21422,12 @@ fn lock_group_invalid_entry_table() -> Result<()> { uv_snapshot!(context.filters(), context.lock(), @r#" success: false - exit_code: 1 + exit_code: 2 ----- stdout ----- ----- stderr ----- - × Failed to build `project @ file://[TEMP_DIR]/` - ╰─▶ Group `foo` contains an unknown dependency object specifier: {"bar": "unknown"} + error: Project `project` has malformed dependency groups + Caused by: Group `foo` contains an unknown dependency object specifier: {"bar": "unknown"} "#); Ok(()) @@ -23092,10 +23617,7 @@ fn lock_derivation_chain_prod() -> Result<()> { let filters = context .filters() .into_iter() - .chain([ - (r"exit code: 1", "exit status: 1"), - (r"/.*/src", "/[TMP]/src"), - ]) + .chain([(r"/.*/src", "/[TMP]/src")]) .collect::>(); uv_snapshot!(filters, context.lock(), @r###" @@ -23152,10 +23674,7 @@ fn lock_derivation_chain_extra() -> Result<()> { let filters = context .filters() .into_iter() - .chain([ - (r"exit code: 1", "exit status: 1"), - (r"/.*/src", "/[TMP]/src"), - ]) + .chain([(r"/.*/src", "/[TMP]/src")]) .collect::>(); uv_snapshot!(filters, context.lock(), @r###" @@ -23214,10 +23733,7 @@ fn lock_derivation_chain_group() -> Result<()> { let filters = context .filters() .into_iter() - .chain([ - (r"exit code: 1", "exit status: 1"), - (r"/.*/src", "/[TMP]/src"), - ]) + .chain([(r"/.*/src", "/[TMP]/src")]) .collect::>(); uv_snapshot!(filters, context.lock(), @r###" @@ -23287,10 +23803,7 @@ fn lock_derivation_chain_extended() -> Result<()> { let filters = context .filters() .into_iter() - .chain([ - (r"exit code: 1", "exit status: 1"), - (r"/.*/src", "/[TMP]/src"), - ]) + .chain([(r"/.*/src", "/[TMP]/src")]) .collect::>(); uv_snapshot!(filters, context.lock(), @r###" @@ -26997,7 +27510,7 @@ fn windows_arm() -> Result<()> { lock, @r#" version = 1 revision = 2 - requires-python = ">=3.12.[X], <3.13" + requires-python = "==3.12.*" resolution-markers = [ "platform_machine == 'x86_64' and sys_platform == 'linux'", "platform_machine == 'AMD64' and sys_platform == 'win32'", @@ -27035,6 +27548,81 @@ fn windows_arm() -> Result<()> { Ok(()) } +#[test] +fn windows_amd64_required() -> Result<()> { + let context = TestContext::new("3.12").with_exclude_newer("2025-01-30T00:00:00Z"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "pywin32-prj" + version = "0.1.0" + requires-python = "~=3.12.0" + dependencies = ["pywin32; sys_platform == 'win32'"] + + [tool.uv] + required-environments = [ + "sys_platform == 'win32' and platform_machine == 'x86'", + "sys_platform == 'win32' and platform_machine == 'AMD64'", + ] + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + "###); + + let lock = context.read("uv.lock"); + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r#" + version = 1 + revision = 2 + requires-python = "==3.12.*" + required-markers = [ + "platform_machine == 'x86' and sys_platform == 'win32'", + "platform_machine == 'AMD64' and sys_platform == 'win32'", + ] + + [options] + exclude-newer = "2025-01-30T00:00:00Z" + + [[package]] + name = "pywin32" + version = "308" + source = { registry = "https://pypi.org/simple" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/00/7c/d00d6bdd96de4344e06c4afbf218bc86b54436a94c01c71a8701f613aa56/pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897", size = 5939729, upload-time = "2024-10-12T20:42:12.001Z" }, + { url = "https://files.pythonhosted.org/packages/21/27/0c8811fbc3ca188f93b5354e7c286eb91f80a53afa4e11007ef661afa746/pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47", size = 6543015, upload-time = "2024-10-12T20:42:14.044Z" }, + { url = "https://files.pythonhosted.org/packages/9d/0f/d40f8373608caed2255781a3ad9a51d03a594a1248cd632d6a298daca693/pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091", size = 7976033, upload-time = "2024-10-12T20:42:16.215Z" }, + ] + + [[package]] + name = "pywin32-prj" + version = "0.1.0" + source = { virtual = "." } + dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, + ] + + [package.metadata] + requires-dist = [{ name = "pywin32", marker = "sys_platform == 'win32'" }] + "# + ); + }); + + Ok(()) +} + #[test] fn lock_empty_extra() -> Result<()> { let context = TestContext::new("3.12"); @@ -27485,6 +28073,170 @@ fn lock_conflict_for_disjoint_python_version() -> Result<()> { Ok(()) } +/// Check that we hint if the resolution failed for a different platform. +#[cfg(feature = "python-patch")] +#[test] +fn lock_requires_python_empty_lock_file() -> Result<()> { + // N.B. These versions were selected based on what was + // in `.python-versions` at the time of writing (2025-06-16). + let (v1, v2) = ("3.13.0", "3.13.2"); + let context = TestContext::new_with_versions(&[v1, v2]); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(&format!( + r#" + [project] + name = "renovate-bug-repro" + version = "0.1.0" + requires-python = "=={v1}" + dependencies = ["opencv-python-headless>=4.8"] + "#, + ))?; + + uv_snapshot!(context.filters(), context.lock(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.13.0 interpreter at: [PYTHON-3.13.0] + Resolved 3 packages in [TIME] + "); + + let lock = context.read("uv.lock"); + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r#" + version = 1 + revision = 2 + requires-python = "==3.13.0" + resolution-markers = [ + "sys_platform == 'darwin'", + "platform_machine == 'aarch64' and sys_platform == 'linux'", + "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')", + ] + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "numpy" + version = "1.26.4" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129, upload-time = "2024-02-06T00:26:44.495Z" } + + [[package]] + name = "opencv-python-headless" + version = "4.9.0.80" + source = { registry = "https://pypi.org/simple" } + dependencies = [ + { name = "numpy" }, + ] + sdist = { url = "https://files.pythonhosted.org/packages/3d/b2/c308bc696bf5d75304175c62222ec8af9a6d5cfe36c14f19f15ea9d1a132/opencv-python-headless-4.9.0.80.tar.gz", hash = "sha256:71a4cd8cf7c37122901d8e81295db7fb188730e33a0e40039a4e59c1030b0958", size = 92910044, upload-time = "2023-12-31T13:34:50.518Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/42/da433fca5733a3ce7e88dd0d4018f70dcffaf48770b5142250815f4faddb/opencv_python_headless-4.9.0.80-cp37-abi3-macosx_10_16_x86_64.whl", hash = "sha256:2ea8a2edc4db87841991b2fbab55fc07b97ecb602e0f47d5d485bd75cee17c1a", size = 55689478, upload-time = "2023-12-31T14:31:30.476Z" }, + { url = "https://files.pythonhosted.org/packages/32/0c/a59f2a40d6058ee8126668dc5dff6977c913f6ecd21dbd15b41563409a18/opencv_python_headless-4.9.0.80-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:e0ee54e27be493e8f7850847edae3128e18b540dac1d7b2e4001b8944e11e1c6", size = 35354670, upload-time = "2023-12-31T16:38:31.588Z" }, + { url = "https://files.pythonhosted.org/packages/36/37/225a1f8be42610ffecf677558311ab0f9dfdc63537c250a2bce76762a380/opencv_python_headless-4.9.0.80-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57ce2865e8fec431c6f97a81e9faaf23fa5be61011d0a75ccf47a3c0d65fa73d", size = 28954368, upload-time = "2023-12-31T16:40:00.838Z" }, + { url = "https://files.pythonhosted.org/packages/71/19/3c65483a80a1d062d46ae20faf5404712d25cb1dfdcaf371efbd67c38544/opencv_python_headless-4.9.0.80-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:976656362d68d9f40a5c66f83901430538002465f7db59142784f3893918f3df", size = 49591873, upload-time = "2023-12-31T13:34:44.316Z" }, + { url = "https://files.pythonhosted.org/packages/10/98/300382ff6ddff3a487e808c8a76362e430f5016002fcbefb3b3117aad32b/opencv_python_headless-4.9.0.80-cp37-abi3-win32.whl", hash = "sha256:11e3849d83e6651d4e7699aadda9ec7ed7c38957cbbcb99db074f2a2d2de9670", size = 28488841, upload-time = "2023-12-31T13:34:31.974Z" }, + { url = "https://files.pythonhosted.org/packages/20/44/458a0a135866f5e08266566b32ad9a182a7a059a894effe6c41a9c841ff1/opencv_python_headless-4.9.0.80-cp37-abi3-win_amd64.whl", hash = "sha256:a8056c2cb37cd65dfcdf4153ca16f7362afcf3a50d600d6bb69c660fc61ee29c", size = 38536073, upload-time = "2023-12-31T13:34:39.675Z" }, + ] + + [[package]] + name = "renovate-bug-repro" + version = "0.1.0" + source = { virtual = "." } + dependencies = [ + { name = "opencv-python-headless" }, + ] + + [package.metadata] + requires-dist = [{ name = "opencv-python-headless", specifier = ">=4.8" }] + "# + ); + }); + + pyproject_toml.write_str(&format!( + r#" + [project] + name = "renovate-bug-repro" + version = "0.1.0" + requires-python = "=={v2}" + dependencies = ["opencv-python-headless>=4.8"] + "#, + ))?; + + uv_snapshot!(context.filters(), context.lock().arg("--upgrade-package=python"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.13.2 interpreter at: [PYTHON-3.13.2] + warning: Ignoring existing lockfile due to fork markers being disjoint with `requires-python`: `python_full_version == '3.13.0'` vs `python_full_version == '3.13.2'` + Resolved 3 packages in [TIME] + "); + + let lock = context.read("uv.lock"); + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r#" + version = 1 + revision = 2 + requires-python = "==3.13.2" + resolution-markers = [ + "sys_platform == 'darwin'", + "platform_machine == 'aarch64' and sys_platform == 'linux'", + "(platform_machine != 'aarch64' and sys_platform == 'linux') or (sys_platform != 'darwin' and sys_platform != 'linux')", + ] + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "numpy" + version = "1.26.4" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129, upload-time = "2024-02-06T00:26:44.495Z" } + + [[package]] + name = "opencv-python-headless" + version = "4.9.0.80" + source = { registry = "https://pypi.org/simple" } + dependencies = [ + { name = "numpy" }, + ] + sdist = { url = "https://files.pythonhosted.org/packages/3d/b2/c308bc696bf5d75304175c62222ec8af9a6d5cfe36c14f19f15ea9d1a132/opencv-python-headless-4.9.0.80.tar.gz", hash = "sha256:71a4cd8cf7c37122901d8e81295db7fb188730e33a0e40039a4e59c1030b0958", size = 92910044, upload-time = "2023-12-31T13:34:50.518Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/ac/42/da433fca5733a3ce7e88dd0d4018f70dcffaf48770b5142250815f4faddb/opencv_python_headless-4.9.0.80-cp37-abi3-macosx_10_16_x86_64.whl", hash = "sha256:2ea8a2edc4db87841991b2fbab55fc07b97ecb602e0f47d5d485bd75cee17c1a", size = 55689478, upload-time = "2023-12-31T14:31:30.476Z" }, + { url = "https://files.pythonhosted.org/packages/32/0c/a59f2a40d6058ee8126668dc5dff6977c913f6ecd21dbd15b41563409a18/opencv_python_headless-4.9.0.80-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:e0ee54e27be493e8f7850847edae3128e18b540dac1d7b2e4001b8944e11e1c6", size = 35354670, upload-time = "2023-12-31T16:38:31.588Z" }, + { url = "https://files.pythonhosted.org/packages/36/37/225a1f8be42610ffecf677558311ab0f9dfdc63537c250a2bce76762a380/opencv_python_headless-4.9.0.80-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57ce2865e8fec431c6f97a81e9faaf23fa5be61011d0a75ccf47a3c0d65fa73d", size = 28954368, upload-time = "2023-12-31T16:40:00.838Z" }, + { url = "https://files.pythonhosted.org/packages/71/19/3c65483a80a1d062d46ae20faf5404712d25cb1dfdcaf371efbd67c38544/opencv_python_headless-4.9.0.80-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:976656362d68d9f40a5c66f83901430538002465f7db59142784f3893918f3df", size = 49591873, upload-time = "2023-12-31T13:34:44.316Z" }, + { url = "https://files.pythonhosted.org/packages/10/98/300382ff6ddff3a487e808c8a76362e430f5016002fcbefb3b3117aad32b/opencv_python_headless-4.9.0.80-cp37-abi3-win32.whl", hash = "sha256:11e3849d83e6651d4e7699aadda9ec7ed7c38957cbbcb99db074f2a2d2de9670", size = 28488841, upload-time = "2023-12-31T13:34:31.974Z" }, + { url = "https://files.pythonhosted.org/packages/20/44/458a0a135866f5e08266566b32ad9a182a7a059a894effe6c41a9c841ff1/opencv_python_headless-4.9.0.80-cp37-abi3-win_amd64.whl", hash = "sha256:a8056c2cb37cd65dfcdf4153ca16f7362afcf3a50d600d6bb69c660fc61ee29c", size = 38536073, upload-time = "2023-12-31T13:34:39.675Z" }, + ] + + [[package]] + name = "renovate-bug-repro" + version = "0.1.0" + source = { virtual = "." } + dependencies = [ + { name = "opencv-python-headless" }, + ] + + [package.metadata] + requires-dist = [{ name = "opencv-python-headless", specifier = ">=4.8" }] + "# + ); + }); + + Ok(()) +} + /// Check that we hint if the resolution failed for a different platform. #[test] fn lock_conflict_for_disjoint_platform() -> Result<()> { @@ -27557,3 +28309,438 @@ fn lock_conflict_for_disjoint_platform() -> Result<()> { Ok(()) } + +/// Add a package with an `--index` URL with no trailing slash. Run `uv lock --locked` +/// with a `pyproject.toml` with that same URL but with a trailing slash. +#[test] +fn lock_with_inconsistent_trailing_slash() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = [] + + [[tool.uv.index]] + name = "pypi-proxy" + url = "https://pypi-proxy.fly.dev/simple/" + "#, + )?; + + let no_trailing_slash_url = "https://pypi-proxy.fly.dev/simple"; + + uv_snapshot!(context.filters(), context.add().arg("anyio").arg("--index").arg(no_trailing_slash_url), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 4 packages in [TIME] + Prepared 3 packages in [TIME] + Installed 3 packages in [TIME] + + anyio==4.3.0 + + idna==3.6 + + sniffio==1.3.1 + "); + + let lock = context.read("uv.lock"); + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r#" + version = 1 + revision = 2 + requires-python = ">=3.12" + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "anyio" + version = "4.3.0" + source = { registry = "https://pypi-proxy.fly.dev/simple" } + dependencies = [ + { name = "idna" }, + { name = "sniffio" }, + ] + sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6", size = 159642, upload-time = "2024-02-19T08:36:28.641Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", size = 85584, upload-time = "2024-02-19T08:36:26.842Z" }, + ] + + [[package]] + name = "idna" + version = "3.6" + source = { registry = "https://pypi-proxy.fly.dev/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" }, + ] + + [[package]] + name = "project" + version = "0.1.0" + source = { virtual = "." } + dependencies = [ + { name = "anyio" }, + ] + + [package.metadata] + requires-dist = [{ name = "anyio", specifier = ">=4.3.0" }] + + [[package]] + name = "sniffio" + version = "1.3.1" + source = { registry = "https://pypi-proxy.fly.dev/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, + ] + "# + ); + }); + + // Re-run with `--locked`. + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 4 packages in [TIME] + "); + + Ok(()) +} + +/// Run `uv lock --locked` with a lockfile with trailing slashes on index URLs. +#[test] +fn lock_with_index_trailing_slashes_in_lockfile() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["anyio"] + + [[tool.uv.index]] + name = "pypi-proxy" + url = "https://pypi-proxy.fly.dev/simple" + + [tool.uv.sources] + anyio = { index = "pypi-proxy" } + "#, + )?; + + let lock = context.temp_dir.child("uv.lock"); + lock.write_str(r#" + version = 1 + revision = 2 + requires-python = ">=3.12" + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "anyio" + version = "4.3.0" + source = { registry = "https://pypi-proxy.fly.dev/simple/" } + dependencies = [ + { name = "idna" }, + { name = "sniffio" }, + ] + sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6", size = 159642, upload-time = "2024-02-19T08:36:28.641Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", size = 85584, upload-time = "2024-02-19T08:36:26.842Z" }, + ] + + [[package]] + name = "idna" + version = "3.6" + source = { registry = "https://pypi-proxy.fly.dev/simple/" } + sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" }, + ] + + [[package]] + name = "project" + version = "0.1.0" + source = { virtual = "." } + dependencies = [ + { name = "anyio" }, + ] + + [package.metadata] + requires-dist = [{ name = "anyio", index = "https://pypi-proxy.fly.dev/simple/" }] + + [[package]] + name = "sniffio" + version = "1.3.1" + source = { registry = "https://pypi-proxy.fly.dev/simple/" } + sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, + ] + "# + )?; + + // Run `uv lock --locked`. + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 4 packages in [TIME] + "); + + Ok(()) +} + +/// Run `uv lock --locked` with a lockfile with trailing slashes on index URLs. +#[test] +fn lock_with_index_trailing_slashes_in_pyproject_toml() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["anyio"] + + [[tool.uv.index]] + name = "pypi-proxy" + url = "https://pypi-proxy.fly.dev/simple/" + + [tool.uv.sources] + anyio = { index = "pypi-proxy" } + "#, + )?; + + let lock = context.temp_dir.child("uv.lock"); + lock.write_str(r#" + version = 1 + revision = 2 + requires-python = ">=3.12" + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "anyio" + version = "4.3.0" + source = { registry = "https://pypi-proxy.fly.dev/simple" } + dependencies = [ + { name = "idna" }, + { name = "sniffio" }, + ] + sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6", size = 159642, upload-time = "2024-02-19T08:36:28.641Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", size = 85584, upload-time = "2024-02-19T08:36:26.842Z" }, + ] + + [[package]] + name = "idna" + version = "3.6" + source = { registry = "https://pypi-proxy.fly.dev/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" }, + ] + + [[package]] + name = "project" + version = "0.1.0" + source = { virtual = "." } + dependencies = [ + { name = "anyio" }, + ] + + [package.metadata] + requires-dist = [{ name = "anyio", index = "https://pypi-proxy.fly.dev/simple" }] + + [[package]] + name = "sniffio" + version = "1.3.1" + source = { registry = "https://pypi-proxy.fly.dev/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, + ] + "# + )?; + + // Run `uv lock --locked`. + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 4 packages in [TIME] + "); + + Ok(()) +} + +/// Run `uv lock --locked` with a lockfile with trailing slashes on index URLs. +#[test] +fn lock_with_index_trailing_slashes_in_lockfile_and_pyproject_toml() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["anyio"] + + [[tool.uv.index]] + name = "pypi-proxy" + url = "https://pypi-proxy.fly.dev/simple/" + + [tool.uv.sources] + anyio = { index = "pypi-proxy" } + "#, + )?; + + let lock = context.temp_dir.child("uv.lock"); + lock.write_str(r#" + version = 1 + revision = 2 + requires-python = ">=3.12" + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "anyio" + version = "4.3.0" + source = { registry = "https://pypi-proxy.fly.dev/simple/" } + dependencies = [ + { name = "idna" }, + { name = "sniffio" }, + ] + sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6", size = 159642, upload-time = "2024-02-19T08:36:28.641Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", size = 85584, upload-time = "2024-02-19T08:36:26.842Z" }, + ] + + [[package]] + name = "idna" + version = "3.6" + source = { registry = "https://pypi-proxy.fly.dev/simple/" } + sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" }, + ] + + [[package]] + name = "project" + version = "0.1.0" + source = { virtual = "." } + dependencies = [ + { name = "anyio" }, + ] + + [package.metadata] + requires-dist = [{ name = "anyio", index = "https://pypi-proxy.fly.dev/simple/" }] + + [[package]] + name = "sniffio" + version = "1.3.1" + source = { registry = "https://pypi-proxy.fly.dev/simple/" } + sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } + wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, + ] + "# + )?; + + // Run `uv lock --locked`. + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 4 packages in [TIME] + "); + + Ok(()) +} + +#[test] +fn lock_prefix_match() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["anyio==5.4.*"] + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + × No solution found when resolving dependencies: + ╰─▶ Because only anyio<=4.3.0 is available and your project depends on anyio==5.4.*, we can conclude that your project's requirements are unsatisfiable. + "); + + Ok(()) +} + +/// Regression test for . +#[test] +fn test_tilde_equals_python_version() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "debug" + version = "0.1.0" + requires-python = ">=3.9" + dependencies = [ + "anyio==4.2.0; python_full_version >= '3.11'", + "anyio==4.3.0; python_full_version ~= '3.10.0'", + ] + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 7 packages in [TIME] + "); + + Ok(()) +} diff --git a/crates/uv/tests/it/lock_scenarios.rs b/crates/uv/tests/it/lock_scenarios.rs index 3be986ad1..801214fa5 100644 --- a/crates/uv/tests/it/lock_scenarios.rs +++ b/crates/uv/tests/it/lock_scenarios.rs @@ -158,7 +158,7 @@ fn wrong_backtracking_basic() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_basic_a-1.0.0.tar.gz", hash = "sha256:5251a827291d4e5b7ca11c742df3aa26802cc55442e3f5fc307ff3423b8f9295" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_basic_a-1.0.0-py3-none-any.whl", hash = "sha256:d9a7ee79b176cd36c9db03e36bc3325856dd4fb061aefc6159eecad6e8776e88" }, @@ -167,7 +167,7 @@ fn wrong_backtracking_basic() -> Result<()> { [[package]] name = "package-b" version = "2.0.9" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ { name = "package-a" }, ] @@ -340,7 +340,7 @@ fn wrong_backtracking_indirect() -> Result<()> { [[package]] name = "package-a" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_indirect_a-2.0.0.tar.gz", hash = "sha256:5891b5a45aac67b3afb90f66913d7ced2ada7cad1676fe427136b7324935bb1e" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_indirect_a-2.0.0-py3-none-any.whl", hash = "sha256:68cb37193f4b2277630ad083522f59ac0449cb1c59e943884d04cc0e2a04cba7" }, @@ -349,7 +349,7 @@ fn wrong_backtracking_indirect() -> Result<()> { [[package]] name = "package-b" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ { name = "package-b-inner" }, ] @@ -361,7 +361,7 @@ fn wrong_backtracking_indirect() -> Result<()> { [[package]] name = "package-b-inner" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ { name = "package-too-old" }, ] @@ -373,7 +373,7 @@ fn wrong_backtracking_indirect() -> Result<()> { [[package]] name = "package-too-old" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_indirect_too_old-1.0.0.tar.gz", hash = "sha256:1b674a931c34e29d20f22e9b92206b648769fa9e35770ab680466dbaa1335090" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_indirect_too_old-1.0.0-py3-none-any.whl", hash = "sha256:15f8fe39323691c883c3088f8873220944428210a74db080f60a61a74c1fc6b0" }, @@ -477,7 +477,7 @@ fn fork_allows_non_conflicting_non_overlapping_dependencies() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_allows_non_conflicting_non_overlapping_dependencies_a-1.0.0.tar.gz", hash = "sha256:dd40a6bd59fbeefbf9f4936aec3df6fb6017e57d334f85f482ae5dd03ae353b9" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_allows_non_conflicting_non_overlapping_dependencies_a-1.0.0-py3-none-any.whl", hash = "sha256:8111e996c2a4e04c7a7cf91cf6f8338f5195c22ecf2303d899c4ef4e718a8175" }, @@ -592,7 +592,7 @@ fn fork_allows_non_conflicting_repeated_dependencies() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_allows_non_conflicting_repeated_dependencies_a-1.0.0.tar.gz", hash = "sha256:45ca30f1f66eaf6790198fad279b6448719092f2128f23b99f2ede0d6dde613b" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_allows_non_conflicting_repeated_dependencies_a-1.0.0-py3-none-any.whl", hash = "sha256:fc3f6d2fab10d1bb4f52bd9a7de69dc97ed1792506706ca78bdc9e95d6641a6b" }, @@ -699,7 +699,7 @@ fn fork_basic() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'darwin'", ] @@ -711,7 +711,7 @@ fn fork_basic() -> Result<()> { [[package]] name = "package-a" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -725,8 +725,8 @@ fn fork_basic() -> Result<()> { version = "0.1.0" source = { virtual = "." } dependencies = [ - { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'darwin'" }, - { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, + { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'darwin'" }, + { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, ] [package.metadata] @@ -1002,7 +1002,7 @@ fn fork_filter_sibling_dependencies() -> Result<()> { [[package]] name = "package-a" version = "4.3.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'darwin'", ] @@ -1014,7 +1014,7 @@ fn fork_filter_sibling_dependencies() -> Result<()> { [[package]] name = "package-a" version = "4.4.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -1026,9 +1026,9 @@ fn fork_filter_sibling_dependencies() -> Result<()> { [[package]] name = "package-b" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ - { name = "package-d", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, + { name = "package-d", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, ] sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_filter_sibling_dependencies_b-1.0.0.tar.gz", hash = "sha256:af3f861d6df9a2bbad55bae02acf17384ea2efa1abbf19206ac56cb021814613" } wheels = [ @@ -1038,9 +1038,9 @@ fn fork_filter_sibling_dependencies() -> Result<()> { [[package]] name = "package-c" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ - { name = "package-d", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'darwin'" }, + { name = "package-d", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'darwin'" }, ] sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_filter_sibling_dependencies_c-1.0.0.tar.gz", hash = "sha256:c03742ca6e81c2a5d7d8cb72d1214bf03b2925e63858a19097f17d3e1a750192" } wheels = [ @@ -1050,7 +1050,7 @@ fn fork_filter_sibling_dependencies() -> Result<()> { [[package]] name = "package-d" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -1062,7 +1062,7 @@ fn fork_filter_sibling_dependencies() -> Result<()> { [[package]] name = "package-d" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'darwin'", ] @@ -1076,8 +1076,8 @@ fn fork_filter_sibling_dependencies() -> Result<()> { version = "0.1.0" source = { virtual = "." } dependencies = [ - { name = "package-a", version = "4.3.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'darwin'" }, - { name = "package-a", version = "4.4.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, + { name = "package-a", version = "4.3.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'darwin'" }, + { name = "package-a", version = "4.4.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, { name = "package-b", marker = "sys_platform == 'linux'" }, { name = "package-c", marker = "sys_platform == 'darwin'" }, ] @@ -1180,7 +1180,7 @@ fn fork_upgrade() -> Result<()> { [[package]] name = "package-bar" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_upgrade_bar-2.0.0.tar.gz", hash = "sha256:2e7b5370d7be19b5af56092a8364a2718a7b8516142a12a95656b82d1b9c8cbc" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_upgrade_bar-2.0.0-py3-none-any.whl", hash = "sha256:d8ce562bf363e849fbf4add170a519b5412ab63e378fb4b7ea290183c77616fc" }, @@ -1189,7 +1189,7 @@ fn fork_upgrade() -> Result<()> { [[package]] name = "package-foo" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ { name = "package-bar" }, ] @@ -1310,7 +1310,7 @@ fn fork_incomplete_markers() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "python_full_version < '3.10'", ] @@ -1322,7 +1322,7 @@ fn fork_incomplete_markers() -> Result<()> { [[package]] name = "package-a" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "python_full_version >= '3.11'", ] @@ -1334,7 +1334,7 @@ fn fork_incomplete_markers() -> Result<()> { [[package]] name = "package-b" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ { name = "package-c", marker = "python_full_version == '3.10.*'" }, ] @@ -1346,7 +1346,7 @@ fn fork_incomplete_markers() -> Result<()> { [[package]] name = "package-c" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_incomplete_markers_c-1.0.0.tar.gz", hash = "sha256:ecc02ea1cc8d3b561c8dcb9d2ba1abcdae2dd32de608bf8e8ed2878118426022" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_incomplete_markers_c-1.0.0-py3-none-any.whl", hash = "sha256:03fa287aa4cb78457211cb3df7459b99ba1ee2259aae24bc745eaab45e7eaaee" }, @@ -1357,8 +1357,8 @@ fn fork_incomplete_markers() -> Result<()> { version = "0.1.0" source = { virtual = "." } dependencies = [ - { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "python_full_version < '3.10'" }, - { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "python_full_version >= '3.11'" }, + { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "python_full_version < '3.10'" }, + { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "python_full_version >= '3.11'" }, { name = "package-b" }, ] @@ -1462,7 +1462,7 @@ fn fork_marker_accrue() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ { name = "package-c", marker = "sys_platform == 'linux'" }, ] @@ -1474,7 +1474,7 @@ fn fork_marker_accrue() -> Result<()> { [[package]] name = "package-b" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ { name = "package-c", marker = "sys_platform == 'darwin'" }, ] @@ -1486,7 +1486,7 @@ fn fork_marker_accrue() -> Result<()> { [[package]] name = "package-c" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_marker_accrue_c-1.0.0.tar.gz", hash = "sha256:a3e09ac3dc8e787a08ebe8d5d6072e09720c76cbbcb76a6645d6f59652742015" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_marker_accrue_c-1.0.0-py3-none-any.whl", hash = "sha256:b0c8719d38c91b2a8548bd065b1d2153fbe031b37775ed244e76fe5bdfbb502e" }, @@ -1680,15 +1680,15 @@ fn fork_marker_inherit_combined_allowed() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "implementation_name == 'pypy' and sys_platform == 'darwin'", "implementation_name == 'cpython' and sys_platform == 'darwin'", "implementation_name != 'cpython' and implementation_name != 'pypy' and sys_platform == 'darwin'", ] dependencies = [ - { name = "package-b", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "implementation_name == 'pypy' and sys_platform == 'darwin'" }, - { name = "package-b", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "implementation_name == 'cpython' and sys_platform == 'darwin'" }, + { name = "package-b", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "implementation_name == 'pypy' and sys_platform == 'darwin'" }, + { name = "package-b", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "implementation_name == 'cpython' and sys_platform == 'darwin'" }, ] sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_marker_inherit_combined_allowed_a-1.0.0.tar.gz", hash = "sha256:c7232306e8597d46c3fe53a3b1472f99b8ff36b3169f335ba0a5b625e193f7d4" } wheels = [ @@ -1698,7 +1698,7 @@ fn fork_marker_inherit_combined_allowed() -> Result<()> { [[package]] name = "package-a" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -1710,7 +1710,7 @@ fn fork_marker_inherit_combined_allowed() -> Result<()> { [[package]] name = "package-b" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "implementation_name == 'pypy' and sys_platform == 'darwin'", ] @@ -1725,7 +1725,7 @@ fn fork_marker_inherit_combined_allowed() -> Result<()> { [[package]] name = "package-b" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "implementation_name == 'cpython' and sys_platform == 'darwin'", ] @@ -1737,7 +1737,7 @@ fn fork_marker_inherit_combined_allowed() -> Result<()> { [[package]] name = "package-c" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_marker_inherit_combined_allowed_c-1.0.0.tar.gz", hash = "sha256:7ce8efca029cfa952e64f55c2d47fe33975c7f77ec689384bda11cbc3b7ef1db" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_marker_inherit_combined_allowed_c-1.0.0-py3-none-any.whl", hash = "sha256:6a6b776dedabceb6a6c4f54a5d932076fa3fed1380310491999ca2d31e13b41c" }, @@ -1748,8 +1748,8 @@ fn fork_marker_inherit_combined_allowed() -> Result<()> { version = "0.1.0" source = { virtual = "." } dependencies = [ - { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'darwin'" }, - { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, + { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'darwin'" }, + { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, ] [package.metadata] @@ -1866,15 +1866,15 @@ fn fork_marker_inherit_combined_disallowed() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "implementation_name == 'pypy' and sys_platform == 'darwin'", "implementation_name == 'cpython' and sys_platform == 'darwin'", "implementation_name != 'cpython' and implementation_name != 'pypy' and sys_platform == 'darwin'", ] dependencies = [ - { name = "package-b", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "implementation_name == 'pypy' and sys_platform == 'darwin'" }, - { name = "package-b", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "implementation_name == 'cpython' and sys_platform == 'darwin'" }, + { name = "package-b", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "implementation_name == 'pypy' and sys_platform == 'darwin'" }, + { name = "package-b", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "implementation_name == 'cpython' and sys_platform == 'darwin'" }, ] sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_marker_inherit_combined_disallowed_a-1.0.0.tar.gz", hash = "sha256:92081d91570582f3a94ed156f203de53baca5b3fdc350aa1c831c7c42723e798" } wheels = [ @@ -1884,7 +1884,7 @@ fn fork_marker_inherit_combined_disallowed() -> Result<()> { [[package]] name = "package-a" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -1896,7 +1896,7 @@ fn fork_marker_inherit_combined_disallowed() -> Result<()> { [[package]] name = "package-b" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "implementation_name == 'pypy' and sys_platform == 'darwin'", ] @@ -1908,7 +1908,7 @@ fn fork_marker_inherit_combined_disallowed() -> Result<()> { [[package]] name = "package-b" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "implementation_name == 'cpython' and sys_platform == 'darwin'", ] @@ -1922,8 +1922,8 @@ fn fork_marker_inherit_combined_disallowed() -> Result<()> { version = "0.1.0" source = { virtual = "." } dependencies = [ - { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'darwin'" }, - { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, + { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'darwin'" }, + { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, ] [package.metadata] @@ -2041,15 +2041,15 @@ fn fork_marker_inherit_combined() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "implementation_name == 'pypy' and sys_platform == 'darwin'", "implementation_name == 'cpython' and sys_platform == 'darwin'", "implementation_name != 'cpython' and implementation_name != 'pypy' and sys_platform == 'darwin'", ] dependencies = [ - { name = "package-b", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "implementation_name == 'pypy' and sys_platform == 'darwin'" }, - { name = "package-b", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "implementation_name == 'cpython' and sys_platform == 'darwin'" }, + { name = "package-b", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "implementation_name == 'pypy' and sys_platform == 'darwin'" }, + { name = "package-b", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "implementation_name == 'cpython' and sys_platform == 'darwin'" }, ] sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_marker_inherit_combined_a-1.0.0.tar.gz", hash = "sha256:2ec4c9dbb7078227d996c344b9e0c1b365ed0000de9527b2ba5b616233636f07" } wheels = [ @@ -2059,7 +2059,7 @@ fn fork_marker_inherit_combined() -> Result<()> { [[package]] name = "package-a" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -2071,7 +2071,7 @@ fn fork_marker_inherit_combined() -> Result<()> { [[package]] name = "package-b" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "implementation_name == 'pypy' and sys_platform == 'darwin'", ] @@ -2083,7 +2083,7 @@ fn fork_marker_inherit_combined() -> Result<()> { [[package]] name = "package-b" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "implementation_name == 'cpython' and sys_platform == 'darwin'", ] @@ -2097,8 +2097,8 @@ fn fork_marker_inherit_combined() -> Result<()> { version = "0.1.0" source = { virtual = "." } dependencies = [ - { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'darwin'" }, - { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, + { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'darwin'" }, + { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, ] [package.metadata] @@ -2205,7 +2205,7 @@ fn fork_marker_inherit_isolated() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'darwin'", ] @@ -2217,7 +2217,7 @@ fn fork_marker_inherit_isolated() -> Result<()> { [[package]] name = "package-a" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -2232,7 +2232,7 @@ fn fork_marker_inherit_isolated() -> Result<()> { [[package]] name = "package-b" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_marker_inherit_isolated_b-1.0.0.tar.gz", hash = "sha256:96f8c3cabc5795e08a064c89ec76a4bfba8afe3c13d647161b4a1568b4584ced" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_marker_inherit_isolated_b-1.0.0-py3-none-any.whl", hash = "sha256:c8affc2f13f9bcd08b3d1601a21a1781ea14d52a8cddc708b29428c9c3d53ea5" }, @@ -2243,8 +2243,8 @@ fn fork_marker_inherit_isolated() -> Result<()> { version = "0.1.0" source = { virtual = "." } dependencies = [ - { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'darwin'" }, - { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, + { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'darwin'" }, + { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, ] [package.metadata] @@ -2359,7 +2359,7 @@ fn fork_marker_inherit_transitive() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'darwin'", ] @@ -2374,7 +2374,7 @@ fn fork_marker_inherit_transitive() -> Result<()> { [[package]] name = "package-a" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -2386,7 +2386,7 @@ fn fork_marker_inherit_transitive() -> Result<()> { [[package]] name = "package-b" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ { name = "package-c", marker = "sys_platform == 'darwin'" }, ] @@ -2398,7 +2398,7 @@ fn fork_marker_inherit_transitive() -> Result<()> { [[package]] name = "package-c" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_marker_inherit_transitive_c-1.0.0.tar.gz", hash = "sha256:58bb788896b2297f2948f51a27fc48cfe44057c687a3c0c4d686b107975f7f32" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_marker_inherit_transitive_c-1.0.0-py3-none-any.whl", hash = "sha256:ad2cbb0582ec6f4dc9549d1726d2aae66cd1fdf0e355acc70cd720cf65ae4d86" }, @@ -2409,8 +2409,8 @@ fn fork_marker_inherit_transitive() -> Result<()> { version = "0.1.0" source = { virtual = "." } dependencies = [ - { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'darwin'" }, - { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, + { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'darwin'" }, + { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, ] [package.metadata] @@ -2519,7 +2519,7 @@ fn fork_marker_inherit() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'darwin'", ] @@ -2531,7 +2531,7 @@ fn fork_marker_inherit() -> Result<()> { [[package]] name = "package-a" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -2545,8 +2545,8 @@ fn fork_marker_inherit() -> Result<()> { version = "0.1.0" source = { virtual = "." } dependencies = [ - { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'darwin'" }, - { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, + { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'darwin'" }, + { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, ] [package.metadata] @@ -2662,7 +2662,7 @@ fn fork_marker_limited_inherit() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'darwin'", ] @@ -2674,7 +2674,7 @@ fn fork_marker_limited_inherit() -> Result<()> { [[package]] name = "package-a" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -2686,7 +2686,7 @@ fn fork_marker_limited_inherit() -> Result<()> { [[package]] name = "package-b" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ { name = "package-c", marker = "sys_platform == 'linux'" }, ] @@ -2698,7 +2698,7 @@ fn fork_marker_limited_inherit() -> Result<()> { [[package]] name = "package-c" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_marker_limited_inherit_c-1.0.0.tar.gz", hash = "sha256:8dcb05f5dff09fec52ab507b215ff367fe815848319a17929db997ad3afe88ae" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_marker_limited_inherit_c-1.0.0-py3-none-any.whl", hash = "sha256:877a87a4987ad795ddaded3e7266ed7defdd3cfbe07a29500cb6047637db4065" }, @@ -2709,8 +2709,8 @@ fn fork_marker_limited_inherit() -> Result<()> { version = "0.1.0" source = { virtual = "." } dependencies = [ - { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'darwin'" }, - { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, + { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'darwin'" }, + { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, { name = "package-b" }, ] @@ -2822,7 +2822,7 @@ fn fork_marker_selection() -> Result<()> { [[package]] name = "package-a" version = "0.1.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_marker_selection_a-0.1.0.tar.gz", hash = "sha256:ece83ba864a62d5d747439f79a0bf36aa4c18d15bca96aab855ffc2e94a8eef7" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_marker_selection_a-0.1.0-py3-none-any.whl", hash = "sha256:a3b9d6e46cc226d20994cc60653fd59d81d96527749f971a6f59ef8cbcbc7c01" }, @@ -2831,7 +2831,7 @@ fn fork_marker_selection() -> Result<()> { [[package]] name = "package-b" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'darwin'", ] @@ -2843,7 +2843,7 @@ fn fork_marker_selection() -> Result<()> { [[package]] name = "package-b" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -2858,8 +2858,8 @@ fn fork_marker_selection() -> Result<()> { source = { virtual = "." } dependencies = [ { name = "package-a" }, - { name = "package-b", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'darwin'" }, - { name = "package-b", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, + { name = "package-b", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'darwin'" }, + { name = "package-b", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, ] [package.metadata] @@ -2985,7 +2985,7 @@ fn fork_marker_track() -> Result<()> { [[package]] name = "package-a" version = "1.3.1" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ { name = "package-c", marker = "implementation_name == 'iron'" }, ] @@ -2997,7 +2997,7 @@ fn fork_marker_track() -> Result<()> { [[package]] name = "package-b" version = "2.7" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'darwin'", ] @@ -3009,7 +3009,7 @@ fn fork_marker_track() -> Result<()> { [[package]] name = "package-b" version = "2.8" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -3021,7 +3021,7 @@ fn fork_marker_track() -> Result<()> { [[package]] name = "package-c" version = "1.10" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_marker_track_c-1.10.tar.gz", hash = "sha256:c89006d893254790b0fcdd1b33520241c8ff66ab950c6752b745e006bdeff144" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_marker_track_c-1.10-py3-none-any.whl", hash = "sha256:cedcb8fbcdd9fbde4eea76612e57536c8b56507a9d7f7a92e483cb56b18c57a3" }, @@ -3033,8 +3033,8 @@ fn fork_marker_track() -> Result<()> { source = { virtual = "." } dependencies = [ { name = "package-a" }, - { name = "package-b", version = "2.7", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'darwin'" }, - { name = "package-b", version = "2.8", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, + { name = "package-b", version = "2.7", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'darwin'" }, + { name = "package-b", version = "2.8", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, ] [package.metadata] @@ -3137,7 +3137,7 @@ fn fork_non_fork_marker_transitive() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ { name = "package-c", marker = "sys_platform == 'linux'" }, ] @@ -3149,7 +3149,7 @@ fn fork_non_fork_marker_transitive() -> Result<()> { [[package]] name = "package-b" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ { name = "package-c", marker = "sys_platform == 'darwin'" }, ] @@ -3161,7 +3161,7 @@ fn fork_non_fork_marker_transitive() -> Result<()> { [[package]] name = "package-c" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_non_fork_marker_transitive_c-2.0.0.tar.gz", hash = "sha256:ffab9124854f64c8b5059ccaed481547f54abac868ba98aa6a454c0163cdb1c7" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_non_fork_marker_transitive_c-2.0.0-py3-none-any.whl", hash = "sha256:2b72d6af81967e1c55f30d920d6a7b913fce6ad0a0658ec79972a3d1a054e85f" }, @@ -3453,7 +3453,7 @@ fn fork_overlapping_markers_basic() -> Result<()> { [[package]] name = "package-a" version = "1.2.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_overlapping_markers_basic_a-1.2.0.tar.gz", hash = "sha256:f8c2058d80430d62b15c87fd66040a6c0dd23d32e7f144a932899c0c74bdff2a" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_overlapping_markers_basic_a-1.2.0-py3-none-any.whl", hash = "sha256:04293ed42eb3620c9ddf56e380a8408a30733d5d38f321a35c024d03e7116083" }, @@ -3636,11 +3636,11 @@ fn preferences_dependent_forking_bistable() -> Result<()> { [[package]] name = "package-cleaver" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ - { name = "package-fork-if-not-forked", version = "3.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, + { name = "package-fork-if-not-forked", version = "3.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, { name = "package-fork-if-not-forked-proxy", marker = "sys_platform != 'linux'" }, - { name = "package-reject-cleaver1", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, + { name = "package-reject-cleaver1", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, { name = "package-reject-cleaver1-proxy" }, ] sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/preferences_dependent_forking_bistable_cleaver-1.0.0.tar.gz", hash = "sha256:64e5ee0c81d6a51fb71ed517fd04cc26c656908ad05073270e67c2f9b92194c5" } @@ -3651,7 +3651,7 @@ fn preferences_dependent_forking_bistable() -> Result<()> { [[package]] name = "package-fork-if-not-forked" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform != 'linux'", ] @@ -3663,7 +3663,7 @@ fn preferences_dependent_forking_bistable() -> Result<()> { [[package]] name = "package-fork-if-not-forked" version = "3.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -3675,9 +3675,9 @@ fn preferences_dependent_forking_bistable() -> Result<()> { [[package]] name = "package-fork-if-not-forked-proxy" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ - { name = "package-fork-if-not-forked", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform != 'linux'" }, + { name = "package-fork-if-not-forked", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform != 'linux'" }, ] sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/preferences_dependent_forking_bistable_fork_if_not_forked_proxy-1.0.0.tar.gz", hash = "sha256:0ed00a7c8280348225835fadc76db8ecc6b4a9ee11351a6c432c475f8d1579de" } wheels = [ @@ -3687,7 +3687,7 @@ fn preferences_dependent_forking_bistable() -> Result<()> { [[package]] name = "package-reject-cleaver1" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -3699,7 +3699,7 @@ fn preferences_dependent_forking_bistable() -> Result<()> { [[package]] name = "package-reject-cleaver1" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform != 'linux'", ] @@ -3711,9 +3711,9 @@ fn preferences_dependent_forking_bistable() -> Result<()> { [[package]] name = "package-reject-cleaver1-proxy" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ - { name = "package-reject-cleaver1", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform != 'linux'" }, + { name = "package-reject-cleaver1", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform != 'linux'" }, ] sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/preferences_dependent_forking_bistable_reject_cleaver1_proxy-1.0.0.tar.gz", hash = "sha256:6b6eaa229d55de992e36084521d2f62dce35120a866e20354d0e5617e16e00ce" } wheels = [ @@ -4048,7 +4048,7 @@ fn preferences_dependent_forking_tristable() -> Result<()> { [[package]] name = "package-bar" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform != 'linux'", ] @@ -4064,7 +4064,7 @@ fn preferences_dependent_forking_tristable() -> Result<()> { [[package]] name = "package-bar" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -4076,7 +4076,7 @@ fn preferences_dependent_forking_tristable() -> Result<()> { [[package]] name = "package-c" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -4088,7 +4088,7 @@ fn preferences_dependent_forking_tristable() -> Result<()> { [[package]] name = "package-c" version = "3.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform != 'linux'", ] @@ -4100,9 +4100,9 @@ fn preferences_dependent_forking_tristable() -> Result<()> { [[package]] name = "package-cleaver" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ - { name = "package-bar", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform != 'linux'" }, + { name = "package-bar", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform != 'linux'" }, { name = "package-foo", marker = "sys_platform == 'linux'" }, ] sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/preferences_dependent_forking_tristable_cleaver-1.0.0.tar.gz", hash = "sha256:49ec5779d0722586652e3ceb4ca2bf053a79dc3fa2d7ccd428a359bcc885a248" } @@ -4113,9 +4113,9 @@ fn preferences_dependent_forking_tristable() -> Result<()> { [[package]] name = "package-d" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ - { name = "package-c", version = "3.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform != 'linux'" }, + { name = "package-c", version = "3.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform != 'linux'" }, ] sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/preferences_dependent_forking_tristable_d-1.0.0.tar.gz", hash = "sha256:690b69acb46d0ebfb11a81f401d2ea2e2e6a8ae97f199d345715e9bd40a7ceba" } wheels = [ @@ -4125,10 +4125,10 @@ fn preferences_dependent_forking_tristable() -> Result<()> { [[package]] name = "package-foo" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ - { name = "package-c", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, - { name = "package-c", version = "3.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform != 'linux'" }, + { name = "package-c", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, + { name = "package-c", version = "3.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform != 'linux'" }, { name = "package-reject-cleaver-1" }, ] sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/preferences_dependent_forking_tristable_foo-1.0.0.tar.gz", hash = "sha256:7c1a2ca51dd2156cf36c3400e38595e11b09442052f4bd1d6b3d53eb5b2acf32" } @@ -4139,10 +4139,10 @@ fn preferences_dependent_forking_tristable() -> Result<()> { [[package]] name = "package-reject-cleaver-1" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ - { name = "package-unrelated-dep2", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, - { name = "package-unrelated-dep2", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform != 'linux'" }, + { name = "package-unrelated-dep2", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, + { name = "package-unrelated-dep2", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform != 'linux'" }, ] sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/preferences_dependent_forking_tristable_reject_cleaver_1-1.0.0.tar.gz", hash = "sha256:6ef93ca22db3a054559cb34f574ffa3789951f2f82b213c5502d0e9ff746f15e" } wheels = [ @@ -4152,7 +4152,7 @@ fn preferences_dependent_forking_tristable() -> Result<()> { [[package]] name = "package-unrelated-dep2" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -4164,7 +4164,7 @@ fn preferences_dependent_forking_tristable() -> Result<()> { [[package]] name = "package-unrelated-dep2" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform != 'linux'", ] @@ -4178,8 +4178,8 @@ fn preferences_dependent_forking_tristable() -> Result<()> { version = "0.1.0" source = { virtual = "." } dependencies = [ - { name = "package-bar", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform != 'linux'" }, - { name = "package-bar", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, + { name = "package-bar", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform != 'linux'" }, + { name = "package-bar", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, { name = "package-cleaver" }, { name = "package-foo" }, ] @@ -4342,7 +4342,7 @@ fn preferences_dependent_forking() -> Result<()> { [[package]] name = "package-bar" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform != 'linux'", ] @@ -4354,7 +4354,7 @@ fn preferences_dependent_forking() -> Result<()> { [[package]] name = "package-bar" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'linux'", ] @@ -4366,9 +4366,9 @@ fn preferences_dependent_forking() -> Result<()> { [[package]] name = "package-cleaver" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ - { name = "package-bar", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform != 'linux'" }, + { name = "package-bar", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform != 'linux'" }, { name = "package-foo", marker = "sys_platform == 'linux'" }, ] sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/preferences_dependent_forking_cleaver-1.0.0.tar.gz", hash = "sha256:0347b927fdf7731758ea53e1594309fc6311ca6983f36553bc11654a264062b2" } @@ -4379,7 +4379,7 @@ fn preferences_dependent_forking() -> Result<()> { [[package]] name = "package-foo" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/preferences_dependent_forking_foo-1.0.0.tar.gz", hash = "sha256:abf1c0ac825ee5961e683067634916f98c6651a6d4473ff87d8b57c17af8fed2" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/preferences_dependent_forking_foo-1.0.0-py3-none-any.whl", hash = "sha256:85348e8df4892b9f297560c16abcf231828f538dc07339ed121197a00a0626a5" }, @@ -4390,8 +4390,8 @@ fn preferences_dependent_forking() -> Result<()> { version = "0.1.0" source = { virtual = "." } dependencies = [ - { name = "package-bar", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform != 'linux'" }, - { name = "package-bar", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" }, + { name = "package-bar", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform != 'linux'" }, + { name = "package-bar", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'linux'" }, { name = "package-cleaver" }, { name = "package-foo" }, ] @@ -4525,15 +4525,15 @@ fn fork_remaining_universe_partitioning() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "os_name == 'darwin' and sys_platform == 'illumos'", "os_name == 'linux' and sys_platform == 'illumos'", "os_name != 'darwin' and os_name != 'linux' and sys_platform == 'illumos'", ] dependencies = [ - { name = "package-b", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "os_name == 'darwin' and sys_platform == 'illumos'" }, - { name = "package-b", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "os_name == 'linux' and sys_platform == 'illumos'" }, + { name = "package-b", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "os_name == 'darwin' and sys_platform == 'illumos'" }, + { name = "package-b", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "os_name == 'linux' and sys_platform == 'illumos'" }, ] sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_remaining_universe_partitioning_a-1.0.0.tar.gz", hash = "sha256:d5be0af9a1958ec08ca2827b47bfd507efc26cab03ecf7ddf204e18e8a3a18ae" } wheels = [ @@ -4543,7 +4543,7 @@ fn fork_remaining_universe_partitioning() -> Result<()> { [[package]] name = "package-a" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "sys_platform == 'windows'", ] @@ -4555,7 +4555,7 @@ fn fork_remaining_universe_partitioning() -> Result<()> { [[package]] name = "package-b" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "os_name == 'darwin' and sys_platform == 'illumos'", ] @@ -4567,7 +4567,7 @@ fn fork_remaining_universe_partitioning() -> Result<()> { [[package]] name = "package-b" version = "2.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } resolution-markers = [ "os_name == 'linux' and sys_platform == 'illumos'", ] @@ -4581,8 +4581,8 @@ fn fork_remaining_universe_partitioning() -> Result<()> { version = "0.1.0" source = { virtual = "." } dependencies = [ - { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'illumos'" }, - { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'windows'" }, + { name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'illumos'" }, + { name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" }, marker = "sys_platform == 'windows'" }, ] [package.metadata] @@ -4845,7 +4845,7 @@ fn fork_requires_python_patch_overlap() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_requires_python_patch_overlap_a-1.0.0.tar.gz", hash = "sha256:ac2820ee4808788674295192d79a709e3259aa4eef5b155e77f719ad4eaa324d" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_requires_python_patch_overlap_a-1.0.0-py3-none-any.whl", hash = "sha256:43a750ba4eaab749d608d70e94d3d51e083cc21f5a52ac99b5967b26486d5ef1" }, @@ -5031,7 +5031,7 @@ fn requires_python_wheels() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/requires_python_wheels_a-1.0.0.tar.gz", hash = "sha256:9a11ff73fdc513c4dab0d3e137f4145a00ef0dfc95154360c8f503eed62a03c9" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/requires_python_wheels_a-1.0.0-cp310-cp310-any.whl", hash = "sha256:b979494a0d7dc825b84d6c516ac407143915f6d2840d229ee2a36b3d06deb61d" }, @@ -5130,7 +5130,7 @@ fn unreachable_package() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/unreachable_package_a-1.0.0.tar.gz", hash = "sha256:308f0b6772e99dcb33acee38003b176e3acffbe01c3c511585db9a7d7ec008f7" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/unreachable_package_a-1.0.0-py3-none-any.whl", hash = "sha256:cc472ded9f3b260e6cda0e633fa407a13607e190422cb455f02beebd32d6751f" }, @@ -5241,7 +5241,7 @@ fn unreachable_wheels() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/unreachable_wheels_a-1.0.0.tar.gz", hash = "sha256:91c6619d1cfa227f3662c0c062b1c0c16efe11e589db2f1836e809e2c6d9961e" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/unreachable_wheels_a-1.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:e9fb30c5eb114114f9031d0ad2238614c2dcce203c5992848305ccda8f38a53e" }, @@ -5250,7 +5250,7 @@ fn unreachable_wheels() -> Result<()> { [[package]] name = "package-b" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/unreachable_wheels_b-1.0.0.tar.gz", hash = "sha256:253ae69b963651cd5ac16601a445e2e179db9eac552e8cfc37aadf73a88931ed" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/unreachable_wheels_b-1.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3de2212ca86f1137324965899ce7f48640ed8db94578f4078d641520b77e13e" }, @@ -5260,7 +5260,7 @@ fn unreachable_wheels() -> Result<()> { [[package]] name = "package-c" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/unreachable_wheels_c-1.0.0.tar.gz", hash = "sha256:5c4783e85f0fa57b720fd02b5c7e0ff8bc98121546fe2cce435710efe4a34b28" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/unreachable_wheels_c-1.0.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:4b846c5b1646b04828a2bef6c9d180ff7cfd725866013dcec8933de7fb5f9e8d" }, @@ -5362,7 +5362,7 @@ fn marker_variants_have_different_extras() -> Result<()> { [[package]] name = "package-psycopg" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ { name = "package-tzdata", marker = "sys_platform == 'win32'" }, ] @@ -5379,7 +5379,7 @@ fn marker_variants_have_different_extras() -> Result<()> { [[package]] name = "package-psycopg-binary" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/marker_variants_have_different_extras_psycopg_binary-1.0.0.tar.gz", hash = "sha256:9939771dfe78d76e3583492aaec576719780f744b36198b1f18bb16bb5048995" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/marker_variants_have_different_extras_psycopg_binary-1.0.0-py3-none-any.whl", hash = "sha256:4fb0aef60e76bc7e339d60dc919f3b6e27e49184ffdef9fb2c3f6902b23b6bd2" }, @@ -5388,7 +5388,7 @@ fn marker_variants_have_different_extras() -> Result<()> { [[package]] name = "package-tzdata" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/marker_variants_have_different_extras_tzdata-1.0.0.tar.gz", hash = "sha256:5aa31d0aec969afbc13584c3209ca2380107bdab68578f881eb2da543ac2ee8e" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/marker_variants_have_different_extras_tzdata-1.0.0-py3-none-any.whl", hash = "sha256:7466eec7ed202434492e7c09a4a7327517aec6d549aaca0436dcc100f9fcb6a5" }, @@ -5515,7 +5515,7 @@ fn virtual_package_extra_priorities() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ { name = "package-b" }, ] @@ -5527,7 +5527,7 @@ fn virtual_package_extra_priorities() -> Result<()> { [[package]] name = "package-b" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/virtual_package_extra_priorities_b-1.0.0.tar.gz", hash = "sha256:79a54df14eb28687678447f5270f578f73b325f8234e620d375a87708fd7345c" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/virtual_package_extra_priorities_b-1.0.0-py3-none-any.whl", hash = "sha256:2aab1a3b90f215cb55b9bfde55b3c3617225ca0da726e8c9543c0727734f1df9" }, @@ -5635,7 +5635,7 @@ fn specific_architecture() -> Result<()> { [[package]] name = "package-a" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } dependencies = [ { name = "package-b", marker = "platform_machine == 'x86_64'" }, { name = "package-c", marker = "platform_machine == 'aarch64'" }, @@ -5649,7 +5649,7 @@ fn specific_architecture() -> Result<()> { [[package]] name = "package-b" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/specific_architecture_b-1.0.0-cp313-cp313-freebsd_13_aarch64.whl", hash = "sha256:4ce70a68440d4aaa31cc1c6174b83b741e9b8f3074ad0f3ef41c572795378999" }, { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/specific_architecture_b-1.0.0-cp313-cp313-freebsd_13_x86_64.whl", hash = "sha256:4ce70a68440d4aaa31cc1c6174b83b741e9b8f3074ad0f3ef41c572795378999" }, @@ -5660,7 +5660,7 @@ fn specific_architecture() -> Result<()> { [[package]] name = "package-c" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/specific_architecture_c-1.0.0-cp313-cp313-freebsd_13_aarch64.whl", hash = "sha256:b028c88fe496724cea4a7d95eb789a000b7f000067f95c922b09461be2746a3d" }, { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/specific_architecture_c-1.0.0-cp313-cp313-freebsd_13_x86_64.whl", hash = "sha256:b028c88fe496724cea4a7d95eb789a000b7f000067f95c922b09461be2746a3d" }, @@ -5671,7 +5671,7 @@ fn specific_architecture() -> Result<()> { [[package]] name = "package-d" version = "1.0.0" - source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" } + source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html" } wheels = [ { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/specific_architecture_d-1.0.0-cp313-cp313-freebsd_13_aarch64.whl", hash = "sha256:842864c1348694fab33199eb05921602c2abfc77844a81085a55db02edd30da4" }, { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/specific_architecture_d-1.0.0-cp313-cp313-freebsd_13_x86_64.whl", hash = "sha256:842864c1348694fab33199eb05921602c2abfc77844a81085a55db02edd30da4" }, diff --git a/crates/uv/tests/it/main.rs b/crates/uv/tests/it/main.rs index 7835fa461..872c88d4b 100644 --- a/crates/uv/tests/it/main.rs +++ b/crates/uv/tests/it/main.rs @@ -84,6 +84,9 @@ mod python_install; #[cfg(feature = "python")] mod python_pin; +#[cfg(feature = "python-managed")] +mod python_upgrade; + #[cfg(all(feature = "python", feature = "pypi"))] mod run; diff --git a/crates/uv/tests/it/network.rs b/crates/uv/tests/it/network.rs index fba24afe1..1a5805970 100644 --- a/crates/uv/tests/it/network.rs +++ b/crates/uv/tests/it/network.rs @@ -1,6 +1,6 @@ -use std::env; +use std::{env, io}; -use assert_fs::fixture::{FileWriteStr, PathChild}; +use assert_fs::fixture::{ChildPath, FileWriteStr, PathChild}; use http::StatusCode; use serde_json::json; use wiremock::matchers::method; @@ -8,31 +8,81 @@ use wiremock::{Mock, MockServer, ResponseTemplate}; use crate::common::{TestContext, uv_snapshot}; -/// Check the simple index error message when the server returns HTTP status 500, a retryable error. -#[tokio::test] -async fn simple_http_500() { - let context = TestContext::new("3.12"); +fn connection_reset(_request: &wiremock::Request) -> io::Error { + io::Error::new(io::ErrorKind::ConnectionReset, "Connection reset by peer") +} +/// Answers with a retryable HTTP status 500. +async fn http_error_server() -> (MockServer, String) { let server = MockServer::start().await; Mock::given(method("GET")) .respond_with(ResponseTemplate::new(StatusCode::INTERNAL_SERVER_ERROR)) .mount(&server) .await; + let mock_server_uri = server.uri(); + (server, mock_server_uri) +} + +/// Answers with a retryable connection reset IO error. +async fn io_error_server() -> (MockServer, String) { + let server = MockServer::start().await; + Mock::given(method("GET")) + .respond_with_err(connection_reset) + .mount(&server) + .await; + + let mock_server_uri = server.uri(); + (server, mock_server_uri) +} + +/// Check the simple index error message when the server returns HTTP status 500, a retryable error. +#[tokio::test] +async fn simple_http_500() { + let context = TestContext::new("3.12"); + + let (_server_drop_guard, mock_server_uri) = http_error_server().await; let filters = vec![(mock_server_uri.as_str(), "[SERVER]")]; uv_snapshot!(filters, context .pip_install() .arg("tqdm") .arg("--index-url") - .arg(server.uri()), @r" + .arg(&mock_server_uri), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Request failed after 3 retries + Caused by: Failed to fetch: `[SERVER]/tqdm/` + Caused by: HTTP status server error (500 Internal Server Error) for url ([SERVER]/tqdm/) + "); +} + +/// Check the simple index error message when the server returns a retryable IO error. +#[tokio::test] +async fn simple_io_err() { + let context = TestContext::new("3.12"); + + let (_server_drop_guard, mock_server_uri) = io_error_server().await; + + let filters = vec![(mock_server_uri.as_str(), "[SERVER]")]; + uv_snapshot!(filters, context + .pip_install() + .arg("tqdm") + .arg("--index-url") + .arg(&mock_server_uri), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- error: Failed to fetch: `[SERVER]/tqdm/` - Caused by: HTTP status server error (500 Internal Server Error) for url ([SERVER]/tqdm/) + Caused by: Request failed after 3 retries + Caused by: error sending request for url ([SERVER]/tqdm/) + Caused by: client error (SendRequest) + Caused by: connection closed before message completed "); } @@ -41,12 +91,7 @@ async fn simple_http_500() { async fn find_links_http_500() { let context = TestContext::new("3.12"); - let server = MockServer::start().await; - Mock::given(method("GET")) - .respond_with(ResponseTemplate::new(StatusCode::INTERNAL_SERVER_ERROR)) - .mount(&server) - .await; - let mock_server_uri = server.uri(); + let (_server_drop_guard, mock_server_uri) = http_error_server().await; let filters = vec![(mock_server_uri.as_str(), "[SERVER]")]; uv_snapshot!(filters, context @@ -54,7 +99,33 @@ async fn find_links_http_500() { .arg("tqdm") .arg("--no-index") .arg("--find-links") - .arg(server.uri()), @r" + .arg(&mock_server_uri), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Failed to read `--find-links` URL: [SERVER]/ + Caused by: Request failed after 3 retries + Caused by: Failed to fetch: `[SERVER]/` + Caused by: HTTP status server error (500 Internal Server Error) for url ([SERVER]/) + "); +} + +/// Check the find links error message when the server returns a retryable IO error. +#[tokio::test] +async fn find_links_io_error() { + let context = TestContext::new("3.12"); + + let (_server_drop_guard, mock_server_uri) = io_error_server().await; + + let filters = vec![(mock_server_uri.as_str(), "[SERVER]")]; + uv_snapshot!(filters, context + .pip_install() + .arg("tqdm") + .arg("--no-index") + .arg("--find-links") + .arg(&mock_server_uri), @r" success: false exit_code: 2 ----- stdout ----- @@ -62,7 +133,10 @@ async fn find_links_http_500() { ----- stderr ----- error: Failed to read `--find-links` URL: [SERVER]/ Caused by: Failed to fetch: `[SERVER]/` - Caused by: HTTP status server error (500 Internal Server Error) for url ([SERVER]/) + Caused by: Request failed after 3 retries + Caused by: error sending request for url ([SERVER]/) + Caused by: client error (SendRequest) + Caused by: connection closed before message completed "); } @@ -72,12 +146,33 @@ async fn find_links_http_500() { async fn direct_url_http_500() { let context = TestContext::new("3.12"); - let server = MockServer::start().await; - Mock::given(method("GET")) - .respond_with(ResponseTemplate::new(StatusCode::INTERNAL_SERVER_ERROR)) - .mount(&server) - .await; - let mock_server_uri = server.uri(); + let (_server_drop_guard, mock_server_uri) = http_error_server().await; + + let tqdm_url = format!( + "{mock_server_uri}/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl" + ); + let filters = vec![(mock_server_uri.as_str(), "[SERVER]")]; + uv_snapshot!(filters, context + .pip_install() + .arg(format!("tqdm @ {tqdm_url}")), @r" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + × Failed to download `tqdm @ [SERVER]/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl` + ├─▶ Request failed after 3 retries + ├─▶ Failed to fetch: `[SERVER]/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl` + ╰─▶ HTTP status server error (500 Internal Server Error) for url ([SERVER]/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl) + "); +} + +/// Check the direct package URL error message when the server returns a retryable IO error. +#[tokio::test] +async fn direct_url_io_error() { + let context = TestContext::new("3.12"); + + let (_server_drop_guard, mock_server_uri) = io_error_server().await; let tqdm_url = format!( "{mock_server_uri}/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl" @@ -93,26 +188,14 @@ async fn direct_url_http_500() { ----- stderr ----- × Failed to download `tqdm @ [SERVER]/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl` ├─▶ Failed to fetch: `[SERVER]/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl` - ╰─▶ HTTP status server error (500 Internal Server Error) for url ([SERVER]/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl) + ├─▶ Request failed after 3 retries + ├─▶ error sending request for url ([SERVER]/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl) + ├─▶ client error (SendRequest) + ╰─▶ connection closed before message completed "); } -/// Check the Python install error message when the server returns HTTP status 500, a retryable -/// error. -#[tokio::test] -async fn python_install_http_500() { - let context = TestContext::new("3.12") - .with_filtered_python_keys() - .with_filtered_exe_suffix() - .with_managed_python_dirs(); - - let server = MockServer::start().await; - Mock::given(method("GET")) - .respond_with(ResponseTemplate::new(StatusCode::INTERNAL_SERVER_ERROR)) - .mount(&server) - .await; - let mock_server_uri = server.uri(); - +fn write_python_downloads_json(context: &TestContext, mock_server_uri: &String) -> ChildPath { let python_downloads_json = context.temp_dir.child("python_downloads.json"); let interpreter = json!({ "cpython-3.10.0-darwin-aarch64-none": { @@ -135,6 +218,51 @@ async fn python_install_http_500() { python_downloads_json .write_str(&serde_json::to_string(&interpreter).unwrap()) .unwrap(); + python_downloads_json +} + +/// Check the Python install error message when the server returns HTTP status 500, a retryable +/// error. +#[tokio::test] +async fn python_install_http_500() { + let context = TestContext::new("3.12") + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs(); + + let (_server_drop_guard, mock_server_uri) = http_error_server().await; + + let python_downloads_json = write_python_downloads_json(&context, &mock_server_uri); + + let filters = vec![(mock_server_uri.as_str(), "[SERVER]")]; + uv_snapshot!(filters, context + .python_install() + .arg("cpython-3.10.0-darwin-aarch64-none") + .arg("--python-downloads-json-url") + .arg(python_downloads_json.path()), @r" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + error: Failed to install cpython-3.10.0-macos-aarch64-none + Caused by: Request failed after 3 retries + Caused by: Failed to download [SERVER]/astral-sh/python-build-standalone/releases/download/20211017/cpython-3.10.0-aarch64-apple-darwin-pgo%2Blto-20211017T1616.tar.zst + Caused by: HTTP status server error (500 Internal Server Error) for url ([SERVER]/astral-sh/python-build-standalone/releases/download/20211017/cpython-3.10.0-aarch64-apple-darwin-pgo%2Blto-20211017T1616.tar.zst) + "); +} + +/// Check the Python install error message when the server returns a retryable IO error. +#[tokio::test] +async fn python_install_io_error() { + let context = TestContext::new("3.12") + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs(); + + let (_server_drop_guard, mock_server_uri) = io_error_server().await; + + let python_downloads_json = write_python_downloads_json(&context, &mock_server_uri); let filters = vec![(mock_server_uri.as_str(), "[SERVER]")]; uv_snapshot!(filters, context @@ -149,6 +277,9 @@ async fn python_install_http_500() { ----- stderr ----- error: Failed to install cpython-3.10.0-macos-aarch64-none Caused by: Failed to download [SERVER]/astral-sh/python-build-standalone/releases/download/20211017/cpython-3.10.0-aarch64-apple-darwin-pgo%2Blto-20211017T1616.tar.zst - Caused by: HTTP status server error (500 Internal Server Error) for url ([SERVER]/astral-sh/python-build-standalone/releases/download/20211017/cpython-3.10.0-aarch64-apple-darwin-pgo%2Blto-20211017T1616.tar.zst) + Caused by: Request failed after 3 retries + Caused by: error sending request for url ([SERVER]/astral-sh/python-build-standalone/releases/download/20211017/cpython-3.10.0-aarch64-apple-darwin-pgo%2Blto-20211017T1616.tar.zst) + Caused by: client error (SendRequest) + Caused by: connection closed before message completed "); } diff --git a/crates/uv/tests/it/pip_compile.rs b/crates/uv/tests/it/pip_compile.rs index efb51e47d..b99be1296 100644 --- a/crates/uv/tests/it/pip_compile.rs +++ b/crates/uv/tests/it/pip_compile.rs @@ -3,9 +3,8 @@ use std::env::current_dir; use std::fs; use std::io::Cursor; -use std::path::PathBuf; -use anyhow::{Context, Result, bail}; +use anyhow::Result; use assert_fs::prelude::*; use flate2::write::GzEncoder; use fs_err::File; @@ -2910,16 +2909,16 @@ fn incompatible_narrowed_url_dependency() -> Result<()> { "})?; uv_snapshot!(context.filters(), context.pip_compile() - .arg("requirements.in"), @r###" + .arg("requirements.in"), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- error: Requirements contain conflicting URLs for package `uv-public-pypackage`: - - git+https://github.com/astral-test/uv-public-pypackage@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389 - git+https://github.com/astral-test/uv-public-pypackage@test-branch - "### + - git+https://github.com/astral-test/uv-public-pypackage@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389 + " ); Ok(()) @@ -4803,97 +4802,6 @@ fn compile_editable_url_requirement() -> Result<()> { Ok(()) } -#[test] -#[ignore] -fn cache_errors_are_non_fatal() -> Result<()> { - let context = TestContext::new("3.12"); - let requirements_in = context.temp_dir.child("requirements.in"); - // No git dep, git has its own locking strategy - requirements_in.write_str(indoc! {r" - # pypi wheel - pandas - # url wheel - flask @ https://files.pythonhosted.org/packages/36/42/015c23096649b908c809c69388a805a571a3bea44362fe87e33fc3afa01f/flask-3.0.0-py3-none-any.whl - # url source dist - werkzeug @ https://files.pythonhosted.org/packages/0d/cc/ff1904eb5eb4b455e442834dabf9427331ac0fa02853bf83db817a7dd53d/werkzeug-3.0.1.tar.gz - " - })?; - - // Pick a file from each kind of cache - let interpreter_cache = context - .cache_dir - .path() - .join("interpreter-v0") - .read_dir()? - .next() - .context("Expected a python interpreter cache file")?? - .path(); - let cache_files = [ - PathBuf::from("simple-v0/pypi/numpy.msgpack"), - PathBuf::from( - "wheels-v0/pypi/python-dateutil/python_dateutil-2.8.2-py2.py3-none-any.msgpack", - ), - PathBuf::from("wheels-v0/url/4b8be67c801a7ecb/flask/flask-3.0.0-py3-none-any.msgpack"), - PathBuf::from("built-wheels-v0/url/6781bd6440ae72c2/werkzeug/metadata.msgpack"), - interpreter_cache, - ]; - - let check = || { - uv_snapshot!(context.filters(), context.pip_compile() - .arg("pip") - .arg("compile") - .arg(requirements_in.path()) - // It's sufficient to check that we resolve to a fix number of packages - .stdout(std::process::Stdio::null()), @r###" - success: true - exit_code: 0 - ----- stdout ----- - - ----- stderr ----- - Resolved 13 packages in [TIME] - "### - ); - }; - - insta::allow_duplicates! { - check(); - - // Replace some cache files with invalid contents - for file in &cache_files { - let file = context.cache_dir.join(file); - if !file.is_file() { - bail!("Missing cache file {}", file.user_display()); - } - fs_err::write(file, "I borken you cache")?; - } - - check(); - - #[cfg(unix)] - { - use fs_err::os::unix::fs::OpenOptionsExt; - - // Make some files unreadable, so that the read instead of the deserialization will fail - for file in cache_files { - let file = context.cache_dir.join(file); - if !file.is_file() { - bail!("Missing cache file {}", file.user_display()); - } - - fs_err::OpenOptions::new() - .create(true) - .write(true) - .mode(0o000) - .open(file)?; - } - } - - check(); - - Ok(()) - } -} - /// Resolve a distribution from an HTML-only registry. #[test] #[cfg(not(target_env = "musl"))] // No musllinux wheels in the torch index @@ -12806,28 +12714,34 @@ fn emit_index_annotation_multiple_indexes() -> Result<()> { let context = TestContext::new("3.12"); let requirements_in = context.temp_dir.child("requirements.in"); - requirements_in.write_str("uv\nrequests")?; + requirements_in.write_str("httpcore\nrequests")?; uv_snapshot!(context.filters(), context.pip_compile() .arg("requirements.in") .arg("--extra-index-url") .arg("https://test.pypi.org/simple") - .arg("--emit-index-annotation"), @r###" + .arg("--emit-index-annotation"), @r" success: true exit_code: 0 ----- stdout ----- # This file was autogenerated by uv via the following command: # uv pip compile --cache-dir [CACHE_DIR] requirements.in --emit-index-annotation + certifi==2016.8.8 + # via httpcore + # from https://test.pypi.org/simple + h11==0.14.0 + # via httpcore + # from https://pypi.org/simple + httpcore==1.0.4 + # via -r requirements.in + # from https://pypi.org/simple requests==2.5.4.1 # via -r requirements.in # from https://test.pypi.org/simple - uv==0.1.24 - # via -r requirements.in - # from https://pypi.org/simple ----- stderr ----- - Resolved 2 packages in [TIME] - "### + Resolved 4 packages in [TIME] + " ); Ok(()) @@ -14765,10 +14679,7 @@ fn compile_derivation_chain() -> Result<()> { let filters = context .filters() .into_iter() - .chain([ - (r"exit code: 1", "exit status: 1"), - (r"/.*/src", "/[TMP]/src"), - ]) + .chain([(r"/.*/src", "/[TMP]/src")]) .collect::>(); uv_snapshot!(filters, context.pip_compile().arg("pyproject.toml"), @r###" @@ -15783,7 +15694,106 @@ fn invalid_group() -> Result<()> { } #[test] -fn project_and_group() -> Result<()> { +fn project_and_group_workspace_inherit() -> Result<()> { + // Checking that --project is handled properly with --group + fn new_context() -> Result { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "myproject" + version = "0.1.0" + requires-python = ">=3.12" + + [tool.uv.workspace] + members = ["packages/*"] + + [tool.uv.sources] + pytest = { workspace = true } + "#, + )?; + + let subdir = context.temp_dir.child("packages"); + subdir.create_dir_all()?; + + let pytest_dir = subdir.child("pytest"); + pytest_dir.create_dir_all()?; + let pytest_toml = pytest_dir.child("pyproject.toml"); + pytest_toml.write_str( + r#" + [project] + name = "pytest" + version = "4.0.0" + requires-python = ">=3.12" + "#, + )?; + + let sniffio_dir = subdir.child("sniffio"); + sniffio_dir.create_dir_all()?; + let sniffio_toml = sniffio_dir.child("pyproject.toml"); + sniffio_toml.write_str( + r#" + [project] + name = "sniffio" + version = "1.3.1" + requires-python = ">=3.12" + "#, + )?; + + let subproject_dir = subdir.child("mysubproject"); + subproject_dir.create_dir_all()?; + let subproject_toml = subproject_dir.child("pyproject.toml"); + subproject_toml.write_str( + r#" + [project] + name = "mysubproject" + version = "0.1.0" + requires-python = ">=3.12" + + [tool.uv.sources] + sniffio = { workspace = true } + + [dependency-groups] + foo = ["iniconfig", "anyio", "sniffio", "pytest"] + "#, + )?; + + Ok(context) + } + + // Check that the workspace's sources are discovered and consulted + let context = new_context()?; + uv_snapshot!(context.filters(), context.pip_compile() + .arg("--group").arg("packages/mysubproject/pyproject.toml:foo"), @r" + success: true + exit_code: 0 + ----- stdout ----- + # This file was autogenerated by uv via the following command: + # uv pip compile --cache-dir [CACHE_DIR] --group packages/mysubproject/pyproject.toml:foo + anyio==4.3.0 + # via mysubproject (packages/mysubproject/pyproject.toml:foo) + idna==3.6 + # via anyio + iniconfig==2.0.0 + # via mysubproject (packages/mysubproject/pyproject.toml:foo) + pytest @ file://[TEMP_DIR]/packages/pytest + # via mysubproject (packages/mysubproject/pyproject.toml:foo) + sniffio @ file://[TEMP_DIR]/packages/sniffio + # via + # mysubproject (packages/mysubproject/pyproject.toml:foo) + # anyio + + ----- stderr ----- + Resolved 5 packages in [TIME] + "); + + Ok(()) +} + +#[test] +fn project_and_group_workspace() -> Result<()> { // Checking that --project is handled properly with --group fn new_context() -> Result { let context = TestContext::new("3.12"); @@ -16332,7 +16342,7 @@ fn pep_751_compile_registry_wheel() -> Result<()> { # uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml lock-version = "1.0" created-by = "uv" - requires-python = ">=3.12.[X]" + requires-python = ">=3.12" [[packages]] name = "iniconfig" @@ -16381,7 +16391,7 @@ fn pep_751_compile_registry_sdist() -> Result<()> { # uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml lock-version = "1.0" created-by = "uv" - requires-python = ">=3.12.[X]" + requires-python = ">=3.12" [[packages]] name = "source-distribution" @@ -16465,7 +16475,7 @@ fn pep_751_compile_directory() -> Result<()> { # uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml lock-version = "1.0" created-by = "uv" - requires-python = ">=3.12.[X]" + requires-python = ">=3.12" [[packages]] name = "anyio" @@ -16536,7 +16546,7 @@ fn pep_751_compile_git() -> Result<()> { # uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml lock-version = "1.0" created-by = "uv" - requires-python = ">=3.12.[X]" + requires-python = ">=3.12" [[packages]] name = "uv-public-pypackage" @@ -16586,7 +16596,7 @@ fn pep_751_compile_url_wheel() -> Result<()> { # uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml lock-version = "1.0" created-by = "uv" - requires-python = ">=3.12.[X]" + requires-python = ">=3.12" [[packages]] name = "anyio" @@ -16650,7 +16660,7 @@ fn pep_751_compile_url_sdist() -> Result<()> { # uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml lock-version = "1.0" created-by = "uv" - requires-python = ">=3.12.[X]" + requires-python = ">=3.12" [[packages]] name = "anyio" @@ -16719,7 +16729,7 @@ fn pep_751_compile_path_wheel() -> Result<()> { # uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml lock-version = "1.0" created-by = "uv" - requires-python = ">=3.12.[X]" + requires-python = ">=3.12" [[packages]] name = "iniconfig" @@ -16757,7 +16767,7 @@ fn pep_751_compile_path_wheel() -> Result<()> { # uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o nested/pylock.toml lock-version = "1.0" created-by = "uv" - requires-python = ">=3.12.[X]" + requires-python = ">=3.12" [[packages]] name = "iniconfig" @@ -16798,7 +16808,7 @@ fn pep_751_compile_path_sdist() -> Result<()> { # uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml lock-version = "1.0" created-by = "uv" - requires-python = ">=3.12.[X]" + requires-python = ">=3.12" [[packages]] name = "iniconfig" @@ -16837,7 +16847,7 @@ fn pep_751_compile_path_sdist() -> Result<()> { # uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o nested/pylock.toml lock-version = "1.0" created-by = "uv" - requires-python = ">=3.12.[X]" + requires-python = ">=3.12" [[packages]] name = "iniconfig" @@ -16874,7 +16884,7 @@ fn pep_751_compile_preferences() -> Result<()> { # uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml lock-version = "1.0" created-by = "uv" - requires-python = ">=3.12.[X]" + requires-python = ">=3.12" [[packages]] name = "anyio" @@ -16915,7 +16925,7 @@ fn pep_751_compile_preferences() -> Result<()> { # uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml lock-version = "1.0" created-by = "uv" - requires-python = ">=3.12.[X]" + requires-python = ">=3.12" [[packages]] name = "anyio" @@ -16955,7 +16965,7 @@ fn pep_751_compile_preferences() -> Result<()> { # uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml lock-version = "1.0" created-by = "uv" - requires-python = ">=3.12.[X]" + requires-python = ">=3.12" [[packages]] name = "anyio" @@ -16994,7 +17004,7 @@ fn pep_751_compile_preferences() -> Result<()> { # uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml lock-version = "1.0" created-by = "uv" - requires-python = ">=3.12.[X]" + requires-python = ">=3.12" [[packages]] name = "anyio" @@ -17042,7 +17052,7 @@ fn pep_751_compile_warn() -> Result<()> { # uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml --emit-index-url lock-version = "1.0" created-by = "uv" - requires-python = ">=3.12.[X]" + requires-python = ">=3.12" [[packages]] name = "iniconfig" @@ -17255,7 +17265,7 @@ fn pep_751_compile_no_emit_package() -> Result<()> { # uv pip compile --cache-dir [CACHE_DIR] requirements.txt --universal -o pylock.toml --no-emit-package idna lock-version = "1.0" created-by = "uv" - requires-python = ">=3.12.[X]" + requires-python = ">=3.12" [[packages]] name = "anyio" @@ -17516,3 +17526,80 @@ fn pubgrub_panic_double_self_dependency_extra() -> Result<()> { Ok(()) } + +/// Sync a Git repository that depends on a package within the same repository via a `path` source. +/// +/// See: +#[test] +#[cfg(feature = "git")] +fn git_path_transitive_dependency() -> Result<()> { + let context = TestContext::new("3.13"); + + let requirements_in = context.temp_dir.child("requirements.in"); + requirements_in.write_str( + r" + git+https://git@github.com/astral-sh/uv-path-dependency-test.git#subdirectory=package2 + ", + )?; + + uv_snapshot!(context.filters(), context.pip_compile().arg("requirements.in"), @r" + success: true + exit_code: 0 + ----- stdout ----- + # This file was autogenerated by uv via the following command: + # uv pip compile --cache-dir [CACHE_DIR] requirements.in + package1 @ git+https://git@github.com/astral-sh/uv-path-dependency-test.git@28781b32cf1f260cdb2c8040628079eb265202bd#subdirectory=package1 + # via package2 + package2 @ git+https://git@github.com/astral-sh/uv-path-dependency-test.git@28781b32cf1f260cdb2c8040628079eb265202bd#subdirectory=package2 + # via -r requirements.in + + ----- stderr ----- + Resolved 2 packages in [TIME] + "); + + Ok(()) +} + +/// Ensure that `--emit-index-annotation` plays nicely with `--annotation-style=line`. +#[test] +fn omit_python_patch_universal() -> Result<()> { + let context = TestContext::new("3.11"); + + let requirements_in = context.temp_dir.child("requirements.in"); + requirements_in.write_str("redis")?; + + uv_snapshot!(context.filters(), context.pip_compile() + .arg("requirements.in"), @r" + success: true + exit_code: 0 + ----- stdout ----- + # This file was autogenerated by uv via the following command: + # uv pip compile --cache-dir [CACHE_DIR] requirements.in + redis==5.0.3 + # via -r requirements.in + + ----- stderr ----- + Resolved 1 package in [TIME] + " + ); + + uv_snapshot!(context.filters(), context.pip_compile() + .arg("requirements.in") + .arg("--universal"), @r" + success: true + exit_code: 0 + ----- stdout ----- + # This file was autogenerated by uv via the following command: + # uv pip compile --cache-dir [CACHE_DIR] requirements.in --universal + async-timeout==4.0.3 ; python_full_version < '3.11.[X]' + # via redis + redis==5.0.3 + # via -r requirements.in + + ----- stderr ----- + Resolved 2 packages in [TIME] + " + ); + + Ok(()) +} diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs index fa823cce0..a33e08d90 100644 --- a/crates/uv/tests/it/pip_install.rs +++ b/crates/uv/tests/it/pip_install.rs @@ -19,7 +19,7 @@ use wiremock::{ use crate::common::{self, decode_token}; use crate::common::{ DEFAULT_PYTHON_VERSION, TestContext, build_vendor_links_url, download_to_disk, get_bin, - uv_snapshot, venv_bin_path, venv_to_interpreter, + uv_snapshot, venv_bin_path, }; use uv_fs::Simplified; use uv_static::EnvVars; @@ -342,10 +342,7 @@ dependencies = ["flask==1.0.x"] let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str("./path_dep")?; - let filters = std::iter::once((r"exit code: 1", "exit status: 1")) - .chain(context.filters()) - .collect::>(); - uv_snapshot!(filters, context.pip_install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-r") .arg("requirements.txt"), @r###" success: false @@ -564,11 +561,6 @@ fn install_requirements_txt() -> Result<()> { #[tokio::test] async fn install_remote_requirements_txt() -> Result<()> { let context = TestContext::new("3.12"); - let filters = context - .filters() - .into_iter() - .chain([(r"127\.0\.0\.1[^\r\n]*", "[LOCALHOST]")]) - .collect::>(); let username = "user"; let password = "password"; @@ -579,17 +571,17 @@ async fn install_remote_requirements_txt() -> Result<()> { let mut requirements_url = Url::parse(&format!("{}/requirements.txt", &server_url))?; // Should fail without credentials - uv_snapshot!(filters, context.pip_install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-r") .arg(requirements_url.as_str()) - .arg("--strict"), @r###" + .arg("--strict"), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- - error: Error while accessing remote requirements file: `http://[LOCALHOST] - "### + error: Error while accessing remote requirements file: `http://[LOCALHOST]/requirements.txt` + " ); let _ = requirements_url.set_username(username); @@ -1520,16 +1512,16 @@ fn install_editable_incompatible_constraint_url() -> Result<()> { .arg("-e") .arg(context.workspace_root.join("scripts/packages/black_editable")) .arg("--constraint") - .arg("constraints.txt"), @r###" + .arg("constraints.txt"), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- error: Requirements contain conflicting URLs for package `black`: - - [WORKSPACE]/scripts/packages/black_editable + - file://[WORKSPACE]/scripts/packages/black_editable (editable) - https://files.pythonhosted.org/packages/0f/89/294c9a6b6c75a08da55e9d05321d0707e9418735e3062b12ef0f54c33474/black-24.4.2-py3-none-any.whl - "### + " ); Ok(()) @@ -2071,6 +2063,64 @@ fn install_git_public_https_missing_branch_or_tag() { "###); } +#[tokio::test] +#[cfg(feature = "git")] +async fn install_git_public_rate_limited_by_github_rest_api_403_response() { + let context = TestContext::new("3.12"); + + let server = MockServer::start().await; + Mock::given(method("GET")) + .respond_with(ResponseTemplate::new(403)) + .expect(1) + .mount(&server) + .await; + + uv_snapshot!(context.filters(), context + .pip_install() + .arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage") + .env("UV_GITHUB_FAST_PATH_URL", server.uri()), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 1 package in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + uv-public-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-public-pypackage@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389) + "); +} + +#[tokio::test] +#[cfg(feature = "git")] +async fn install_git_public_rate_limited_by_github_rest_api_429_response() { + use uv_client::DEFAULT_RETRIES; + + let context = TestContext::new("3.12"); + + let server = MockServer::start().await; + Mock::given(method("GET")) + .respond_with(ResponseTemplate::new(429)) + .expect(1 + u64::from(DEFAULT_RETRIES)) // Middleware retries on 429 by default + .mount(&server) + .await; + + uv_snapshot!(context.filters(), context + .pip_install() + .arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage") + .env("UV_GITHUB_FAST_PATH_URL", server.uri()), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 1 package in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + uv-public-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-public-pypackage@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389) + "); +} + /// Install a package from a public GitHub repository at a ref that does not exist #[test] #[cfg(feature = "git")] @@ -2291,7 +2341,7 @@ fn install_git_private_https_pat_at_ref() { /// See: . #[test] #[cfg(feature = "git")] -#[ignore] +#[ignore = "Modifies the user's keyring"] fn install_git_private_https_pat_and_username() { let context = TestContext::new(DEFAULT_PYTHON_VERSION); let token = decode_token(common::READ_ONLY_GITHUB_TOKEN); @@ -4877,10 +4927,7 @@ fn no_build_isolation() -> Result<()> { requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz")?; // We expect the build to fail, because `setuptools` is not installed. - let filters = std::iter::once((r"exit code: 1", "exit status: 1")) - .chain(context.filters()) - .collect::>(); - uv_snapshot!(filters, context.pip_install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-r") .arg("requirements.in") .arg("--no-build-isolation"), @r###" @@ -4948,10 +4995,7 @@ fn respect_no_build_isolation_env_var() -> Result<()> { requirements_in.write_str("anyio @ https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz")?; // We expect the build to fail, because `setuptools` is not installed. - let filters = std::iter::once((r"exit code: 1", "exit status: 1")) - .chain(context.filters()) - .collect::>(); - uv_snapshot!(filters, context.pip_install() + uv_snapshot!(context.filters(), context.pip_install() .arg("-r") .arg("requirements.in") .env(EnvVars::UV_NO_BUILD_ISOLATION, "yes"), @r###" @@ -8548,10 +8592,7 @@ fn install_build_isolation_package() -> Result<()> { )?; // Running `uv pip install` should fail for iniconfig. - let filters = std::iter::once((r"exit code: 1", "exit status: 1")) - .chain(context.filters()) - .collect::>(); - uv_snapshot!(filters, context.pip_install() + uv_snapshot!(context.filters(), context.pip_install() .arg("--no-build-isolation-package") .arg("iniconfig") .arg(package.path()), @r###" @@ -8878,10 +8919,7 @@ fn missing_top_level() { fn sklearn() { let context = TestContext::new("3.12"); - let filters = std::iter::once((r"exit code: 1", "exit status: 1")) - .chain(context.filters()) - .collect::>(); - uv_snapshot!(filters, context.pip_install().arg("sklearn"), @r###" + uv_snapshot!(context.filters(), context.pip_install().arg("sklearn"), @r###" success: false exit_code: 1 ----- stdout ----- @@ -8931,10 +8969,7 @@ fn resolve_derivation_chain() -> Result<()> { let filters = context .filters() .into_iter() - .chain([ - (r"exit code: 1", "exit status: 1"), - (r"/.*/src", "/[TMP]/src"), - ]) + .chain([(r"/.*/src", "/[TMP]/src")]) .collect::>(); uv_snapshot!(filters, context.pip_install() @@ -9088,8 +9123,7 @@ fn build_tag() { ); // Ensure that we choose the highest build tag (5). - uv_snapshot!(Command::new(venv_to_interpreter(&context.venv)) - .arg("-B") + uv_snapshot!(context.python_command() .arg("-c") .arg("import build_tag; build_tag.main()") .current_dir(&context.temp_dir), @r###" @@ -9637,6 +9671,43 @@ fn dependency_group() -> Result<()> { Ok(()) } +#[test] +fn virtual_dependency_group() -> Result<()> { + // testing basic `uv pip install --group` functionality + // when the pyproject.toml is virtual + fn new_context() -> Result { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [dependency-groups] + foo = ["sortedcontainers"] + bar = ["iniconfig"] + dev = ["sniffio"] + "#, + )?; + Ok(context) + } + + // 'bar' using path sugar + let context = new_context()?; + uv_snapshot!(context.filters(), context.pip_install() + .arg("--group").arg("bar"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 1 package in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + iniconfig==2.0.0 + "); + + Ok(()) +} + #[test] fn many_pyproject_group() -> Result<()> { // `uv pip install --group` tests with multiple projects @@ -11397,3 +11468,25 @@ fn pep_751_dependency() -> Result<()> { Ok(()) } + +/// Test that we show an error instead of panicking for conflicting arguments in different levels, +/// which are not caught by clap. +#[test] +fn conflicting_flags_clap_bug() { + let context = TestContext::new("3.12"); + + uv_snapshot!(context.filters(), context.command() + .arg("pip") + .arg("--offline") + .arg("install") + .arg("--no-offline") + .arg("tqdm"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: `--offline` and `--no-offline` cannot be used together. Boolean flags on different levels are currently not supported (https://github.com/clap-rs/clap/issues/6049) + " + ); +} diff --git a/crates/uv/tests/it/pip_install_scenarios.rs b/crates/uv/tests/it/pip_install_scenarios.rs index 1a95b1caa..153d5a8fb 100644 --- a/crates/uv/tests/it/pip_install_scenarios.rs +++ b/crates/uv/tests/it/pip_install_scenarios.rs @@ -5,52 +5,20 @@ //! #![cfg(all(feature = "python", feature = "pypi", unix))] -use std::path::Path; use std::process::Command; -use assert_cmd::assert::Assert; -use assert_cmd::prelude::*; - use uv_static::EnvVars; -use crate::common::{ - TestContext, build_vendor_links_url, get_bin, packse_index_url, uv_snapshot, - venv_to_interpreter, -}; - -fn assert_command(venv: &Path, command: &str, temp_dir: &Path) -> Assert { - Command::new(venv_to_interpreter(venv)) - .arg("-c") - .arg(command) - .current_dir(temp_dir) - .assert() -} - -fn assert_installed(venv: &Path, package: &'static str, version: &'static str, temp_dir: &Path) { - assert_command( - venv, - format!("import {package} as package; print(package.__version__, end='')").as_str(), - temp_dir, - ) - .success() - .stdout(version); -} - -fn assert_not_installed(venv: &Path, package: &'static str, temp_dir: &Path) { - assert_command(venv, format!("import {package}").as_str(), temp_dir).failure(); -} +use crate::common::{TestContext, build_vendor_links_url, packse_index_url, uv_snapshot}; /// Create a `pip install` command with options shared across all scenarios. fn command(context: &TestContext) -> Command { - let mut command = Command::new(get_bin()); + let mut command = context.pip_install(); command - .arg("pip") - .arg("install") .arg("--index-url") .arg(packse_index_url()) .arg("--find-links") .arg(build_vendor_links_url()); - context.add_shared_options(&mut command, true); command.env_remove(EnvVars::UV_EXCLUDE_NEWER); command } @@ -88,11 +56,7 @@ fn requires_exact_version_does_not_exist() { ╰─▶ Because there is no version of package-a==2.0.0 and you require package-a==2.0.0, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "requires_exact_version_does_not_exist_a", - &context.temp_dir, - ); + context.assert_not_installed("requires_exact_version_does_not_exist_a"); } /// The user requires a version of `a` greater than `1.0.0` but only smaller or equal versions exist @@ -130,11 +94,7 @@ fn requires_greater_version_does_not_exist() { ╰─▶ Because only package-a<=1.0.0 is available and you require package-a>1.0.0, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "requires_greater_version_does_not_exist_a", - &context.temp_dir, - ); + context.assert_not_installed("requires_greater_version_does_not_exist_a"); } /// The user requires a version of `a` less than `1.0.0` but only larger versions exist @@ -174,11 +134,7 @@ fn requires_less_version_does_not_exist() { ╰─▶ Because only package-a>=2.0.0 is available and you require package-a<2.0.0, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "requires_less_version_does_not_exist_a", - &context.temp_dir, - ); + context.assert_not_installed("requires_less_version_does_not_exist_a"); } /// The user requires any version of package `a` which does not exist. @@ -211,11 +167,7 @@ fn requires_package_does_not_exist() { ╰─▶ Because package-a was not found in the package registry and you require package-a, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "requires_package_does_not_exist_a", - &context.temp_dir, - ); + context.assert_not_installed("requires_package_does_not_exist_a"); } /// The user requires package `a` but `a` requires package `b` which does not exist @@ -254,11 +206,7 @@ fn transitive_requires_package_does_not_exist() { And because only package-a==1.0.0 is available and you require package-a, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "transitive_requires_package_does_not_exist_a", - &context.temp_dir, - ); + context.assert_not_installed("transitive_requires_package_does_not_exist_a"); } /// There is a non-contiguous range of compatible versions for the requested package `a`, but another dependency `c` excludes the range. This is the same as `dependency-excludes-range-of-compatible-versions` but some of the versions of `a` are incompatible for another reason e.g. dependency on non-existent package `d`. @@ -376,21 +324,12 @@ fn dependency_excludes_non_contiguous_range_of_compatible_versions() { "); // Only the `2.x` versions of `a` are available since `a==1.0.0` and `a==3.0.0` require incompatible versions of `b`, but all available versions of `c` exclude that range of `a` so resolution fails. - assert_not_installed( - &context.venv, - "dependency_excludes_non_contiguous_range_of_compatible_versions_a", - &context.temp_dir, - ); - assert_not_installed( - &context.venv, - "dependency_excludes_non_contiguous_range_of_compatible_versions_b", - &context.temp_dir, - ); - assert_not_installed( - &context.venv, - "dependency_excludes_non_contiguous_range_of_compatible_versions_c", - &context.temp_dir, - ); + context + .assert_not_installed("dependency_excludes_non_contiguous_range_of_compatible_versions_a"); + context + .assert_not_installed("dependency_excludes_non_contiguous_range_of_compatible_versions_b"); + context + .assert_not_installed("dependency_excludes_non_contiguous_range_of_compatible_versions_c"); } /// There is a range of compatible versions for the requested package `a`, but another dependency `c` excludes that range. @@ -499,21 +438,9 @@ fn dependency_excludes_range_of_compatible_versions() { "); // Only the `2.x` versions of `a` are available since `a==1.0.0` and `a==3.0.0` require incompatible versions of `b`, but all available versions of `c` exclude that range of `a` so resolution fails. - assert_not_installed( - &context.venv, - "dependency_excludes_range_of_compatible_versions_a", - &context.temp_dir, - ); - assert_not_installed( - &context.venv, - "dependency_excludes_range_of_compatible_versions_b", - &context.temp_dir, - ); - assert_not_installed( - &context.venv, - "dependency_excludes_range_of_compatible_versions_c", - &context.temp_dir, - ); + context.assert_not_installed("dependency_excludes_range_of_compatible_versions_a"); + context.assert_not_installed("dependency_excludes_range_of_compatible_versions_b"); + context.assert_not_installed("dependency_excludes_range_of_compatible_versions_c"); } /// Only one version of the requested package `a` is compatible, but the user has banned that version. @@ -586,16 +513,8 @@ fn excluded_only_compatible_version() { "); // Only `a==1.2.0` is available since `a==1.0.0` and `a==3.0.0` require incompatible versions of `b`. The user has excluded that version of `a` so resolution fails. - assert_not_installed( - &context.venv, - "excluded_only_compatible_version_a", - &context.temp_dir, - ); - assert_not_installed( - &context.venv, - "excluded_only_compatible_version_b", - &context.temp_dir, - ); + context.assert_not_installed("excluded_only_compatible_version_a"); + context.assert_not_installed("excluded_only_compatible_version_b"); } /// Only one version of the requested package is available, but the user has banned that version. @@ -635,7 +554,7 @@ fn excluded_only_version() { "); // Only `a==1.0.0` is available but the user excluded it. - assert_not_installed(&context.venv, "excluded_only_version_a", &context.temp_dir); + context.assert_not_installed("excluded_only_version_a"); } /// Multiple optional dependencies are requested for the package via an 'all' extra. @@ -701,24 +620,9 @@ fn all_extras_required() { + package-c==1.0.0 "); - assert_installed( - &context.venv, - "all_extras_required_a", - "1.0.0", - &context.temp_dir, - ); - assert_installed( - &context.venv, - "all_extras_required_b", - "1.0.0", - &context.temp_dir, - ); - assert_installed( - &context.venv, - "all_extras_required_c", - "1.0.0", - &context.temp_dir, - ); + context.assert_installed("all_extras_required_a", "1.0.0"); + context.assert_installed("all_extras_required_b", "1.0.0"); + context.assert_installed("all_extras_required_c", "1.0.0"); } /// Optional dependencies are requested for the package, the extra is only available on an older version. @@ -771,12 +675,7 @@ fn extra_does_not_exist_backtrack() { "); // The resolver should not backtrack to `a==1.0.0` because missing extras are allowed during resolution. `b` should not be installed. - assert_installed( - &context.venv, - "extra_does_not_exist_backtrack_a", - "3.0.0", - &context.temp_dir, - ); + context.assert_installed("extra_does_not_exist_backtrack_a", "3.0.0"); } /// One of two incompatible optional dependencies are requested for the package. @@ -829,18 +728,8 @@ fn extra_incompatible_with_extra_not_requested() { "); // Because the user does not request both extras, it is okay that one is incompatible with the other. - assert_installed( - &context.venv, - "extra_incompatible_with_extra_not_requested_a", - "1.0.0", - &context.temp_dir, - ); - assert_installed( - &context.venv, - "extra_incompatible_with_extra_not_requested_b", - "2.0.0", - &context.temp_dir, - ); + context.assert_installed("extra_incompatible_with_extra_not_requested_a", "1.0.0"); + context.assert_installed("extra_incompatible_with_extra_not_requested_b", "2.0.0"); } /// Multiple optional dependencies are requested for the package, but they have conflicting requirements with each other. @@ -892,11 +781,7 @@ fn extra_incompatible_with_extra() { "); // Because both `extra_b` and `extra_c` are requested and they require incompatible versions of `b`, `a` cannot be installed. - assert_not_installed( - &context.venv, - "extra_incompatible_with_extra_a", - &context.temp_dir, - ); + context.assert_not_installed("extra_incompatible_with_extra_a"); } /// Optional dependencies are requested for the package, but the extra is not compatible with other requested versions. @@ -946,16 +831,8 @@ fn extra_incompatible_with_root() { "); // Because the user requested `b==2.0.0` but the requested extra requires `b==1.0.0`, the dependencies cannot be satisfied. - assert_not_installed( - &context.venv, - "extra_incompatible_with_root_a", - &context.temp_dir, - ); - assert_not_installed( - &context.venv, - "extra_incompatible_with_root_b", - &context.temp_dir, - ); + context.assert_not_installed("extra_incompatible_with_root_a"); + context.assert_not_installed("extra_incompatible_with_root_b"); } /// Optional dependencies are requested for the package. @@ -1001,18 +878,8 @@ fn extra_required() { + package-b==1.0.0 "); - assert_installed( - &context.venv, - "extra_required_a", - "1.0.0", - &context.temp_dir, - ); - assert_installed( - &context.venv, - "extra_required_b", - "1.0.0", - &context.temp_dir, - ); + context.assert_installed("extra_required_a", "1.0.0"); + context.assert_installed("extra_required_b", "1.0.0"); } /// Optional dependencies are requested for the package, but the extra does not exist. @@ -1052,7 +919,7 @@ fn missing_extra() { "); // Missing extras are ignored during resolution. - assert_installed(&context.venv, "missing_extra_a", "1.0.0", &context.temp_dir); + context.assert_installed("missing_extra_a", "1.0.0"); } /// Multiple optional dependencies are requested for the package. @@ -1106,24 +973,9 @@ fn multiple_extras_required() { + package-c==1.0.0 "); - assert_installed( - &context.venv, - "multiple_extras_required_a", - "1.0.0", - &context.temp_dir, - ); - assert_installed( - &context.venv, - "multiple_extras_required_b", - "1.0.0", - &context.temp_dir, - ); - assert_installed( - &context.venv, - "multiple_extras_required_c", - "1.0.0", - &context.temp_dir, - ); + context.assert_installed("multiple_extras_required_a", "1.0.0"); + context.assert_installed("multiple_extras_required_b", "1.0.0"); + context.assert_installed("multiple_extras_required_c", "1.0.0"); } /// The user requires two incompatible, existing versions of package `a` @@ -1164,16 +1016,8 @@ fn direct_incompatible_versions() { ╰─▶ Because you require package-a==1.0.0 and package-a==2.0.0, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "direct_incompatible_versions_a", - &context.temp_dir, - ); - assert_not_installed( - &context.venv, - "direct_incompatible_versions_a", - &context.temp_dir, - ); + context.assert_not_installed("direct_incompatible_versions_a"); + context.assert_not_installed("direct_incompatible_versions_a"); } /// The user requires `a`, which requires two incompatible, existing versions of package `b` @@ -1214,11 +1058,7 @@ fn transitive_incompatible_versions() { And because you require package-a==1.0.0, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "transitive_incompatible_versions_a", - &context.temp_dir, - ); + context.assert_not_installed("transitive_incompatible_versions_a"); } /// The user requires packages `a` and `b` but `a` requires a different version of `b` @@ -1265,16 +1105,8 @@ fn transitive_incompatible_with_root_version() { And because you require package-a and package-b==1.0.0, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "transitive_incompatible_with_root_version_a", - &context.temp_dir, - ); - assert_not_installed( - &context.venv, - "transitive_incompatible_with_root_version_b", - &context.temp_dir, - ); + context.assert_not_installed("transitive_incompatible_with_root_version_a"); + context.assert_not_installed("transitive_incompatible_with_root_version_b"); } /// The user requires package `a` and `b`; `a` and `b` require different versions of `c` @@ -1327,16 +1159,8 @@ fn transitive_incompatible_with_transitive() { And because you require package-a and package-b, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "transitive_incompatible_with_transitive_a", - &context.temp_dir, - ); - assert_not_installed( - &context.venv, - "transitive_incompatible_with_transitive_b", - &context.temp_dir, - ); + context.assert_not_installed("transitive_incompatible_with_transitive_a"); + context.assert_not_installed("transitive_incompatible_with_transitive_b"); } /// A local version should be included in inclusive ordered comparisons. @@ -1378,12 +1202,7 @@ fn local_greater_than_or_equal() { "); // The version '1.2.3+foo' satisfies the constraint '>=1.2.3'. - assert_installed( - &context.venv, - "local_greater_than_or_equal_a", - "1.2.3+foo", - &context.temp_dir, - ); + context.assert_installed("local_greater_than_or_equal_a", "1.2.3+foo"); } /// A local version should be excluded in exclusive ordered comparisons. @@ -1419,7 +1238,7 @@ fn local_greater_than() { ╰─▶ Because only package-a==1.2.3+foo is available and you require package-a>1.2.3, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed(&context.venv, "local_greater_than_a", &context.temp_dir); + context.assert_not_installed("local_greater_than_a"); } /// A local version should be included in inclusive ordered comparisons. @@ -1461,12 +1280,7 @@ fn local_less_than_or_equal() { "); // The version '1.2.3+foo' satisfies the constraint '<=1.2.3'. - assert_installed( - &context.venv, - "local_less_than_or_equal_a", - "1.2.3+foo", - &context.temp_dir, - ); + context.assert_installed("local_less_than_or_equal_a", "1.2.3+foo"); } /// A local version should be excluded in exclusive ordered comparisons. @@ -1502,7 +1316,7 @@ fn local_less_than() { ╰─▶ Because only package-a==1.2.3+foo is available and you require package-a<1.2.3, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed(&context.venv, "local_less_than_a", &context.temp_dir); + context.assert_not_installed("local_less_than_a"); } /// Tests that we can select an older version with a local segment when newer versions are incompatible. @@ -1546,12 +1360,7 @@ fn local_not_latest() { + package-a==1.2.1+foo "); - assert_installed( - &context.venv, - "local_not_latest_a", - "1.2.1+foo", - &context.temp_dir, - ); + context.assert_installed("local_not_latest_a", "1.2.1+foo"); } /// If there is a 1.2.3 version with an sdist published and no compatible wheels, then the sdist will be used. @@ -1593,12 +1402,7 @@ fn local_not_used_with_sdist() { "); // The version '1.2.3' with an sdist satisfies the constraint '==1.2.3'. - assert_installed( - &context.venv, - "local_not_used_with_sdist_a", - "1.2.3+foo", - &context.temp_dir, - ); + context.assert_installed("local_not_used_with_sdist_a", "1.2.3+foo"); } /// A simple version constraint should not exclude published versions with local segments. @@ -1640,12 +1444,7 @@ fn local_simple() { "); // The version '1.2.3+foo' satisfies the constraint '==1.2.3'. - assert_installed( - &context.venv, - "local_simple_a", - "1.2.3+foo", - &context.temp_dir, - ); + context.assert_installed("local_simple_a", "1.2.3+foo"); } /// A dependency depends on a conflicting local version of a direct dependency, but we can backtrack to a compatible version. @@ -1701,18 +1500,8 @@ fn local_transitive_backtrack() { "); // Backtracking to '1.0.0' gives us compatible local versions of b. - assert_installed( - &context.venv, - "local_transitive_backtrack_a", - "1.0.0", - &context.temp_dir, - ); - assert_installed( - &context.venv, - "local_transitive_backtrack_b", - "2.0.0+foo", - &context.temp_dir, - ); + context.assert_installed("local_transitive_backtrack_a", "1.0.0"); + context.assert_installed("local_transitive_backtrack_b", "2.0.0+foo"); } /// A dependency depends on a conflicting local version of a direct dependency. @@ -1759,16 +1548,8 @@ fn local_transitive_conflicting() { And because you require package-a and package-b==2.0.0+foo, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "local_transitive_conflicting_a", - &context.temp_dir, - ); - assert_not_installed( - &context.venv, - "local_transitive_conflicting_b", - &context.temp_dir, - ); + context.assert_not_installed("local_transitive_conflicting_a"); + context.assert_not_installed("local_transitive_conflicting_b"); } /// A transitive dependency has both a non-local and local version published, but the non-local version is unusable. @@ -1819,18 +1600,8 @@ fn local_transitive_confounding() { "); // The version '2.0.0+foo' satisfies the constraint '==2.0.0'. - assert_installed( - &context.venv, - "local_transitive_confounding_a", - "1.0.0", - &context.temp_dir, - ); - assert_installed( - &context.venv, - "local_transitive_confounding_b", - "2.0.0+foo", - &context.temp_dir, - ); + context.assert_installed("local_transitive_confounding_a", "1.0.0"); + context.assert_installed("local_transitive_confounding_b", "2.0.0+foo"); } /// A transitive constraint on a local version should match an inclusive ordered operator. @@ -1881,18 +1652,8 @@ fn local_transitive_greater_than_or_equal() { "); // The version '2.0.0+foo' satisfies both >=2.0.0 and ==2.0.0+foo. - assert_installed( - &context.venv, - "local_transitive_greater_than_or_equal_a", - "1.0.0", - &context.temp_dir, - ); - assert_installed( - &context.venv, - "local_transitive_greater_than_or_equal_b", - "2.0.0+foo", - &context.temp_dir, - ); + context.assert_installed("local_transitive_greater_than_or_equal_a", "1.0.0"); + context.assert_installed("local_transitive_greater_than_or_equal_b", "2.0.0+foo"); } /// A transitive constraint on a local version should not match an exclusive ordered operator. @@ -1939,16 +1700,8 @@ fn local_transitive_greater_than() { And because you require package-a and package-b==2.0.0+foo, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "local_transitive_greater_than_a", - &context.temp_dir, - ); - assert_not_installed( - &context.venv, - "local_transitive_greater_than_b", - &context.temp_dir, - ); + context.assert_not_installed("local_transitive_greater_than_a"); + context.assert_not_installed("local_transitive_greater_than_b"); } /// A transitive constraint on a local version should match an inclusive ordered operator. @@ -1999,18 +1752,8 @@ fn local_transitive_less_than_or_equal() { "); // The version '2.0.0+foo' satisfies both <=2.0.0 and ==2.0.0+foo. - assert_installed( - &context.venv, - "local_transitive_less_than_or_equal_a", - "1.0.0", - &context.temp_dir, - ); - assert_installed( - &context.venv, - "local_transitive_less_than_or_equal_b", - "2.0.0+foo", - &context.temp_dir, - ); + context.assert_installed("local_transitive_less_than_or_equal_a", "1.0.0"); + context.assert_installed("local_transitive_less_than_or_equal_b", "2.0.0+foo"); } /// A transitive constraint on a local version should not match an exclusive ordered operator. @@ -2057,16 +1800,8 @@ fn local_transitive_less_than() { And because you require package-a and package-b==2.0.0+foo, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "local_transitive_less_than_a", - &context.temp_dir, - ); - assert_not_installed( - &context.venv, - "local_transitive_less_than_b", - &context.temp_dir, - ); + context.assert_not_installed("local_transitive_less_than_a"); + context.assert_not_installed("local_transitive_less_than_b"); } /// A simple version constraint should not exclude published versions with local segments. @@ -2117,18 +1852,8 @@ fn local_transitive() { "); // The version '2.0.0+foo' satisfies both ==2.0.0 and ==2.0.0+foo. - assert_installed( - &context.venv, - "local_transitive_a", - "1.0.0", - &context.temp_dir, - ); - assert_installed( - &context.venv, - "local_transitive_b", - "2.0.0+foo", - &context.temp_dir, - ); + context.assert_installed("local_transitive_a", "1.0.0"); + context.assert_installed("local_transitive_b", "2.0.0+foo"); } /// Even if there is a 1.2.3 version published, if it is unavailable for some reason (no sdist and no compatible wheels in this case), a 1.2.3 version with a local segment should be usable instead. @@ -2170,12 +1895,7 @@ fn local_used_without_sdist() { "); // The version '1.2.3+foo' satisfies the constraint '==1.2.3'. - assert_installed( - &context.venv, - "local_used_without_sdist_a", - "1.2.3+foo", - &context.temp_dir, - ); + context.assert_installed("local_used_without_sdist_a", "1.2.3+foo"); } /// An equal version constraint should match a post-release version if the post-release version is available. @@ -2216,12 +1936,7 @@ fn post_equal_available() { "); // The version '1.2.3.post0' satisfies the constraint '==1.2.3.post0'. - assert_installed( - &context.venv, - "post_equal_available_a", - "1.2.3.post0", - &context.temp_dir, - ); + context.assert_installed("post_equal_available_a", "1.2.3.post0"); } /// An equal version constraint should not match a post-release version if the post-release version is not available. @@ -2259,11 +1974,7 @@ fn post_equal_not_available() { ╰─▶ Because there is no version of package-a==1.2.3.post0 and you require package-a==1.2.3.post0, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "post_equal_not_available_a", - &context.temp_dir, - ); + context.assert_not_installed("post_equal_not_available_a"); } /// A greater-than-or-equal version constraint should match a post-release version if the constraint is itself a post-release version. @@ -2305,12 +2016,7 @@ fn post_greater_than_or_equal_post() { "); // The version '1.2.3.post1' satisfies the constraint '>=1.2.3.post0'. - assert_installed( - &context.venv, - "post_greater_than_or_equal_post_a", - "1.2.3.post1", - &context.temp_dir, - ); + context.assert_installed("post_greater_than_or_equal_post_a", "1.2.3.post1"); } /// A greater-than-or-equal version constraint should match a post-release version. @@ -2349,12 +2055,7 @@ fn post_greater_than_or_equal() { "); // The version '1.2.3.post1' satisfies the constraint '>=1.2.3'. - assert_installed( - &context.venv, - "post_greater_than_or_equal_a", - "1.2.3.post1", - &context.temp_dir, - ); + context.assert_installed("post_greater_than_or_equal_a", "1.2.3.post1"); } /// A greater-than version constraint should not match a post-release version if the post-release version is not available. @@ -2394,11 +2095,7 @@ fn post_greater_than_post_not_available() { ╰─▶ Because only package-a<=1.2.3.post1 is available and you require package-a>=1.2.3.post3, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "post_greater_than_post_not_available_a", - &context.temp_dir, - ); + context.assert_not_installed("post_greater_than_post_not_available_a"); } /// A greater-than version constraint should match a post-release version if the constraint is itself a post-release version. @@ -2439,12 +2136,7 @@ fn post_greater_than_post() { "); // The version '1.2.3.post1' satisfies the constraint '>1.2.3.post0'. - assert_installed( - &context.venv, - "post_greater_than_post_a", - "1.2.3.post1", - &context.temp_dir, - ); + context.assert_installed("post_greater_than_post_a", "1.2.3.post1"); } /// A greater-than version constraint should not match a post-release version. @@ -2480,7 +2172,7 @@ fn post_greater_than() { ╰─▶ Because only package-a==1.2.3.post1 is available and you require package-a>1.2.3, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed(&context.venv, "post_greater_than_a", &context.temp_dir); + context.assert_not_installed("post_greater_than_a"); } /// A less-than-or-equal version constraint should not match a post-release version. @@ -2516,11 +2208,7 @@ fn post_less_than_or_equal() { ╰─▶ Because only package-a==1.2.3.post1 is available and you require package-a<=1.2.3, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "post_less_than_or_equal_a", - &context.temp_dir, - ); + context.assert_not_installed("post_less_than_or_equal_a"); } /// A less-than version constraint should not match a post-release version. @@ -2556,7 +2244,7 @@ fn post_less_than() { ╰─▶ Because only package-a==1.2.3.post1 is available and you require package-a<1.2.3, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed(&context.venv, "post_less_than_a", &context.temp_dir); + context.assert_not_installed("post_less_than_a"); } /// A greater-than version constraint should not match a post-release version with a local version identifier. @@ -2594,11 +2282,7 @@ fn post_local_greater_than_post() { ╰─▶ Because only package-a<=1.2.3.post1+local is available and you require package-a>=1.2.3.post2, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "post_local_greater_than_post_a", - &context.temp_dir, - ); + context.assert_not_installed("post_local_greater_than_post_a"); } /// A greater-than version constraint should not match a post-release version with a local version identifier. @@ -2636,11 +2320,7 @@ fn post_local_greater_than() { ╰─▶ Because only package-a<=1.2.3.post1+local is available and you require package-a>1.2.3, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "post_local_greater_than_a", - &context.temp_dir, - ); + context.assert_not_installed("post_local_greater_than_a"); } /// A simple version constraint should not match a post-release version. @@ -2676,7 +2356,7 @@ fn post_simple() { ╰─▶ Because there is no version of package-a==1.2.3 and you require package-a==1.2.3, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed(&context.venv, "post_simple_a", &context.temp_dir); + context.assert_not_installed("post_simple_a"); } /// The user requires `a` which has multiple prereleases available with different labels. @@ -2721,12 +2401,7 @@ fn package_multiple_prereleases_kinds() { "); // Release candidates should be the highest precedence prerelease kind. - assert_installed( - &context.venv, - "package_multiple_prereleases_kinds_a", - "1.0.0rc1", - &context.temp_dir, - ); + context.assert_installed("package_multiple_prereleases_kinds_a", "1.0.0rc1"); } /// The user requires `a` which has multiple alphas available. @@ -2771,12 +2446,7 @@ fn package_multiple_prereleases_numbers() { "); // The latest alpha version should be selected. - assert_installed( - &context.venv, - "package_multiple_prereleases_numbers_a", - "1.0.0a3", - &context.temp_dir, - ); + context.assert_installed("package_multiple_prereleases_numbers_a", "1.0.0a3"); } /// The user requires a non-prerelease version of `a` which only has prerelease versions available. There are pre-releases on the boundary of their range. @@ -2819,12 +2489,7 @@ fn package_only_prereleases_boundary() { "); // Since there are only prerelease versions of `a` available, a prerelease is allowed. Since the user did not explicitly request a pre-release, pre-releases at the boundary should not be selected. - assert_installed( - &context.venv, - "package_only_prereleases_boundary_a", - "0.1.0a1", - &context.temp_dir, - ); + context.assert_installed("package_only_prereleases_boundary_a", "0.1.0a1"); } /// The user requires a version of package `a` which only matches prerelease versions but they did not include a prerelease specifier. @@ -2865,11 +2530,7 @@ fn package_only_prereleases_in_range() { "); // Since there are stable versions of `a` available, prerelease versions should not be selected without explicit opt-in. - assert_not_installed( - &context.venv, - "package_only_prereleases_in_range_a", - &context.temp_dir, - ); + context.assert_not_installed("package_only_prereleases_in_range_a"); } /// The user requires any version of package `a` which only has prerelease versions available. @@ -2908,12 +2569,7 @@ fn package_only_prereleases() { "); // Since there are only prerelease versions of `a` available, it should be installed even though the user did not include a prerelease specifier. - assert_installed( - &context.venv, - "package_only_prereleases_a", - "1.0.0a1", - &context.temp_dir, - ); + context.assert_installed("package_only_prereleases_a", "1.0.0a1"); } /// The user requires a version of `a` with a prerelease specifier and both prerelease and stable releases are available. @@ -2961,12 +2617,7 @@ fn package_prerelease_specified_mixed_available() { "); // Since the user provided a prerelease specifier, the latest prerelease version should be selected. - assert_installed( - &context.venv, - "package_prerelease_specified_mixed_available_a", - "1.0.0a1", - &context.temp_dir, - ); + context.assert_installed("package_prerelease_specified_mixed_available_a", "1.0.0a1"); } /// The user requires a version of `a` with a prerelease specifier and only stable releases are available. @@ -3014,11 +2665,9 @@ fn package_prerelease_specified_only_final_available() { "); // The latest stable version should be selected. - assert_installed( - &context.venv, + context.assert_installed( "package_prerelease_specified_only_final_available_a", "0.3.0", - &context.temp_dir, ); } @@ -3067,11 +2716,9 @@ fn package_prerelease_specified_only_prerelease_available() { "); // The latest prerelease version should be selected. - assert_installed( - &context.venv, + context.assert_installed( "package_prerelease_specified_only_prerelease_available_a", "0.3.0a1", - &context.temp_dir, ); } @@ -3116,12 +2763,7 @@ fn package_prereleases_boundary() { "); // Since the user did not use a pre-release specifier, pre-releases at the boundary should not be selected even though pre-releases are allowed. - assert_installed( - &context.venv, - "package_prereleases_boundary_a", - "0.1.0", - &context.temp_dir, - ); + context.assert_installed("package_prereleases_boundary_a", "0.1.0"); } /// The user requires a non-prerelease version of `a` but has enabled pre-releases. There are pre-releases on the boundary of their range. @@ -3165,12 +2807,7 @@ fn package_prereleases_global_boundary() { "); // Since the user did not use a pre-release specifier, pre-releases at the boundary should not be selected even though pre-releases are allowed. - assert_installed( - &context.venv, - "package_prereleases_global_boundary_a", - "0.1.0", - &context.temp_dir, - ); + context.assert_installed("package_prereleases_global_boundary_a", "0.1.0"); } /// The user requires a prerelease version of `a`. There are pre-releases on the boundary of their range. @@ -3220,12 +2857,7 @@ fn package_prereleases_specifier_boundary() { "); // Since the user used a pre-release specifier, pre-releases at the boundary should be selected. - assert_installed( - &context.venv, - "package_prereleases_specifier_boundary_a", - "0.2.0a1", - &context.temp_dir, - ); + context.assert_installed("package_prereleases_specifier_boundary_a", "0.2.0a1"); } /// The user requires a version of package `a` which only matches prerelease versions. They did not include a prerelease specifier for the package, but they opted into prereleases globally. @@ -3269,11 +2901,9 @@ fn requires_package_only_prereleases_in_range_global_opt_in() { + package-a==1.0.0a1 "); - assert_installed( - &context.venv, + context.assert_installed( "requires_package_only_prereleases_in_range_global_opt_in_a", "1.0.0a1", - &context.temp_dir, ); } @@ -3315,12 +2945,7 @@ fn requires_package_prerelease_and_final_any() { "); // Since the user did not provide a prerelease specifier, the older stable version should be selected. - assert_installed( - &context.venv, - "requires_package_prerelease_and_final_any_a", - "0.1.0", - &context.temp_dir, - ); + context.assert_installed("requires_package_prerelease_and_final_any_a", "0.1.0"); } /// The user requires package `a` which has a dependency on a package which only matches prerelease versions; the user has opted into allowing prereleases in `b` explicitly. @@ -3374,17 +2999,13 @@ fn transitive_package_only_prereleases_in_range_opt_in() { "); // Since the user included a dependency on `b` with a prerelease specifier, a prerelease version can be selected. - assert_installed( - &context.venv, + context.assert_installed( "transitive_package_only_prereleases_in_range_opt_in_a", "0.1.0", - &context.temp_dir, ); - assert_installed( - &context.venv, + context.assert_installed( "transitive_package_only_prereleases_in_range_opt_in_b", "1.0.0a1", - &context.temp_dir, ); } @@ -3432,11 +3053,7 @@ fn transitive_package_only_prereleases_in_range() { "); // Since there are stable versions of `b` available, the prerelease version should not be selected without explicit opt-in. The available version is excluded by the range requested by the user. - assert_not_installed( - &context.venv, - "transitive_package_only_prereleases_in_range_a", - &context.temp_dir, - ); + context.assert_not_installed("transitive_package_only_prereleases_in_range_a"); } /// The user requires any version of package `a` which requires `b` which only has prerelease versions available. @@ -3481,18 +3098,8 @@ fn transitive_package_only_prereleases() { "); // Since there are only prerelease versions of `b` available, it should be selected even though the user did not opt-in to prereleases. - assert_installed( - &context.venv, - "transitive_package_only_prereleases_a", - "0.1.0", - &context.temp_dir, - ); - assert_installed( - &context.venv, - "transitive_package_only_prereleases_b", - "1.0.0a1", - &context.temp_dir, - ); + context.assert_installed("transitive_package_only_prereleases_a", "0.1.0"); + context.assert_installed("transitive_package_only_prereleases_b", "1.0.0a1"); } /// A transitive dependency has both a prerelease and a stable selector, but can only be satisfied by a prerelease. There are many prerelease versions and some are excluded. @@ -3605,16 +3212,10 @@ fn transitive_prerelease_and_stable_dependency_many_versions_holes() { "); // Since the user did not explicitly opt-in to a prerelease, it cannot be selected. - assert_not_installed( - &context.venv, - "transitive_prerelease_and_stable_dependency_many_versions_holes_a", - &context.temp_dir, - ); - assert_not_installed( - &context.venv, - "transitive_prerelease_and_stable_dependency_many_versions_holes_b", - &context.temp_dir, - ); + context + .assert_not_installed("transitive_prerelease_and_stable_dependency_many_versions_holes_a"); + context + .assert_not_installed("transitive_prerelease_and_stable_dependency_many_versions_holes_b"); } /// A transitive dependency has both a prerelease and a stable selector, but can only be satisfied by a prerelease. There are many prerelease versions. @@ -3716,16 +3317,8 @@ fn transitive_prerelease_and_stable_dependency_many_versions() { "); // Since the user did not explicitly opt-in to a prerelease, it cannot be selected. - assert_not_installed( - &context.venv, - "transitive_prerelease_and_stable_dependency_many_versions_a", - &context.temp_dir, - ); - assert_not_installed( - &context.venv, - "transitive_prerelease_and_stable_dependency_many_versions_b", - &context.temp_dir, - ); + context.assert_not_installed("transitive_prerelease_and_stable_dependency_many_versions_a"); + context.assert_not_installed("transitive_prerelease_and_stable_dependency_many_versions_b"); } /// A transitive dependency has both a prerelease and a stable selector, but can only be satisfied by a prerelease. The user includes an opt-in to prereleases of the transitive dependency. @@ -3788,23 +3381,17 @@ fn transitive_prerelease_and_stable_dependency_opt_in() { "); // Since the user explicitly opted-in to a prerelease for `c`, it can be installed. - assert_installed( - &context.venv, + context.assert_installed( "transitive_prerelease_and_stable_dependency_opt_in_a", "1.0.0", - &context.temp_dir, ); - assert_installed( - &context.venv, + context.assert_installed( "transitive_prerelease_and_stable_dependency_opt_in_b", "1.0.0", - &context.temp_dir, ); - assert_installed( - &context.venv, + context.assert_installed( "transitive_prerelease_and_stable_dependency_opt_in_c", "2.0.0b1", - &context.temp_dir, ); } @@ -3860,16 +3447,8 @@ fn transitive_prerelease_and_stable_dependency() { "); // Since the user did not explicitly opt-in to a prerelease, it cannot be selected. - assert_not_installed( - &context.venv, - "transitive_prerelease_and_stable_dependency_a", - &context.temp_dir, - ); - assert_not_installed( - &context.venv, - "transitive_prerelease_and_stable_dependency_b", - &context.temp_dir, - ); + context.assert_not_installed("transitive_prerelease_and_stable_dependency_a"); + context.assert_not_installed("transitive_prerelease_and_stable_dependency_b"); } /// The user requires a package where recent versions require a Python version greater than the current version, but an older version is compatible. @@ -3915,12 +3494,7 @@ fn python_greater_than_current_backtrack() { + package-a==1.0.0 "); - assert_installed( - &context.venv, - "python_greater_than_current_backtrack_a", - "1.0.0", - &context.temp_dir, - ); + context.assert_installed("python_greater_than_current_backtrack_a", "1.0.0"); } /// The user requires a package where recent versions require a Python version greater than the current version, but an excluded older version is compatible. @@ -3975,11 +3549,7 @@ fn python_greater_than_current_excluded() { And because you require package-a>=2.0.0, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "python_greater_than_current_excluded_a", - &context.temp_dir, - ); + context.assert_not_installed("python_greater_than_current_excluded_a"); } /// The user requires a package which has many versions which all require a Python version greater than the current version @@ -4037,11 +3607,7 @@ fn python_greater_than_current_many() { ╰─▶ Because there is no version of package-a==1.0.0 and you require package-a==1.0.0, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "python_greater_than_current_many_a", - &context.temp_dir, - ); + context.assert_not_installed("python_greater_than_current_many_a"); } /// The user requires a package which requires a Python version with a patch version greater than the current patch version @@ -4079,11 +3645,7 @@ fn python_greater_than_current_patch() { And because you require package-a==1.0.0, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "python_greater_than_current_patch_a", - &context.temp_dir, - ); + context.assert_not_installed("python_greater_than_current_patch_a"); } /// The user requires a package which requires a Python version greater than the current version @@ -4120,11 +3682,7 @@ fn python_greater_than_current() { And because you require package-a==1.0.0, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "python_greater_than_current_a", - &context.temp_dir, - ); + context.assert_not_installed("python_greater_than_current_a"); } /// The user requires a package which requires a Python version less than the current version @@ -4199,11 +3757,7 @@ fn python_version_does_not_exist() { And because you require package-a==1.0.0, we can conclude that your requirements are unsatisfiable. "); - assert_not_installed( - &context.venv, - "python_version_does_not_exist_a", - &context.temp_dir, - ); + context.assert_not_installed("python_version_does_not_exist_a"); } /// Both wheels and source distributions are available, and the user has disabled binaries. @@ -4323,11 +3877,7 @@ fn no_sdist_no_wheels_with_matching_abi() { hint: You require CPython 3.12 (`cp312`), but we only found wheels for `package-a` (v1.0.0) with the following Python ABI tag: `graalpy240_310_native` "); - assert_not_installed( - &context.venv, - "no_sdist_no_wheels_with_matching_abi_a", - &context.temp_dir, - ); + context.assert_not_installed("no_sdist_no_wheels_with_matching_abi_a"); } /// No wheels with matching platform tags are available, nor are any source distributions available @@ -4367,11 +3917,7 @@ fn no_sdist_no_wheels_with_matching_platform() { hint: Wheels are available for `package-a` (v1.0.0) on the following platform: `macosx_10_0_ppc64` "); - assert_not_installed( - &context.venv, - "no_sdist_no_wheels_with_matching_platform_a", - &context.temp_dir, - ); + context.assert_not_installed("no_sdist_no_wheels_with_matching_platform_a"); } /// No wheels with matching Python tags are available, nor are any source distributions available @@ -4411,11 +3957,7 @@ fn no_sdist_no_wheels_with_matching_python() { hint: You require CPython 3.12 (`cp312`), but we only found wheels for `package-a` (v1.0.0) with the following Python implementation tag: `graalpy310` "); - assert_not_installed( - &context.venv, - "no_sdist_no_wheels_with_matching_python_a", - &context.temp_dir, - ); + context.assert_not_installed("no_sdist_no_wheels_with_matching_python_a"); } /// No wheels are available, only source distributions but the user has disabled builds. @@ -4456,7 +3998,7 @@ fn no_wheels_no_build() { hint: Wheels are required for `package-a` because building from source is disabled for `package-a` (i.e., with `--no-build-package package-a`) "); - assert_not_installed(&context.venv, "no_wheels_no_build_a", &context.temp_dir); + context.assert_not_installed("no_wheels_no_build_a"); } /// No wheels with matching platform tags are available, just source distributions. @@ -4569,7 +4111,7 @@ fn only_wheels_no_binary() { hint: A source distribution is required for `package-a` because using pre-built wheels is disabled for `package-a` (i.e., with `--no-binary-package package-a`) "); - assert_not_installed(&context.venv, "only_wheels_no_binary_a", &context.temp_dir); + context.assert_not_installed("only_wheels_no_binary_a"); } /// No source distributions are available, only wheels. @@ -4684,11 +4226,7 @@ fn package_only_yanked_in_range() { "); // Since there are other versions of `a` available, yanked versions should not be selected without explicit opt-in. - assert_not_installed( - &context.venv, - "package_only_yanked_in_range_a", - &context.temp_dir, - ); + context.assert_not_installed("package_only_yanked_in_range_a"); } /// The user requires any version of package `a` which only has yanked versions available. @@ -4726,7 +4264,7 @@ fn package_only_yanked() { "); // Yanked versions should not be installed, even if they are the only one available. - assert_not_installed(&context.venv, "package_only_yanked_a", &context.temp_dir); + context.assert_not_installed("package_only_yanked_a"); } /// The user requires any version of `a` and both yanked and unyanked releases are available. @@ -4772,12 +4310,7 @@ fn package_yanked_specified_mixed_available() { "); // The latest unyanked version should be selected. - assert_installed( - &context.venv, - "package_yanked_specified_mixed_available_a", - "0.3.0", - &context.temp_dir, - ); + context.assert_installed("package_yanked_specified_mixed_available_a", "0.3.0"); } /// The user requires any version of package `a` has a yanked version available and an older unyanked version. @@ -4818,12 +4351,7 @@ fn requires_package_yanked_and_unyanked_any() { "); // The unyanked version should be selected. - assert_installed( - &context.venv, - "requires_package_yanked_and_unyanked_any_a", - "0.1.0", - &context.temp_dir, - ); + context.assert_installed("requires_package_yanked_and_unyanked_any_a", "0.1.0"); } /// The user requires package `a` which has a dependency on a package which only matches yanked versions; the user has opted into allowing the yanked version of `b` explicitly. @@ -4877,18 +4405,8 @@ fn transitive_package_only_yanked_in_range_opt_in() { "#); // Since the user included a dependency on `b` with an exact specifier, the yanked version can be selected. - assert_installed( - &context.venv, - "transitive_package_only_yanked_in_range_opt_in_a", - "0.1.0", - &context.temp_dir, - ); - assert_installed( - &context.venv, - "transitive_package_only_yanked_in_range_opt_in_b", - "1.0.0", - &context.temp_dir, - ); + context.assert_installed("transitive_package_only_yanked_in_range_opt_in_a", "0.1.0"); + context.assert_installed("transitive_package_only_yanked_in_range_opt_in_b", "1.0.0"); } /// The user requires package `a` which has a dependency on a package which only matches yanked versions. @@ -4937,11 +4455,7 @@ fn transitive_package_only_yanked_in_range() { "); // Yanked versions should not be installed, even if they are the only valid version in a range. - assert_not_installed( - &context.venv, - "transitive_package_only_yanked_in_range_a", - &context.temp_dir, - ); + context.assert_not_installed("transitive_package_only_yanked_in_range_a"); } /// The user requires any version of package `a` which requires `b` which only has yanked versions available. @@ -4985,11 +4499,7 @@ fn transitive_package_only_yanked() { "); // Yanked versions should not be installed, even if they are the only one available. - assert_not_installed( - &context.venv, - "transitive_package_only_yanked_a", - &context.temp_dir, - ); + context.assert_not_installed("transitive_package_only_yanked_a"); } /// A transitive dependency has both a yanked and an unyanked version, but can only be satisfied by a yanked. The user includes an opt-in to the yanked version of the transitive dependency. @@ -5052,23 +4562,17 @@ fn transitive_yanked_and_unyanked_dependency_opt_in() { "#); // Since the user explicitly selected the yanked version of `c`, it can be installed. - assert_installed( - &context.venv, + context.assert_installed( "transitive_yanked_and_unyanked_dependency_opt_in_a", "1.0.0", - &context.temp_dir, ); - assert_installed( - &context.venv, + context.assert_installed( "transitive_yanked_and_unyanked_dependency_opt_in_b", "1.0.0", - &context.temp_dir, ); - assert_installed( - &context.venv, + context.assert_installed( "transitive_yanked_and_unyanked_dependency_opt_in_c", "2.0.0", - &context.temp_dir, ); } @@ -5122,14 +4626,6 @@ fn transitive_yanked_and_unyanked_dependency() { "); // Since the user did not explicitly select the yanked version, it cannot be used. - assert_not_installed( - &context.venv, - "transitive_yanked_and_unyanked_dependency_a", - &context.temp_dir, - ); - assert_not_installed( - &context.venv, - "transitive_yanked_and_unyanked_dependency_b", - &context.temp_dir, - ); + context.assert_not_installed("transitive_yanked_and_unyanked_dependency_a"); + context.assert_not_installed("transitive_yanked_and_unyanked_dependency_b"); } diff --git a/crates/uv/tests/it/pip_sync.rs b/crates/uv/tests/it/pip_sync.rs index 32252396e..43cbc26c7 100644 --- a/crates/uv/tests/it/pip_sync.rs +++ b/crates/uv/tests/it/pip_sync.rs @@ -1,6 +1,4 @@ use std::env::consts::EXE_SUFFIX; -use std::path::Path; -use std::process::Command; use anyhow::Result; use assert_cmd::prelude::*; @@ -11,24 +9,10 @@ use indoc::indoc; use predicates::Predicate; use url::Url; -use crate::common::{ - TestContext, download_to_disk, site_packages_path, uv_snapshot, venv_to_interpreter, -}; +use crate::common::{TestContext, download_to_disk, site_packages_path, uv_snapshot}; use uv_fs::{Simplified, copy_dir_all}; use uv_static::EnvVars; -fn check_command(venv: &Path, command: &str, temp_dir: &Path) { - Command::new(venv_to_interpreter(venv)) - // Our tests change files in <1s, so we must disable CPython bytecode caching or we'll get stale files - // https://github.com/python/cpython/issues/75953 - .arg("-B") - .arg("-c") - .arg(command) - .current_dir(temp_dir) - .assert() - .success(); -} - #[test] fn missing_requirements_txt() { let context = TestContext::new("3.12"); @@ -463,7 +447,13 @@ fn link() -> Result<()> { "### ); - check_command(&context2.venv, "import iniconfig", &context2.temp_dir); + context2 + .python_command() + .arg("-c") + .arg("import iniconfig") + .current_dir(&context2.temp_dir) + .assert() + .success(); Ok(()) } @@ -5221,8 +5211,8 @@ fn target_built_distribution() -> Result<()> { context.assert_command("import iniconfig").failure(); // Ensure that we can import the package by augmenting the `PYTHONPATH`. - Command::new(venv_to_interpreter(&context.venv)) - .arg("-B") + context + .python_command() .arg("-c") .arg("import iniconfig") .env(EnvVars::PYTHONPATH, context.temp_dir.child("target").path()) @@ -5326,8 +5316,8 @@ fn target_source_distribution() -> Result<()> { context.assert_command("import iniconfig").failure(); // Ensure that we can import the package by augmenting the `PYTHONPATH`. - Command::new(venv_to_interpreter(&context.venv)) - .arg("-B") + context + .python_command() .arg("-c") .arg("import iniconfig") .env(EnvVars::PYTHONPATH, context.temp_dir.child("target").path()) @@ -5397,8 +5387,8 @@ fn target_no_build_isolation() -> Result<()> { context.assert_command("import wheel").failure(); // Ensure that we can import the package by augmenting the `PYTHONPATH`. - Command::new(venv_to_interpreter(&context.venv)) - .arg("-B") + context + .python_command() .arg("-c") .arg("import wheel") .env(EnvVars::PYTHONPATH, context.temp_dir.child("target").path()) @@ -5474,8 +5464,8 @@ fn prefix() -> Result<()> { context.assert_command("import iniconfig").failure(); // Ensure that we can import the package by augmenting the `PYTHONPATH`. - Command::new(venv_to_interpreter(&context.venv)) - .arg("-B") + context + .python_command() .arg("-c") .arg("import iniconfig") .env( diff --git a/crates/uv/tests/it/pip_uninstall.rs b/crates/uv/tests/it/pip_uninstall.rs index 3c1c0d717..c72b92876 100644 --- a/crates/uv/tests/it/pip_uninstall.rs +++ b/crates/uv/tests/it/pip_uninstall.rs @@ -5,7 +5,7 @@ use assert_cmd::prelude::*; use assert_fs::fixture::ChildPath; use assert_fs::prelude::*; -use crate::common::{TestContext, get_bin, uv_snapshot, venv_to_interpreter}; +use crate::common::{TestContext, get_bin, uv_snapshot}; #[test] fn no_arguments() { @@ -113,12 +113,7 @@ fn uninstall() -> Result<()> { .assert() .success(); - Command::new(venv_to_interpreter(&context.venv)) - .arg("-c") - .arg("import markupsafe") - .current_dir(&context.temp_dir) - .assert() - .success(); + context.assert_command("import markupsafe").success(); uv_snapshot!(context.pip_uninstall() .arg("MarkupSafe"), @r###" @@ -132,12 +127,7 @@ fn uninstall() -> Result<()> { "### ); - Command::new(venv_to_interpreter(&context.venv)) - .arg("-c") - .arg("import markupsafe") - .current_dir(&context.temp_dir) - .assert() - .failure(); + context.assert_command("import markupsafe").failure(); Ok(()) } @@ -156,12 +146,7 @@ fn missing_record() -> Result<()> { .assert() .success(); - Command::new(venv_to_interpreter(&context.venv)) - .arg("-c") - .arg("import markupsafe") - .current_dir(&context.temp_dir) - .assert() - .success(); + context.assert_command("import markupsafe").success(); // Delete the RECORD file. let dist_info = context.site_packages().join("MarkupSafe-2.1.3.dist-info"); @@ -191,7 +176,7 @@ fn uninstall_editable_by_name() -> Result<()> { "-e {}", context .workspace_root - .join("scripts/packages/poetry_editable") + .join("scripts/packages/flit_editable") .as_os_str() .to_str() .expect("Path is valid unicode") @@ -202,30 +187,22 @@ fn uninstall_editable_by_name() -> Result<()> { .assert() .success(); - Command::new(venv_to_interpreter(&context.venv)) - .arg("-c") - .arg("import poetry_editable") - .assert() - .success(); + context.assert_command("import flit_editable").success(); // Uninstall the editable by name. uv_snapshot!(context.filters(), context.pip_uninstall() - .arg("poetry-editable"), @r###" + .arg("flit-editable"), @r###" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- Uninstalled 1 package in [TIME] - - poetry-editable==0.1.0 (from file://[WORKSPACE]/scripts/packages/poetry_editable) + - flit-editable==0.1.0 (from file://[WORKSPACE]/scripts/packages/flit_editable) "### ); - Command::new(venv_to_interpreter(&context.venv)) - .arg("-c") - .arg("import poetry_editable") - .assert() - .failure(); + context.assert_command("import flit_editable").failure(); Ok(()) } @@ -239,7 +216,7 @@ fn uninstall_by_path() -> Result<()> { requirements_txt.write_str( context .workspace_root - .join("scripts/packages/poetry_editable") + .join("scripts/packages/flit_editable") .as_os_str() .to_str() .expect("Path is valid unicode"), @@ -251,30 +228,22 @@ fn uninstall_by_path() -> Result<()> { .assert() .success(); - Command::new(venv_to_interpreter(&context.venv)) - .arg("-c") - .arg("import poetry_editable") - .assert() - .success(); + context.assert_command("import flit_editable").success(); // Uninstall the editable by path. uv_snapshot!(context.filters(), context.pip_uninstall() - .arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r###" + .arg(context.workspace_root.join("scripts/packages/flit_editable")), @r###" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- Uninstalled 1 package in [TIME] - - poetry-editable==0.1.0 (from file://[WORKSPACE]/scripts/packages/poetry_editable) + - flit-editable==0.1.0 (from file://[WORKSPACE]/scripts/packages/flit_editable) "### ); - Command::new(venv_to_interpreter(&context.venv)) - .arg("-c") - .arg("import poetry_editable") - .assert() - .failure(); + context.assert_command("import flit_editable").failure(); Ok(()) } @@ -288,7 +257,7 @@ fn uninstall_duplicate_by_path() -> Result<()> { requirements_txt.write_str( context .workspace_root - .join("scripts/packages/poetry_editable") + .join("scripts/packages/flit_editable") .as_os_str() .to_str() .expect("Path is valid unicode"), @@ -300,31 +269,23 @@ fn uninstall_duplicate_by_path() -> Result<()> { .assert() .success(); - Command::new(venv_to_interpreter(&context.venv)) - .arg("-c") - .arg("import poetry_editable") - .assert() - .success(); + context.assert_command("import flit_editable").success(); // Uninstall the editable by both path and name. uv_snapshot!(context.filters(), context.pip_uninstall() - .arg("poetry-editable") - .arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r###" + .arg("flit-editable") + .arg(context.workspace_root.join("scripts/packages/flit_editable")), @r###" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- Uninstalled 1 package in [TIME] - - poetry-editable==0.1.0 (from file://[WORKSPACE]/scripts/packages/poetry_editable) + - flit-editable==0.1.0 (from file://[WORKSPACE]/scripts/packages/flit_editable) "### ); - Command::new(venv_to_interpreter(&context.venv)) - .arg("-c") - .arg("import poetry_editable") - .assert() - .failure(); + context.assert_command("import flit_editable").failure(); Ok(()) } diff --git a/crates/uv/tests/it/python_find.rs b/crates/uv/tests/it/python_find.rs index 11bf421a3..f438e9b4d 100644 --- a/crates/uv/tests/it/python_find.rs +++ b/crates/uv/tests/it/python_find.rs @@ -318,15 +318,15 @@ fn python_find_project() { "###); // Unless explicitly requested - uv_snapshot!(context.filters(), context.python_find().arg("3.10"), @r###" + uv_snapshot!(context.filters(), context.python_find().arg("3.10"), @r" success: true exit_code: 0 ----- stdout ----- [PYTHON-3.10] ----- stderr ----- - warning: The requested interpreter resolved to Python 3.10.[X], which is incompatible with the project's Python requirement: `>=3.11` - "###); + warning: The requested interpreter resolved to Python 3.10.[X], which is incompatible with the project's Python requirement: `>=3.11` (from `project.requires-python`) + "); // Or `--no-project` is used uv_snapshot!(context.filters(), context.python_find().arg("--no-project"), @r###" @@ -367,15 +367,16 @@ fn python_find_project() { "###); // We should warn on subsequent uses, but respect the pinned version? - uv_snapshot!(context.filters(), context.python_find(), @r###" + uv_snapshot!(context.filters(), context.python_find(), @r" success: true exit_code: 0 ----- stdout ----- [PYTHON-3.10] ----- stderr ----- - warning: The Python request from `.python-version` resolved to Python 3.10.[X], which is incompatible with the project's Python requirement: `>=3.11`. Use `uv python pin` to update the `.python-version` file to a compatible version. - "###); + warning: The Python request from `.python-version` resolved to Python 3.10.[X], which is incompatible with the project's Python requirement: `>=3.11` (from `project.requires-python`) + Use `uv python pin` to update the `.python-version` file to a compatible version + "); // Unless the pin file is outside the project, in which case we should just ignore it let child_dir = context.temp_dir.child("child"); diff --git a/crates/uv/tests/it/python_install.rs b/crates/uv/tests/it/python_install.rs index 913711c7c..7fd596cd8 100644 --- a/crates/uv/tests/it/python_install.rs +++ b/crates/uv/tests/it/python_install.rs @@ -1,3 +1,6 @@ +#[cfg(windows)] +use std::path::PathBuf; + use std::{env, path::Path, process::Command}; use crate::common::{TestContext, uv_snapshot}; @@ -8,6 +11,7 @@ use assert_fs::{ use indoc::indoc; use predicates::prelude::predicate; use tracing::debug; + use uv_fs::Simplified; use uv_static::EnvVars; @@ -351,6 +355,25 @@ fn python_install_preview() { #[cfg(unix)] bin_python.assert(predicate::path::is_symlink()); + // The link should be to a path containing a minor version symlink directory + if cfg!(unix) { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.13-[PLATFORM]/bin/python3.13" + ); + }); + } else if cfg!(windows) { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.13-[PLATFORM]/python" + ); + }); + } + // The executable should "work" uv_snapshot!(context.filters(), Command::new(bin_python.as_os_str()) .arg("-c").arg("import subprocess; print('hello world')"), @r###" @@ -459,8 +482,53 @@ fn python_install_preview() { // The executable should be removed bin_python.assert(predicate::path::missing()); + // Install a minor version + uv_snapshot!(context.filters(), context.python_install().arg("3.11").arg("--preview"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.11.13 in [TIME] + + cpython-3.11.13-[PLATFORM] (python3.11) + "); + + let bin_python = context + .bin_dir + .child(format!("python3.11{}", std::env::consts::EXE_SUFFIX)); + + // The link should be to a path containing a minor version symlink directory + if cfg!(unix) { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.11-[PLATFORM]/bin/python3.11" + ); + }); + } else if cfg!(windows) { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.11-[PLATFORM]/python" + ); + }); + } + + uv_snapshot!(context.filters(), context.python_uninstall().arg("3.11"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Searching for Python versions matching: Python 3.11 + Uninstalled Python 3.11.13 in [TIME] + - cpython-3.11.13-[PLATFORM] (python3.11) + "); + // Install multiple patch versions - uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.8").arg("3.12.6"), @r###" + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.8").arg("3.12.6"), @r" success: true exit_code: 0 ----- stdout ----- @@ -469,27 +537,27 @@ fn python_install_preview() { Installed 2 versions in [TIME] + cpython-3.12.6-[PLATFORM] + cpython-3.12.8-[PLATFORM] (python3.12) - "###); + "); let bin_python = context .bin_dir .child(format!("python3.12{}", std::env::consts::EXE_SUFFIX)); - // The link should be for the newer patch version + // The link should resolve to the newer patch version if cfg!(unix) { insta::with_settings!({ filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.8-[PLATFORM]/bin/python3.12" + canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.8-[PLATFORM]/bin/python3.12" ); }); - } else { + } else if cfg!(windows) { insta::with_settings!({ filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.8-[PLATFORM]/python" + canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.8-[PLATFORM]/python" ); }); } @@ -517,6 +585,25 @@ fn python_install_preview_upgrade() { + cpython-3.12.5-[PLATFORM] (python3.12) "###); + // Installing with a patch version should cause the link to be to the patch installation. + if cfg!(unix) { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.5-[PLATFORM]/bin/python3.12" + ); + }); + } else if cfg!(windows) { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.5-[PLATFORM]/python" + ); + }); + } + // Installing 3.12.4 should not replace the executable, but also shouldn't fail uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.4"), @r###" success: true @@ -533,15 +620,15 @@ fn python_install_preview_upgrade() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.5-[PLATFORM]/bin/python3.12" + canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.5-[PLATFORM]/bin/python3.12" ); }); - } else { + } else if cfg!(windows) { insta::with_settings!({ filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.5-[PLATFORM]/python" + canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.5-[PLATFORM]/python" ); }); } @@ -562,15 +649,15 @@ fn python_install_preview_upgrade() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.5-[PLATFORM]/bin/python3.12" + canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.5-[PLATFORM]/bin/python3.12" ); }); - } else { + } else if cfg!(windows) { insta::with_settings!({ filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.5-[PLATFORM]/python" + canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.5-[PLATFORM]/python" ); }); } @@ -591,15 +678,15 @@ fn python_install_preview_upgrade() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.4-[PLATFORM]/bin/python3.12" + canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.4-[PLATFORM]/bin/python3.12" ); }); - } else { + } else if cfg!(windows) { insta::with_settings!({ filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.4-[PLATFORM]/python" + canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.4-[PLATFORM]/python" ); }); } @@ -620,15 +707,15 @@ fn python_install_preview_upgrade() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.6-[PLATFORM]/bin/python3.12" + canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.6-[PLATFORM]/bin/python3.12" ); }); - } else { + } else if cfg!(windows) { insta::with_settings!({ filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.6-[PLATFORM]/python" + canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.6-[PLATFORM]/python" ); }); } @@ -825,7 +912,7 @@ fn python_install_default() { bin_python_default.assert(predicate::path::missing()); // Install the latest version, i.e., a "default install" - uv_snapshot!(context.filters(), context.python_install().arg("--preview"), @r" + uv_snapshot!(context.filters(), context.python_install().arg("--default").arg("--preview"), @r" success: true exit_code: 0 ----- stdout ----- @@ -840,6 +927,75 @@ fn python_install_default() { bin_python_major.assert(predicate::path::exists()); bin_python_default.assert(predicate::path::exists()); + // And 3.13 should be the default + if cfg!(unix) { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.13-[PLATFORM]/bin/python3.13" + ); + insta::assert_snapshot!( + canonicalize_link_path(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13" + ); + }); + + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_minor_13), @"[TEMP_DIR]/managed/cpython-3.13-[PLATFORM]/bin/python3.13" + ); + insta::assert_snapshot!( + canonicalize_link_path(&bin_python_minor_13), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13" + ); + }); + + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.13-[PLATFORM]/bin/python3.13" + ); + insta::assert_snapshot!( + canonicalize_link_path(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13" + ); + }); + } else if cfg!(windows) { + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.13-[PLATFORM]/python" + ); + insta::assert_snapshot!( + canonicalize_link_path(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/python" + ); + }); + + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_minor_13), @"[TEMP_DIR]/managed/cpython-3.13-[PLATFORM]/python" + ); + insta::assert_snapshot!( + canonicalize_link_path(&bin_python_minor_13), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/python" + ); + }); + + insta::with_settings!({ + filters => context.filters(), + }, { + insta::assert_snapshot!( + read_link(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.13-[PLATFORM]/python" + ); + insta::assert_snapshot!( + canonicalize_link_path(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/python" + ); + }); + } + // Uninstall again uv_snapshot!(context.filters(), context.python_uninstall().arg("3.13"), @r" success: true @@ -893,7 +1049,10 @@ fn python_install_default() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/bin/python3.12" + read_link(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.12-[PLATFORM]/bin/python3.12" + ); + insta::assert_snapshot!( + canonicalize_link_path(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/bin/python3.12" ); }); @@ -901,7 +1060,10 @@ fn python_install_default() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python_minor_12), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/bin/python3.12" + read_link(&bin_python_minor_12), @"[TEMP_DIR]/managed/cpython-3.12-[PLATFORM]/bin/python3.12" + ); + insta::assert_snapshot!( + canonicalize_link_path(&bin_python_minor_12), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/bin/python3.12" ); }); @@ -909,7 +1071,10 @@ fn python_install_default() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/bin/python3.12" + read_link(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.12-[PLATFORM]/bin/python3.12" + ); + insta::assert_snapshot!( + canonicalize_link_path(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/bin/python3.12" ); }); } else { @@ -917,7 +1082,10 @@ fn python_install_default() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/python" + read_link(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.12-[PLATFORM]/python" + ); + insta::assert_snapshot!( + canonicalize_link_path(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/python" ); }); @@ -925,7 +1093,10 @@ fn python_install_default() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python_minor_12), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/python" + read_link(&bin_python_minor_12), @"[TEMP_DIR]/managed/cpython-3.12-[PLATFORM]/python" + ); + insta::assert_snapshot!( + canonicalize_link_path(&bin_python_minor_12), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/python" ); }); @@ -933,7 +1104,10 @@ fn python_install_default() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/python" + read_link(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.12-[PLATFORM]/python" + ); + insta::assert_snapshot!( + canonicalize_link_path(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/python" ); }); } @@ -961,7 +1135,7 @@ fn python_install_default() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13" + canonicalize_link_path(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13" ); }); @@ -969,7 +1143,7 @@ fn python_install_default() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python_minor_13), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13" + canonicalize_link_path(&bin_python_minor_13), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13" ); }); @@ -977,7 +1151,7 @@ fn python_install_default() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python_minor_12), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/bin/python3.12" + canonicalize_link_path(&bin_python_minor_12), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/bin/python3.12" ); }); @@ -985,15 +1159,15 @@ fn python_install_default() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13" + canonicalize_link_path(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13" ); }); - } else { + } else if cfg!(windows) { insta::with_settings!({ filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/python" + canonicalize_link_path(&bin_python_major), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/python" ); }); @@ -1001,7 +1175,7 @@ fn python_install_default() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python_minor_13), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/python" + canonicalize_link_path(&bin_python_minor_13), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/python" ); }); @@ -1009,7 +1183,7 @@ fn python_install_default() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python_minor_12), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/python" + canonicalize_link_path(&bin_python_minor_12), @"[TEMP_DIR]/managed/cpython-3.12.11-[PLATFORM]/python" ); }); @@ -1017,28 +1191,42 @@ fn python_install_default() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/python" + canonicalize_link_path(&bin_python_default), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/python" ); }); } } -fn read_link_path(path: &Path) -> String { - if cfg!(unix) { - path.read_link() - .unwrap_or_else(|_| panic!("{} should be readable", path.display())) - .simplified_display() - .to_string() - } else if cfg!(windows) { - let launcher = uv_trampoline_builder::Launcher::try_from_path(path) - .ok() - .unwrap_or_else(|| panic!("{} should be readable", path.display())) - .unwrap_or_else(|| panic!("{} should be a valid launcher", path.display())); +#[cfg(windows)] +fn launcher_path(path: &Path) -> PathBuf { + let launcher = uv_trampoline_builder::Launcher::try_from_path(path) + .unwrap_or_else(|_| panic!("{} should be readable", path.display())) + .unwrap_or_else(|| panic!("{} should be a valid launcher", path.display())); + launcher.python_path +} - launcher.python_path.simplified_display().to_string() - } else { - unreachable!() - } +fn canonicalize_link_path(path: &Path) -> String { + #[cfg(unix)] + let canonical_path = fs_err::canonicalize(path); + + #[cfg(windows)] + let canonical_path = dunce::canonicalize(launcher_path(path)); + + canonical_path + .unwrap_or_else(|_| panic!("{} should be readable", path.display())) + .simplified_display() + .to_string() +} + +fn read_link(path: &Path) -> String { + #[cfg(unix)] + let linked_path = + fs_err::read_link(path).unwrap_or_else(|_| panic!("{} should be readable", path.display())); + + #[cfg(windows)] + let linked_path = launcher_path(path); + + linked_path.simplified_display().to_string() } #[test] @@ -1101,7 +1289,7 @@ fn python_install_preview_broken_link() { filters => context.filters(), }, { insta::assert_snapshot!( - read_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13" + canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.13.5-[PLATFORM]/bin/python3.13" ); }); } @@ -1256,8 +1444,8 @@ fn python_install_314() { ----- stdout ----- ----- stderr ----- - Installed Python 3.14.0b2 in [TIME] - + cpython-3.14.0b2-[PLATFORM] + Installed Python 3.14.0b3 in [TIME] + + cpython-3.14.0b3-[PLATFORM] "); // Install a specific pre-release @@ -1277,7 +1465,7 @@ fn python_install_314() { success: true exit_code: 0 ----- stdout ----- - [TEMP_DIR]/managed/cpython-3.14.0b2-[PLATFORM]/[INSTALL-BIN]/python + [TEMP_DIR]/managed/cpython-3.14.0b3-[PLATFORM]/[INSTALL-BIN]/python ----- stderr ----- "); @@ -1287,7 +1475,7 @@ fn python_install_314() { success: true exit_code: 0 ----- stdout ----- - [TEMP_DIR]/managed/cpython-3.14.0b2-[PLATFORM]/[INSTALL-BIN]/python + [TEMP_DIR]/managed/cpython-3.14.0b3-[PLATFORM]/[INSTALL-BIN]/python ----- stderr ----- "); @@ -1296,7 +1484,7 @@ fn python_install_314() { success: true exit_code: 0 ----- stdout ----- - [TEMP_DIR]/managed/cpython-3.14.0b2-[PLATFORM]/[INSTALL-BIN]/python + [TEMP_DIR]/managed/cpython-3.14.0b3-[PLATFORM]/[INSTALL-BIN]/python ----- stderr ----- "); @@ -1486,3 +1674,557 @@ fn python_install_emulated_macos() { ----- stderr ----- "); } + +// A virtual environment should track the latest patch version installed. +#[test] +fn install_transparent_patch_upgrade_uv_venv() { + let context = TestContext::new_with_versions(&["3.13"]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs() + .with_filtered_python_install_bin(); + + // Install a lower patch version. + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.9"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.12.9 in [TIME] + + cpython-3.12.9-[PLATFORM] (python3.12) + " + ); + + // Create a virtual environment. + uv_snapshot!(context.filters(), context.venv().arg("-p").arg("3.12") + .arg(context.venv.as_os_str()), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.9 + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + " + ); + + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.12.9 + + ----- stderr ----- + " + ); + + // Install a higher patch version. + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.11"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.12.11 in [TIME] + + cpython-3.12.11-[PLATFORM] (python3.12) + " + ); + + // Virtual environment should reflect higher version. + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.12.11 + + ----- stderr ----- + " + ); + + // Install a lower patch version. + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.8"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.12.8 in [TIME] + + cpython-3.12.8-[PLATFORM] + " + ); + + // Virtual environment should reflect highest version. + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.12.11 + + ----- stderr ----- + " + ); +} + +// When installing multiple patches simultaneously, a virtual environment on that +// minor version should point to the highest. +#[test] +fn install_multiple_patches() { + let context = TestContext::new_with_versions(&[]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs() + .with_filtered_python_install_bin(); + + // Install 3.12 patches in ascending order list + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.9").arg("3.12.11"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed 2 versions in [TIME] + + cpython-3.12.9-[PLATFORM] + + cpython-3.12.11-[PLATFORM] (python3.12) + " + ); + + // Create a virtual environment. + uv_snapshot!(context.filters(), context.venv().arg("-p").arg("3.12") + .arg(context.venv.as_os_str()), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.11 + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + " + ); + + // Virtual environment should be on highest installed patch. + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.12.11 + + ----- stderr ----- + " + ); + + // Remove the original virtual environment + fs_err::remove_dir_all(&context.venv).unwrap(); + + // Install 3.10 patches in descending order list + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.10.17").arg("3.10.16"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed 2 versions in [TIME] + + cpython-3.10.16-[PLATFORM] + + cpython-3.10.17-[PLATFORM] (python3.10) + " + ); + + // Create a virtual environment on 3.10. + uv_snapshot!(context.filters(), context.venv().arg("-p").arg("3.10") + .arg(context.venv.as_os_str()), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.10.17 + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + " + ); + + // Virtual environment should be on highest installed patch. + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.10.17 + + ----- stderr ----- + " + ); +} + +// After uninstalling the highest patch, a virtual environment should point to the +// next highest. +#[test] +fn uninstall_highest_patch() { + let context = TestContext::new_with_versions(&[]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs() + .with_filtered_python_install_bin(); + + // Install patches in ascending order list + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.11").arg("3.12.9").arg("3.12.8"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed 3 versions in [TIME] + + cpython-3.12.8-[PLATFORM] + + cpython-3.12.9-[PLATFORM] + + cpython-3.12.11-[PLATFORM] (python3.12) + " + ); + + uv_snapshot!(context.filters(), context.venv().arg("-p").arg("3.12") + .arg(context.venv.as_os_str()), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.11 + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + " + ); + + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.12.11 + + ----- stderr ----- + " + ); + + // Uninstall the highest patch version + uv_snapshot!(context.filters(), context.python_uninstall().arg("--preview").arg("3.12.11"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Searching for Python versions matching: Python 3.12.11 + Uninstalled Python 3.12.11 in [TIME] + - cpython-3.12.11-[PLATFORM] (python3.12) + " + ); + + // Virtual environment should be on highest patch version remaining. + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.12.9 + + ----- stderr ----- + " + ); +} + +// Virtual environments only record minor versions. `uv venv -p 3.x.y` will +// not prevent a virtual environment from tracking the latest patch version +// installed. +#[test] +fn install_no_transparent_upgrade_with_venv_patch_specification() { + let context = TestContext::new_with_versions(&["3.13"]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs() + .with_filtered_python_install_bin(); + + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.9"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.12.9 in [TIME] + + cpython-3.12.9-[PLATFORM] (python3.12) + " + ); + + // Create a virtual environment with a patch version + uv_snapshot!(context.filters(), context.venv().arg("-p").arg("3.12.9") + .arg(context.venv.as_os_str()), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.9 + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + " + ); + + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.12.9 + + ----- stderr ----- + " + ); + + // Install a higher patch version. + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.11"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.12.11 in [TIME] + + cpython-3.12.11-[PLATFORM] (python3.12) + " + ); + + // The virtual environment Python version is transparently upgraded. + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.12.9 + + ----- stderr ----- + " + ); +} + +// A virtual environment created using the `venv` module should track +// the latest patch version installed. +#[test] +fn install_transparent_patch_upgrade_venv_module() { + let context = TestContext::new_with_versions(&[]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs() + .with_filtered_python_install_bin(); + + let bin_dir = context.temp_dir.child("bin"); + + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.9"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.12.9 in [TIME] + + cpython-3.12.9-[PLATFORM] (python3.12) + " + ); + + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.12.9 + + ----- stderr ----- + " + ); + + // Create a virtual environment using venv module. + uv_snapshot!(context.filters(), context.run().arg("python").arg("-m").arg("venv").arg(context.venv.as_os_str()).arg("--without-pip") + .env(EnvVars::PATH, bin_dir.as_os_str()), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + "); + + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.12.9 + + ----- stderr ----- + " + ); + + // Install a higher patch version + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.11"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.12.11 in [TIME] + + cpython-3.12.11-[PLATFORM] (python3.12) + " + ); + + // Virtual environment should reflect highest patch version. + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.12.11 + + ----- stderr ----- + " + ); +} + +// Automatically installing a lower patch version when running a command like +// `uv run` should not downgrade virtual environments. +#[test] +fn install_lower_patch_automatically() { + let context = TestContext::new_with_versions(&[]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs() + .with_filtered_python_install_bin(); + + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.11"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.12.11 in [TIME] + + cpython-3.12.11-[PLATFORM] (python3.12) + " + ); + + uv_snapshot!(context.filters(), context.venv().arg("-p").arg("3.12") + .arg(context.venv.as_os_str()), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.11 + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + " + ); + + uv_snapshot!(context.filters(), context.init().arg("-p").arg("3.12.9").arg("proj"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Initialized project `proj` at `[TEMP_DIR]/proj` + " + ); + + // Create a new virtual environment to trigger automatic installation of + // lower patch version + uv_snapshot!(context.filters(), context.venv() + .arg("--directory").arg("proj") + .arg("-p").arg("3.12.9"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.9 + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + "); + + // Original virtual environment should still point to higher patch + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.12.11 + + ----- stderr ----- + " + ); +} + +#[test] +fn uninstall_last_patch() { + let context = TestContext::new_with_versions(&[]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs() + .with_filtered_virtualenv_bin(); + + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.10.17"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.10.17 in [TIME] + + cpython-3.10.17-[PLATFORM] (python3.10) + " + ); + + uv_snapshot!(context.filters(), context.venv().arg("-p").arg("3.10"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.10.17 + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + " + ); + + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.10.17 + + ----- stderr ----- + " + ); + + uv_snapshot!(context.filters(), context.python_uninstall().arg("--preview").arg("3.10.17"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Searching for Python versions matching: Python 3.10.17 + Uninstalled Python 3.10.17 in [TIME] + - cpython-3.10.17-[PLATFORM] (python3.10) + " + ); + + let mut filters = context.filters(); + filters.push(("python3", "python")); + + #[cfg(unix)] + uv_snapshot!(filters, context.run().arg("python").arg("--version"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Failed to inspect Python interpreter from active virtual environment at `.venv/[BIN]/python` + Caused by: Broken symlink at `.venv/[BIN]/python`, was the underlying Python interpreter removed? + + hint: Consider recreating the environment (e.g., with `uv venv`) + " + ); + + #[cfg(windows)] + uv_snapshot!(filters, context.run().arg("python").arg("--version"), @r#" + success: false + exit_code: 103 + ----- stdout ----- + + ----- stderr ----- + No Python at '"[TEMP_DIR]/managed/cpython-3.10-[PLATFORM]/python' + "# + ); +} diff --git a/crates/uv/tests/it/python_upgrade.rs b/crates/uv/tests/it/python_upgrade.rs new file mode 100644 index 000000000..bf6d45e08 --- /dev/null +++ b/crates/uv/tests/it/python_upgrade.rs @@ -0,0 +1,703 @@ +use crate::common::{TestContext, uv_snapshot}; +use anyhow::Result; +use assert_fs::fixture::FileTouch; +use assert_fs::prelude::PathChild; + +use uv_static::EnvVars; + +#[test] +fn python_upgrade() { + let context: TestContext = TestContext::new_with_versions(&[]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs(); + + // Install an earlier patch version + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.10.17"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.10.17 in [TIME] + + cpython-3.10.17-[PLATFORM] (python3.10) + "); + + // Don't accept patch version as argument to upgrade command + uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10.17"), @r" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + error: `uv python upgrade` only accepts minor versions + "); + + // Upgrade patch version + uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.10.18 in [TIME] + + cpython-3.10.18-[PLATFORM] (python3.10) + "); + + // Should be a no-op when already upgraded + uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + "); +} + +#[test] +fn python_upgrade_without_version() { + let context: TestContext = TestContext::new_with_versions(&[]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs(); + + // Should be a no-op when no versions have been installed + uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + There are no installed versions to upgrade + "); + + // Install earlier patch versions for different minor versions + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.11.8").arg("3.12.8").arg("3.13.1"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed 3 versions in [TIME] + + cpython-3.11.8-[PLATFORM] (python3.11) + + cpython-3.12.8-[PLATFORM] (python3.12) + + cpython-3.13.1-[PLATFORM] (python3.13) + "); + + let mut filters = context.filters().clone(); + filters.push((r"3.13.\d+", "3.13.[X]")); + + // Upgrade one patch version + uv_snapshot!(filters, context.python_upgrade().arg("--preview").arg("3.13"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.13.[X] in [TIME] + + cpython-3.13.[X]-[PLATFORM] (python3.13) + "); + + // Providing no minor version to `uv python upgrade` should upgrade the rest + // of the patch versions + uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed 2 versions in [TIME] + + cpython-3.11.13-[PLATFORM] (python3.11) + + cpython-3.12.11-[PLATFORM] (python3.12) + "); + + // Should be a no-op when every version is already upgraded + uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + All versions already on latest supported patch release + "); +} + +#[test] +fn python_upgrade_transparent_from_venv() { + let context: TestContext = TestContext::new_with_versions(&["3.13"]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs(); + + // Install an earlier patch version + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.10.17"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.10.17 in [TIME] + + cpython-3.10.17-[PLATFORM] (python3.10) + "); + + // Create a virtual environment + uv_snapshot!(context.filters(), context.venv(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.10.17 + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + "); + + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.10.17 + + ----- stderr ----- + " + ); + + let second_venv = ".venv2"; + + // Create a second virtual environment with minor version request + uv_snapshot!(context.filters(), context.venv().arg(second_venv).arg("-p").arg("3.10"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.10.17 + Creating virtual environment at: .venv2 + Activate with: source .venv2/[BIN]/activate + "); + + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version") + .env(EnvVars::VIRTUAL_ENV, second_venv), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.10.17 + + ----- stderr ----- + " + ); + + // Upgrade patch version + uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.10.18 in [TIME] + + cpython-3.10.18-[PLATFORM] (python3.10) + "); + + // First virtual environment should reflect upgraded patch + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.10.18 + + ----- stderr ----- + " + ); + + // Second virtual environment should reflect upgraded patch + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version") + .env(EnvVars::VIRTUAL_ENV, second_venv), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.10.18 + + ----- stderr ----- + " + ); +} + +// Installing Python in preview mode should not prevent virtual environments +// from transparently upgrading. +#[test] +fn python_upgrade_transparent_from_venv_preview() { + let context: TestContext = TestContext::new_with_versions(&["3.13"]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs(); + + // Install an earlier patch version using `--preview` + uv_snapshot!(context.filters(), context.python_install().arg("3.10.17").arg("--preview"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.10.17 in [TIME] + + cpython-3.10.17-[PLATFORM] (python3.10) + "); + + // Create a virtual environment + uv_snapshot!(context.filters(), context.venv().arg("-p").arg("3.10"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.10.17 + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + "); + + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.10.17 + + ----- stderr ----- + " + ); + + // Upgrade patch version + uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.10.18 in [TIME] + + cpython-3.10.18-[PLATFORM] (python3.10) + "); + + // Virtual environment should reflect upgraded patch + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.10.18 + + ----- stderr ----- + " + ); +} + +#[test] +fn python_upgrade_ignored_with_python_pin() { + let context: TestContext = TestContext::new_with_versions(&["3.13"]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs(); + + // Install an earlier patch version + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.10.17"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.10.17 in [TIME] + + cpython-3.10.17-[PLATFORM] (python3.10) + "); + + // Create a virtual environment + uv_snapshot!(context.filters(), context.venv(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.10.17 + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + "); + + // Pin to older patch version + uv_snapshot!(context.filters(), context.python_pin().arg("3.10.17"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Pinned `.python-version` to `3.10.17` + + ----- stderr ----- + "); + + // Upgrade patch version + uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.10.18 in [TIME] + + cpython-3.10.18-[PLATFORM] (python3.10) + "); + + // Virtual environment should continue to respect pinned patch version + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.10.17 + + ----- stderr ----- + " + ); +} + +// Virtual environments only record minor versions. `uv venv -p 3.x.y` will +// not prevent transparent upgrades. +#[test] +fn python_no_transparent_upgrade_with_venv_patch_specification() { + let context: TestContext = TestContext::new_with_versions(&["3.13"]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs(); + + // Install an earlier patch version + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.10.17"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.10.17 in [TIME] + + cpython-3.10.17-[PLATFORM] (python3.10) + "); + + // Create a virtual environment with a patch version + uv_snapshot!(context.filters(), context.venv().arg("-p").arg("3.10.17"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.10.17 + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + "); + + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.10.17 + + ----- stderr ----- + " + ); + + // Upgrade patch version + uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.10.18 in [TIME] + + cpython-3.10.18-[PLATFORM] (python3.10) + "); + + // The virtual environment Python version is transparently upgraded. + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.10.17 + + ----- stderr ----- + " + ); +} + +// Transparent upgrades should work for virtual environments created within +// virtual environments. +#[test] +fn python_transparent_upgrade_venv_venv() { + let context: TestContext = TestContext::new_with_versions(&["3.13"]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_filtered_virtualenv_bin() + .with_managed_python_dirs(); + + // Install an earlier patch version + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.10.17"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.10.17 in [TIME] + + cpython-3.10.17-[PLATFORM] (python3.10) + "); + + // Create an initial virtual environment + uv_snapshot!(context.filters(), context.venv().arg("-p").arg("3.10"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.10.17 + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + "); + + let venv_python = if cfg!(windows) { + context.venv.child("Scripts/python.exe") + } else { + context.venv.child("bin/python") + }; + + let second_venv = ".venv2"; + + // Create a new virtual environment from within a virtual environment + uv_snapshot!(context.filters(), context.venv() + .arg(second_venv) + .arg("-p").arg(venv_python.as_os_str()), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.10.17 interpreter at: .venv/[BIN]/python + Creating virtual environment at: .venv2 + Activate with: source .venv2/[BIN]/activate + "); + + // Check version from within second virtual environment + uv_snapshot!(context.filters(), context.run() + .arg("python").arg("--version") + .env(EnvVars::VIRTUAL_ENV, second_venv), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.10.17 + + ----- stderr ----- + " + ); + + // Upgrade patch version + uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.10.18 in [TIME] + + cpython-3.10.18-[PLATFORM] (python3.10) + "); + + // Should have transparently upgraded in second virtual environment + uv_snapshot!(context.filters(), context.run() + .arg("python").arg("--version") + .env(EnvVars::VIRTUAL_ENV, second_venv), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.10.18 + + ----- stderr ----- + " + ); +} + +// Transparent upgrades should work for virtual environments created using +// the `venv` module. +#[test] +fn python_upgrade_transparent_from_venv_module() { + let context = TestContext::new_with_versions(&["3.13"]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs() + .with_filtered_python_install_bin(); + + let bin_dir = context.temp_dir.child("bin"); + + // Install earlier patch version + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.9"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.12.9 in [TIME] + + cpython-3.12.9-[PLATFORM] (python3.12) + "); + + // Create a virtual environment using venv module + uv_snapshot!(context.filters(), context.run().arg("python").arg("-m").arg("venv").arg(context.venv.as_os_str()).arg("--without-pip") + .env(EnvVars::PATH, bin_dir.as_os_str()), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + "); + + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.12.9 + + ----- stderr ----- + " + ); + + // Upgrade patch version + uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.12"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.12.11 in [TIME] + + cpython-3.12.11-[PLATFORM] (python3.12) + " + ); + + // Virtual environment should reflect upgraded patch + uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.12.11 + + ----- stderr ----- + " + ); +} + +// Transparent Python upgrades should work in environments created using +// the `venv` module within an existing virtual environment. +#[test] +fn python_upgrade_transparent_from_venv_module_in_venv() { + let context = TestContext::new_with_versions(&["3.13"]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs() + .with_filtered_python_install_bin(); + + let bin_dir = context.temp_dir.child("bin"); + + // Install earlier patch version + uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.10.17"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.10.17 in [TIME] + + cpython-3.10.17-[PLATFORM] (python3.10) + "); + + // Create first virtual environment + uv_snapshot!(context.filters(), context.venv().arg("-p").arg("3.10"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.10.17 + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + "); + + let second_venv = ".venv2"; + + // Create a virtual environment using `venv`` module from within the first virtual environment. + uv_snapshot!(context.filters(), context.run() + .arg("python").arg("-m").arg("venv").arg(second_venv).arg("--without-pip") + .env(EnvVars::PATH, bin_dir.as_os_str()), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + "); + + // Check version within second virtual environment + uv_snapshot!(context.filters(), context.run() + .env(EnvVars::VIRTUAL_ENV, second_venv) + .arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.10.17 + + ----- stderr ----- + " + ); + + // Upgrade patch version + uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.10.18 in [TIME] + + cpython-3.10.18-[PLATFORM] (python3.10) + " + ); + + // Second virtual environment should reflect upgraded patch. + uv_snapshot!(context.filters(), context.run() + .env(EnvVars::VIRTUAL_ENV, second_venv) + .arg("python").arg("--version"), @r" + success: true + exit_code: 0 + ----- stdout ----- + Python 3.10.18 + + ----- stderr ----- + " + ); +} + +// Tests that `uv python upgrade 3.12` will warn if trying to install over non-managed +// interpreter. +#[test] +fn python_upgrade_force_install() -> Result<()> { + let context = TestContext::new_with_versions(&["3.13"]) + .with_filtered_python_keys() + .with_filtered_exe_suffix() + .with_managed_python_dirs(); + + context + .bin_dir + .child(format!("python3.12{}", std::env::consts::EXE_SUFFIX)) + .touch()?; + + // Try to upgrade with a non-managed interpreter installed in `bin`. + uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.12"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + warning: Executable already exists at `[BIN]/python3.12` but is not managed by uv; use `uv python install 3.12 --force` to replace it + Installed Python 3.12.11 in [TIME] + + cpython-3.12.11-[PLATFORM] + "); + + // Force the `bin` install. + uv_snapshot!(context.filters(), context.python_install().arg("3.12").arg("--force").arg("--preview").arg("3.12"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Installed Python 3.12.11 in [TIME] + + cpython-3.12.11-[PLATFORM] (python3.12) + "); + + Ok(()) +} diff --git a/crates/uv/tests/it/run.rs b/crates/uv/tests/it/run.rs index 121915ef0..98c2adbfe 100644 --- a/crates/uv/tests/it/run.rs +++ b/crates/uv/tests/it/run.rs @@ -133,7 +133,7 @@ fn run_with_python_version() -> Result<()> { ----- stderr ----- Using CPython 3.9.[X] interpreter at: [PYTHON-3.9] - error: The requested interpreter resolved to Python 3.9.[X], which is incompatible with the project's Python requirement: `>=3.11, <4` + error: The requested interpreter resolved to Python 3.9.[X], which is incompatible with the project's Python requirement: `>=3.11, <4` (from `project.requires-python`) "); Ok(()) @@ -1344,7 +1344,7 @@ fn run_with_build_constraints() -> Result<()> { })?; // Installing requests with incompatible build constraints should fail. - uv_snapshot!(context.filters(), context.run().arg("--with").arg("requests==1.2").arg("main.py"), @r###" + uv_snapshot!(context.filters(), context.run().arg("--with").arg("requests==1.2").arg("main.py"), @r" success: false exit_code: 1 ----- stdout ----- @@ -1358,12 +1358,11 @@ fn run_with_build_constraints() -> Result<()> { + idna==3.6 + sniffio==1.3.1 + typing-extensions==4.10.0 - Resolved 1 package in [TIME] × Failed to download and build `requests==1.2.0` ├─▶ Failed to resolve requirements from `setup.py` build ├─▶ No solution found when resolving: `setuptools>=40.8.0` ╰─▶ Because you require setuptools>=40.8.0 and setuptools==1, we can conclude that your requirements are unsatisfiable. - "###); + "); // Change the build constraint to be compatible with `requests==1.2`. pyproject_toml.write_str(indoc! { r#" @@ -3136,25 +3135,27 @@ fn run_isolated_incompatible_python() -> Result<()> { })?; // We should reject Python 3.9... - uv_snapshot!(context.filters(), context.run().arg("main.py"), @r###" + uv_snapshot!(context.filters(), context.run().arg("main.py"), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- Using CPython 3.9.[X] interpreter at: [PYTHON-3.9] - error: The Python request from `.python-version` resolved to Python 3.9.[X], which is incompatible with the project's Python requirement: `>=3.12`. Use `uv python pin` to update the `.python-version` file to a compatible version. - "###); + error: The Python request from `.python-version` resolved to Python 3.9.[X], which is incompatible with the project's Python requirement: `>=3.12` (from `project.requires-python`) + Use `uv python pin` to update the `.python-version` file to a compatible version + "); // ...even if `--isolated` is provided. - uv_snapshot!(context.filters(), context.run().arg("--isolated").arg("main.py"), @r###" + uv_snapshot!(context.filters(), context.run().arg("--isolated").arg("main.py"), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- - error: The Python request from `.python-version` resolved to Python 3.9.[X], which is incompatible with the project's Python requirement: `>=3.12`. Use `uv python pin` to update the `.python-version` file to a compatible version. - "###); + error: The Python request from `.python-version` resolved to Python 3.9.[X], which is incompatible with the project's Python requirement: `>=3.12` (from `project.requires-python`) + Use `uv python pin` to update the `.python-version` file to a compatible version + "); Ok(()) } @@ -4598,6 +4599,265 @@ fn run_default_groups() -> Result<()> { Ok(()) } + +#[test] +fn run_groups_requires_python() -> Result<()> { + let context = + TestContext::new_with_versions(&["3.11", "3.12", "3.13"]).with_filtered_python_sources(); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.11" + dependencies = ["typing-extensions"] + + [dependency-groups] + foo = ["anyio"] + bar = ["iniconfig"] + dev = ["sniffio"] + + [tool.uv.dependency-groups] + foo = {requires-python=">=3.14"} + bar = {requires-python=">=3.13"} + dev = {requires-python=">=3.12"} + "#, + )?; + + context.lock().assert().success(); + + // With --no-default-groups only the main requires-python should be consulted + uv_snapshot!(context.filters(), context.run() + .arg("--no-default-groups") + .arg("python").arg("-c").arg("import typing_extensions"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.11.[X] interpreter at: [PYTHON-3.11] + Creating virtual environment at: .venv + Resolved 6 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + typing-extensions==4.10.0 + "); + + // The main requires-python and the default group's requires-python should be consulted + // (This should trigger a version bump) + uv_snapshot!(context.filters(), context.run() + .arg("python").arg("-c").arg("import typing_extensions"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Removed virtual environment at: .venv + Creating virtual environment at: .venv + Resolved 6 packages in [TIME] + Prepared 1 package in [TIME] + Installed 2 packages in [TIME] + + sniffio==1.3.1 + + typing-extensions==4.10.0 + "); + + // The main requires-python and "dev" and "bar" requires-python should be consulted + // (This should trigger a version bump) + uv_snapshot!(context.filters(), context.run() + .arg("--group").arg("bar") + .arg("python").arg("-c").arg("import typing_extensions"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.13.[X] interpreter at: [PYTHON-3.13] + Removed virtual environment at: .venv + Creating virtual environment at: .venv + Resolved 6 packages in [TIME] + Prepared 1 package in [TIME] + Installed 3 packages in [TIME] + + iniconfig==2.0.0 + + sniffio==1.3.1 + + typing-extensions==4.10.0 + "); + + // TMP: Attempt to catch this flake with verbose output + // See https://github.com/astral-sh/uv/issues/14160 + let output = context + .run() + .arg("python") + .arg("-c") + .arg("import typing_extensions") + .arg("-vv") + .output()?; + let stderr = String::from_utf8_lossy(&output.stderr); + assert!( + !stderr.contains("Removed virtual environment"), + "{}", + stderr + ); + + // Going back to just "dev" we shouldn't churn the venv needlessly + uv_snapshot!(context.filters(), context.run() + .arg("python").arg("-c").arg("import typing_extensions"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 6 packages in [TIME] + Audited 2 packages in [TIME] + "); + + // Explicitly requesting an in-range python can downgrade + uv_snapshot!(context.filters(), context.run() + .arg("-p").arg("3.12") + .arg("python").arg("-c").arg("import typing_extensions"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Removed virtual environment at: .venv + Creating virtual environment at: .venv + Resolved 6 packages in [TIME] + Installed 2 packages in [TIME] + + sniffio==1.3.1 + + typing-extensions==4.10.0 + "); + + // Explicitly requesting an out-of-range python fails + uv_snapshot!(context.filters(), context.run() + .arg("-p").arg("3.11") + .arg("python").arg("-c").arg("import typing_extensions"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.11.[X] interpreter at: [PYTHON-3.11] + error: The requested interpreter resolved to Python 3.11.[X], which is incompatible with the project's Python requirement: `>=3.12` (from `tool.uv.dependency-groups.dev.requires-python`). + "); + + // Enabling foo we can't find an interpreter + uv_snapshot!(context.filters(), context.run() + .arg("--group").arg("foo") + .arg("python").arg("-c").arg("import typing_extensions"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: No interpreter found for Python >=3.14 in [PYTHON SOURCES] + "); + + Ok(()) +} + +#[test] +fn run_groups_include_requires_python() -> Result<()> { + let context = TestContext::new_with_versions(&["3.11", "3.12", "3.13"]); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.11" + dependencies = ["typing-extensions"] + + [dependency-groups] + foo = ["anyio"] + bar = ["iniconfig"] + baz = ["iniconfig"] + dev = ["sniffio", {include-group = "foo"}, {include-group = "baz"}] + + + [tool.uv.dependency-groups] + foo = {requires-python="<3.13"} + bar = {requires-python=">=3.13"} + baz = {requires-python=">=3.12"} + "#, + )?; + + context.lock().assert().success(); + + // With --no-default-groups only the main requires-python should be consulted + uv_snapshot!(context.filters(), context.run() + .arg("--no-default-groups") + .arg("python").arg("-c").arg("import typing_extensions"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.11.[X] interpreter at: [PYTHON-3.11] + Creating virtual environment at: .venv + Resolved 6 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + typing-extensions==4.10.0 + "); + + // The main requires-python and the default group's requires-python should be consulted + // (This should trigger a version bump) + uv_snapshot!(context.filters(), context.run() + .arg("python").arg("-c").arg("import typing_extensions"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Removed virtual environment at: .venv + Creating virtual environment at: .venv + Resolved 6 packages in [TIME] + Prepared 4 packages in [TIME] + Installed 5 packages in [TIME] + + anyio==4.3.0 + + idna==3.6 + + iniconfig==2.0.0 + + sniffio==1.3.1 + + typing-extensions==4.10.0 + "); + + // The main requires-python and "dev" and "bar" requires-python should be consulted + // (This should trigger a conflict) + uv_snapshot!(context.filters(), context.run() + .arg("--group").arg("bar") + .arg("python").arg("-c").arg("import typing_extensions"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + error: Found conflicting Python requirements: + - project: >=3.11 + - project:bar: >=3.13 + - project:dev: >=3.12, <3.13 + "); + + // Explicitly requesting an out-of-range python fails + uv_snapshot!(context.filters(), context.run() + .arg("-p").arg("3.13") + .arg("python").arg("-c").arg("import typing_extensions"), @r" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.13.[X] interpreter at: [PYTHON-3.13] + error: The requested interpreter resolved to Python 3.13.[X], which is incompatible with the project's Python requirement: `==3.12.*` (from `tool.uv.dependency-groups.dev.requires-python`). + "); + Ok(()) +} + /// Test that a signal n makes the process exit with code 128+n. #[cfg(unix)] #[test] @@ -4616,7 +4876,7 @@ fn exit_status_signal() -> Result<()> { #[test] fn run_repeated() -> Result<()> { - let context = TestContext::new_with_versions(&["3.13"]); + let context = TestContext::new_with_versions(&["3.13", "3.12"]); let pyproject_toml = context.temp_dir.child("pyproject.toml"); pyproject_toml.write_str(indoc! { r#" @@ -4663,22 +4923,25 @@ fn run_repeated() -> Result<()> { Resolved 1 package in [TIME] "###); - // Re-running as a tool shouldn't require reinstalling `typing-extensions`, since the environment is cached. + // Re-running as a tool does require reinstalling `typing-extensions`, since the base venv is + // different. uv_snapshot!( context.filters(), - context.tool_run().arg("--with").arg("typing-extensions").arg("python").arg("-c").arg("import typing_extensions; import iniconfig"), @r###" + context.tool_run().arg("--with").arg("typing-extensions").arg("python").arg("-c").arg("import typing_extensions; import iniconfig"), @r#" success: false exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] + Installed 1 package in [TIME] + + typing-extensions==4.10.0 Traceback (most recent call last): File "", line 1, in import typing_extensions; import iniconfig ^^^^^^^^^^^^^^^^ ModuleNotFoundError: No module named 'iniconfig' - "###); + "#); Ok(()) } @@ -4719,22 +4982,25 @@ fn run_without_overlay() -> Result<()> { + typing-extensions==4.10.0 "###); - // Import `iniconfig` in the context of a `tool run` command, which should fail. + // Import `iniconfig` in the context of a `tool run` command, which should fail. Note that + // typing-extensions gets installed again, because the venv is not shared. uv_snapshot!( context.filters(), - context.tool_run().arg("--with").arg("typing-extensions").arg("python").arg("-c").arg("import typing_extensions; import iniconfig"), @r###" + context.tool_run().arg("--with").arg("typing-extensions").arg("python").arg("-c").arg("import typing_extensions; import iniconfig"), @r#" success: false exit_code: 1 ----- stdout ----- ----- stderr ----- Resolved 1 package in [TIME] + Installed 1 package in [TIME] + + typing-extensions==4.10.0 Traceback (most recent call last): File "", line 1, in import typing_extensions; import iniconfig ^^^^^^^^^^^^^^^^ ModuleNotFoundError: No module named 'iniconfig' - "###); + "#); // Re-running in the context of the project should reset the overlay. uv_snapshot!( diff --git a/crates/uv/tests/it/show_settings.rs b/crates/uv/tests/it/show_settings.rs index 87453090c..7635bd523 100644 --- a/crates/uv/tests/it/show_settings.rs +++ b/crates/uv/tests/it/show_settings.rs @@ -3987,7 +3987,7 @@ fn resolve_config_file() -> anyhow::Result<()> { | 1 | [project] | ^^^^^^^ - unknown field `project`, expected one of `required-version`, `native-tls`, `offline`, `no-cache`, `cache-dir`, `preview`, `python-preference`, `python-downloads`, `concurrent-downloads`, `concurrent-builds`, `concurrent-installs`, `index`, `index-url`, `extra-index-url`, `no-index`, `find-links`, `index-strategy`, `keyring-provider`, `allow-insecure-host`, `resolution`, `prerelease`, `fork-strategy`, `dependency-metadata`, `config-settings`, `no-build-isolation`, `no-build-isolation-package`, `exclude-newer`, `link-mode`, `compile-bytecode`, `no-sources`, `upgrade`, `upgrade-package`, `reinstall`, `reinstall-package`, `no-build`, `no-build-package`, `no-binary`, `no-binary-package`, `python-install-mirror`, `pypy-install-mirror`, `python-downloads-json-url`, `publish-url`, `trusted-publishing`, `check-url`, `add-bounds`, `pip`, `cache-keys`, `override-dependencies`, `constraint-dependencies`, `build-constraint-dependencies`, `environments`, `required-environments`, `conflicts`, `workspace`, `sources`, `managed`, `package`, `default-groups`, `dev-dependencies`, `build-backend` + unknown field `project`, expected one of `required-version`, `native-tls`, `offline`, `no-cache`, `cache-dir`, `preview`, `python-preference`, `python-downloads`, `concurrent-downloads`, `concurrent-builds`, `concurrent-installs`, `index`, `index-url`, `extra-index-url`, `no-index`, `find-links`, `index-strategy`, `keyring-provider`, `allow-insecure-host`, `resolution`, `prerelease`, `fork-strategy`, `dependency-metadata`, `config-settings`, `no-build-isolation`, `no-build-isolation-package`, `exclude-newer`, `link-mode`, `compile-bytecode`, `no-sources`, `upgrade`, `upgrade-package`, `reinstall`, `reinstall-package`, `no-build`, `no-build-package`, `no-binary`, `no-binary-package`, `python-install-mirror`, `pypy-install-mirror`, `python-downloads-json-url`, `publish-url`, `trusted-publishing`, `check-url`, `add-bounds`, `pip`, `cache-keys`, `override-dependencies`, `constraint-dependencies`, `build-constraint-dependencies`, `environments`, `required-environments`, `conflicts`, `workspace`, `sources`, `managed`, `package`, `default-groups`, `dependency-groups`, `dev-dependencies`, `build-backend` " ); diff --git a/crates/uv/tests/it/sync.rs b/crates/uv/tests/it/sync.rs index 3f3cd072c..f9a71fe82 100644 --- a/crates/uv/tests/it/sync.rs +++ b/crates/uv/tests/it/sync.rs @@ -3,13 +3,14 @@ use assert_cmd::prelude::*; use assert_fs::{fixture::ChildPath, prelude::*}; use indoc::{formatdoc, indoc}; use insta::assert_snapshot; - -use crate::common::{TestContext, download_to_disk, packse_index_url, uv_snapshot, venv_bin_path}; use predicates::prelude::predicate; use tempfile::tempdir_in; + use uv_fs::Simplified; use uv_static::EnvVars; +use crate::common::{TestContext, download_to_disk, packse_index_url, uv_snapshot, venv_bin_path}; + #[test] fn sync() -> Result<()> { let context = TestContext::new("3.12"); @@ -346,7 +347,298 @@ fn mixed_requires_python() -> Result<()> { ----- stderr ----- Using CPython 3.9.[X] interpreter at: [PYTHON-3.9] - error: The requested interpreter resolved to Python 3.9.[X], which is incompatible with the project's Python requirement: `>=3.12`. However, a workspace member (`bird-feeder`) supports Python >=3.9. To install the workspace member on its own, navigate to `packages/bird-feeder`, then run `uv venv --python 3.9.[X]` followed by `uv pip install -e .`. + error: The requested interpreter resolved to Python 3.9.[X], which is incompatible with the project's Python requirement: `>=3.12` (from workspace member `albatross`'s `project.requires-python`). + "); + + Ok(()) +} + +/// Ensure that group requires-python solves an actual problem +#[test] +#[cfg(not(windows))] +#[cfg(feature = "python-eol")] +fn group_requires_python_useful_defaults() -> Result<()> { + let context = TestContext::new_with_versions(&["3.8", "3.9"]); + + // Require 3.8 for our project, but have a dev-dependency on a version of sphinx that needs 3.9 + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "pharaohs-tomp" + version = "0.1.0" + requires-python = ">=3.8" + dependencies = ["anyio"] + + [dependency-groups] + dev = ["sphinx>=7.2.6"] + "#, + )?; + + let src = context.temp_dir.child("src").child("albatross"); + src.create_dir_all()?; + + let init = src.child("__init__.py"); + init.touch()?; + + // Running `uv sync --no-dev` should ideally succeed, locking for Python 3.8. + // ...but once we pick the 3.8 interpreter the lock freaks out because it sees + // that the dependency-group containing sphinx will never successfully install, + // even though it's not enabled! + uv_snapshot!(context.filters(), context.sync() + .arg("--no-dev"), @r" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.8.[X] interpreter at: [PYTHON-3.8] + Creating virtual environment at: .venv + × No solution found when resolving dependencies for split (python_full_version == '3.8.*'): + ╰─▶ Because the requested Python version (>=3.8) does not satisfy Python>=3.9 and sphinx==7.2.6 depends on Python>=3.9, we can conclude that sphinx==7.2.6 cannot be used. + And because only sphinx<=7.2.6 is available, we can conclude that sphinx>=7.2.6 cannot be used. + And because pharaohs-tomp:dev depends on sphinx>=7.2.6 and your project requires pharaohs-tomp:dev, we can conclude that your project's requirements are unsatisfiable. + + hint: The `requires-python` value (>=3.8) includes Python versions that are not supported by your dependencies (e.g., sphinx==7.2.6 only supports >=3.9). Consider using a more restrictive `requires-python` value (like >=3.9). + "); + + // Running `uv sync` should always fail, as now sphinx is involved + uv_snapshot!(context.filters(), context.sync(), @r" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + × No solution found when resolving dependencies for split (python_full_version == '3.8.*'): + ╰─▶ Because the requested Python version (>=3.8) does not satisfy Python>=3.9 and sphinx==7.2.6 depends on Python>=3.9, we can conclude that sphinx==7.2.6 cannot be used. + And because only sphinx<=7.2.6 is available, we can conclude that sphinx>=7.2.6 cannot be used. + And because pharaohs-tomp:dev depends on sphinx>=7.2.6 and your project requires pharaohs-tomp:dev, we can conclude that your project's requirements are unsatisfiable. + + hint: The `requires-python` value (>=3.8) includes Python versions that are not supported by your dependencies (e.g., sphinx==7.2.6 only supports >=3.9). Consider using a more restrictive `requires-python` value (like >=3.9). + "); + + // Adding group requires python should fix it + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "pharaohs-tomp" + version = "0.1.0" + requires-python = ">=3.8" + dependencies = ["anyio"] + + [dependency-groups] + dev = ["sphinx>=7.2.6"] + + [tool.uv.dependency-groups] + dev = {requires-python = ">=3.9"} + "#, + )?; + + // Running `uv sync --no-dev` should succeed, still using the Python 3.8. + uv_snapshot!(context.filters(), context.sync() + .arg("--no-dev"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 29 packages in [TIME] + Prepared 5 packages in [TIME] + Installed 5 packages in [TIME] + + anyio==4.3.0 + + exceptiongroup==1.2.0 + + idna==3.6 + + sniffio==1.3.1 + + typing-extensions==4.10.0 + "); + + // Running `uv sync` should succeed, bumping to Python 3.9 as sphinx is now involved. + uv_snapshot!(context.filters(), context.sync(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.9.[X] interpreter at: [PYTHON-3.9] + Removed virtual environment at: .venv + Creating virtual environment at: .venv + Resolved 29 packages in [TIME] + Prepared 22 packages in [TIME] + Installed 27 packages in [TIME] + + alabaster==0.7.16 + + anyio==4.3.0 + + babel==2.14.0 + + certifi==2024.2.2 + + charset-normalizer==3.3.2 + + docutils==0.20.1 + + exceptiongroup==1.2.0 + + idna==3.6 + + imagesize==1.4.1 + + importlib-metadata==7.1.0 + + jinja2==3.1.3 + + markupsafe==2.1.5 + + packaging==24.0 + + pygments==2.17.2 + + requests==2.31.0 + + sniffio==1.3.1 + + snowballstemmer==2.2.0 + + sphinx==7.2.6 + + sphinxcontrib-applehelp==1.0.8 + + sphinxcontrib-devhelp==1.0.6 + + sphinxcontrib-htmlhelp==2.0.5 + + sphinxcontrib-jsmath==1.0.1 + + sphinxcontrib-qthelp==1.0.7 + + sphinxcontrib-serializinghtml==1.1.10 + + typing-extensions==4.10.0 + + urllib3==2.2.1 + + zipp==3.18.1 + "); + + Ok(()) +} + +/// Ensure that group requires-python solves an actual problem +#[test] +#[cfg(not(windows))] +#[cfg(feature = "python-eol")] +fn group_requires_python_useful_non_defaults() -> Result<()> { + let context = TestContext::new_with_versions(&["3.8", "3.9"]); + + // Require 3.8 for our project, but have a dev-dependency on a version of sphinx that needs 3.9 + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "pharaohs-tomp" + version = "0.1.0" + requires-python = ">=3.8" + dependencies = ["anyio"] + + [dependency-groups] + mygroup = ["sphinx>=7.2.6"] + "#, + )?; + + let src = context.temp_dir.child("src").child("albatross"); + src.create_dir_all()?; + + let init = src.child("__init__.py"); + init.touch()?; + + // Running `uv sync` should ideally succeed, locking for Python 3.8. + // ...but once we pick the 3.8 interpreter the lock freaks out because it sees + // that the dependency-group containing sphinx will never successfully install, + // even though it's not enabled, or even a default! + uv_snapshot!(context.filters(), context.sync(), @r" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.8.[X] interpreter at: [PYTHON-3.8] + Creating virtual environment at: .venv + × No solution found when resolving dependencies for split (python_full_version == '3.8.*'): + ╰─▶ Because the requested Python version (>=3.8) does not satisfy Python>=3.9 and sphinx==7.2.6 depends on Python>=3.9, we can conclude that sphinx==7.2.6 cannot be used. + And because only sphinx<=7.2.6 is available, we can conclude that sphinx>=7.2.6 cannot be used. + And because pharaohs-tomp:mygroup depends on sphinx>=7.2.6 and your project requires pharaohs-tomp:mygroup, we can conclude that your project's requirements are unsatisfiable. + + hint: The `requires-python` value (>=3.8) includes Python versions that are not supported by your dependencies (e.g., sphinx==7.2.6 only supports >=3.9). Consider using a more restrictive `requires-python` value (like >=3.9). + "); + + // Running `uv sync --group mygroup` should definitely fail, as now sphinx is involved + uv_snapshot!(context.filters(), context.sync() + .arg("--group").arg("mygroup"), @r" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + × No solution found when resolving dependencies for split (python_full_version == '3.8.*'): + ╰─▶ Because the requested Python version (>=3.8) does not satisfy Python>=3.9 and sphinx==7.2.6 depends on Python>=3.9, we can conclude that sphinx==7.2.6 cannot be used. + And because only sphinx<=7.2.6 is available, we can conclude that sphinx>=7.2.6 cannot be used. + And because pharaohs-tomp:mygroup depends on sphinx>=7.2.6 and your project requires pharaohs-tomp:mygroup, we can conclude that your project's requirements are unsatisfiable. + + hint: The `requires-python` value (>=3.8) includes Python versions that are not supported by your dependencies (e.g., sphinx==7.2.6 only supports >=3.9). Consider using a more restrictive `requires-python` value (like >=3.9). + "); + + // Adding group requires python should fix it + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "pharaohs-tomp" + version = "0.1.0" + requires-python = ">=3.8" + dependencies = ["anyio"] + + [dependency-groups] + mygroup = ["sphinx>=7.2.6"] + + [tool.uv.dependency-groups] + mygroup = {requires-python = ">=3.9"} + "#, + )?; + + // Running `uv sync` should succeed, locking for the previous picked Python 3.8. + uv_snapshot!(context.filters(), context.sync(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 29 packages in [TIME] + Prepared 5 packages in [TIME] + Installed 5 packages in [TIME] + + anyio==4.3.0 + + exceptiongroup==1.2.0 + + idna==3.6 + + sniffio==1.3.1 + + typing-extensions==4.10.0 + "); + + // Running `uv sync --group mygroup` should pass, bumping the interpreter to 3.9, + // as the group requires-python saves us + uv_snapshot!(context.filters(), context.sync() + .arg("--group").arg("mygroup"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.9.[X] interpreter at: [PYTHON-3.9] + Removed virtual environment at: .venv + Creating virtual environment at: .venv + Resolved 29 packages in [TIME] + Prepared 22 packages in [TIME] + Installed 27 packages in [TIME] + + alabaster==0.7.16 + + anyio==4.3.0 + + babel==2.14.0 + + certifi==2024.2.2 + + charset-normalizer==3.3.2 + + docutils==0.20.1 + + exceptiongroup==1.2.0 + + idna==3.6 + + imagesize==1.4.1 + + importlib-metadata==7.1.0 + + jinja2==3.1.3 + + markupsafe==2.1.5 + + packaging==24.0 + + pygments==2.17.2 + + requests==2.31.0 + + sniffio==1.3.1 + + snowballstemmer==2.2.0 + + sphinx==7.2.6 + + sphinxcontrib-applehelp==1.0.8 + + sphinxcontrib-devhelp==1.0.6 + + sphinxcontrib-htmlhelp==2.0.5 + + sphinxcontrib-jsmath==1.0.1 + + sphinxcontrib-qthelp==1.0.7 + + sphinxcontrib-serializinghtml==1.1.10 + + typing-extensions==4.10.0 + + urllib3==2.2.1 + + zipp==3.18.1 "); Ok(()) @@ -830,10 +1122,7 @@ fn sync_build_isolation_package() -> Result<()> { )?; // Running `uv sync` should fail for iniconfig. - let filters = std::iter::once((r"exit code: 1", "exit status: 1")) - .chain(context.filters()) - .collect::>(); - uv_snapshot!(filters, context.sync().arg("--no-build-isolation-package").arg("source-distribution"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-build-isolation-package").arg("source-distribution"), @r###" success: false exit_code: 1 ----- stdout ----- @@ -923,10 +1212,7 @@ fn sync_build_isolation_extra() -> Result<()> { )?; // Running `uv sync` should fail for the `compile` extra. - let filters = std::iter::once((r"exit code: 1", "exit status: 1")) - .chain(context.filters()) - .collect::>(); - uv_snapshot!(&filters, context.sync().arg("--extra").arg("compile"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("compile"), @r###" success: false exit_code: 1 ----- stdout ----- @@ -947,7 +1233,7 @@ fn sync_build_isolation_extra() -> Result<()> { "###); // Running `uv sync` with `--all-extras` should also fail. - uv_snapshot!(&filters, context.sync().arg("--all-extras"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--all-extras"), @r###" success: false exit_code: 1 ----- stdout ----- @@ -4080,17 +4366,17 @@ fn sync_custom_environment_path() -> Result<()> { // But if it's just an incompatible virtual environment... fs_err::remove_dir_all(context.temp_dir.join("foo"))?; - uv_snapshot!(context.filters(), context.venv().arg("foo").arg("--python").arg("3.11"), @r###" + uv_snapshot!(context.filters(), context.venv().arg("foo").arg("--python").arg("3.11"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- Using CPython 3.11.[X] interpreter at: [PYTHON-3.11] - warning: The requested interpreter resolved to Python 3.11.[X], which is incompatible with the project's Python requirement: `>=3.12` + warning: The requested interpreter resolved to Python 3.11.[X], which is incompatible with the project's Python requirement: `>=3.12` (from `project.requires-python`) Creating virtual environment at: foo Activate with: source foo/[BIN]/activate - "###); + "); // Even with some extraneous content... fs_err::write(context.temp_dir.join("foo").join("file"), b"")?; @@ -5817,17 +6103,17 @@ fn sync_invalid_environment() -> Result<()> { // But if it's just an incompatible virtual environment... fs_err::remove_dir_all(context.temp_dir.join(".venv"))?; - uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11"), @r###" + uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- Using CPython 3.11.[X] interpreter at: [PYTHON-3.11] - warning: The requested interpreter resolved to Python 3.11.[X], which is incompatible with the project's Python requirement: `>=3.12` + warning: The requested interpreter resolved to Python 3.11.[X], which is incompatible with the project's Python requirement: `>=3.12` (from `project.requires-python`) Creating virtual environment at: .venv Activate with: source .venv/[BIN]/activate - "###); + "); // Even with some extraneous content... fs_err::write(context.temp_dir.join(".venv").join("file"), b"")?; @@ -5884,17 +6170,17 @@ fn sync_invalid_environment() -> Result<()> { // But if it's not a virtual environment... fs_err::remove_dir_all(context.temp_dir.join(".venv"))?; - uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11"), @r###" + uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- Using CPython 3.11.[X] interpreter at: [PYTHON-3.11] - warning: The requested interpreter resolved to Python 3.11.[X], which is incompatible with the project's Python requirement: `>=3.12` + warning: The requested interpreter resolved to Python 3.11.[X], which is incompatible with the project's Python requirement: `>=3.12` (from `project.requires-python`) Creating virtual environment at: .venv Activate with: source .venv/[BIN]/activate - "###); + "); // Which we detect by the presence of a `pyvenv.cfg` file fs_err::remove_file(context.temp_dir.join(".venv").join("pyvenv.cfg"))?; @@ -6004,15 +6290,15 @@ fn sync_python_version() -> Result<()> { "###); // Unless explicitly requested... - uv_snapshot!(context.filters(), context.sync().arg("--python").arg("3.10"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--python").arg("3.10"), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- Using CPython 3.10.[X] interpreter at: [PYTHON-3.10] - error: The requested interpreter resolved to Python 3.10.[X], which is incompatible with the project's Python requirement: `>=3.11` - "###); + error: The requested interpreter resolved to Python 3.10.[X], which is incompatible with the project's Python requirement: `>=3.11` (from `project.requires-python`) + "); // But a pin should take precedence uv_snapshot!(context.filters(), context.python_pin().arg("3.12"), @r###" @@ -6051,15 +6337,16 @@ fn sync_python_version() -> Result<()> { "###); // We should warn on subsequent uses, but respect the pinned version? - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- Using CPython 3.10.[X] interpreter at: [PYTHON-3.10] - error: The Python request from `.python-version` resolved to Python 3.10.[X], which is incompatible with the project's Python requirement: `>=3.11`. Use `uv python pin` to update the `.python-version` file to a compatible version. - "###); + error: The Python request from `.python-version` resolved to Python 3.10.[X], which is incompatible with the project's Python requirement: `>=3.11` (from `project.requires-python`) + Use `uv python pin` to update the `.python-version` file to a compatible version + "); // Unless the pin file is outside the project, in which case we should just ignore it entirely let child_dir = context.temp_dir.child("child"); @@ -6692,10 +6979,7 @@ fn sync_derivation_chain() -> Result<()> { let filters = context .filters() .into_iter() - .chain([ - (r"exit code: 1", "exit status: 1"), - (r"/.*/src", "/[TMP]/src"), - ]) + .chain([(r"/.*/src", "/[TMP]/src")]) .collect::>(); uv_snapshot!(filters, context.sync(), @r###" @@ -6758,10 +7042,7 @@ fn sync_derivation_chain_extra() -> Result<()> { let filters = context .filters() .into_iter() - .chain([ - (r"exit code: 1", "exit status: 1"), - (r"/.*/src", "/[TMP]/src"), - ]) + .chain([(r"/.*/src", "/[TMP]/src")]) .collect::>(); uv_snapshot!(filters, context.sync().arg("--extra").arg("wsgi"), @r###" @@ -6826,10 +7107,7 @@ fn sync_derivation_chain_group() -> Result<()> { let filters = context .filters() .into_iter() - .chain([ - (r"exit code: 1", "exit status: 1"), - (r"/.*/src", "/[TMP]/src"), - ]) + .chain([(r"/.*/src", "/[TMP]/src")]) .collect::>(); uv_snapshot!(filters, context.sync().arg("--group").arg("wsgi"), @r###" @@ -7850,7 +8128,7 @@ fn sync_dry_run() -> Result<()> { "#, )?; - uv_snapshot!(context.filters(), context.sync().arg("--dry-run"), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--dry-run"), @r" success: true exit_code: 0 ----- stdout ----- @@ -7858,14 +8136,15 @@ fn sync_dry_run() -> Result<()> { ----- stderr ----- Using CPython 3.9.[X] interpreter at: [PYTHON-3.9] Would replace existing virtual environment at: .venv + warning: Ignoring existing lockfile due to fork markers being disjoint with `requires-python`: `python_full_version >= '3.12'` vs `python_full_version == '3.9.*'` Resolved 2 packages in [TIME] Would update lockfile at: uv.lock Would install 1 package + iniconfig==2.0.0 - "###); + "); // Perform a full sync. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync(), @r" success: true exit_code: 0 ----- stdout ----- @@ -7874,11 +8153,14 @@ fn sync_dry_run() -> Result<()> { Using CPython 3.9.[X] interpreter at: [PYTHON-3.9] Removed virtual environment at: .venv Creating virtual environment at: .venv + warning: Ignoring existing lockfile due to fork markers being disjoint with `requires-python`: `python_full_version >= '3.12'` vs `python_full_version == '3.9.*'` Resolved 2 packages in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); + // TMP: Attempt to catch this flake with verbose output + // See https://github.com/astral-sh/uv/issues/13744 let output = context.sync().arg("--dry-run").arg("-vv").output()?; let stderr = String::from_utf8_lossy(&output.stderr); assert!( @@ -7887,6 +8169,19 @@ fn sync_dry_run() -> Result<()> { stderr ); + uv_snapshot!(context.filters(), context.sync().arg("--dry-run"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Discovered existing environment at: .venv + Resolved 2 packages in [TIME] + Found up-to-date lockfile at: uv.lock + Audited 1 package in [TIME] + Would make no changes + "); + Ok(()) } @@ -8368,6 +8663,7 @@ fn sync_locked_script() -> Result<()> { ----- stderr ----- Recreating script environment at: [CACHE_DIR]/environments-v2/script-[HASH] + warning: Ignoring existing lockfile due to fork markers being disjoint with `requires-python`: `python_full_version >= '3.11'` vs `python_full_version >= '3.8' and python_full_version < '3.11'` Resolved 6 packages in [TIME] error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`. "); @@ -8379,6 +8675,7 @@ fn sync_locked_script() -> Result<()> { ----- stderr ----- Using script environment at: [CACHE_DIR]/environments-v2/script-[HASH] + warning: Ignoring existing lockfile due to fork markers being disjoint with `requires-python`: `python_full_version >= '3.11'` vs `python_full_version >= '3.8' and python_full_version < '3.11'` Resolved 6 packages in [TIME] Prepared 2 packages in [TIME] Installed 6 packages in [TIME] @@ -8935,52 +9232,52 @@ fn transitive_group_conflicts_cycle() -> Result<()> { uv_snapshot!(context.filters(), context.sync(), @r" success: false - exit_code: 1 + exit_code: 2 ----- stdout ----- ----- stderr ----- - × Failed to build `example @ file://[TEMP_DIR]/` - ╰─▶ Detected a cycle in `dependency-groups`: `dev` -> `test` -> `dev` + error: Project `example` has malformed dependency groups + Caused by: Detected a cycle in `dependency-groups`: `dev` -> `test` -> `dev` "); uv_snapshot!(context.filters(), context.sync().arg("--group").arg("dev"), @r" success: false - exit_code: 1 + exit_code: 2 ----- stdout ----- ----- stderr ----- - × Failed to build `example @ file://[TEMP_DIR]/` - ╰─▶ Detected a cycle in `dependency-groups`: `dev` -> `test` -> `dev` + error: Project `example` has malformed dependency groups + Caused by: Detected a cycle in `dependency-groups`: `dev` -> `test` -> `dev` "); uv_snapshot!(context.filters(), context.sync().arg("--group").arg("dev").arg("--group").arg("test"), @r" success: false - exit_code: 1 + exit_code: 2 ----- stdout ----- ----- stderr ----- - × Failed to build `example @ file://[TEMP_DIR]/` - ╰─▶ Detected a cycle in `dependency-groups`: `dev` -> `test` -> `dev` + error: Project `example` has malformed dependency groups + Caused by: Detected a cycle in `dependency-groups`: `dev` -> `test` -> `dev` "); uv_snapshot!(context.filters(), context.sync().arg("--group").arg("test").arg("--group").arg("magic"), @r" success: false - exit_code: 1 + exit_code: 2 ----- stdout ----- ----- stderr ----- - × Failed to build `example @ file://[TEMP_DIR]/` - ╰─▶ Detected a cycle in `dependency-groups`: `dev` -> `test` -> `dev` + error: Project `example` has malformed dependency groups + Caused by: Detected a cycle in `dependency-groups`: `dev` -> `test` -> `dev` "); uv_snapshot!(context.filters(), context.sync().arg("--group").arg("dev").arg("--group").arg("magic"), @r" success: false - exit_code: 1 + exit_code: 2 ----- stdout ----- ----- stderr ----- - × Failed to build `example @ file://[TEMP_DIR]/` - ╰─▶ Detected a cycle in `dependency-groups`: `dev` -> `test` -> `dev` + error: Project `example` has malformed dependency groups + Caused by: Detected a cycle in `dependency-groups`: `dev` -> `test` -> `dev` "); Ok(()) @@ -9325,9 +9622,7 @@ fn sync_when_virtual_environment_incompatible_with_interpreter() -> Result<()> { }, { let contents = fs_err::read_to_string(&pyvenv_cfg).unwrap(); let lines: Vec<&str> = contents.split('\n').collect(); - assert_snapshot!(lines[3], @r###" - version_info = 3.12.[X] - "###); + assert_snapshot!(lines[3], @"version_info = 3.12.[X]"); }); // Simulate an incompatible `pyvenv.cfg:version_info` value created @@ -9366,9 +9661,7 @@ fn sync_when_virtual_environment_incompatible_with_interpreter() -> Result<()> { }, { let contents = fs_err::read_to_string(&pyvenv_cfg).unwrap(); let lines: Vec<&str> = contents.split('\n').collect(); - assert_snapshot!(lines[3], @r###" - version_info = 3.12.[X] - "###); + assert_snapshot!(lines[3], @"version_info = 3.12.[X]"); }); Ok(()) @@ -9646,10 +9939,90 @@ fn sync_required_environment_hint() -> Result<()> { ----- stderr ----- Resolved 2 packages in [TIME] - error: Distribution `no-sdist-no-wheels-with-matching-platform-a==1.0.0 @ registry+https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/` can't be installed because it doesn't have a source distribution or wheel for the current platform + error: Distribution `no-sdist-no-wheels-with-matching-platform-a==1.0.0 @ registry+https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html` can't be installed because it doesn't have a source distribution or wheel for the current platform hint: You're on [PLATFORM] (`[TAG]`), but `no-sdist-no-wheels-with-matching-platform-a` (v1.0.0) only has wheels for the following platform: `macosx_10_0_ppc64`; consider adding your platform to `tool.uv.required-environments` to ensure uv resolves to a version with compatible wheels "); Ok(()) } + +#[test] +fn sync_url_with_query_parameters() -> Result<()> { + let context = TestContext::new("3.13").with_exclude_newer("2025-03-24T19:00:00Z"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(r#" + [project] + name = "example" + version = "0.1.0" + requires-python = ">=3.13.2" + dependencies = ["source-distribution @ https://files.pythonhosted.org/packages/1f/e5/5b016c945d745f8b108e759d428341488a6aee8f51f07c6c4e33498bb91f/source_distribution-0.0.3.tar.gz?foo=bar"] + "# + )?; + + uv_snapshot!(context.filters(), context.sync(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + source-distribution==0.0.3 (from https://files.pythonhosted.org/packages/1f/e5/5b016c945d745f8b108e759d428341488a6aee8f51f07c6c4e33498bb91f/source_distribution-0.0.3.tar.gz?foo=bar) + "); + + Ok(()) +} + +#[test] +#[cfg(unix)] +fn read_only() -> Result<()> { + use std::os::unix::fs::PermissionsExt; + + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["iniconfig"] + "#, + )?; + + uv_snapshot!(context.filters(), context.sync(), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + iniconfig==2.0.0 + "###); + + assert!(context.temp_dir.child("uv.lock").exists()); + + // Remove the flock. + fs_err::remove_file(context.venv.child(".lock"))?; + + // Make the virtual environment read and execute (but not write). + fs_err::set_permissions(&context.venv, std::fs::Permissions::from_mode(0o555))?; + + uv_snapshot!(context.filters(), context.sync(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + Audited 1 package in [TIME] + "); + + Ok(()) +} diff --git a/crates/uv/tests/it/tool_install.rs b/crates/uv/tests/it/tool_install.rs index 88e73406f..6a2d38db8 100644 --- a/crates/uv/tests/it/tool_install.rs +++ b/crates/uv/tests/it/tool_install.rs @@ -420,7 +420,6 @@ fn tool_install_with_incompatible_build_constraints() -> Result<()> { ----- stdout ----- ----- stderr ----- - Resolved [N] packages in [TIME] × Failed to download and build `requests==1.2.0` ├─▶ Failed to resolve requirements from `setup.py` build ├─▶ No solution found when resolving: `setuptools>=40.8.0` @@ -449,13 +448,13 @@ fn tool_install_suggest_other_packages_with_executable() { uv_snapshot!(filters, context.tool_install() .arg("fastapi==0.111.0") .env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str()) - .env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###" + .env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r" success: false exit_code: 1 ----- stdout ----- - No executables are provided by `fastapi` - However, an executable with the name `fastapi` is available via dependency `fastapi-cli`. - Did you mean `uv tool install fastapi-cli`? + No executables are provided by package `fastapi`; removing tool + hint: An executable with the name `fastapi` is available via dependency `fastapi-cli`. + Did you mean `uv tool install fastapi-cli`? ----- stderr ----- Resolved 35 packages in [TIME] @@ -495,7 +494,7 @@ fn tool_install_suggest_other_packages_with_executable() { + uvicorn==0.29.0 + watchfiles==0.21.0 + websockets==12.0 - "###); + "); } /// Test installing a tool at a version @@ -822,11 +821,11 @@ fn tool_install_remove_on_empty() -> Result<()> { .arg(black.path()) .env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str()) .env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()) - .env(EnvVars::PATH, bin_dir.as_os_str()), @r###" + .env(EnvVars::PATH, bin_dir.as_os_str()), @r" success: false exit_code: 1 ----- stdout ----- - No executables are provided by `black` + No executables are provided by package `black`; removing tool ----- stderr ----- Resolved 1 package in [TIME] @@ -840,7 +839,7 @@ fn tool_install_remove_on_empty() -> Result<()> { - packaging==24.0 - pathspec==0.12.1 - platformdirs==4.2.0 - "###); + "); // Re-request `black`. It should reinstall, without requiring `--force`. uv_snapshot!(context.filters(), context.tool_install() @@ -1650,18 +1649,18 @@ fn tool_install_no_entrypoints() { .arg("iniconfig") .env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str()) .env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()) - .env(EnvVars::PATH, bin_dir.as_os_str()), @r###" + .env(EnvVars::PATH, bin_dir.as_os_str()), @r" success: false exit_code: 1 ----- stdout ----- - No executables are provided by `iniconfig` + No executables are provided by package `iniconfig`; removing tool ----- stderr ----- Resolved 1 package in [TIME] Prepared 1 package in [TIME] Installed 1 package in [TIME] + iniconfig==2.0.0 - "###); + "); // Ensure the tool environment is not created. tool_dir @@ -1683,7 +1682,6 @@ fn tool_install_uninstallable() { .filters() .into_iter() .chain([ - (r"exit code: 1", "exit status: 1"), (r"bdist\.[^/\\\s]+(-[^/\\\s]+)?", "bdist.linux-x86_64"), (r"\\\.", ""), (r"#+", "#"), diff --git a/crates/uv/tests/it/tool_run.rs b/crates/uv/tests/it/tool_run.rs index a8bcd5a05..d4dcb216c 100644 --- a/crates/uv/tests/it/tool_run.rs +++ b/crates/uv/tests/it/tool_run.rs @@ -2537,7 +2537,6 @@ fn tool_run_with_incompatible_build_constraints() -> Result<()> { ----- stdout ----- ----- stderr ----- - Resolved [N] packages in [TIME] × Failed to download and build `requests==1.2.0` ├─▶ Failed to resolve requirements from `setup.py` build ├─▶ No solution found when resolving: `setuptools>=40.8.0` diff --git a/crates/uv/tests/it/tool_upgrade.rs b/crates/uv/tests/it/tool_upgrade.rs index a36db9b92..70309f04d 100644 --- a/crates/uv/tests/it/tool_upgrade.rs +++ b/crates/uv/tests/it/tool_upgrade.rs @@ -741,9 +741,7 @@ fn tool_upgrade_python() { }, { let content = fs_err::read_to_string(tool_dir.join("babel").join("pyvenv.cfg")).unwrap(); let lines: Vec<&str> = content.split('\n').collect(); - assert_snapshot!(lines[lines.len() - 3], @r###" - version_info = 3.12.[X] - "###); + assert_snapshot!(lines[lines.len() - 3], @"version_info = 3.12.[X]"); }); } @@ -826,9 +824,7 @@ fn tool_upgrade_python_with_all() { }, { let content = fs_err::read_to_string(tool_dir.join("babel").join("pyvenv.cfg")).unwrap(); let lines: Vec<&str> = content.split('\n').collect(); - assert_snapshot!(lines[lines.len() - 3], @r###" - version_info = 3.12.[X] - "###); + assert_snapshot!(lines[lines.len() - 3], @"version_info = 3.12.[X]"); }); insta::with_settings!({ @@ -836,8 +832,6 @@ fn tool_upgrade_python_with_all() { }, { let content = fs_err::read_to_string(tool_dir.join("python-dotenv").join("pyvenv.cfg")).unwrap(); let lines: Vec<&str> = content.split('\n').collect(); - assert_snapshot!(lines[lines.len() - 3], @r###" - version_info = 3.12.[X] - "###); + assert_snapshot!(lines[lines.len() - 3], @"version_info = 3.12.[X]"); }); } diff --git a/crates/uv/tests/it/venv.rs b/crates/uv/tests/it/venv.rs index 1d9eb5721..52291c05d 100644 --- a/crates/uv/tests/it/venv.rs +++ b/crates/uv/tests/it/venv.rs @@ -475,17 +475,195 @@ fn create_venv_respects_pyproject_requires_python() -> Result<()> { context.venv.assert(predicates::path::is_dir()); // We warn if we receive an incompatible version - uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11"), @r###" + uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11"), @r" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- Using CPython 3.11.[X] interpreter at: [PYTHON-3.11] - warning: The requested interpreter resolved to Python 3.11.[X], which is incompatible with the project's Python requirement: `>=3.12` + warning: The requested interpreter resolved to Python 3.11.[X], which is incompatible with the project's Python requirement: `>=3.12` (from `project.requires-python`) Creating virtual environment at: .venv Activate with: source .venv/[BIN]/activate - "### + " + ); + + Ok(()) +} + +#[test] +fn create_venv_respects_group_requires_python() -> Result<()> { + let context = TestContext::new_with_versions(&["3.9", "3.10", "3.11", "3.12"]); + + // Without a Python requirement, we use the first on the PATH + uv_snapshot!(context.filters(), context.venv(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.9.[X] interpreter at: [PYTHON-3.9] + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + " + ); + + // With `requires-python = ">=3.10"` on the default group, we pick 3.10 + // However non-default groups should not be consulted! + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! { r#" + [project] + name = "foo" + version = "1.0.0" + dependencies = [] + + [dependency-groups] + dev = ["sortedcontainers"] + other = ["sniffio"] + + [tool.uv.dependency-groups] + dev = {requires-python = ">=3.10"} + other = {requires-python = ">=3.12"} + "# + })?; + + uv_snapshot!(context.filters(), context.venv(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.10.[X] interpreter at: [PYTHON-3.10] + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + " + ); + + // When the top-level requires-python and default group requires-python + // both apply, their intersection is used. However non-default groups + // should not be consulted! (here the top-level wins) + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! { r#" + [project] + name = "foo" + version = "1.0.0" + requires-python = ">=3.11" + dependencies = [] + + [dependency-groups] + dev = ["sortedcontainers"] + other = ["sniffio"] + + [tool.uv.dependency-groups] + dev = {requires-python = ">=3.10"} + other = {requires-python = ">=3.12"} + "# + })?; + + uv_snapshot!(context.filters(), context.venv(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.11.[X] interpreter at: [PYTHON-3.11] + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + " + ); + + // When the top-level requires-python and default group requires-python + // both apply, their intersection is used. However non-default groups + // should not be consulted! (here the group wins) + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! { r#" + [project] + name = "foo" + version = "1.0.0" + requires-python = ">=3.10" + dependencies = [] + + [dependency-groups] + dev = ["sortedcontainers"] + other = ["sniffio"] + + [tool.uv.dependency-groups] + dev = {requires-python = ">=3.11"} + other = {requires-python = ">=3.12"} + "# + })?; + + uv_snapshot!(context.filters(), context.venv(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.11.[X] interpreter at: [PYTHON-3.11] + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + " + ); + + // We warn if we receive an incompatible version + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! { r#" + [project] + name = "foo" + version = "1.0.0" + dependencies = [] + + [dependency-groups] + dev = ["sortedcontainers"] + + [tool.uv.dependency-groups] + dev = {requires-python = ">=3.12"} + "# + })?; + + uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.11.[X] interpreter at: [PYTHON-3.11] + warning: The requested interpreter resolved to Python 3.11.[X], which is incompatible with the project's Python requirement: `>=3.12` (from `tool.uv.dependency-groups.dev.requires-python`). + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + " + ); + + // We error if there's no compatible version + // non-default groups are not consulted here! + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc! { r#" + [project] + name = "foo" + version = "1.0.0" + requires-python = "<3.12" + dependencies = [] + + [dependency-groups] + dev = ["sortedcontainers"] + other = ["sniffio"] + + [tool.uv.dependency-groups] + dev = {requires-python = ">=3.12"} + other = {requires-python = ">=3.11"} + "# + })?; + + uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11"), @r" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + × Found conflicting Python requirements: + │ - foo: <3.12 + │ - foo:dev: >=3.12 + " ); Ok(()) @@ -690,14 +868,14 @@ fn create_venv_unknown_python_patch() { "### ); } else { - uv_snapshot!(&mut command, @r###" + uv_snapshot!(&mut command, @r" success: false exit_code: 1 ----- stdout ----- ----- stderr ----- × No interpreter found for Python 3.12.100 in managed installations or search path - "### + " ); } @@ -874,6 +1052,39 @@ fn non_empty_dir_exists_allow_existing() -> Result<()> { Ok(()) } +/// Run `uv venv` followed by `uv venv --allow-existing`. +#[test] +fn create_venv_then_allow_existing() { + let context = TestContext::new_with_versions(&["3.12"]); + + // Create a venv + uv_snapshot!(context.filters(), context.venv(), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + " + ); + + // Create a venv again with `--allow-existing` + uv_snapshot!(context.filters(), context.venv() + .arg("--allow-existing"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] + Creating virtual environment at: .venv + Activate with: source .venv/[BIN]/activate + "### + ); +} + #[test] #[cfg(windows)] fn windows_shims() -> Result<()> { diff --git a/crates/uv/tests/it/version.rs b/crates/uv/tests/it/version.rs index 1fda42705..97d30f4f4 100644 --- a/crates/uv/tests/it/version.rs +++ b/crates/uv/tests/it/version.rs @@ -905,7 +905,7 @@ fn version_get_fallback_unmanaged_short() -> Result<()> { .filters() .into_iter() .chain([( - r"\d+\.\d+\.\d+(\+\d+)?( \(.*\))?", + r"\d+\.\d+\.\d+(-alpha\.\d+)?(\+\d+)?( \(.*\))?", r"[VERSION] ([COMMIT] DATE)", )]) .collect::>(); @@ -972,7 +972,10 @@ fn version_get_fallback_unmanaged_json() -> Result<()> { .filters() .into_iter() .chain([ - (r#"version": "\d+.\d+.\d+""#, r#"version": "[VERSION]""#), + ( + r#"version": "\d+\.\d+\.\d+(-(alpha|beta|rc)\.\d+)?(\+\d+)?""#, + r#"version": "[VERSION]""#, + ), ( r#"short_commit_hash": ".*""#, r#"short_commit_hash": "[HASH]""#, @@ -1175,7 +1178,7 @@ fn self_version_short() -> Result<()> { .filters() .into_iter() .chain([( - r"\d+\.\d+\.\d+(\+\d+)?( \(.*\))?", + r"\d+\.\d+\.\d+(-alpha\.\d+)?(\+\d+)?( \(.*\))?", r"[VERSION] ([COMMIT] DATE)", )]) .collect::>(); @@ -1220,7 +1223,10 @@ fn self_version_json() -> Result<()> { .filters() .into_iter() .chain([ - (r#"version": "\d+.\d+.\d+""#, r#"version": "[VERSION]""#), + ( + r#"version": "\d+\.\d+\.\d+(-(alpha|beta|rc)\.\d+)?(\+\d+)?""#, + r#"version": "[VERSION]""#, + ), ( r#"short_commit_hash": ".*""#, r#"short_commit_hash": "[HASH]""#, @@ -1952,3 +1958,57 @@ fn version_set_evil_constraints() -> Result<()> { Ok(()) } + +/// Bump the version with conflicting extras, to ensure we're activating the correct subset of +/// extras during the resolve. +#[test] +fn version_extras() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" +[project] +name = "myproject" +version = "1.10.31" +requires-python = ">=3.12" + +[project.optional-dependencies] +foo = ["requests"] +bar = ["httpx"] +baz = ["flask"] + +[tool.uv] +conflicts = [[{"extra" = "foo"}, {"extra" = "bar"}]] +"#, + )?; + + uv_snapshot!(context.filters(), context.version() + .arg("--bump").arg("patch"), @r" + success: true + exit_code: 0 + ----- stdout ----- + myproject 1.10.31 => 1.10.32 + + ----- stderr ----- + Resolved 19 packages in [TIME] + Audited in [TIME] + "); + + // Sync an extra, we should not remove it. + context.sync().arg("--extra").arg("foo").assert().success(); + + uv_snapshot!(context.filters(), context.version() + .arg("--bump").arg("patch"), @r" + success: true + exit_code: 0 + ----- stdout ----- + myproject 1.10.32 => 1.10.33 + + ----- stderr ----- + Resolved 19 packages in [TIME] + Audited in [TIME] + "); + + Ok(()) +} diff --git a/crates/uv/tests/it/workspace.rs b/crates/uv/tests/it/workspace.rs index c52c4a2f1..631e4f6c3 100644 --- a/crates/uv/tests/it/workspace.rs +++ b/crates/uv/tests/it/workspace.rs @@ -1351,7 +1351,7 @@ fn workspace_unsatisfiable_member_dependencies() -> Result<()> { leaf.child("src/__init__.py").touch()?; // Resolving should fail. - uv_snapshot!(context.filters(), context.lock().current_dir(&workspace), @r###" + uv_snapshot!(context.filters(), context.lock().current_dir(&workspace), @r" success: false exit_code: 1 ----- stdout ----- @@ -1359,9 +1359,9 @@ fn workspace_unsatisfiable_member_dependencies() -> Result<()> { ----- stderr ----- Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] × No solution found when resolving dependencies: - ╰─▶ Because only httpx<=1.0.0b0 is available and leaf depends on httpx>9999, we can conclude that leaf's requirements are unsatisfiable. + ╰─▶ Because only httpx<=0.27.0 is available and leaf depends on httpx>9999, we can conclude that leaf's requirements are unsatisfiable. And because your workspace requires leaf, we can conclude that your workspace's requirements are unsatisfiable. - "### + " ); Ok(()) diff --git a/dist-workspace.toml b/dist-workspace.toml new file mode 100644 index 000000000..3e16bd4cf --- /dev/null +++ b/dist-workspace.toml @@ -0,0 +1,89 @@ +[workspace] +members = ["cargo:."] + +# Config for 'dist' +[dist] +# The preferred dist version to use in CI (Cargo.toml SemVer syntax) +cargo-dist-version = "0.28.7-prerelease.1" +# make a package being included in our releases opt-in instead of opt-out +dist = false +# CI backends to support +ci = "github" +# The installers to generate for each app +installers = ["shell", "powershell"] +# The archive format to use for windows builds (defaults .zip) +windows-archive = ".zip" +# The archive format to use for non-windows builds (defaults .tar.xz) +unix-archive = ".tar.gz" +# Target platforms to build apps for (Rust target-triple syntax) +targets = [ + "aarch64-apple-darwin", + "aarch64-unknown-linux-gnu", + "aarch64-unknown-linux-musl", + "aarch64-pc-windows-msvc", + "arm-unknown-linux-musleabihf", + "armv7-unknown-linux-gnueabihf", + "armv7-unknown-linux-musleabihf", + "x86_64-apple-darwin", + "powerpc64-unknown-linux-gnu", + "powerpc64le-unknown-linux-gnu", + "riscv64gc-unknown-linux-gnu", + "s390x-unknown-linux-gnu", + "x86_64-unknown-linux-gnu", + "x86_64-unknown-linux-musl", + "x86_64-pc-windows-msvc", + "i686-unknown-linux-gnu", + "i686-unknown-linux-musl", + "i686-pc-windows-msvc" +] +# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true) +auto-includes = false +# Whether dist should create a Github Release or use an existing draft +create-release = true +# Which actions to run on pull requests +pr-run-mode = "plan" +# Whether CI should trigger releases with dispatches instead of tag pushes +dispatch-releases = true +# Which phase dist should use to create the GitHub release +github-release = "announce" +# Whether CI should include auto-generated code to build local artifacts +build-local-artifacts = false +# Local artifacts jobs to run in CI +local-artifacts-jobs = ["./build-binaries", "./build-docker"] +# Publish jobs to run in CI +publish-jobs = ["./publish-pypi"] +# Post-announce jobs to run in CI +post-announce-jobs = ["./publish-docs"] +# Custom permissions for GitHub Jobs +github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read", id-token = "write", attestations = "write" } } +# Whether to install an updater program +install-updater = false +# Path that installers should place binaries in +install-path = [ + "$XDG_BIN_HOME/", + "$XDG_DATA_HOME/../bin", + "~/.local/bin" +] + +[dist.github-custom-runners] +global = "depot-ubuntu-latest-4" + +[dist.min-glibc-version] +# Override glibc version for specific target triplets. +aarch64-unknown-linux-gnu = "2.28" +riscv64gc-unknown-linux-gnu = "2.31" +# Override all remaining glibc versions. +"*" = "2.17" + +[dist.github-action-commits] +"actions/checkout" = "11bd71901bbe5b1630ceea73d27597364c9af683" # v4 +"actions/upload-artifact" = "6027e3dd177782cd8ab9af838c04fd81a07f1d47" # v4.6.2 +"actions/download-artifact" = "d3f86a106a0bac45b974a628896c90dbdf5c8093" # v4.3.0 +"actions/attest-build-provenance" = "c074443f1aee8d4aeeae555aebba3282517141b2" #v2.2.3 + +[dist.binaries] +"*" = ["uv", "uvx"] +# Add "uvw" binary for Windows targets +aarch64-pc-windows-msvc = ["uv", "uvx", "uvw"] +i686-pc-windows-msvc = ["uv", "uvx", "uvw"] +x86_64-pc-windows-msvc = ["uv", "uvx", "uvw"] diff --git a/docs/concepts/build-backend.md b/docs/concepts/build-backend.md index 8d0dfa533..e68069ddb 100644 --- a/docs/concepts/build-backend.md +++ b/docs/concepts/build-backend.md @@ -2,103 +2,205 @@ !!! note - The uv build backend is currently in preview and may change without warning. - - When preview mode is not enabled, uv uses [hatchling](https://pypi.org/project/hatchling/) as the default build backend. + Currently, the default build backend for `uv init` is + [hatchling](https://pypi.org/project/hatchling/). This will change to `uv` in a future version. A build backend transforms a source tree (i.e., a directory) into a source distribution or a wheel. -While uv supports all build backends (as specified by PEP 517), it includes a `uv_build` backend -that integrates tightly with uv to improve performance and user experience. -The uv build backend currently only supports Python code. An alternative backend is required if you -want to create a +uv supports all build backends (as specified by [PEP 517](https://peps.python.org/pep-0517/)), but +also provides a native build backend (`uv_build`) that integrates tightly with uv to improve +performance and user experience. + +## Choosing a build backend + +The uv build backend is a great choice for most Python projects. It has reasonable defaults, with +the goal of requiring zero configuration for most users, but provides flexible configuration to +accommodate most Python project structures. It integrates tightly with uv, to improve messaging and +user experience. It validates project metadata and structures, preventing common mistakes. And, +finally, it's very fast. + +The uv build backend currently **only supports pure Python code**. An alternative backend is +required to build a [library with extension modules](../concepts/projects/init.md#projects-with-extension-modules). -To use the uv build backend as [build system](../concepts/projects/config.md#build-systems) in an -existing project, add it to the `[build-system]` section in your `pyproject.toml`: +!!! tip -```toml + While the backend supports a number of options for configuring your project structure, when build scripts or + a more flexible project layout are required, consider using the + [hatchling](https://hatch.pypa.io/latest/config/build/#build-system) build backend instead. + +## Using the uv build backend + +To use uv as a build backend in an existing project, add `uv_build` to the +[`[build-system]`](../concepts/projects/config.md#build-systems) section in your `pyproject.toml`: + +```toml title="pyproject.toml" [build-system] -requires = ["uv_build>=0.7.13,<0.8.0"] +requires = ["uv_build>=0.7.19,<0.8.0"] build-backend = "uv_build" ``` +!!! note + + The uv build backend follows the same [versioning policy](../reference/policies/versioning.md) + as uv. Including an upper bound on the `uv_build` version ensures that your package continues to + build correctly as new versions are released. + +To create a new project that uses the uv build backend, use `uv init`: + +```console +$ uv init --build-backend uv +``` + +When the project is built, e.g., with [`uv build`](../guides/package.md), the uv build backend will +be used to create the source distribution and wheel. + +## Bundled build backend + +The build backend is published as a separate package (`uv_build`) that is optimized for portability +and small binary size. However, the `uv` executable also includes a copy of the build backend, which +will be used during builds performed by uv, e.g., during `uv build`, if its version is compatible +with the `uv_build` requirement. If it's not compatible, a compatible version of the `uv_build` +package will be used. Other build frontends, such as `python -m build`, will always use the +`uv_build` package, typically choosing the latest compatible version. + +## Modules + +Python packages are expected to contain one or more Python modules, which are directories containing +an `__init__.py`. By default, a single root module is expected at `src//__init__.py`. + +For example, the structure for a project named `foo` would be: + +```text +pyproject.toml +src +└── foo + └── __init__.py +``` + +uv normalizes the package name to determine the default module name: the package name is lowercased +and dots and dashes are replaced with underscores, e.g., `Foo-Bar` would be converted to `foo_bar`. + +The `src/` directory is the default directory for module discovery. + +These defaults can be changed with the `module-name` and `module-root` settings. For example, to use +a `FOO` module in the root directory, as in the project structure: + +```text +pyproject.toml +FOO +└── __init__.py +``` + +The correct build configuration would be: + +```toml title="pyproject.toml" +[tool.uv.build-backend] +module-name = "FOO" +module-root = "" +``` + +## Namespace packages + +Namespace packages are intended for use-cases where multiple packages write modules into a shared +namespace. + +Namespace package modules are identified by a `.` in the `module-name`. For example, to package the +module `bar` in the shared namespace `foo`, the project structure would be: + +```text +pyproject.toml +src +└── foo + └── bar + └── __init__.py +``` + +And the `module-name` configuration would be: + +```toml title="pyproject.toml" +[tool.uv.build-backend] +module-name = "foo.bar" +``` + !!! important - The uv build backend follows the same [versioning policy](../reference/policies/versioning.md), - setting an upper bound on the `uv_build` version ensures that the package continues to build in - the future. + The `__init__.py` file is not included in `foo`, since it's the shared namespace module. -You can also create a new project that uses the uv build backend with `uv init`: +It's also possible to have a complex namespace package with more than one root module, e.g., with +the project structure: -```shell -uv init --build-backend uv +```text +pyproject.toml +src +├── foo +│   └── __init__.py +└── bar + └── __init__.py ``` -`uv_build` is a separate package from uv, optimized for portability and small binary size. The `uv` -command includes a copy of the build backend, so when running `uv build`, the same version will be -used for the build backend as for the uv process. Other build frontends, such as `python -m build`, -will choose the latest compatible `uv_build` version. +While we do not recommend this structure (i.e., you should use a workspace with multiple packages +instead), it is supported via the `namespace` option: -## Modules - -The default module name is the package name in lower case with dots and dashes replaced by -underscores, and the default module location is under the `src` directory, i.e., the build backend -expects to find `src//__init__.py`. These defaults can be changed with the -`module-name` and `module-root` setting. The example below expects a module in the project root with -`PIL/__init__.py` instead: - -```toml -[tool.uv.build-backend] -module-name = "PIL" -module-root = "" -``` - -For a namespace packages, the path can be dotted. The example below expects to find a -`src/cloud/db/schema/__init__.py`: - -```toml -[tool.uv.build-backend] -module-name = "cloud.db.schema" -``` - -Complex namespaces with more than one root module can be built by setting the `namespace` option, -which allows more than one root `__init__.py`: - -```toml +```toml title="pyproject.toml" [tool.uv.build-backend] namespace = true ``` -The build backend supports building stubs packages with a `-stubs` suffix on the package or module -name, including for namespace packages. +## Stub packages -## Include and exclude configuration +The build backend also supports building type stub packages, which are identified by the `-stubs` +suffix on the package or module name, e.g., `foo-stubs`. The module name for type stub packages must +end in `-stubs`, so uv will not normalize the `-` to an underscore. Additionally, uv will search for +a `__init__.pyi` file. For example, the project structure would be: -To select which files to include in the source distribution, uv first adds the included files and +```text +pyproject.toml +src +└── foo-stubs + └── __init__.pyi +``` + +Type stub modules are also supported for [namespace packages](#namespace-packages). + +## File inclusion and exclusion + +The build backend is responsible for determining which files in a source tree should be packaged +into the distributions. + +To determine which files to include in a source distribution, uv first adds the included files and directories, then removes the excluded files and directories. This means that exclusions always take precedence over inclusions. -When building the source distribution, the following files and directories are included: +By default, uv excludes `__pycache__`, `*.pyc`, and `*.pyo`. -- `pyproject.toml` -- The module under `tool.uv.build-backend.module-root`, by default - `src//**`. -- `project.license-files` and `project.readme`. -- All directories under `tool.uv.build-backend.data`. -- All patterns from `tool.uv.build-backend.source-include`. +When building a source distribution, the following files and directories are included: -From these, `tool.uv.build-backend.source-exclude` and the default excludes are removed. +- The `pyproject.toml` +- The [module](#modules) under + [`tool.uv.build-backend.module-root`](../reference/settings.md#build-backend_module-root). +- The files referenced by `project.license-files` and `project.readme`. +- All directories under [`tool.uv.build-backend.data`](../reference/settings.md#build-backend_data). +- All files matching patterns from + [`tool.uv.build-backend.source-include`](../reference/settings.md#build-backend_source-include). -When building the wheel, the following files and directories are included: +From these, items matching +[`tool.uv.build-backend.source-exclude`](../reference/settings.md#build-backend_source-exclude) and +the [default excludes](../reference/settings.md#build-backend_default-excludes) are removed. -- The module under `tool.uv.build-backend.module-root`, by default - `src//**`. -- `project.license-files` and `project.readme`, as part of the project metadata. -- Each directory under `tool.uv.build-backend.data`, as data directories. +When building a wheel, the following files and directories are included: -From these, `tool.uv.build-backend.source-exclude`, `tool.uv.build-backend.wheel-exclude` and the -default excludes are removed. The source dist excludes are applied to avoid source tree to wheel +- The [module](#modules) under + [`tool.uv.build-backend.module-root`](../reference/settings.md#build-backend_module-root) +- The files referenced by `project.license-files`, which are copied into the `.dist-info` directory. +- The `project.readme`, which is copied into the project metadata. +- All directories under [`tool.uv.build-backend.data`](../reference/settings.md#build-backend_data), + which are copied into the `.data` directory. + +From these, +[`tool.uv.build-backend.source-exclude`](../reference/settings.md#build-backend_source-exclude), +[`tool.uv.build-backend.wheel-exclude`](../reference/settings.md#build-backend_wheel-exclude) and +the default excludes are removed. The source dist excludes are applied to avoid source tree to wheel source builds including more files than source tree to source distribution to wheel build. There are no specific wheel includes. There must only be one top level module, and all data files @@ -106,20 +208,20 @@ must either be under the module root or in the appropriate [data directory](../reference/settings.md#build-backend_data). Most packages store small data in the module root alongside the source code. -## Include and exclude syntax +### Include and exclude syntax -Includes are anchored, which means that `pyproject.toml` includes only -`/pyproject.toml`. For example, `assets/**/sample.csv` includes all `sample.csv` files -in `/assets` or any child directory. To recursively include all files under a -directory, use a `/**` suffix, e.g. `src/**`. +Includes are anchored, which means that `pyproject.toml` includes only `/pyproject.toml` and +not `/bar/pyproject.toml`. To recursively include all files under a directory, use a `/**` +suffix, e.g. `src/**`. Recursive inclusions are also anchored, e.g., `assets/**/sample.csv` includes +all `sample.csv` files in `/assets` or any of its children. !!! note For performance and reproducibility, avoid patterns without an anchor such as `**/sample.csv`. Excludes are not anchored, which means that `__pycache__` excludes all directories named -`__pycache__` and its children anywhere. To anchor a directory, use a `/` prefix, e.g., `/dist` will -exclude only `/dist`. +`__pycache__` regardless of its parent directory. All children of an exclusion are excluded as well. +To anchor a directory, use a `/` prefix, e.g., `/dist` will exclude only `/dist`. All fields accepting patterns use the reduced portable glob syntax from [PEP 639](https://peps.python.org/pep-0639/#add-license-FILES-key), with the addition that diff --git a/docs/concepts/projects/dependencies.md b/docs/concepts/projects/dependencies.md index 42a579695..22d030637 100644 --- a/docs/concepts/projects/dependencies.md +++ b/docs/concepts/projects/dependencies.md @@ -37,7 +37,7 @@ dependencies = ["httpx>=0.27.2"] ``` The [`--dev`](#development-dependencies), [`--group`](#dependency-groups), or -[`--optional`](#optional-dependencies) flags can be used to add a dependencies to an alternative +[`--optional`](#optional-dependencies) flags can be used to add dependencies to an alternative field. The dependency will include a constraint, e.g., `>=0.27.2`, for the most recent, compatible version diff --git a/docs/concepts/projects/workspaces.md b/docs/concepts/projects/workspaces.md index 942cea8c2..4b2d670b4 100644 --- a/docs/concepts/projects/workspaces.md +++ b/docs/concepts/projects/workspaces.md @@ -113,6 +113,13 @@ build-backend = "hatchling.build" Every workspace member would, by default, install `tqdm` from GitHub, unless a specific member overrides the `tqdm` entry in its own `tool.uv.sources` table. +!!! note + + If a workspace member provides `tool.uv.sources` for some dependency, it will ignore any + `tool.uv.sources` for the same dependency in the workspace root, even if the member's source is + limited by a [marker](dependencies.md#platform-specific-sources) that doesn't match the current + platform. + ## Workspace layouts The most common workspace layout can be thought of as a root project with a series of accompanying diff --git a/docs/concepts/python-versions.md b/docs/concepts/python-versions.md index 0d409ff50..a7472bea8 100644 --- a/docs/concepts/python-versions.md +++ b/docs/concepts/python-versions.md @@ -123,7 +123,7 @@ present, uv will install all the Python versions listed in the file. !!! important - Support for installing Python executables is in _preview_, this means the behavior is experimental + Support for installing Python executables is in _preview_. This means the behavior is experimental and subject to change. To install Python executables into your `PATH`, provide the `--preview` option: @@ -158,6 +158,70 @@ $ uv python install 3.12.6 --preview # Does not update `python3.12` $ uv python install 3.12.8 --preview # Updates `python3.12` to point to 3.12.8 ``` +## Upgrading Python versions + +!!! important + + Support for upgrading Python versions is in _preview_. This means the behavior is experimental + and subject to change. + + Upgrades are only supported for uv-managed Python versions. + + Upgrades are not currently supported for PyPy and GraalPy. + +uv allows transparently upgrading Python versions to the latest patch release, e.g., 3.13.4 to +3.13.5. uv does not allow transparently upgrading across minor Python versions, e.g., 3.12 to 3.13, +because changing minor versions can affect dependency resolution. + +uv-managed Python versions can be upgraded to the latest supported patch release with the +`python upgrade` command: + +To upgrade a Python version to the latest supported patch release: + +```console +$ uv python upgrade 3.12 +``` + +To upgrade all installed Python versions: + +```console +$ uv python upgrade +``` + +After an upgrade, uv will prefer the new version, but will retain the existing version as it may +still be used by virtual environments. + +If the Python version was installed with preview enabled, e.g., `uv python install 3.12 --preview`, +virtual environments using the Python version will be automatically upgraded to the new patch +version. + +!!! note + + If the virtual environment was created _before_ opting in to the preview mode, it will not be + included in the automatic upgrades. + +If a virtual environment was created with an explicitly requested patch version, e.g., +`uv venv -p 3.10.8`, it will not be transparently upgraded to a new version. + +### Minor version directories + +Automatic upgrades for virtual environments are implemented using a directory with the Python minor +version, e.g.: + +``` +~/.local/share/uv/python/cpython-3.12-macos-aarch64-none +``` + +which is a symbolic link (on Unix) or junction (on Windows) pointing to a specific patch version: + +```console +$ readlink ~/.local/share/uv/python/cpython-3.12-macos-aarch64-none +~/.local/share/uv/python/cpython-3.12.11-macos-aarch64-none +``` + +If this link is resolved by another tool, e.g., by canonicalizing the Python interpreter path, and +used to create a virtual environment, it will not be automatically upgraded. + ## Project Python versions uv will respect Python requirements defined in `requires-python` in the `pyproject.toml` file during diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md index 6b6b3e872..a507a3ade 100644 --- a/docs/getting-started/installation.md +++ b/docs/getting-started/installation.md @@ -25,7 +25,7 @@ uv provides a standalone installer to download and install uv: Request a specific version by including it in the URL: ```console - $ curl -LsSf https://astral.sh/uv/0.7.13/install.sh | sh + $ curl -LsSf https://astral.sh/uv/0.7.19/install.sh | sh ``` === "Windows" @@ -41,7 +41,7 @@ uv provides a standalone installer to download and install uv: Request a specific version by including it in the URL: ```pwsh-session - PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.7.13/install.ps1 | iex" + PS> powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.7.19/install.ps1 | iex" ``` !!! tip diff --git a/docs/guides/install-python.md b/docs/guides/install-python.md index 0b80589a5..da841eac6 100644 --- a/docs/guides/install-python.md +++ b/docs/guides/install-python.md @@ -120,6 +120,28 @@ To force uv to use the system Python, provide the `--no-managed-python` flag. Se [Python version preference](../concepts/python-versions.md#requiring-or-disabling-managed-python-versions) documentation for more details. +## Upgrading Python versions + +!!! important + + Support for upgrading Python patch versions is in _preview_. This means the behavior is + experimental and subject to change. + +To upgrade a Python version to the latest supported patch release: + +```console +$ uv python upgrade 3.12 +``` + +To upgrade all uv-managed Python versions: + +```console +$ uv python upgrade +``` + +See the [`python upgrade`](../concepts/python-versions.md#upgrading-python-versions) documentation +for more details. + ## Next steps To learn more about `uv python`, see the [Python version concept](../concepts/python-versions.md) diff --git a/docs/guides/integration/alternative-indexes.md b/docs/guides/integration/alternative-indexes.md index 52ec6e365..3e73efff0 100644 --- a/docs/guides/integration/alternative-indexes.md +++ b/docs/guides/integration/alternative-indexes.md @@ -142,7 +142,7 @@ To use Google Artifact Registry, add the index to your project: ```toml title="pyproject.toml" [[tool.uv.index]] name = "private-registry" -url = "https://-python.pkg.dev//" +url = "https://-python.pkg.dev///simple/" ``` ### Authenticate with a Google access token @@ -219,8 +219,8 @@ First, add a `publish-url` to the index you want to publish packages to. For exa ```toml title="pyproject.toml" hl_lines="4" [[tool.uv.index]] name = "private-registry" -url = "https://-python.pkg.dev//" -publish-url = "https://-python.pkg.dev//" +url = "https://-python.pkg.dev///simple/" +publish-url = "https://-python.pkg.dev///" ``` Then, configure credentials (if not using keyring): @@ -239,7 +239,7 @@ $ uv publish --index private-registry To use `uv publish` without adding the `publish-url` to the project, you can set `UV_PUBLISH_URL`: ```console -$ export UV_PUBLISH_URL=https://-python.pkg.dev// +$ export UV_PUBLISH_URL=https://-python.pkg.dev/// $ uv publish ``` @@ -368,6 +368,68 @@ $ uv publish Note this method is not preferable because uv cannot check if the package is already published before uploading artifacts. -## Other package indexes +## JFrog Artifactory -uv is also known to work with JFrog's Artifactory. +uv can install packages from JFrog Artifactory, either by using a username and password or a JWT +token. + +To use it, add the index to your project: + +```toml title="pyproject.toml" +[[tool.uv.index]] +name = "private-registry" +url = "https://.jfrog.io/artifactory/api/pypi//simple" +``` + +### Authenticate with username and password + +```console +$ export UV_INDEX_PRIVATE_REGISTRY_USERNAME="" +$ export UV_INDEX_PRIVATE_REGISTRY_PASSWORD="" +``` + +### Authenticate with JWT token + +```console +$ export UV_INDEX_PRIVATE_REGISTRY_USERNAME="" +$ export UV_INDEX_PRIVATE_REGISTRY_PASSWORD="$JFROG_JWT_TOKEN" +``` + +!!! note + + Replace `PRIVATE_REGISTRY` in the environment variable names with the actual index name defined in your `pyproject.toml`. + +### Publishing packages to JFrog Artifactory + +Add a `publish-url` to your index definition: + +```toml title="pyproject.toml" +[[tool.uv.index]] +name = "private-registry" +url = "https://.jfrog.io/artifactory/api/pypi//simple" +publish-url = "https://.jfrog.io/artifactory/api/pypi/" +``` + +!!! important + + If you use `--token "$JFROG_TOKEN"` or `UV_PUBLISH_TOKEN` with JFrog, you will receive a + 401 Unauthorized error as JFrog requires an empty username but uv passes `__token__` for as + the username when `--token` is used. + +To authenticate, pass your token as the password and set the username to an empty string: + +```console +$ uv publish --index -u "" -p "$JFROG_TOKEN" +``` + +Alternatively, you can set environment variables: + +```console +$ export UV_PUBLISH_USERNAME="" +$ export UV_PUBLISH_PASSWORD="$JFROG_TOKEN" +$ uv publish --index private-registry +``` + +!!! note + + The publish environment variables (`UV_PUBLISH_USERNAME` and `UV_PUBLISH_PASSWORD`) do not include the index name. diff --git a/docs/guides/integration/aws-lambda.md b/docs/guides/integration/aws-lambda.md index 700011297..233969420 100644 --- a/docs/guides/integration/aws-lambda.md +++ b/docs/guides/integration/aws-lambda.md @@ -92,7 +92,7 @@ the second stage, we'll copy this directory over to the final image, omitting th other unnecessary files. ```dockerfile title="Dockerfile" -FROM ghcr.io/astral-sh/uv:0.7.13 AS uv +FROM ghcr.io/astral-sh/uv:0.7.19 AS uv # First, bundle the dependencies into the task root. FROM public.ecr.aws/lambda/python:3.13 AS builder @@ -334,7 +334,7 @@ And confirm that opening http://127.0.0.1:8000/ in a web browser displays, "Hell Finally, we'll update the Dockerfile to include the local library in the deployment package: ```dockerfile title="Dockerfile" -FROM ghcr.io/astral-sh/uv:0.7.13 AS uv +FROM ghcr.io/astral-sh/uv:0.7.19 AS uv # First, bundle the dependencies into the task root. FROM public.ecr.aws/lambda/python:3.13 AS builder diff --git a/docs/guides/integration/docker.md b/docs/guides/integration/docker.md index 55f86fad6..bfbae2a7b 100644 --- a/docs/guides/integration/docker.md +++ b/docs/guides/integration/docker.md @@ -31,7 +31,7 @@ $ docker run --rm -it ghcr.io/astral-sh/uv:debian uv --help The following distroless images are available: - `ghcr.io/astral-sh/uv:latest` -- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.7.13` +- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.7.19` - `ghcr.io/astral-sh/uv:{major}.{minor}`, e.g., `ghcr.io/astral-sh/uv:0.7` (the latest patch version) @@ -75,7 +75,7 @@ And the following derived images are available: As with the distroless image, each derived image is published with uv version tags as `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}-{base}` and -`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.13-alpine`. +`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.7.19-alpine`. For more details, see the [GitHub Container](https://github.com/astral-sh/uv/pkgs/container/uv) page. @@ -113,7 +113,7 @@ Note this requires `curl` to be available. In either case, it is best practice to pin to a specific uv version, e.g., with: ```dockerfile -COPY --from=ghcr.io/astral-sh/uv:0.7.13 /uv /uvx /bin/ +COPY --from=ghcr.io/astral-sh/uv:0.7.19 /uv /uvx /bin/ ``` !!! tip @@ -131,7 +131,7 @@ COPY --from=ghcr.io/astral-sh/uv:0.7.13 /uv /uvx /bin/ Or, with the installer: ```dockerfile -ADD https://astral.sh/uv/0.7.13/install.sh /uv-installer.sh +ADD https://astral.sh/uv/0.7.19/install.sh /uv-installer.sh ``` ### Installing a project @@ -557,5 +557,5 @@ Verified OK !!! tip These examples use `latest`, but best practice is to verify the attestation for a specific - version tag, e.g., `ghcr.io/astral-sh/uv:0.7.13`, or (even better) the specific image digest, + version tag, e.g., `ghcr.io/astral-sh/uv:0.7.19`, or (even better) the specific image digest, such as `ghcr.io/astral-sh/uv:0.5.27@sha256:5adf09a5a526f380237408032a9308000d14d5947eafa687ad6c6a2476787b4f`. diff --git a/docs/guides/integration/github.md b/docs/guides/integration/github.md index 49eb57a80..de8853d05 100644 --- a/docs/guides/integration/github.md +++ b/docs/guides/integration/github.md @@ -47,7 +47,7 @@ jobs: uses: astral-sh/setup-uv@v5 with: # Install a specific version of uv. - version: "0.7.13" + version: "0.7.19" ``` ## Setting up Python diff --git a/docs/guides/integration/pre-commit.md b/docs/guides/integration/pre-commit.md index 87c384da0..b2f637a42 100644 --- a/docs/guides/integration/pre-commit.md +++ b/docs/guides/integration/pre-commit.md @@ -19,7 +19,7 @@ To make sure your `uv.lock` file is up to date even if your `pyproject.toml` fil repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.13 + rev: 0.7.19 hooks: - id: uv-lock ``` @@ -30,7 +30,7 @@ To keep a `requirements.txt` file in sync with your `uv.lock` file: repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.13 + rev: 0.7.19 hooks: - id: uv-export ``` @@ -41,7 +41,7 @@ To compile requirements files: repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.13 + rev: 0.7.19 hooks: # Compile requirements - id: pip-compile @@ -54,7 +54,7 @@ To compile alternative requirements files, modify `args` and `files`: repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.13 + rev: 0.7.19 hooks: # Compile requirements - id: pip-compile @@ -68,7 +68,7 @@ To run the hook over multiple files at the same time, add additional entries: repos: - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.7.13 + rev: 0.7.19 hooks: # Compile requirements - id: pip-compile diff --git a/docs/guides/integration/pytorch.md b/docs/guides/integration/pytorch.md index 79b6f31e1..a90ebeb6b 100644 --- a/docs/guides/integration/pytorch.md +++ b/docs/guides/integration/pytorch.md @@ -85,21 +85,21 @@ In such cases, the first step is to add the relevant PyTorch index to your `pypr explicit = true ``` -=== "CUDA 12.1" +=== "CUDA 12.6" ```toml [[tool.uv.index]] - name = "pytorch-cu121" - url = "https://download.pytorch.org/whl/cu121" + name = "pytorch-cu126" + url = "https://download.pytorch.org/whl/cu126" explicit = true ``` -=== "CUDA 12.4" +=== "CUDA 12.8" ```toml [[tool.uv.index]] - name = "pytorch-cu124" - url = "https://download.pytorch.org/whl/cu124" + name = "pytorch-cu128" + url = "https://download.pytorch.org/whl/cu128" explicit = true ``` @@ -108,7 +108,7 @@ In such cases, the first step is to add the relevant PyTorch index to your `pypr ```toml [[tool.uv.index]] name = "pytorch-rocm" - url = "https://download.pytorch.org/whl/rocm6.2" + url = "https://download.pytorch.org/whl/rocm6.3" explicit = true ``` @@ -154,7 +154,7 @@ Next, update the `pyproject.toml` to point `torch` and `torchvision` to the desi ] ``` -=== "CUDA 12.1" +=== "CUDA 12.6" PyTorch doesn't publish CUDA builds for macOS. As such, we gate on `sys_platform` to instruct uv to limit the PyTorch index to Linux and Windows, falling back to PyPI on macOS: @@ -162,14 +162,14 @@ Next, update the `pyproject.toml` to point `torch` and `torchvision` to the desi ```toml [tool.uv.sources] torch = [ - { index = "pytorch-cu121", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, + { index = "pytorch-cu126", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, ] torchvision = [ - { index = "pytorch-cu121", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, + { index = "pytorch-cu126", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, ] ``` -=== "CUDA 12.4" +=== "CUDA 12.8" PyTorch doesn't publish CUDA builds for macOS. As such, we gate on `sys_platform` to instruct uv to limit the PyTorch index to Linux and Windows, falling back to PyPI on macOS: @@ -177,10 +177,10 @@ Next, update the `pyproject.toml` to point `torch` and `torchvision` to the desi ```toml [tool.uv.sources] torch = [ - { index = "pytorch-cu124", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, + { index = "pytorch-cu128", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, ] torchvision = [ - { index = "pytorch-cu124", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, + { index = "pytorch-cu128", marker = "sys_platform == 'linux' or sys_platform == 'win32'" }, ] ``` @@ -355,11 +355,11 @@ explicit = true In some cases, you may want to use CPU-only builds in some cases, but CUDA-enabled builds in others, with the choice toggled by a user-provided extra (e.g., `uv sync --extra cpu` vs. -`uv sync --extra cu124`). +`uv sync --extra cu128`). With `tool.uv.sources`, you can use extra markers to specify the desired index for each enabled extra. For example, the following configuration would use PyTorch's CPU-only for -`uv sync --extra cpu` and CUDA-enabled builds for `uv sync --extra cu124`: +`uv sync --extra cpu` and CUDA-enabled builds for `uv sync --extra cu128`: ```toml [project] @@ -410,7 +410,7 @@ explicit = true !!! note Since GPU-accelerated builds aren't available on macOS, the above configuration will fail to install - on macOS when the `cu124` extra is enabled. + on macOS when the `cu128` extra is enabled. ## The `uv pip` interface @@ -433,25 +433,31 @@ $ uv pip install torch torchvision torchaudio --index-url https://download.pytor ## Automatic backend selection -In [preview](../../reference/settings.md#preview), uv can automatically select the appropriate -PyTorch index at runtime by inspecting the system configuration via `--torch-backend=auto` (or -`UV_TORCH_BACKEND=auto`): +uv supports automatic selection of the appropriate PyTorch index via the `--torch-backend=auto` +command-line argument (or the `UV_TORCH_BACKEND=auto` environment variable), as in: ```shell +$ # With a command-line argument. +$ uv pip install torch --torch-backend=auto + +$ # With an environment variable. $ UV_TORCH_BACKEND=auto uv pip install torch ``` -When enabled, uv will query for the installed CUDA driver version and use the most-compatible -PyTorch index for all relevant packages (e.g., `torch`, `torchvision`, etc.). If no such CUDA driver -is found, uv will fall back to the CPU-only index. uv will continue to respect existing index -configuration for any packages outside the PyTorch ecosystem. +When enabled, uv will query for the installed CUDA driver and AMD GPU versions then use the +most-compatible PyTorch index for all relevant packages (e.g., `torch`, `torchvision`, etc.). If no +such GPU is found, uv will fall back to the CPU-only index. uv will continue to respect existing +index configuration for any packages outside the PyTorch ecosystem. -To select a specific backend (e.g., `cu126`), set `--torch-backend=cu126` (or -`UV_TORCH_BACKEND=cu126`). +You can also select a specific backend (e.g., CUDA 12.6) with `--torch-backend=cu126` (or +`UV_TORCH_BACKEND=cu126`): -At present, `--torch-backend` is only available in the `uv pip` interface, and only supports -detection of CUDA drivers (as opposed to other accelerators like ROCm or Intel GPUs). +```shell +$ # With a command-line argument. +$ uv pip install torch torchvision --torch-backend=cu126 -As `--torch-backend` is a preview feature, it should be considered experimental and is not governed -by uv's standard [versioning policy](../../reference/policies/versioning.md). `--torch-backend` may -change or be removed entirely in future versions of uv. +$ # With an environment variable. +$ UV_TORCH_BACKEND=cu126 uv pip install torch torchvision +``` + +At present, `--torch-backend` is only available in the `uv pip` interface. diff --git a/docs/guides/migration/index.md b/docs/guides/migration/index.md new file mode 100644 index 000000000..aa5f0f44f --- /dev/null +++ b/docs/guides/migration/index.md @@ -0,0 +1,14 @@ +# Migration guides + +Learn how to migrate from other tools to uv: + +- [Migrate from pip to uv projects](./pip-to-project.md) + +!!! note + + Other guides, such as migrating from another project management tool, or from pip to `uv pip` + are not yet available. See [#5200](https://github.com/astral-sh/uv/issues/5200) to track + progress. + +Or, explore the [integration guides](../integration/index.md) to learn how to use uv with other +software. diff --git a/docs/guides/migration/pip-to-project.md b/docs/guides/migration/pip-to-project.md new file mode 100644 index 000000000..1356cb5d7 --- /dev/null +++ b/docs/guides/migration/pip-to-project.md @@ -0,0 +1,472 @@ +# Migrating from pip to a uv project + +This guide will discuss converting from a `pip` and `pip-tools` workflow centered on `requirements` +files to uv's project workflow using a `pyproject.toml` and `uv.lock` file. + +!!! note + + If you're looking to migrate from `pip` and `pip-tools` to uv's drop-in interface or from an + existing workflow where you're already using a `pyproject.toml`, those guides are not yet + written. See [#5200](https://github.com/astral-sh/uv/issues/5200) to track progress. + +We'll start with an overview of developing with `pip`, then discuss migrating to uv. + +!!! tip + + If you're familiar with the ecosystem, you can jump ahead to the + [requirements file import](#importing-requirements-files) instructions. + +## Understanding pip workflows + +### Project dependencies + +When you want to use a package in your project, you need to install it first. `pip` supports +imperative installation of packages, e.g.: + +```console +$ pip install fastapi +``` + +This installs the package into the environment that `pip` is installed in. This may be a virtual +environment, or, the global environment of your system's Python installation. + +Then, you can run a Python script that requires the package: + +```python title="example.py" +import fastapi +``` + +It's best practice to create a virtual environment for each project, to avoid mixing packages +between them. For example: + +```console +$ python -m venv +$ source .venv/bin/activate +$ pip ... +``` + +We will revisit this topic in the [project environments section](#project-environments) below. + +### Requirements files + +When sharing projects with others, it's useful to declare all the packages you require upfront. +`pip` supports installing requirements from a file, e.g.: + +```python title="requirements.txt" +fastapi +``` + +```console +$ pip install -r requirements.txt +``` + +Notice above that `fastapi` is not "locked" to a specific version — each person working on the +project may have a different version of `fastapi` installed. `pip-tools` was created to improve this +experience. + +When using `pip-tools`, requirements files specify both the dependencies for your project and lock +dependencies to a specific version — the file extension is used to differentiate between the two. +For example, if you require `fastapi` and `pydantic`, you'd specify these in a `requirements.in` +file: + +```python title="requirements.in" +fastapi +pydantic>2 +``` + +Notice there's a version constraint on `pydantic` — this means only `pydantic` versions later than +`2.0.0` can be used. In contrast, `fastapi` does not have a version constraint — any version can be +used. + +These dependencies can be compiled into a `requirements.txt` file: + +```console +$ pip-compile requirements.in -o requirements.txt +``` + +```python title="requirements.txt" +annotated-types==0.7.0 + # via pydantic +anyio==4.8.0 + # via starlette +fastapi==0.115.11 + # via -r requirements.in +idna==3.10 + # via anyio +pydantic==2.10.6 + # via + # -r requirements.in + # fastapi +pydantic-core==2.27.2 + # via pydantic +sniffio==1.3.1 + # via anyio +starlette==0.46.1 + # via fastapi +typing-extensions==4.12.2 + # via + # fastapi + # pydantic + # pydantic-core +``` + +Here, all the versions constraints are _exact_. Only a single version of each package can be used. +The above example was generated with `uv pip compile`, but could also be generated with +`pip-compile` from `pip-tools`. + +Though less common, the `requirements.txt` can also be generated using `pip freeze`, by first +installing the input dependencies into the environment then exporting the installed versions: + +```console +$ pip install -r requirements.in +$ pip freeze > requirements.txt +``` + +```python title="requirements.txt" +annotated-types==0.7.0 +anyio==4.8.0 +fastapi==0.115.11 +idna==3.10 +pydantic==2.10.6 +pydantic-core==2.27.2 +sniffio==1.3.1 +starlette==0.46.1 +typing-extensions==4.12.2 +``` + +After compiling dependencies into a locked set of versions, these files are committed to version +control and distributed with the project. + +Then, when someone wants to use the project, they install from the requirements file: + +```console +$ pip install -r requirements.txt +``` + + + +### Development dependencies + +The requirements file format can only describe a single set of dependencies at once. This means if +you have additional _groups_ of dependencies, such as development dependencies, they need separate +files. For example, we'll create a `-dev` dependency file: + +```python title="requirements-dev.in" +-r requirements.in +-c requirements.txt + +pytest +``` + +Notice the base requirements are included with `-r requirements.in`. This ensures your development +environment considers _all_ of the dependencies together. The `-c requirements.txt` _constrains_ the +package version to ensure that the `requirements-dev.txt` uses the same versions as +`requirements.txt`. + +!!! note + + It's common to use `-r requirements.txt` directly instead of using both + `-r requirements.in`, and `-c requirements.txt`. There's no difference in the resulting package + versions, but using both files produces annotations which allow you to determine which + dependencies are _direct_ (annotated with `-r requirements.in`) and which are _indirect_ (only + annotated with `-c requirements.txt`). + +The compiled development dependencies look like: + +```python title="requirements-dev.txt" +annotated-types==0.7.0 + # via + # -c requirements.txt + # pydantic +anyio==4.8.0 + # via + # -c requirements.txt + # starlette +fastapi==0.115.11 + # via + # -c requirements.txt + # -r requirements.in +idna==3.10 + # via + # -c requirements.txt + # anyio +iniconfig==2.0.0 + # via pytest +packaging==24.2 + # via pytest +pluggy==1.5.0 + # via pytest +pydantic==2.10.6 + # via + # -c requirements.txt + # -r requirements.in + # fastapi +pydantic-core==2.27.2 + # via + # -c requirements.txt + # pydantic +pytest==8.3.5 + # via -r requirements-dev.in +sniffio==1.3.1 + # via + # -c requirements.txt + # anyio +starlette==0.46.1 + # via + # -c requirements.txt + # fastapi +typing-extensions==4.12.2 + # via + # -c requirements.txt + # fastapi + # pydantic + # pydantic-core +``` + +As with the base dependency files, these are committed to version control and distributed with the +project. When someone wants to work on the project, they'll install from the requirements file: + +```console +$ pip install -r requirements-dev.txt +``` + +### Platform-specific dependencies + +When compiling dependencies with `pip` or `pip-tools`, the result is only usable on the same +platform as it is generated on. This poses a problem for projects which need to be usable on +multiple platforms, such as Windows and macOS. + +For example, take a simple dependency: + +```python title="requirements.in" +tqdm +``` + +On Linux, this compiles to: + +```python title="requirements-linux.txt" +tqdm==4.67.1 + # via -r requirements.in +``` + +While on Windows, this compiles to: + +```python title="requirements-win.txt" +colorama==0.4.6 + # via tqdm +tqdm==4.67.1 + # via -r requirements.in +``` + +`colorama` is a Windows-only dependency of `tqdm`. + +When using `pip` and `pip-tools`, a project needs to declare a requirements lock file for each +supported platform. + +!!! note + + uv's resolver can compile dependencies for multiple platforms at once (see ["universal resolution"](../../concepts/resolution.md#universal-resolution)), + allowing you to use a single `requirements.txt` for all platforms: + + ```console + $ uv pip compile --universal requirements.in + ``` + + ```python title="requirements.txt" + colorama==0.4.6 ; sys_platform == 'win32' + # via tqdm + tqdm==4.67.1 + # via -r requirements.in + ``` + + This resolution mode is also used when using a `pyproject.toml` and `uv.lock`. + +## Migrating to a uv project + +### The `pyproject.toml` + +The `pyproject.toml` is a standardized file for Python project metadata. It replaces +`requirements.in` files, allowing you to represent arbitrary groups of project dependencies. It also +provides a centralized location for metadata about your project, such as the build system or tool +settings. + + + +For example, the `requirements.in` and `requirements-dev.in` files above can be translated to a +`pyproject.toml` as follows: + +```toml title="pyproject.toml" +[project] +name = "example" +version = "0.0.1" +dependencies = [ + "fastapi", + "pydantic>2" +] + +[dependency-groups] +dev = ["pytest"] +``` + +We'll discuss the commands necessary to automate these imports below. + +### The uv lockfile + +uv uses a lockfile (`uv.lock`) file to lock package versions. The format of this file is specific to +uv, allowing uv to support advanced features. It replaces `requirements.txt` files. + +The lockfile will be automatically created and populated when adding dependencies, but you can +explicitly create it with `uv lock`. + +Unlike `requirements.txt` files, the `uv.lock` file can represent arbitrary groups of dependencies, +so multiple files are not needed to lock development dependencies. + +The uv lockfile is always [universal](../../concepts/resolution.md#universal-resolution), so +multiple files are not needed to +[lock dependencies for each platform](#platform-specific-dependencies). This ensures that all +developers are using consistent, locked versions of dependencies regardless of their machine. + +The uv lockfile also supports concepts like +[pinning packages to specific indexes](../../concepts/indexes.md#pinning-a-package-to-an-index), +which is not representable in `requirements.txt` files. + +!!! tip + + If you only need to lock for a subset of platforms, use the + [`tool.uv.environments`](../../concepts/resolution.md#limited-resolution-environments) setting + to limit the resolution and lockfile. + +To learn more, see the [lockfile](../../concepts/projects/layout.md#the-lockfile) documentation. + +### Importing requirements files + +First, create a `pyproject.toml` if you have not already: + +```console +$ uv init +``` + +Then, the easiest way to import requirements is with `uv add`: + +```console +$ uv add -r requirements.in +``` + +However, there is some nuance to this transition. Notice we used the `requirements.in` file, which +does not pin to exact versions of packages so uv will solve for new versions of these packages. You +may want to continue using your previously locked versions from your `requirements.txt` so, when +switching over to uv, none of your dependency versions change. + +The solution is to add your locked versions as _constraints_. uv supports using these on `add` to +preserve locked versions: + +```console +$ uv add -r requirements.in -c requirements.txt +``` + +Your existing versions will be retained when producing a `uv.lock` file. + +#### Importing platform-specific constraints + +If your platform-specific dependencies have been compiled into separate files, you can still +transition to a universal lockfile. However, you cannot just use `-c` to specify constraints from +your existing platform-specific `requirements.txt` files because they do not include markers +describing the environment and will consequently conflict. + +To add the necessary markers, use `uv pip compile` to convert your existing files. For example, +given the following: + +```python title="requirements-win.txt" +colorama==0.4.6 + # via tqdm +tqdm==4.67.1 + # via -r requirements.in +``` + +The markers can be added with: + +```console +$ uv pip compile requirements.in -o requirements-win.txt --python-platform windows --no-strip-markers +``` + +Notice the resulting output includes a Windows marker on `colorama`: + +```python title="requirements-win.txt" +colorama==0.4.6 ; sys_platform == 'win32' + # via tqdm +tqdm==4.67.1 + # via -r requirements.in +``` + +When using `-o`, uv will constrain the versions to match the existing output file, if it can. + +Markers can be added for other platforms by changing the `--python-platform` and `-o` values for +each requirements file you need to import, e.g., to `linux` and `macos`. + +Once each `requirements.txt` file has been transformed, the dependencies can be imported to the +`pyproject.toml` and `uv.lock` with `uv add`: + +```console +$ uv add -r requirements.in -c requirements-win.txt -c requirements-linux.txt +``` + +#### Importing development dependency files + +As discussed in the [development dependencies](#development-dependencies) section, it's common to +have groups of dependencies for development purposes. + +To import development dependencies, use the `--dev` flag during `uv add`: + +```console +$ uv add --dev -r requirements-dev.in -c requirements-dev.txt +``` + +If the `requirements-dev.in` includes the parent `requirements.in` via `-r`, it will need to be +stripped to avoid adding the base requirements to the `dev` dependency group. The following example +uses `sed` to strip lines that start with `-r`, then pipes the result to `uv add`: + +```console +$ sed '/^-r /d' requirements-dev.in | uv add --dev -r - -c requirements-dev.txt +``` + +In addition to the `dev` dependency group, uv supports arbitrary group names. For example, if you +also have a dedicated set of dependencies for building your documentation, those can be imported to +a `docs` group: + +```console +$ uv add -r requirements-docs.in -c requirements-docs.txt --group docs +``` + +### Project environments + +Unlike `pip`, uv is not centered around the concept of an "active" virtual environment. Instead, uv +uses a dedicated virtual environment for each project in a `.venv` directory. This environment is +automatically managed, so when you run a command, like `uv add`, the environment is synced with the +project dependencies. + +The preferred way to execute commands in the environment is with `uv run`, e.g.: + +```console +$ uv run pytest +``` + +Prior to every `uv run` invocation, uv will verify that the lockfile is up-to-date with the +`pyproject.toml`, and that the environment is up-to-date with the lockfile, keeping your project +in-sync without the need for manual intervention. `uv run` guarantees that your command is run in a +consistent, locked environment. + +The project environment can also be explicitly created with `uv sync`, e.g., for use with editors. + +!!! note + + When in projects, uv will prefer a `.venv` in the project directory and ignore the active + environment as declared by the `VIRTUAL_ENV` variable by default. You can opt-in to using the + active environment with the `--active` flag. + +To learn more, see the +[project environment](../../concepts/projects/layout.md#the-project-environment) documentation. + +## Next steps + +Now that you've migrated to uv, take a look at the +[project concept](../../concepts/projects/index.md) page for more details about uv projects. diff --git a/docs/guides/package.md b/docs/guides/package.md index 0914d5750..ce5bae7f9 100644 --- a/docs/guides/package.md +++ b/docs/guides/package.md @@ -31,8 +31,8 @@ the effect of declaring a build system in the This setting makes PyPI reject your uploaded package from publishing. It does not affect security or privacy settings on alternative registries. - We also recommend only generating per-project tokens: Without a PyPI token matching the project, - it can't be accidentally published. + We also recommend only generating [per-project PyPI API tokens](https://pypi.org/help/#apitoken): + Without a PyPI token matching the project, it can't be accidentally published. ## Building your package diff --git a/docs/guides/scripts.md b/docs/guides/scripts.md index 7142db155..26d85e76d 100644 --- a/docs/guides/scripts.md +++ b/docs/guides/scripts.md @@ -241,10 +241,12 @@ Declaration of dependencies is also supported in this context, for example: ```python title="example" #!/usr/bin/env -S uv run --script +# # /// script # requires-python = ">=3.12" # dependencies = ["httpx"] # /// + import httpx print(httpx.get("https://example.com")) diff --git a/docs/reference/cli.md b/docs/reference/cli.md index d434b954b..82fe0fa3d 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -123,6 +123,7 @@ uv run [OPTIONS] [COMMAND]
--index index

The URLs to use when resolving dependencies, in addition to the default index.

Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

+

Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

May also be set with the UV_INDEX environment variable.

--index-strategy index-strategy

The strategy to use when resolving against multiple index URLs.

By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

May also be set with the UV_INDEX_STRATEGY environment variable.

Possible values:

@@ -479,6 +480,7 @@ uv add [OPTIONS] >
--index index

The URLs to use when resolving dependencies, in addition to the default index.

Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

+

Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

May also be set with the UV_INDEX environment variable.

--index-strategy index-strategy

The strategy to use when resolving against multiple index URLs.

By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

May also be set with the UV_INDEX_STRATEGY environment variable.

Possible values:

@@ -663,6 +665,7 @@ uv remove [OPTIONS] ...
--index index

The URLs to use when resolving dependencies, in addition to the default index.

Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

+

Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

May also be set with the UV_INDEX environment variable.

--index-strategy index-strategy

The strategy to use when resolving against multiple index URLs.

By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

May also be set with the UV_INDEX_STRATEGY environment variable.

Possible values:

@@ -832,6 +835,7 @@ uv version [OPTIONS] [VALUE]
--index index

The URLs to use when resolving dependencies, in addition to the default index.

Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

+

Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

May also be set with the UV_INDEX environment variable.

--index-strategy index-strategy

The strategy to use when resolving against multiple index URLs.

By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

May also be set with the UV_INDEX_STRATEGY environment variable.

Possible values:

@@ -1022,6 +1026,7 @@ uv sync [OPTIONS]
--index index

The URLs to use when resolving dependencies, in addition to the default index.

Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

+

Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

May also be set with the UV_INDEX environment variable.

--index-strategy index-strategy

The strategy to use when resolving against multiple index URLs.

By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

May also be set with the UV_INDEX_STRATEGY environment variable.

Possible values:

@@ -1210,6 +1215,7 @@ uv lock [OPTIONS]
--index index

The URLs to use when resolving dependencies, in addition to the default index.

Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

+

Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

May also be set with the UV_INDEX environment variable.

--index-strategy index-strategy

The strategy to use when resolving against multiple index URLs.

By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

May also be set with the UV_INDEX_STRATEGY environment variable.

Possible values:

@@ -1383,6 +1389,7 @@ uv export [OPTIONS]
--index index

The URLs to use when resolving dependencies, in addition to the default index.

Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

+

Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

May also be set with the UV_INDEX environment variable.

--index-strategy index-strategy

The strategy to use when resolving against multiple index URLs.

By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

May also be set with the UV_INDEX_STRATEGY environment variable.

Possible values:

@@ -1568,6 +1575,7 @@ uv tree [OPTIONS]
--index index

The URLs to use when resolving dependencies, in addition to the default index.

Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

+

Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

May also be set with the UV_INDEX environment variable.

--index-strategy index-strategy

The strategy to use when resolving against multiple index URLs.

By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

May also be set with the UV_INDEX_STRATEGY environment variable.

Possible values:

@@ -1827,6 +1835,7 @@ uv tool run [OPTIONS] [COMMAND]
--index index

The URLs to use when resolving dependencies, in addition to the default index.

Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

+

Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

May also be set with the UV_INDEX environment variable.

--index-strategy index-strategy

The strategy to use when resolving against multiple index URLs.

By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

May also be set with the UV_INDEX_STRATEGY environment variable.

Possible values:

@@ -1997,6 +2006,7 @@ uv tool install [OPTIONS]
--index index

The URLs to use when resolving dependencies, in addition to the default index.

Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

+

Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

May also be set with the UV_INDEX environment variable.

--index-strategy index-strategy

The strategy to use when resolving against multiple index URLs.

By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

May also be set with the UV_INDEX_STRATEGY environment variable.

Possible values:

@@ -2157,6 +2167,7 @@ uv tool upgrade [OPTIONS] ...
--index index

The URLs to use when resolving dependencies, in addition to the default index.

Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

+

Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

May also be set with the UV_INDEX environment variable.

--index-strategy index-strategy

The strategy to use when resolving against multiple index URLs.

By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

May also be set with the UV_INDEX_STRATEGY environment variable.

Possible values:

@@ -2559,6 +2570,7 @@ uv python [OPTIONS]
uv python list

List the available Python installations

uv python install

Download and install Python versions

+
uv python upgrade

Upgrade installed Python versions to the latest supported patch release (requires the --preview flag)

uv python find

Search for a Python installation

uv python pin

Pin to a specific Python version

uv python dir

Show the uv Python installation directory

@@ -2753,6 +2765,91 @@ uv python install [OPTIONS] [TARGETS]...

You can configure fine-grained logging using the RUST_LOG environment variable. (https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives)

+### uv python upgrade + +Upgrade installed Python versions to the latest supported patch release (requires the `--preview` flag). + +A target Python minor version to upgrade may be provided, e.g., `3.13`. Multiple versions may be provided to perform more than one upgrade. + +If no target version is provided, then uv will upgrade all managed CPython versions. + +During an upgrade, uv will not uninstall outdated patch versions. + +When an upgrade is performed, virtual environments created by uv will automatically use the new version. However, if the virtual environment was created before the upgrade functionality was added, it will continue to use the old Python version; to enable upgrades, the environment must be recreated. + +Upgrades are not yet supported for alternative implementations, like PyPy. + +

Usage

+ +``` +uv python upgrade [OPTIONS] [TARGETS]... +``` + +

Arguments

+ +
TARGETS

The Python minor version(s) to upgrade.

+

If no target version is provided, then uv will upgrade all managed CPython versions.

+
+ +

Options

+ +
--allow-insecure-host, --trusted-host allow-insecure-host

Allow insecure connections to a host.

+

Can be provided multiple times.

+

Expects to receive either a hostname (e.g., localhost), a host-port pair (e.g., localhost:8080), or a URL (e.g., https://localhost).

+

WARNING: Hosts included in this list will not be verified against the system's certificate store. Only use --allow-insecure-host in a secure network with verified sources, as it bypasses SSL verification and could expose you to MITM attacks.

+

May also be set with the UV_INSECURE_HOST environment variable.

--cache-dir cache-dir

Path to the cache directory.

+

Defaults to $XDG_CACHE_HOME/uv or $HOME/.cache/uv on macOS and Linux, and %LOCALAPPDATA%\uv\cache on Windows.

+

To view the location of the cache directory, run uv cache dir.

+

May also be set with the UV_CACHE_DIR environment variable.

--color color-choice

Control the use of color in output.

+

By default, uv will automatically detect support for colors when writing to a terminal.

+

Possible values:

+
    +
  • auto: Enables colored output only when the output is going to a terminal or TTY with support
  • +
  • always: Enables colored output regardless of the detected environment
  • +
  • never: Disables colored output
  • +
--config-file config-file

The path to a uv.toml file to use for configuration.

+

While uv configuration can be included in a pyproject.toml file, it is not allowed in this context.

+

May also be set with the UV_CONFIG_FILE environment variable.

--directory directory

Change to the given directory prior to running the command.

+

Relative paths are resolved with the given directory as the base.

+

See --project to only change the project root directory.

+
--help, -h

Display the concise help for this command

+
--install-dir, -i install-dir

The directory Python installations are stored in.

+

If provided, UV_PYTHON_INSTALL_DIR will need to be set for subsequent operations for uv to discover the Python installation.

+

See uv python dir to view the current Python installation directory. Defaults to ~/.local/share/uv/python.

+

May also be set with the UV_PYTHON_INSTALL_DIR environment variable.

--managed-python

Require use of uv-managed Python versions.

+

By default, uv prefers using Python versions it manages. However, it will use system Python versions if a uv-managed Python is not installed. This option disables use of system Python versions.

+

May also be set with the UV_MANAGED_PYTHON environment variable.

--mirror mirror

Set the URL to use as the source for downloading Python installations.

+

The provided URL will replace https://github.com/astral-sh/python-build-standalone/releases/download in, e.g., https://github.com/astral-sh/python-build-standalone/releases/download/20240713/cpython-3.12.4%2B20240713-aarch64-apple-darwin-install_only.tar.gz.

+

Distributions can be read from a local directory by using the file:// URL scheme.

+

May also be set with the UV_PYTHON_INSTALL_MIRROR environment variable.

--native-tls

Whether to load TLS certificates from the platform's native certificate store.

+

By default, uv loads certificates from the bundled webpki-roots crate. The webpki-roots are a reliable set of trust roots from Mozilla, and including them in uv improves portability and performance (especially on macOS).

+

However, in some cases, you may want to use the platform's native certificate store, especially if you're relying on a corporate trust root (e.g., for a mandatory proxy) that's included in your system's certificate store.

+

May also be set with the UV_NATIVE_TLS environment variable.

--no-cache, --no-cache-dir, -n

Avoid reading from or writing to the cache, instead using a temporary directory for the duration of the operation

+

May also be set with the UV_NO_CACHE environment variable.

--no-config

Avoid discovering configuration files (pyproject.toml, uv.toml).

+

Normally, configuration files are discovered in the current directory, parent directories, or user configuration directories.

+

May also be set with the UV_NO_CONFIG environment variable.

--no-managed-python

Disable use of uv-managed Python versions.

+

Instead, uv will search for a suitable Python version on the system.

+

May also be set with the UV_NO_MANAGED_PYTHON environment variable.

--no-progress

Hide all progress outputs.

+

For example, spinners or progress bars.

+

May also be set with the UV_NO_PROGRESS environment variable.

--no-python-downloads

Disable automatic downloads of Python.

+
--offline

Disable network access.

+

When disabled, uv will only use locally cached data and locally available files.

+

May also be set with the UV_OFFLINE environment variable.

--project project

Run the command within the given project directory.

+

All pyproject.toml, uv.toml, and .python-version files will be discovered by walking up the directory tree from the project root, as will the project's virtual environment (.venv).

+

Other command-line arguments (such as relative paths) will be resolved relative to the current working directory.

+

See --directory to change the working directory entirely.

+

This setting has no effect when used in the uv pip interface.

+

May also be set with the UV_PROJECT environment variable.

--pypy-mirror pypy-mirror

Set the URL to use as the source for downloading PyPy installations.

+

The provided URL will replace https://downloads.python.org/pypy in, e.g., https://downloads.python.org/pypy/pypy3.8-v7.3.7-osx64.tar.bz2.

+

Distributions can be read from a local directory by using the file:// URL scheme.

+

May also be set with the UV_PYPY_INSTALL_MIRROR environment variable.

--python-downloads-json-url python-downloads-json-url

URL pointing to JSON of custom Python installations.

+

Note that currently, only local paths are supported.

+

May also be set with the UV_PYTHON_DOWNLOADS_JSON_URL environment variable.

--quiet, -q

Use quiet output.

+

Repeating this option, e.g., -qq, will enable a silent mode in which uv will write no output to stdout.

+
--verbose, -v

Use verbose output.

+

You can configure fine-grained logging using the RUST_LOG environment variable. (https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives)

+
+ ### uv python find Search for a Python installation. @@ -3166,6 +3263,7 @@ uv pip compile [OPTIONS] >
--index index

The URLs to use when resolving dependencies, in addition to the default index.

Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

+

Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

May also be set with the UV_INDEX environment variable.

--index-strategy index-strategy

The strategy to use when resolving against multiple index URLs.

By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

May also be set with the UV_INDEX_STRATEGY environment variable.

Possible values:

@@ -3349,6 +3447,23 @@ by --python-version.

  • cu91: Use the PyTorch index for CUDA 9.1
  • cu90: Use the PyTorch index for CUDA 9.0
  • cu80: Use the PyTorch index for CUDA 8.0
  • +
  • rocm6.3: Use the PyTorch index for ROCm 6.3
  • +
  • rocm6.2.4: Use the PyTorch index for ROCm 6.2.4
  • +
  • rocm6.2: Use the PyTorch index for ROCm 6.2
  • +
  • rocm6.1: Use the PyTorch index for ROCm 6.1
  • +
  • rocm6.0: Use the PyTorch index for ROCm 6.0
  • +
  • rocm5.7: Use the PyTorch index for ROCm 5.7
  • +
  • rocm5.6: Use the PyTorch index for ROCm 5.6
  • +
  • rocm5.5: Use the PyTorch index for ROCm 5.5
  • +
  • rocm5.4.2: Use the PyTorch index for ROCm 5.4.2
  • +
  • rocm5.4: Use the PyTorch index for ROCm 5.4
  • +
  • rocm5.3: Use the PyTorch index for ROCm 5.3
  • +
  • rocm5.2: Use the PyTorch index for ROCm 5.2
  • +
  • rocm5.1.1: Use the PyTorch index for ROCm 5.1.1
  • +
  • rocm4.2: Use the PyTorch index for ROCm 4.2
  • +
  • rocm4.1: Use the PyTorch index for ROCm 4.1
  • +
  • rocm4.0.1: Use the PyTorch index for ROCm 4.0.1
  • +
  • xpu: Use the PyTorch index for Intel XPU
  • --universal

    Perform a universal resolution, attempting to generate a single requirements.txt output file that is compatible with all operating systems, architectures, and Python implementations.

    In universal mode, the current Python version (or user-provided --python-version) will be treated as a lower bound. For example, --universal --python-version 3.7 would produce a universal resolution for Python 3.7 and later.

    Implies --no-strip-markers.

    @@ -3428,6 +3543,7 @@ uv pip sync [OPTIONS] ...
    --index index

    The URLs to use when resolving dependencies, in addition to the default index.

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

    +

    Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

    May also be set with the UV_INDEX environment variable.

    --index-strategy index-strategy

    The strategy to use when resolving against multiple index URLs.

    By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

    May also be set with the UV_INDEX_STRATEGY environment variable.

    Possible values:

    @@ -3590,6 +3706,23 @@ be used with caution, as it can modify the system Python installation.

  • cu91: Use the PyTorch index for CUDA 9.1
  • cu90: Use the PyTorch index for CUDA 9.0
  • cu80: Use the PyTorch index for CUDA 8.0
  • +
  • rocm6.3: Use the PyTorch index for ROCm 6.3
  • +
  • rocm6.2.4: Use the PyTorch index for ROCm 6.2.4
  • +
  • rocm6.2: Use the PyTorch index for ROCm 6.2
  • +
  • rocm6.1: Use the PyTorch index for ROCm 6.1
  • +
  • rocm6.0: Use the PyTorch index for ROCm 6.0
  • +
  • rocm5.7: Use the PyTorch index for ROCm 5.7
  • +
  • rocm5.6: Use the PyTorch index for ROCm 5.6
  • +
  • rocm5.5: Use the PyTorch index for ROCm 5.5
  • +
  • rocm5.4.2: Use the PyTorch index for ROCm 5.4.2
  • +
  • rocm5.4: Use the PyTorch index for ROCm 5.4
  • +
  • rocm5.3: Use the PyTorch index for ROCm 5.3
  • +
  • rocm5.2: Use the PyTorch index for ROCm 5.2
  • +
  • rocm5.1.1: Use the PyTorch index for ROCm 5.1.1
  • +
  • rocm4.2: Use the PyTorch index for ROCm 4.2
  • +
  • rocm4.1: Use the PyTorch index for ROCm 4.1
  • +
  • rocm4.0.1: Use the PyTorch index for ROCm 4.0.1
  • +
  • xpu: Use the PyTorch index for Intel XPU
  • --verbose, -v

    Use verbose output.

    You can configure fine-grained logging using the RUST_LOG environment variable. (https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives)

    @@ -3675,6 +3808,7 @@ uv pip install [OPTIONS] |--editable
    --index index

    The URLs to use when resolving dependencies, in addition to the default index.

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

    +

    Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

    May also be set with the UV_INDEX environment variable.

    --index-strategy index-strategy

    The strategy to use when resolving against multiple index URLs.

    By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

    May also be set with the UV_INDEX_STRATEGY environment variable.

    Possible values:

    @@ -3864,6 +3998,23 @@ should be used with caution, as it can modify the system Python installation.

    cu91: Use the PyTorch index for CUDA 9.1
  • cu90: Use the PyTorch index for CUDA 9.0
  • cu80: Use the PyTorch index for CUDA 8.0
  • +
  • rocm6.3: Use the PyTorch index for ROCm 6.3
  • +
  • rocm6.2.4: Use the PyTorch index for ROCm 6.2.4
  • +
  • rocm6.2: Use the PyTorch index for ROCm 6.2
  • +
  • rocm6.1: Use the PyTorch index for ROCm 6.1
  • +
  • rocm6.0: Use the PyTorch index for ROCm 6.0
  • +
  • rocm5.7: Use the PyTorch index for ROCm 5.7
  • +
  • rocm5.6: Use the PyTorch index for ROCm 5.6
  • +
  • rocm5.5: Use the PyTorch index for ROCm 5.5
  • +
  • rocm5.4.2: Use the PyTorch index for ROCm 5.4.2
  • +
  • rocm5.4: Use the PyTorch index for ROCm 5.4
  • +
  • rocm5.3: Use the PyTorch index for ROCm 5.3
  • +
  • rocm5.2: Use the PyTorch index for ROCm 5.2
  • +
  • rocm5.1.1: Use the PyTorch index for ROCm 5.1.1
  • +
  • rocm4.2: Use the PyTorch index for ROCm 4.2
  • +
  • rocm4.1: Use the PyTorch index for ROCm 4.1
  • +
  • rocm4.0.1: Use the PyTorch index for ROCm 4.0.1
  • +
  • xpu: Use the PyTorch index for Intel XPU
  • --upgrade, -U

    Allow package upgrades, ignoring pinned versions in any existing output file. Implies --refresh

    --upgrade-package, -P upgrade-package

    Allow upgrades for a specific package, ignoring pinned versions in any existing output file. Implies --refresh-package

    --user
    --verbose, -v

    Use verbose output.

    @@ -4077,6 +4228,7 @@ uv pip list [OPTIONS]
    --index index

    The URLs to use when resolving dependencies, in addition to the default index.

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

    +

    Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

    May also be set with the UV_INDEX environment variable.

    --index-strategy index-strategy

    The strategy to use when resolving against multiple index URLs.

    By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

    May also be set with the UV_INDEX_STRATEGY environment variable.

    Possible values:

    @@ -4250,6 +4402,7 @@ uv pip tree [OPTIONS]
    --index index

    The URLs to use when resolving dependencies, in addition to the default index.

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

    +

    Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

    May also be set with the UV_INDEX environment variable.

    --index-strategy index-strategy

    The strategy to use when resolving against multiple index URLs.

    By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

    May also be set with the UV_INDEX_STRATEGY environment variable.

    Possible values:

    @@ -4437,6 +4590,7 @@ uv venv [OPTIONS] [PATH]
    --index index

    The URLs to use when resolving dependencies, in addition to the default index.

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

    +

    Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

    May also be set with the UV_INDEX environment variable.

    --index-strategy index-strategy

    The strategy to use when resolving against multiple index URLs.

    By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

    May also be set with the UV_INDEX_STRATEGY environment variable.

    Possible values:

    @@ -4587,6 +4741,7 @@ uv build [OPTIONS] [SRC]
    --index index

    The URLs to use when resolving dependencies, in addition to the default index.

    Accepts either a repository compliant with PEP 503 (the simple repository API), or a local directory laid out in the same format.

    All indexes provided via this flag take priority over the index specified by --default-index (which defaults to PyPI). When multiple --index flags are provided, earlier values take priority.

    +

    Index names are not supported as values. Relative paths must be disambiguated from index names with ./ or ../ on Unix or .\\, ..\\, ./ or ../ on Windows.

    May also be set with the UV_INDEX environment variable.

    --index-strategy index-strategy

    The strategy to use when resolving against multiple index URLs.

    By default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (first-index). This prevents "dependency confusion" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.

    May also be set with the UV_INDEX_STRATEGY environment variable.

    Possible values:

    diff --git a/docs/reference/settings.md b/docs/reference/settings.md index c203bcc71..58948c80e 100644 --- a/docs/reference/settings.md +++ b/docs/reference/settings.md @@ -127,6 +127,31 @@ default-groups = ["docs"] --- +### [`dependency-groups`](#dependency-groups) {: #dependency-groups } + +Additional settings for `dependency-groups`. + +Currently this can only be used to add `requires-python` constraints +to dependency groups (typically to inform uv that your dev tooling +has a higher python requirement than your actual project). + +This cannot be used to define dependency groups, use the top-level +`[dependency-groups]` table for that. + +**Default value**: `[]` + +**Type**: `dict` + +**Example usage**: + +```toml title="pyproject.toml" + +[tool.uv.dependency-groups] +my-group = {requires-python = ">=3.12"} +``` + +--- + ### [`dev-dependencies`](#dev-dependencies) {: #dev-dependencies } The project's development dependencies. @@ -371,10 +396,6 @@ pydantic = { path = "/path/to/pydantic", editable = true } Settings for the uv build backend (`uv_build`). -!!! note - - The uv build backend is currently in preview and may change in any future release. - Note that those settings only apply when using the `uv_build` backend, other build backends (such as hatchling) have their own configuration. diff --git a/mkdocs.template.yml b/mkdocs.template.yml index 0b2ee6623..69a299b5b 100644 --- a/mkdocs.template.yml +++ b/mkdocs.template.yml @@ -174,6 +174,9 @@ nav: - Using tools: guides/tools.md - Working on projects: guides/projects.md - Publishing packages: guides/package.md + - Migration: + - guides/migration/index.md + - From pip to a uv project: guides/migration/pip-to-project.md - Integrations: - guides/integration/index.md - Docker: guides/integration/docker.md diff --git a/pyproject.toml b/pyproject.toml index d7bc4d805..5d4261360 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "maturin" [project] name = "uv" -version = "0.7.13" +version = "0.7.19" description = "An extremely fast Python package and project manager, written in Rust." authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }] requires-python = ">=3.8" diff --git a/ruff.toml b/ruff.toml index 8aac77263..7c6488a1e 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,10 +1,10 @@ -target-version = "py37" +target-version = "py312" exclude = [ "crates/uv-virtualenv/src/activator/activate_this.py", "crates/uv-virtualenv/src/_virtualenv.py", - "crates/uv-python/python", "ecosystem", "scripts/workspaces", + "scripts/packages", ] [lint] diff --git a/rust-toolchain.toml b/rust-toolchain.toml index e7f22fb8b..c95c90571 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,2 +1,2 @@ [toolchain] -channel = "1.86" +channel = "1.88" diff --git a/rustfmt.toml b/rustfmt.toml new file mode 100644 index 000000000..f3e454b61 --- /dev/null +++ b/rustfmt.toml @@ -0,0 +1,2 @@ +edition = "2024" +style_edition = "2024" diff --git a/scripts/packages/flit_editable/.gitignore b/scripts/packages/flit_editable/.gitignore new file mode 100644 index 000000000..3a8816c9e --- /dev/null +++ b/scripts/packages/flit_editable/.gitignore @@ -0,0 +1,162 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm-project.org/#use-with-ide +.pdm.toml +.pdm-python +.pdm-build/ + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ diff --git a/scripts/packages/flit_editable/flit_editable/__init__.py b/scripts/packages/flit_editable/flit_editable/__init__.py new file mode 100644 index 000000000..4076f6c86 --- /dev/null +++ b/scripts/packages/flit_editable/flit_editable/__init__.py @@ -0,0 +1,6 @@ +def main(): + print("Hello world!") + + +if __name__ == "__main__": + main() diff --git a/scripts/packages/flit_editable/pyproject.toml b/scripts/packages/flit_editable/pyproject.toml new file mode 100644 index 000000000..02f431543 --- /dev/null +++ b/scripts/packages/flit_editable/pyproject.toml @@ -0,0 +1,17 @@ +[project] +name = "flit-editable" +version = "0.1.0" +description = "Example Flit project" +authors = [ + {name = "konstin", email = "konstin@mailbox.org"}, +] +dependencies = [] +requires-python = ">=3.11" +license = {text = "MIT"} + +[project.scripts] +flit-editable = "flit_editable:main" + +[build-system] +requires = ["flit_core>=3.4,<4"] +build-backend = "flit_core.buildapi" diff --git a/scripts/publish/test_publish.py b/scripts/publish/test_publish.py index 620a08e61..c2c35fe90 100644 --- a/scripts/publish/test_publish.py +++ b/scripts/publish/test_publish.py @@ -163,39 +163,37 @@ all_targets: dict[str, TargetConfiguration] = local_targets | { } -def get_latest_version(project_name: str, client: httpx.Client) -> Version: +def get_latest_version(target: str, client: httpx.Client) -> Version: """Return the latest version on all indexes of the package.""" # To keep the number of packages small we reuse them across targets, so we have to # pick a version that doesn't exist on any target yet versions = set() - for target_config in all_targets.values(): - if target_config.project_name != project_name: - continue - url = target_config.index_url + project_name + "/" + target_config = all_targets[target] + url = target_config.index_url + target_config.project_name + "/" - # Get with retries - error = None - for _ in range(5): - try: - versions.update(collect_versions(url, client)) - break - except httpx.HTTPError as err: - error = err - print( - f"Error getting version for {project_name}, sleeping for 1s: {err}", - file=sys.stderr, - ) - time.sleep(1) - except InvalidSdistFilename as err: - # Sometimes there's a link that says "status page" - error = err - print( - f"Invalid index page for {project_name}, sleeping for 1s: {err}", - file=sys.stderr, - ) - time.sleep(1) - else: - raise RuntimeError(f"Failed to fetch {url}") from error + # Get with retries + error = None + for _ in range(5): + try: + versions.update(collect_versions(url, client)) + break + except httpx.HTTPError as err: + error = err + print( + f"Error getting version for {target_config.project_name}, sleeping for 1s: {err}", + file=sys.stderr, + ) + time.sleep(1) + except InvalidSdistFilename as err: + # Sometimes there's a link that says "status page" + error = err + print( + f"Invalid index page for {target_config.project_name}, sleeping for 1s: {err}", + file=sys.stderr, + ) + time.sleep(1) + else: + raise RuntimeError(f"Failed to fetch {url}") from error return max(versions) @@ -223,7 +221,7 @@ def get_filenames(url: str, client: httpx.Client) -> list[str]: response = client.get(url) data = response.text # Works for the indexes in the list - href_text = r"([^<>]+)" + href_text = r"([^<>]+)" return [m.group(1) for m in re.finditer(href_text, data)] @@ -363,7 +361,7 @@ def publish_project(target: str, uv: Path, client: httpx.Client): print(f"\nPublish {project_name} for {target}", file=sys.stderr) # The distributions are build to the dist directory of the project. - previous_version = get_latest_version(project_name, client) + previous_version = get_latest_version(target, client) version = get_new_version(previous_version) project_dir = build_project_at_version(target, version, uv) diff --git a/scripts/registries-test.py b/scripts/registries-test.py new file mode 100644 index 000000000..2d4c1d2aa --- /dev/null +++ b/scripts/registries-test.py @@ -0,0 +1,422 @@ +#!/usr/bin/env python3 +""" +Test `uv add` against multiple Python package registries. + +This script looks for environment variables that configure registries for testing. +To configure a registry, set the following environment variables: + + `UV_TEST__URL` URL for the registry + `UV_TEST__TOKEN` authentication token + +The username defaults to "__token__" but can be optionally set with: + `UV_TEST__USERNAME` + +The package to install defaults to "astral-registries-test-pkg" but can be optionally +set with: + `UV_TEST__PKG` + +Keep in mind that some registries can fall back to PyPI internally, so make sure +you choose a package that only exists in the registry you are testing. + +You can also use the 1Password CLI to fetch registry credentials from a vault by passing +the `--use-op` flag. For each item in the vault named `UV_TEST_XXX`, the script will set +env vars for any of the following fields, if present: + `UV_TEST__USERNAME` from the `username` field + `UV_TEST__TOKEN` from the `password` field + `UV_TEST__URL` from a field with the label `url` + `UV_TEST__PKG` from a field with the label `pkg` + +# /// script +# requires-python = ">=3.12" +# dependencies = ["colorama>=0.4.6"] +# /// +""" + +import argparse +import json +import os +import re +import subprocess +import sys +import tempfile +from pathlib import Path +from typing import Dict + +import colorama +from colorama import Fore + + +def initialize_colorama(force_color=False): + colorama.init(strip=not force_color, autoreset=True) + + +cwd = Path(__file__).parent + +DEFAULT_TIMEOUT = 30 +DEFAULT_PKG_NAME = "astral-registries-test-pkg" + +KNOWN_REGISTRIES = [ + "artifactory", + "azure", + "aws", + "cloudsmith", + "gcp", + "gemfury", + "gitlab", +] + + +def fetch_op_items(vault_name: str, env: Dict[str, str]) -> Dict[str, str]: + """Fetch items from the specified 1Password vault and add them to the environment. + + For each item named UV_TEST_XXX in the vault: + - Set `UV_TEST_XXX_USERNAME` to the `username` field + - Set `UV_TEST_XXX_TOKEN` to the `password` field + - Set `UV_TEST_XXX_URL` to the `url` field + + Raises exceptions for any 1Password CLI errors so they can be handled by the caller. + """ + # Run 'op item list' to get all items in the vault + result = subprocess.run( + ["op", "item", "list", "--vault", vault_name, "--format", "json"], + capture_output=True, + text=True, + check=True, + ) + + items = json.loads(result.stdout) + updated_env = env.copy() + + for item in items: + item_id = item["id"] + item_title = item["title"] + + # Only process items that match the registry naming pattern + if item_title.startswith("UV_TEST_"): + # Extract the registry name (e.g., "AWS" from "UV_TEST_AWS") + registry_name = item_title.removeprefix("UV_TEST_") + + # Get the item details + item_details = subprocess.run( + ["op", "item", "get", item_id, "--format", "json"], + capture_output=True, + text=True, + check=True, + ) + + item_data = json.loads(item_details.stdout) + + username = None + password = None + url = None + pkg = None + + if "fields" in item_data: + for field in item_data["fields"]: + if field.get("id") == "username": + username = field.get("value") + elif field.get("id") == "password": + password = field.get("value") + elif field.get("label") == "url": + url = field.get("value") + elif field.get("label") == "pkg": + pkg = field.get("value") + if username: + updated_env[f"UV_TEST_{registry_name}_USERNAME"] = username + if password: + updated_env[f"UV_TEST_{registry_name}_TOKEN"] = password + if url: + updated_env[f"UV_TEST_{registry_name}_URL"] = url + if pkg: + updated_env[f"UV_TEST_{registry_name}_PKG"] = pkg + + print(f"Added 1Password credentials for {registry_name}") + + return updated_env + + +def get_registries(env: Dict[str, str]) -> Dict[str, str]: + pattern = re.compile(r"^UV_TEST_(.+)_URL$") + registries: Dict[str, str] = {} + + for env_var, value in env.items(): + match = pattern.match(env_var) + if match: + registry_name = match.group(1).lower() + registries[registry_name] = value + + return registries + + +def setup_test_project( + registry_name: str, registry_url: str, project_dir: str, requires_python: str +): + """Create a temporary project directory with a pyproject.toml""" + pyproject_content = f"""[project] +name = "{registry_name}-test" +version = "0.1.0" +description = "Test registry" +requires-python = ">={requires_python}" + +[[tool.uv.index]] +name = "{registry_name}" +url = "{registry_url}" +default = true +""" + pyproject_file = Path(project_dir) / "pyproject.toml" + pyproject_file.write_text(pyproject_content) + + +def run_test( + env: dict[str, str], + uv: Path, + registry_name: str, + registry_url: str, + package: str, + username: str, + token: str, + verbosity: int, + timeout: int, + requires_python: str, +) -> bool: + print(uv) + """Attempt to install a package from this registry.""" + print( + f"{registry_name} -- Running test for {registry_url} with username {username}" + ) + if package == DEFAULT_PKG_NAME: + print( + f"** Using default test package name: {package}. To choose a different package, set UV_TEST_{registry_name.upper()}_PKG" + ) + print(f"\nAttempting to install {package}") + env[f"UV_INDEX_{registry_name.upper()}_USERNAME"] = username + env[f"UV_INDEX_{registry_name.upper()}_PASSWORD"] = token + + with tempfile.TemporaryDirectory() as project_dir: + setup_test_project(registry_name, registry_url, project_dir, requires_python) + + cmd = [ + uv, + "add", + package, + "--directory", + project_dir, + ] + if verbosity: + cmd.extend(["-" + "v" * verbosity]) + + result = None + try: + result = subprocess.run( + cmd, + capture_output=True, + text=True, + timeout=timeout, + check=False, + env=env, + ) + + if result.returncode != 0: + error_msg = result.stderr.strip() if result.stderr else "Unknown error" + print(f"{Fore.RED}{registry_name}: FAIL{Fore.RESET} \n\n{error_msg}") + return False + + success = False + for line in result.stderr.strip().split("\n"): + if line.startswith(f" + {package}=="): + success = True + if success: + print(f"{Fore.GREEN}{registry_name}: PASS") + if verbosity > 0: + print(f" stdout: {result.stdout.strip()}") + print(f" stderr: {result.stderr.strip()}") + return True + else: + print( + f"{Fore.RED}{registry_name}: FAIL{Fore.RESET} - Failed to install {package}." + ) + + except subprocess.TimeoutExpired: + print(f"{Fore.RED}{registry_name}: TIMEOUT{Fore.RESET} (>{timeout}s)") + except FileNotFoundError: + print(f"{Fore.RED}{registry_name}: ERROR{Fore.RESET} - uv not found") + except Exception as e: + print(f"{Fore.RED}{registry_name}: ERROR{Fore.RESET} - {e}") + + if result: + if result.stdout: + print(f"{Fore.RED} stdout:{Fore.RESET} {result.stdout.strip()}") + if result.stderr: + print(f"\n{Fore.RED} stderr:{Fore.RESET} {result.stderr.strip()}") + return False + + +def parse_args() -> argparse.Namespace: + """Parse command line arguments""" + parser = argparse.ArgumentParser( + description="Test uv add command against multiple registries", + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument( + "--all", + action="store_true", + help="fail if any known registry was not tested", + ) + parser.add_argument( + "--uv", + type=str, + help="specify a path to the uv binary (default: uv command)", + ) + parser.add_argument( + "--timeout", + type=int, + default=os.environ.get("UV_TEST_TIMEOUT", DEFAULT_TIMEOUT), + help=f"timeout in seconds for each test (default: {DEFAULT_TIMEOUT} or UV_TEST_TIMEOUT)", + ) + parser.add_argument( + "-v", + "--verbose", + action="count", + default=0, + help="increase verbosity (-v for debug, -vv for trace)", + ) + parser.add_argument( + "--use-op", + action="store_true", + help="use 1Password CLI to fetch registry credentials from the specified vault", + ) + parser.add_argument( + "--op-vault", + type=str, + default="RegistryTests", + help="name of the 1Password vault to use (default: RegistryTests)", + ) + parser.add_argument( + "--required-python", + type=str, + default="3.12", + help="minimum Python version for tests (default: 3.12)", + ) + parser.add_argument("--color", choices=["always", "auto", "never"], default="auto") + return parser.parse_args() + + +def main() -> None: + args = parse_args() + env = os.environ.copy() + + if args.color == "always": + initialize_colorama(force_color=True) + elif args.color == "never": + initialize_colorama(force_color=False) + else: + initialize_colorama(force_color=sys.stdout.isatty()) + + # If using 1Password, fetch credentials from the vault + if args.use_op: + print(f"Fetching credentials from 1Password vault '{args.op_vault}'...") + try: + env = fetch_op_items(args.op_vault, env) + except Exception as e: + print(f"{Fore.RED}Error accessing 1Password: {e}{Fore.RESET}") + print( + f"{Fore.YELLOW}Hint: If you're not authenticated, run 'op signin' first.{Fore.RESET}" + ) + sys.exit(1) + + if args.uv: + # We change the working directory for the subprocess calls, so we have to + # absolutize the path. + uv = Path.cwd().joinpath(args.uv) + else: + subprocess.run(["cargo", "build"]) + executable_suffix = ".exe" if os.name == "nt" else "" + uv = cwd.parent.joinpath(f"target/debug/uv{executable_suffix}") + + passed = [] + failed = [] + skipped = [] + untested_registries = set(KNOWN_REGISTRIES) + + print("Running tests...") + for registry_name, registry_url in get_registries(env).items(): + print("----------------") + + token = env.get(f"UV_TEST_{registry_name.upper()}_TOKEN") + if not token: + if args.all: + print( + f"{Fore.RED}{registry_name}: UV_TEST_{registry_name.upper()}_TOKEN contained no token. Required by --all" + ) + failed.append(registry_name) + else: + print( + f"{Fore.YELLOW}{registry_name}: UV_TEST_{registry_name.upper()}_TOKEN contained no token. Skipping test" + ) + skipped.append(registry_name) + continue + + # The private package we will test installing + package = env.get(f"UV_TEST_{registry_name.upper()}_PKG", DEFAULT_PKG_NAME) + username = env.get(f"UV_TEST_{registry_name.upper()}_USERNAME", "__token__") + + if run_test( + env, + uv, + registry_name, + registry_url, + package, + username, + token, + args.verbose, + args.timeout, + args.required_python, + ): + passed.append(registry_name) + else: + failed.append(registry_name) + + untested_registries.remove(registry_name) + + total = len(passed) + len(failed) + + print("----------------") + if passed: + print(f"\n{Fore.GREEN}Passed:") + for registry_name in passed: + print(f" * {registry_name}") + if failed: + print(f"\n{Fore.RED}Failed:") + for registry_name in failed: + print(f" * {registry_name}") + if skipped: + print(f"\n{Fore.YELLOW}Skipped:") + for registry_name in skipped: + print(f" * {registry_name}") + + print(f"\nResults: {len(passed)}/{total} tests passed, {len(skipped)} skipped") + + if args.all and len(untested_registries) > 0: + print( + f"\n{Fore.RED}Failed to test all known registries (requested via --all).{Fore.RESET}\nMissing:" + ) + for registry_name in untested_registries: + print(f" * {registry_name}") + print("You must use the exact registry name as listed here") + sys.exit(1) + + if total == 0: + print("\nNo tests were run - have you defined at least one registry?") + print(" * UV_TEST__URL") + print(" * UV_TEST__TOKEN") + print( + " * UV_TEST__PKG (the private package to test installing)" + ) + print(' * UV_TEST__USERNAME (defaults to "__token__")') + sys.exit(1) + + sys.exit(0 if len(failed) == 0 else 1) + + +if __name__ == "__main__": + main() diff --git a/scripts/scenarios/templates/compile.mustache b/scripts/scenarios/templates/compile.mustache index aa6db8529..2a3202662 100644 --- a/scripts/scenarios/templates/compile.mustache +++ b/scripts/scenarios/templates/compile.mustache @@ -16,8 +16,8 @@ use predicates::prelude::predicate; use uv_static::EnvVars; use crate::common::{ - build_vendor_links_url, get_bin, packse_index_url, python_path_with_versions, uv_snapshot, - TestContext, + TestContext, build_vendor_links_url, get_bin, packse_index_url, python_path_with_versions, + uv_snapshot, }; /// Provision python binaries and return a `pip compile` command with options shared across all scenarios. diff --git a/scripts/scenarios/templates/install.mustache b/scripts/scenarios/templates/install.mustache index 15f48077e..8f1c477b2 100644 --- a/scripts/scenarios/templates/install.mustache +++ b/scripts/scenarios/templates/install.mustache @@ -5,52 +5,20 @@ //! #![cfg(all(feature = "python", feature = "pypi", unix))] -use std::path::Path; use std::process::Command; -use assert_cmd::assert::Assert; -use assert_cmd::prelude::*; - use uv_static::EnvVars; -use crate::common::{ - build_vendor_links_url, get_bin, packse_index_url, uv_snapshot, venv_to_interpreter, - TestContext, -}; - -fn assert_command(venv: &Path, command: &str, temp_dir: &Path) -> Assert { - Command::new(venv_to_interpreter(venv)) - .arg("-c") - .arg(command) - .current_dir(temp_dir) - .assert() -} - -fn assert_installed(venv: &Path, package: &'static str, version: &'static str, temp_dir: &Path) { - assert_command( - venv, - format!("import {package} as package; print(package.__version__, end='')").as_str(), - temp_dir, - ) - .success() - .stdout(version); -} - -fn assert_not_installed(venv: &Path, package: &'static str, temp_dir: &Path) { - assert_command(venv, format!("import {package}").as_str(), temp_dir).failure(); -} +use crate::common::{TestContext, build_vendor_links_url, packse_index_url, uv_snapshot}; /// Create a `pip install` command with options shared across all scenarios. fn command(context: &TestContext) -> Command { - let mut command = Command::new(get_bin()); + let mut command = context.pip_install(); command - .arg("pip") - .arg("install") .arg("--index-url") .arg(packse_index_url()) .arg("--find-links") .arg(build_vendor_links_url()); - context.add_shared_options(&mut command, true); command.env_remove(EnvVars::UV_EXCLUDE_NEWER); command } @@ -93,25 +61,20 @@ fn {{module_name}}() { {{/resolver_options.python_platform}} {{#root.requires}} .arg("{{requirement}}") - {{/root.requires}}, @r###" - "###); + {{/root.requires}}, @r#" + "#); {{#expected.explanation}} // {{expected.explanation}} {{/expected.explanation}} {{#expected.satisfiable}} {{#expected.packages}} - assert_installed( - &context.venv, - "{{module_name}}", - "{{version}}", - &context.temp_dir - ); + context.assert_installed("{{module_name}}", "{{version}}"); {{/expected.packages}} {{/expected.satisfiable}} {{^expected.satisfiable}} {{#root.requires}} - assert_not_installed(&context.venv, "{{module_name}}", &context.temp_dir); + context.assert_not_installed("{{module_name}}"); {{/root.requires}} {{/expected.satisfiable}} } diff --git a/scripts/scenarios/templates/lock.mustache b/scripts/scenarios/templates/lock.mustache index 7d80b8f9b..74deb3764 100644 --- a/scripts/scenarios/templates/lock.mustache +++ b/scripts/scenarios/templates/lock.mustache @@ -15,7 +15,7 @@ use insta::assert_snapshot; use uv_static::EnvVars; -use crate::common::{packse_index_url, TestContext, uv_snapshot}; +use crate::common::{TestContext, packse_index_url, uv_snapshot}; {{#scenarios}} diff --git a/scripts/transform_readme.py b/scripts/transform_readme.py index b8f11fbe0..9d52d6517 100644 --- a/scripts/transform_readme.py +++ b/scripts/transform_readme.py @@ -9,11 +9,10 @@ from __future__ import annotations import argparse import re +import tomllib import urllib.parse from pathlib import Path -import tomllib - # To be kept in sync with: `docs/index.md` URL = "https://github.com/astral-sh/uv/assets/1309177/{}" URL_LIGHT = URL.format("629e59c0-9c6e-4013-9ad4-adb2bcf5080d") diff --git a/scripts/workspaces/albatross-in-example/src/albatross/__init__.py b/scripts/workspaces/albatross-in-example/src/albatross/__init__.py index d79aed9cb..764c5ce3f 100644 --- a/scripts/workspaces/albatross-in-example/src/albatross/__init__.py +++ b/scripts/workspaces/albatross-in-example/src/albatross/__init__.py @@ -5,5 +5,5 @@ def fly(): pass -if __name__ == '__main__': +if __name__ == "__main__": print("Caw") diff --git a/uv.schema.json b/uv.schema.json index 97fe9e28e..dbc4f1168 100644 --- a/uv.schema.json +++ b/uv.schema.json @@ -5,7 +5,7 @@ "type": "object", "properties": { "add-bounds": { - "description": "The default version specifier when adding a dependency.\n\nWhen adding a dependency to the project, if no constraint or URL is provided, a constraint is added based on the latest compatible version of the package. By default, a lower bound constraint is used, e.g., `>=1.2.3`.\n\nWhen `--frozen` is provided, no resolution is performed, and dependencies are always added without constraints.\n\nThis option is in preview and may change in any future release.", + "description": "The default version specifier when adding a dependency.\n\nWhen adding a dependency to the project, if no constraint or URL is provided, a constraint\nis added based on the latest compatible version of the package. By default, a lower bound\nconstraint is used, e.g., `>=1.2.3`.\n\nWhen `--frozen` is provided, no resolution is performed, and dependencies are always added\nwithout constraints.\n\nThis option is in preview and may change in any future release.", "anyOf": [ { "$ref": "#/definitions/AddBoundsKind" @@ -16,7 +16,7 @@ ] }, "allow-insecure-host": { - "description": "Allow insecure connections to host.\n\nExpects to receive either a hostname (e.g., `localhost`), a host-port pair (e.g., `localhost:8080`), or a URL (e.g., `https://localhost`).\n\nWARNING: Hosts included in this list will not be verified against the system's certificate store. Only use `--allow-insecure-host` in a secure network with verified sources, as it bypasses SSL verification and could expose you to MITM attacks.", + "description": "Allow insecure connections to host.\n\nExpects to receive either a hostname (e.g., `localhost`), a host-port pair (e.g.,\n`localhost:8080`), or a URL (e.g., `https://localhost`).\n\nWARNING: Hosts included in this list will not be verified against the system's certificate\nstore. Only use `--allow-insecure-host` in a secure network with verified sources, as it\nbypasses SSL verification and could expose you to MITM attacks.", "type": [ "array", "null" @@ -26,7 +26,7 @@ } }, "build-backend": { - "description": "Configuration for the uv build backend.\n\nNote that those settings only apply when using the `uv_build` backend, other build backends (such as hatchling) have their own configuration.", + "description": "Configuration for the uv build backend.\n\nNote that those settings only apply when using the `uv_build` backend, other build backends\n(such as hatchling) have their own configuration.", "anyOf": [ { "$ref": "#/definitions/BuildBackendSettings" @@ -47,14 +47,14 @@ } }, "cache-dir": { - "description": "Path to the cache directory.\n\nDefaults to `$XDG_CACHE_HOME/uv` or `$HOME/.cache/uv` on Linux and macOS, and `%LOCALAPPDATA%\\uv\\cache` on Windows.", + "description": "Path to the cache directory.\n\nDefaults to `$XDG_CACHE_HOME/uv` or `$HOME/.cache/uv` on Linux and macOS, and\n`%LOCALAPPDATA%\\uv\\cache` on Windows.", "type": [ "string", "null" ] }, "cache-keys": { - "description": "The keys to consider when caching builds for the project.\n\nCache keys enable you to specify the files or directories that should trigger a rebuild when modified. By default, uv will rebuild a project whenever the `pyproject.toml`, `setup.py`, or `setup.cfg` files in the project directory are modified, or if a `src` directory is added or removed, i.e.:\n\n```toml cache-keys = [{ file = \"pyproject.toml\" }, { file = \"setup.py\" }, { file = \"setup.cfg\" }, { dir = \"src\" }] ```\n\nAs an example: if a project uses dynamic metadata to read its dependencies from a `requirements.txt` file, you can specify `cache-keys = [{ file = \"requirements.txt\" }, { file = \"pyproject.toml\" }]` to ensure that the project is rebuilt whenever the `requirements.txt` file is modified (in addition to watching the `pyproject.toml`).\n\nGlobs are supported, following the syntax of the [`glob`](https://docs.rs/glob/0.3.1/glob/struct.Pattern.html) crate. For example, to invalidate the cache whenever a `.toml` file in the project directory or any of its subdirectories is modified, you can specify `cache-keys = [{ file = \"**/*.toml\" }]`. Note that the use of globs can be expensive, as uv may need to walk the filesystem to determine whether any files have changed.\n\nCache keys can also include version control information. For example, if a project uses `setuptools_scm` to read its version from a Git commit, you can specify `cache-keys = [{ git = { commit = true }, { file = \"pyproject.toml\" }]` to include the current Git commit hash in the cache key (in addition to the `pyproject.toml`). Git tags are also supported via `cache-keys = [{ git = { commit = true, tags = true } }]`.\n\nCache keys can also include environment variables. For example, if a project relies on `MACOSX_DEPLOYMENT_TARGET` or other environment variables to determine its behavior, you can specify `cache-keys = [{ env = \"MACOSX_DEPLOYMENT_TARGET\" }]` to invalidate the cache whenever the environment variable changes.\n\nCache keys only affect the project defined by the `pyproject.toml` in which they're specified (as opposed to, e.g., affecting all members in a workspace), and all paths and globs are interpreted as relative to the project directory.", + "description": "The keys to consider when caching builds for the project.\n\nCache keys enable you to specify the files or directories that should trigger a rebuild when\nmodified. By default, uv will rebuild a project whenever the `pyproject.toml`, `setup.py`,\nor `setup.cfg` files in the project directory are modified, or if a `src` directory is\nadded or removed, i.e.:\n\n```toml\ncache-keys = [{ file = \"pyproject.toml\" }, { file = \"setup.py\" }, { file = \"setup.cfg\" }, { dir = \"src\" }]\n```\n\nAs an example: if a project uses dynamic metadata to read its dependencies from a\n`requirements.txt` file, you can specify `cache-keys = [{ file = \"requirements.txt\" }, { file = \"pyproject.toml\" }]`\nto ensure that the project is rebuilt whenever the `requirements.txt` file is modified (in\naddition to watching the `pyproject.toml`).\n\nGlobs are supported, following the syntax of the [`glob`](https://docs.rs/glob/0.3.1/glob/struct.Pattern.html)\ncrate. For example, to invalidate the cache whenever a `.toml` file in the project directory\nor any of its subdirectories is modified, you can specify `cache-keys = [{ file = \"**/*.toml\" }]`.\nNote that the use of globs can be expensive, as uv may need to walk the filesystem to\ndetermine whether any files have changed.\n\nCache keys can also include version control information. For example, if a project uses\n`setuptools_scm` to read its version from a Git commit, you can specify `cache-keys = [{ git = { commit = true }, { file = \"pyproject.toml\" }]`\nto include the current Git commit hash in the cache key (in addition to the\n`pyproject.toml`). Git tags are also supported via `cache-keys = [{ git = { commit = true, tags = true } }]`.\n\nCache keys can also include environment variables. For example, if a project relies on\n`MACOSX_DEPLOYMENT_TARGET` or other environment variables to determine its behavior, you can\nspecify `cache-keys = [{ env = \"MACOSX_DEPLOYMENT_TARGET\" }]` to invalidate the cache\nwhenever the environment variable changes.\n\nCache keys only affect the project defined by the `pyproject.toml` in which they're\nspecified (as opposed to, e.g., affecting all members in a workspace), and all paths and\nglobs are interpreted as relative to the project directory.", "type": [ "array", "null" @@ -64,7 +64,7 @@ } }, "check-url": { - "description": "Check an index URL for existing files to skip duplicate uploads.\n\nThis option allows retrying publishing that failed after only some, but not all files have been uploaded, and handles error due to parallel uploads of the same file.\n\nBefore uploading, the index is checked. If the exact same file already exists in the index, the file will not be uploaded. If an error occurred during the upload, the index is checked again, to handle cases where the identical file was uploaded twice in parallel.\n\nThe exact behavior will vary based on the index. When uploading to PyPI, uploading the same file succeeds even without `--check-url`, while most other indexes error.\n\nThe index must provide one of the supported hashes (SHA-256, SHA-384, or SHA-512).", + "description": "Check an index URL for existing files to skip duplicate uploads.\n\nThis option allows retrying publishing that failed after only some, but not all files have\nbeen uploaded, and handles error due to parallel uploads of the same file.\n\nBefore uploading, the index is checked. If the exact same file already exists in the index,\nthe file will not be uploaded. If an error occurred during the upload, the index is checked\nagain, to handle cases where the identical file was uploaded twice in parallel.\n\nThe exact behavior will vary based on the index. When uploading to PyPI, uploading the same\nfile succeeds even without `--check-url`, while most other indexes error.\n\nThe index must provide one of the supported hashes (SHA-256, SHA-384, or SHA-512).", "anyOf": [ { "$ref": "#/definitions/IndexUrl" @@ -75,29 +75,29 @@ ] }, "compile-bytecode": { - "description": "Compile Python files to bytecode after installation.\n\nBy default, uv does not compile Python (`.py`) files to bytecode (`__pycache__/*.pyc`); instead, compilation is performed lazily the first time a module is imported. For use-cases in which start time is critical, such as CLI applications and Docker containers, this option can be enabled to trade longer installation times for faster start times.\n\nWhen enabled, uv will process the entire site-packages directory (including packages that are not being modified by the current operation) for consistency. Like pip, it will also ignore errors.", + "description": "Compile Python files to bytecode after installation.\n\nBy default, uv does not compile Python (`.py`) files to bytecode (`__pycache__/*.pyc`);\ninstead, compilation is performed lazily the first time a module is imported. For use-cases\nin which start time is critical, such as CLI applications and Docker containers, this option\ncan be enabled to trade longer installation times for faster start times.\n\nWhen enabled, uv will process the entire site-packages directory (including packages that\nare not being modified by the current operation) for consistency. Like pip, it will also\nignore errors.", "type": [ "boolean", "null" ] }, "concurrent-builds": { - "description": "The maximum number of source distributions that uv will build concurrently at any given time.\n\nDefaults to the number of available CPU cores.", + "description": "The maximum number of source distributions that uv will build concurrently at any given\ntime.\n\nDefaults to the number of available CPU cores.", "type": [ "integer", "null" ], "format": "uint", - "minimum": 1.0 + "minimum": 1 }, "concurrent-downloads": { - "description": "The maximum number of in-flight concurrent downloads that uv will perform at any given time.", + "description": "The maximum number of in-flight concurrent downloads that uv will perform at any given\ntime.", "type": [ "integer", "null" ], "format": "uint", - "minimum": 1.0 + "minimum": 1 }, "concurrent-installs": { "description": "The number of threads used when installing and unzipping packages.\n\nDefaults to the number of available CPU cores.", @@ -106,10 +106,10 @@ "null" ], "format": "uint", - "minimum": 1.0 + "minimum": 1 }, "config-settings": { - "description": "Settings to pass to the [PEP 517](https://peps.python.org/pep-0517/) build backend, specified as `KEY=VALUE` pairs.", + "description": "Settings to pass to the [PEP 517](https://peps.python.org/pep-0517/) build backend,\nspecified as `KEY=VALUE` pairs.", "anyOf": [ { "$ref": "#/definitions/ConfigSettings" @@ -151,8 +151,19 @@ } ] }, + "dependency-groups": { + "description": "Additional settings for `dependency-groups`.\n\nCurrently this can only be used to add `requires-python` constraints\nto dependency groups (typically to inform uv that your dev tooling\nhas a higher python requirement than your actual project).\n\nThis cannot be used to define dependency groups, use the top-level\n`[dependency-groups]` table for that.", + "anyOf": [ + { + "$ref": "#/definitions/ToolUvDependencyGroups" + }, + { + "type": "null" + } + ] + }, "dependency-metadata": { - "description": "Pre-defined static metadata for dependencies of the project (direct or transitive). When provided, enables the resolver to use the specified metadata instead of querying the registry or building the relevant package from source.\n\nMetadata should be provided in adherence with the [Metadata 2.3](https://packaging.python.org/en/latest/specifications/core-metadata/) standard, though only the following fields are respected:\n\n- `name`: The name of the package. - (Optional) `version`: The version of the package. If omitted, the metadata will be applied to all versions of the package. - (Optional) `requires-dist`: The dependencies of the package (e.g., `werkzeug>=0.14`). - (Optional) `requires-python`: The Python version required by the package (e.g., `>=3.10`). - (Optional) `provides-extras`: The extras provided by the package.", + "description": "Pre-defined static metadata for dependencies of the project (direct or transitive). When\nprovided, enables the resolver to use the specified metadata instead of querying the\nregistry or building the relevant package from source.\n\nMetadata should be provided in adherence with the [Metadata 2.3](https://packaging.python.org/en/latest/specifications/core-metadata/)\nstandard, though only the following fields are respected:\n\n- `name`: The name of the package.\n- (Optional) `version`: The version of the package. If omitted, the metadata will be applied\n to all versions of the package.\n- (Optional) `requires-dist`: The dependencies of the package (e.g., `werkzeug>=0.14`).\n- (Optional) `requires-python`: The Python version required by the package (e.g., `>=3.10`).\n- (Optional) `provides-extras`: The extras provided by the package.", "type": [ "array", "null" @@ -182,7 +193,7 @@ } }, "exclude-newer": { - "description": "Limit candidate packages to those that were uploaded prior to a given point in time.\n\nAccepts a superset of [RFC 3339](https://www.rfc-editor.org/rfc/rfc3339.html) (e.g., `2006-12-02T02:07:43Z`). A full timestamp is required to ensure that the resolver will behave consistently across timezones.", + "description": "Limit candidate packages to those that were uploaded prior to a given point in time.\n\nAccepts a superset of [RFC 3339](https://www.rfc-editor.org/rfc/rfc3339.html) (e.g.,\n`2006-12-02T02:07:43Z`). A full timestamp is required to ensure that the resolver will\nbehave consistently across timezones.", "anyOf": [ { "$ref": "#/definitions/ExcludeNewer" @@ -193,7 +204,7 @@ ] }, "extra-index-url": { - "description": "Extra URLs of package indexes to use, in addition to `--index-url`.\n\nAccepts either a repository compliant with [PEP 503](https://peps.python.org/pep-0503/) (the simple repository API), or a local directory laid out in the same format.\n\nAll indexes provided via this flag take priority over the index specified by [`index_url`](#index-url) or [`index`](#index) with `default = true`. When multiple indexes are provided, earlier values take priority.\n\nTo control uv's resolution strategy when multiple indexes are present, see [`index_strategy`](#index-strategy).\n\n(Deprecated: use `index` instead.)", + "description": "Extra URLs of package indexes to use, in addition to `--index-url`.\n\nAccepts either a repository compliant with [PEP 503](https://peps.python.org/pep-0503/)\n(the simple repository API), or a local directory laid out in the same format.\n\nAll indexes provided via this flag take priority over the index specified by\n[`index_url`](#index-url) or [`index`](#index) with `default = true`. When multiple indexes\nare provided, earlier values take priority.\n\nTo control uv's resolution strategy when multiple indexes are present, see\n[`index_strategy`](#index-strategy).\n\n(Deprecated: use `index` instead.)", "type": [ "array", "null" @@ -203,7 +214,7 @@ } }, "find-links": { - "description": "Locations to search for candidate distributions, in addition to those found in the registry indexes.\n\nIf a path, the target must be a directory that contains packages as wheel files (`.whl`) or source distributions (e.g., `.tar.gz` or `.zip`) at the top level.\n\nIf a URL, the page must contain a flat list of links to package files adhering to the formats described above.", + "description": "Locations to search for candidate distributions, in addition to those found in the registry\nindexes.\n\nIf a path, the target must be a directory that contains packages as wheel files (`.whl`) or\nsource distributions (e.g., `.tar.gz` or `.zip`) at the top level.\n\nIf a URL, the page must contain a flat list of links to package files adhering to the\nformats described above.", "type": [ "array", "null" @@ -213,7 +224,7 @@ } }, "fork-strategy": { - "description": "The strategy to use when selecting multiple versions of a given package across Python versions and platforms.\n\nBy default, uv will optimize for selecting the latest version of each package for each supported Python version (`requires-python`), while minimizing the number of selected versions across platforms.\n\nUnder `fewest`, uv will minimize the number of selected versions for each package, preferring older versions that are compatible with a wider range of supported Python versions or platforms.", + "description": "The strategy to use when selecting multiple versions of a given package across Python\nversions and platforms.\n\nBy default, uv will optimize for selecting the latest version of each package for each\nsupported Python version (`requires-python`), while minimizing the number of selected\nversions across platforms.\n\nUnder `fewest`, uv will minimize the number of selected versions for each package,\npreferring older versions that are compatible with a wider range of supported Python\nversions or platforms.", "anyOf": [ { "$ref": "#/definitions/ForkStrategy" @@ -224,18 +235,18 @@ ] }, "index": { - "description": "The indexes to use when resolving dependencies.\n\nAccepts either a repository compliant with [PEP 503](https://peps.python.org/pep-0503/) (the simple repository API), or a local directory laid out in the same format.\n\nIndexes are considered in the order in which they're defined, such that the first-defined index has the highest priority. Further, the indexes provided by this setting are given higher priority than any indexes specified via [`index_url`](#index-url) or [`extra_index_url`](#extra-index-url). uv will only consider the first index that contains a given package, unless an alternative [index strategy](#index-strategy) is specified.\n\nIf an index is marked as `explicit = true`, it will be used exclusively for the dependencies that select it explicitly via `[tool.uv.sources]`, as in:\n\n```toml [[tool.uv.index]] name = \"pytorch\" url = \"https://download.pytorch.org/whl/cu121\" explicit = true\n\n[tool.uv.sources] torch = { index = \"pytorch\" } ```\n\nIf an index is marked as `default = true`, it will be moved to the end of the prioritized list, such that it is given the lowest priority when resolving packages. Additionally, marking an index as default will disable the PyPI default index.", - "default": null, + "description": "The indexes to use when resolving dependencies.\n\nAccepts either a repository compliant with [PEP 503](https://peps.python.org/pep-0503/)\n(the simple repository API), or a local directory laid out in the same format.\n\nIndexes are considered in the order in which they're defined, such that the first-defined\nindex has the highest priority. Further, the indexes provided by this setting are given\nhigher priority than any indexes specified via [`index_url`](#index-url) or\n[`extra_index_url`](#extra-index-url). uv will only consider the first index that contains\na given package, unless an alternative [index strategy](#index-strategy) is specified.\n\nIf an index is marked as `explicit = true`, it will be used exclusively for the\ndependencies that select it explicitly via `[tool.uv.sources]`, as in:\n\n```toml\n[[tool.uv.index]]\nname = \"pytorch\"\nurl = \"https://download.pytorch.org/whl/cu121\"\nexplicit = true\n\n[tool.uv.sources]\ntorch = { index = \"pytorch\" }\n```\n\nIf an index is marked as `default = true`, it will be moved to the end of the prioritized list, such that it is\ngiven the lowest priority when resolving packages. Additionally, marking an index as default will disable the\nPyPI default index.", "type": [ "array", "null" ], + "default": null, "items": { "$ref": "#/definitions/Index" } }, "index-strategy": { - "description": "The strategy to use when resolving against multiple index URLs.\n\nBy default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (`first-index`). This prevents \"dependency confusion\" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.", + "description": "The strategy to use when resolving against multiple index URLs.\n\nBy default, uv will stop at the first index on which a given package is available, and\nlimit resolutions to those present on that first index (`first-index`). This prevents\n\"dependency confusion\" attacks, whereby an attacker can upload a malicious package under the\nsame name to an alternate index.", "anyOf": [ { "$ref": "#/definitions/IndexStrategy" @@ -246,7 +257,7 @@ ] }, "index-url": { - "description": "The URL of the Python package index (by default: ).\n\nAccepts either a repository compliant with [PEP 503](https://peps.python.org/pep-0503/) (the simple repository API), or a local directory laid out in the same format.\n\nThe index provided by this setting is given lower priority than any indexes specified via [`extra_index_url`](#extra-index-url) or [`index`](#index).\n\n(Deprecated: use `index` instead.)", + "description": "The URL of the Python package index (by default: ).\n\nAccepts either a repository compliant with [PEP 503](https://peps.python.org/pep-0503/)\n(the simple repository API), or a local directory laid out in the same format.\n\nThe index provided by this setting is given lower priority than any indexes specified via\n[`extra_index_url`](#extra-index-url) or [`index`](#index).\n\n(Deprecated: use `index` instead.)", "anyOf": [ { "$ref": "#/definitions/IndexUrl" @@ -257,7 +268,7 @@ ] }, "keyring-provider": { - "description": "Attempt to use `keyring` for authentication for index URLs.\n\nAt present, only `--keyring-provider subprocess` is supported, which configures uv to use the `keyring` CLI to handle authentication.", + "description": "Attempt to use `keyring` for authentication for index URLs.\n\nAt present, only `--keyring-provider subprocess` is supported, which configures uv to\nuse the `keyring` CLI to handle authentication.", "anyOf": [ { "$ref": "#/definitions/KeyringProviderType" @@ -268,7 +279,7 @@ ] }, "link-mode": { - "description": "The method to use when installing packages from the global cache.\n\nDefaults to `clone` (also known as Copy-on-Write) on macOS, and `hardlink` on Linux and Windows.", + "description": "The method to use when installing packages from the global cache.\n\nDefaults to `clone` (also known as Copy-on-Write) on macOS, and `hardlink` on Linux and\nWindows.", "anyOf": [ { "$ref": "#/definitions/LinkMode" @@ -279,21 +290,21 @@ ] }, "managed": { - "description": "Whether the project is managed by uv. If `false`, uv will ignore the project when `uv run` is invoked.", + "description": "Whether the project is managed by uv. If `false`, uv will ignore the project when\n`uv run` is invoked.", "type": [ "boolean", "null" ] }, "native-tls": { - "description": "Whether to load TLS certificates from the platform's native certificate store.\n\nBy default, uv loads certificates from the bundled `webpki-roots` crate. The `webpki-roots` are a reliable set of trust roots from Mozilla, and including them in uv improves portability and performance (especially on macOS).\n\nHowever, in some cases, you may want to use the platform's native certificate store, especially if you're relying on a corporate trust root (e.g., for a mandatory proxy) that's included in your system's certificate store.", + "description": "Whether to load TLS certificates from the platform's native certificate store.\n\nBy default, uv loads certificates from the bundled `webpki-roots` crate. The\n`webpki-roots` are a reliable set of trust roots from Mozilla, and including them in uv\nimproves portability and performance (especially on macOS).\n\nHowever, in some cases, you may want to use the platform's native certificate store,\nespecially if you're relying on a corporate trust root (e.g., for a mandatory proxy) that's\nincluded in your system's certificate store.", "type": [ "boolean", "null" ] }, "no-binary": { - "description": "Don't install pre-built wheels.\n\nThe given packages will be built and installed from source. The resolver will still use pre-built wheels to extract package metadata, if available.", + "description": "Don't install pre-built wheels.\n\nThe given packages will be built and installed from source. The resolver will still use\npre-built wheels to extract package metadata, if available.", "type": [ "boolean", "null" @@ -310,21 +321,21 @@ } }, "no-build": { - "description": "Don't build source distributions.\n\nWhen enabled, resolving will not run arbitrary Python code. The cached wheels of already-built source distributions will be reused, but operations that require building distributions will exit with an error.", + "description": "Don't build source distributions.\n\nWhen enabled, resolving will not run arbitrary Python code. The cached wheels of\nalready-built source distributions will be reused, but operations that require building\ndistributions will exit with an error.", "type": [ "boolean", "null" ] }, "no-build-isolation": { - "description": "Disable isolation when building source distributions.\n\nAssumes that build dependencies specified by [PEP 518](https://peps.python.org/pep-0518/) are already installed.", + "description": "Disable isolation when building source distributions.\n\nAssumes that build dependencies specified by [PEP 518](https://peps.python.org/pep-0518/)\nare already installed.", "type": [ "boolean", "null" ] }, "no-build-isolation-package": { - "description": "Disable isolation when building source distributions for a specific package.\n\nAssumes that the packages' build dependencies specified by [PEP 518](https://peps.python.org/pep-0518/) are already installed.", + "description": "Disable isolation when building source distributions for a specific package.\n\nAssumes that the packages' build dependencies specified by [PEP 518](https://peps.python.org/pep-0518/)\nare already installed.", "type": [ "array", "null" @@ -344,21 +355,21 @@ } }, "no-cache": { - "description": "Avoid reading from or writing to the cache, instead using a temporary directory for the duration of the operation.", + "description": "Avoid reading from or writing to the cache, instead using a temporary directory for the\nduration of the operation.", "type": [ "boolean", "null" ] }, "no-index": { - "description": "Ignore all registry indexes (e.g., PyPI), instead relying on direct URL dependencies and those provided via `--find-links`.", + "description": "Ignore all registry indexes (e.g., PyPI), instead relying on direct URL dependencies and\nthose provided via `--find-links`.", "type": [ "boolean", "null" ] }, "no-sources": { - "description": "Ignore the `tool.uv.sources` table when resolving dependencies. Used to lock against the standards-compliant, publishable package metadata, as opposed to using any local or Git sources.", + "description": "Ignore the `tool.uv.sources` table when resolving dependencies. Used to lock against the\nstandards-compliant, publishable package metadata, as opposed to using any local or Git\nsources.", "type": [ "boolean", "null" @@ -382,7 +393,7 @@ } }, "package": { - "description": "Whether the project should be considered a Python package, or a non-package (\"virtual\") project.\n\nPackages are built and installed into the virtual environment in editable mode and thus require a build backend, while virtual projects are _not_ built or installed; instead, only their dependencies are included in the virtual environment.\n\nCreating a package requires that a `build-system` is present in the `pyproject.toml`, and that the project adheres to a structure that adheres to the build backend's expectations (e.g., a `src` layout).", + "description": "Whether the project should be considered a Python package, or a non-package (\"virtual\")\nproject.\n\nPackages are built and installed into the virtual environment in editable mode and thus\nrequire a build backend, while virtual projects are _not_ built or installed; instead, only\ntheir dependencies are included in the virtual environment.\n\nCreating a package requires that a `build-system` is present in the `pyproject.toml`, and\nthat the project adheres to a structure that adheres to the build backend's expectations\n(e.g., a `src` layout).", "type": [ "boolean", "null" @@ -399,7 +410,7 @@ ] }, "prerelease": { - "description": "The strategy to use when considering pre-release versions.\n\nBy default, uv will accept pre-releases for packages that _only_ publish pre-releases, along with first-party requirements that contain an explicit pre-release marker in the declared specifiers (`if-necessary-or-explicit`).", + "description": "The strategy to use when considering pre-release versions.\n\nBy default, uv will accept pre-releases for packages that _only_ publish pre-releases,\nalong with first-party requirements that contain an explicit pre-release marker in the\ndeclared specifiers (`if-necessary-or-explicit`).", "anyOf": [ { "$ref": "#/definitions/PrereleaseMode" @@ -417,15 +428,18 @@ ] }, "publish-url": { - "description": "The URL for publishing packages to the Python package index (by default: ).", - "type": [ - "string", - "null" - ], - "format": "uri" + "description": "The URL for publishing packages to the Python package index (by default:\n).", + "anyOf": [ + { + "$ref": "#/definitions/DisplaySafeUrl" + }, + { + "type": "null" + } + ] }, "pypy-install-mirror": { - "description": "Mirror URL to use for downloading managed PyPy installations.\n\nBy default, managed PyPy installations are downloaded from [downloads.python.org](https://downloads.python.org/). This variable can be set to a mirror URL to use a different source for PyPy installations. The provided URL will replace `https://downloads.python.org/pypy` in, e.g., `https://downloads.python.org/pypy/pypy3.8-v7.3.7-osx64.tar.bz2`.\n\nDistributions can be read from a local directory by using the `file://` URL scheme.", + "description": "Mirror URL to use for downloading managed PyPy installations.\n\nBy default, managed PyPy installations are downloaded from [downloads.python.org](https://downloads.python.org/).\nThis variable can be set to a mirror URL to use a different source for PyPy installations.\nThe provided URL will replace `https://downloads.python.org/pypy` in, e.g., `https://downloads.python.org/pypy/pypy3.8-v7.3.7-osx64.tar.bz2`.\n\nDistributions can be read from a\nlocal directory by using the `file://` URL scheme.", "type": [ "string", "null" @@ -450,14 +464,14 @@ ] }, "python-install-mirror": { - "description": "Mirror URL for downloading managed Python installations.\n\nBy default, managed Python installations are downloaded from [`python-build-standalone`](https://github.com/astral-sh/python-build-standalone). This variable can be set to a mirror URL to use a different source for Python installations. The provided URL will replace `https://github.com/astral-sh/python-build-standalone/releases/download` in, e.g., `https://github.com/astral-sh/python-build-standalone/releases/download/20240713/cpython-3.12.4%2B20240713-aarch64-apple-darwin-install_only.tar.gz`.\n\nDistributions can be read from a local directory by using the `file://` URL scheme.", + "description": "Mirror URL for downloading managed Python installations.\n\nBy default, managed Python installations are downloaded from [`python-build-standalone`](https://github.com/astral-sh/python-build-standalone).\nThis variable can be set to a mirror URL to use a different source for Python installations.\nThe provided URL will replace `https://github.com/astral-sh/python-build-standalone/releases/download` in, e.g., `https://github.com/astral-sh/python-build-standalone/releases/download/20240713/cpython-3.12.4%2B20240713-aarch64-apple-darwin-install_only.tar.gz`.\n\nDistributions can be read from a local directory by using the `file://` URL scheme.", "type": [ "string", "null" ] }, "python-preference": { - "description": "Whether to prefer using Python installations that are already present on the system, or those that are downloaded and installed by uv.", + "description": "Whether to prefer using Python installations that are already present on the system, or\nthose that are downloaded and installed by uv.", "anyOf": [ { "$ref": "#/definitions/PythonPreference" @@ -475,7 +489,7 @@ ] }, "reinstall-package": { - "description": "Reinstall a specific package, regardless of whether it's already installed. Implies `refresh-package`.", + "description": "Reinstall a specific package, regardless of whether it's already installed. Implies\n`refresh-package`.", "type": [ "array", "null" @@ -495,7 +509,7 @@ } }, "required-version": { - "description": "Enforce a requirement on the version of uv.\n\nIf the version of uv does not meet the requirement at runtime, uv will exit with an error.\n\nAccepts a [PEP 440](https://peps.python.org/pep-0440/) specifier, like `==0.5.0` or `>=0.5.0`.", + "description": "Enforce a requirement on the version of uv.\n\nIf the version of uv does not meet the requirement at runtime, uv will exit\nwith an error.\n\nAccepts a [PEP 440](https://peps.python.org/pep-0440/) specifier, like `==0.5.0` or `>=0.5.0`.", "anyOf": [ { "$ref": "#/definitions/RequiredVersion" @@ -506,7 +520,7 @@ ] }, "resolution": { - "description": "The strategy to use when selecting between the different compatible versions for a given package requirement.\n\nBy default, uv will use the latest compatible version of each package (`highest`).", + "description": "The strategy to use when selecting between the different compatible versions for a given\npackage requirement.\n\nBy default, uv will use the latest compatible version of each package (`highest`).", "anyOf": [ { "$ref": "#/definitions/ResolutionMode" @@ -517,7 +531,7 @@ ] }, "sources": { - "description": "The sources to use when resolving dependencies.\n\n`tool.uv.sources` enriches the dependency metadata with additional sources, incorporated during development. A dependency source can be a Git repository, a URL, a local path, or an alternative registry.\n\nSee [Dependencies](https://docs.astral.sh/uv/concepts/projects/dependencies/) for more.", + "description": "The sources to use when resolving dependencies.\n\n`tool.uv.sources` enriches the dependency metadata with additional sources, incorporated\nduring development. A dependency source can be a Git repository, a URL, a local path, or an\nalternative registry.\n\nSee [Dependencies](https://docs.astral.sh/uv/concepts/projects/dependencies/) for more.", "anyOf": [ { "$ref": "#/definitions/ToolUvSources" @@ -528,7 +542,7 @@ ] }, "trusted-publishing": { - "description": "Configure trusted publishing via GitHub Actions.\n\nBy default, uv checks for trusted publishing when running in GitHub Actions, but ignores it if it isn't configured or the workflow doesn't have enough permissions (e.g., a pull request from a fork).", + "description": "Configure trusted publishing via GitHub Actions.\n\nBy default, uv checks for trusted publishing when running in GitHub Actions, but ignores it\nif it isn't configured or the workflow doesn't have enough permissions (e.g., a pull request\nfrom a fork).", "anyOf": [ { "$ref": "#/definitions/TrustedPublishing" @@ -546,7 +560,7 @@ ] }, "upgrade-package": { - "description": "Allow upgrades for a specific package, ignoring pinned versions in any existing output file.\n\nAccepts both standalone package names (`ruff`) and version specifiers (`ruff<0.5.0`).", + "description": "Allow upgrades for a specific package, ignoring pinned versions in any existing output\nfile.\n\nAccepts both standalone package names (`ruff`) and version specifiers (`ruff<0.5.0`).", "type": [ "array", "null" @@ -567,6 +581,7 @@ ] } }, + "additionalProperties": false, "definitions": { "AddBoundsKind": { "description": "The default version specifier when adding a dependency.", @@ -574,49 +589,37 @@ { "description": "Only a lower bound, e.g., `>=1.2.3`.", "type": "string", - "enum": [ - "lower" - ] + "const": "lower" }, { "description": "Allow the same major version, similar to the semver caret, e.g., `>=1.2.3, <2.0.0`.\n\nLeading zeroes are skipped, e.g. `>=0.1.2, <0.2.0`.", "type": "string", - "enum": [ - "major" - ] + "const": "major" }, { "description": "Allow the same minor version, similar to the semver tilde, e.g., `>=1.2.3, <1.3.0`.\n\nLeading zeroes are skipped, e.g. `>=0.1.2, <0.1.3`.", "type": "string", - "enum": [ - "minor" - ] + "const": "minor" }, { "description": "Pin the exact version, e.g., `==1.2.3`.\n\nThis option is not recommended, as versions are already pinned in the uv lockfile.", "type": "string", - "enum": [ - "exact" - ] + "const": "exact" } ] }, "AnnotationStyle": { - "description": "Indicate the style of annotation comments, used to indicate the dependencies that requested each package.", + "description": "Indicate the style of annotation comments, used to indicate the dependencies that requested each\npackage.", "oneOf": [ { "description": "Render the annotations on a single, comma-separated line.", "type": "string", - "enum": [ - "line" - ] + "const": "line" }, { "description": "Render each annotation on its own line.", "type": "string", - "enum": [ - "split" - ] + "const": "split" } ] }, @@ -624,90 +627,84 @@ "description": "When to use authentication.", "oneOf": [ { - "description": "Authenticate when necessary.\n\nIf credentials are provided, they will be used. Otherwise, an unauthenticated request will be attempted first. If the request fails, uv will search for credentials. If credentials are found, an authenticated request will be attempted.", + "description": "Authenticate when necessary.\n\nIf credentials are provided, they will be used. Otherwise, an unauthenticated request will\nbe attempted first. If the request fails, uv will search for credentials. If credentials are\nfound, an authenticated request will be attempted.", "type": "string", - "enum": [ - "auto" - ] + "const": "auto" }, { - "description": "Always authenticate.\n\nIf credentials are not provided, uv will eagerly search for credentials. If credentials cannot be found, uv will error instead of attempting an unauthenticated request.", + "description": "Always authenticate.\n\nIf credentials are not provided, uv will eagerly search for credentials. If credentials\ncannot be found, uv will error instead of attempting an unauthenticated request.", "type": "string", - "enum": [ - "always" - ] + "const": "always" }, { "description": "Never authenticate.\n\nIf credentials are provided, uv will error. uv will not search for credentials.", "type": "string", - "enum": [ - "never" - ] + "const": "never" } ] }, "BuildBackendSettings": { - "description": "Settings for the uv build backend (`uv_build`).\n\n!!! note\n\nThe uv build backend is currently in preview and may change in any future release.\n\nNote that those settings only apply when using the `uv_build` backend, other build backends (such as hatchling) have their own configuration.\n\nAll options that accept globs use the portable glob patterns from [PEP 639](https://packaging.python.org/en/latest/specifications/glob-patterns/).", + "description": "Settings for the uv build backend (`uv_build`).\n\nNote that those settings only apply when using the `uv_build` backend, other build backends\n(such as hatchling) have their own configuration.\n\nAll options that accept globs use the portable glob patterns from\n[PEP 639](https://packaging.python.org/en/latest/specifications/glob-patterns/).", "type": "object", "properties": { "data": { - "description": "Data includes for wheels.\n\nEach entry is a directory, whose contents are copied to the matching directory in the wheel in `-.data/(purelib|platlib|headers|scripts|data)`. Upon installation, this data is moved to its target location, as defined by . Usually, small data files are included by placing them in the Python module instead of using data includes.\n\n- `scripts`: Installed to the directory for executables, `/bin` on Unix or `\\Scripts` on Windows. This directory is added to `PATH` when the virtual environment is activated or when using `uv run`, so this data type can be used to install additional binaries. Consider using `project.scripts` instead for Python entrypoints. - `data`: Installed over the virtualenv environment root.\n\nWarning: This may override existing files!\n\n- `headers`: Installed to the include directory. Compilers building Python packages with this package as build requirement use the include directory to find additional header files. - `purelib` and `platlib`: Installed to the `site-packages` directory. It is not recommended to uses these two options.", + "description": "Data includes for wheels.\n\nEach entry is a directory, whose contents are copied to the matching directory in the wheel\nin `-.data/(purelib|platlib|headers|scripts|data)`. Upon installation, this\ndata is moved to its target location, as defined by\n. Usually, small\ndata files are included by placing them in the Python module instead of using data includes.\n\n- `scripts`: Installed to the directory for executables, `/bin` on Unix or\n `\\Scripts` on Windows. This directory is added to `PATH` when the virtual\n environment is activated or when using `uv run`, so this data type can be used to install\n additional binaries. Consider using `project.scripts` instead for Python entrypoints.\n- `data`: Installed over the virtualenv environment root.\n\n Warning: This may override existing files!\n\n- `headers`: Installed to the include directory. Compilers building Python packages\n with this package as build requirement use the include directory to find additional header\n files.\n- `purelib` and `platlib`: Installed to the `site-packages` directory. It is not recommended\n to uses these two options.", + "allOf": [ + { + "$ref": "#/definitions/WheelDataIncludes" + } + ], "default": { "data": null, "headers": null, "platlib": null, "purelib": null, "scripts": null - }, - "allOf": [ - { - "$ref": "#/definitions/WheelDataIncludes" - } - ] + } }, "default-excludes": { "description": "If set to `false`, the default excludes aren't applied.\n\nDefault excludes: `__pycache__`, `*.pyc`, and `*.pyo`.", - "default": true, - "type": "boolean" + "type": "boolean", + "default": true }, "module-name": { - "description": "The name of the module directory inside `module-root`.\n\nThe default module name is the package name with dots and dashes replaced by underscores.\n\nPackage names need to be valid Python identifiers, and the directory needs to contain a `__init__.py`. An exception are stubs packages, whose name ends with `-stubs`, with the stem being the module name, and which contain a `__init__.pyi` file.\n\nFor namespace packages with a single module, the path can be dotted, e.g., `foo.bar` or `foo-stubs.bar`.\n\nNote that using this option runs the risk of creating two packages with different names but the same module names. Installing such packages together leads to unspecified behavior, often with corrupted files or directory trees.", - "default": null, + "description": "The name of the module directory inside `module-root`.\n\nThe default module name is the package name with dots and dashes replaced by underscores.\n\nPackage names need to be valid Python identifiers, and the directory needs to contain a\n`__init__.py`. An exception are stubs packages, whose name ends with `-stubs`, with the stem\nbeing the module name, and which contain a `__init__.pyi` file.\n\nFor namespace packages with a single module, the path can be dotted, e.g., `foo.bar` or\n`foo-stubs.bar`.\n\nNote that using this option runs the risk of creating two packages with different names but\nthe same module names. Installing such packages together leads to unspecified behavior,\noften with corrupted files or directory trees.", "type": [ "string", "null" - ] + ], + "default": null }, "module-root": { "description": "The directory that contains the module directory.\n\nCommon values are `src` (src layout, the default) or an empty path (flat layout).", - "default": "src", - "type": "string" + "type": "string", + "default": "src" }, "namespace": { - "description": "Build a namespace package.\n\nBuild a PEP 420 implicit namespace package, allowing more than one root `__init__.py`.\n\nUse this option when the namespace package contains multiple root `__init__.py`, for namespace packages with a single root `__init__.py` use a dotted `module-name` instead.\n\nTo compare dotted `module-name` and `namespace = true`, the first example below can be expressed with `module-name = \"cloud.database\"`: There is one root `__init__.py` `database`. In the second example, we have three roots (`cloud.database`, `cloud.database_pro`, `billing.modules.database_pro`), so `namespace = true` is required.\n\n```text src └── cloud └── database ├── __init__.py ├── query_builder │ └── __init__.py └── sql ├── parser.py └── __init__.py ```\n\n```text src ├── cloud │ ├── database │ │ ├── __init__.py │ │ ├── query_builder │ │ │ └── __init__.py │ │ └── sql │ │ ├── __init__.py │ │ └── parser.py │ └── database_pro │ ├── __init__.py │ └── query_builder.py └── billing └── modules └── database_pro ├── __init__.py └── sql.py ```", - "default": false, - "type": "boolean" + "description": "Build a namespace package.\n\nBuild a PEP 420 implicit namespace package, allowing more than one root `__init__.py`.\n\nUse this option when the namespace package contains multiple root `__init__.py`, for\nnamespace packages with a single root `__init__.py` use a dotted `module-name` instead.\n\nTo compare dotted `module-name` and `namespace = true`, the first example below can be\nexpressed with `module-name = \"cloud.database\"`: There is one root `__init__.py` `database`.\nIn the second example, we have three roots (`cloud.database`, `cloud.database_pro`,\n`billing.modules.database_pro`), so `namespace = true` is required.\n\n```text\nsrc\n└── cloud\n └── database\n ├── __init__.py\n ├── query_builder\n │ └── __init__.py\n └── sql\n ├── parser.py\n └── __init__.py\n```\n\n```text\nsrc\n├── cloud\n│ ├── database\n│ │ ├── __init__.py\n│ │ ├── query_builder\n│ │ │ └── __init__.py\n│ │ └── sql\n│ │ ├── __init__.py\n│ │ └── parser.py\n│ └── database_pro\n│ ├── __init__.py\n│ └── query_builder.py\n└── billing\n └── modules\n └── database_pro\n ├── __init__.py\n └── sql.py\n```", + "type": "boolean", + "default": false }, "source-exclude": { "description": "Glob expressions which files and directories to exclude from the source distribution.", - "default": [], "type": "array", + "default": [], "items": { "type": "string" } }, "source-include": { - "description": "Glob expressions which files and directories to additionally include in the source distribution.\n\n`pyproject.toml` and the contents of the module directory are always included.", - "default": [], + "description": "Glob expressions which files and directories to additionally include in the source\ndistribution.\n\n`pyproject.toml` and the contents of the module directory are always included.", "type": "array", + "default": [], "items": { "type": "string" } }, "wheel-exclude": { "description": "Glob expressions which files and directories to exclude from the wheel.", - "default": [], "type": "array", + "default": [], "items": { "type": "string" } @@ -723,54 +720,54 @@ { "description": "Ex) `{ file = \"Cargo.lock\" }` or `{ file = \"**/*.toml\" }`", "type": "object", - "required": [ - "file" - ], "properties": { "file": { "type": "string" } }, - "additionalProperties": false + "additionalProperties": false, + "required": [ + "file" + ] }, { "description": "Ex) `{ dir = \"src\" }`", "type": "object", - "required": [ - "dir" - ], "properties": { "dir": { "type": "string" } }, - "additionalProperties": false + "additionalProperties": false, + "required": [ + "dir" + ] }, { "description": "Ex) `{ git = true }` or `{ git = { commit = true, tags = false } }`", "type": "object", - "required": [ - "git" - ], "properties": { "git": { "$ref": "#/definitions/GitPattern" } }, - "additionalProperties": false + "additionalProperties": false, + "required": [ + "git" + ] }, { "description": "Ex) `{ env = \"UV_CACHE_INFO\" }`", "type": "object", - "required": [ - "env" - ], "properties": { "env": { "type": "string" } }, - "additionalProperties": false + "additionalProperties": false, + "required": [ + "env" + ] } ] }, @@ -790,7 +787,7 @@ ] }, "ConfigSettings": { - "description": "Settings to pass to a PEP 517 build backend, structured as a map from (string) key to string or list of strings.\n\nSee: ", + "description": "Settings to pass to a PEP 517 build backend, structured as a map from (string) key to string or\nlist of strings.\n\nSee: ", "type": "object", "additionalProperties": { "$ref": "#/definitions/ConfigSettingValue" @@ -802,16 +799,11 @@ { "description": "All groups are defaulted", "type": "string", - "enum": [ - "All" - ] + "const": "All" }, { "description": "A list of groups", "type": "object", - "required": [ - "List" - ], "properties": { "List": { "type": "array", @@ -820,34 +812,50 @@ } } }, - "additionalProperties": false + "additionalProperties": false, + "required": [ + "List" + ] } ] }, + "DependencyGroupSettings": { + "type": "object", + "properties": { + "requires-python": { + "description": "Version of python to require when installing this group", + "type": [ + "string", + "null" + ] + } + } + }, + "DisplaySafeUrl": { + "description": "A [`Url`] wrapper that redacts credentials when displaying the URL.\n\n`DisplaySafeUrl` wraps the standard [`url::Url`] type, providing functionality to mask\nsecrets by default when the URL is displayed or logged. This helps prevent accidental\nexposure of sensitive information in logs and debug output.\n\n# Examples\n\n```\nuse uv_redacted::DisplaySafeUrl;\nuse std::str::FromStr;\n\n// Create a `DisplaySafeUrl` from a `&str`\nlet mut url = DisplaySafeUrl::parse(\"https://user:password@example.com\").unwrap();\n\n// Display will mask secrets\nassert_eq!(url.to_string(), \"https://user:****@example.com/\");\n\n// You can still access the username and password\nassert_eq!(url.username(), \"user\");\nassert_eq!(url.password(), Some(\"password\"));\n\n// And you can still update the username and password\nlet _ = url.set_username(\"new_user\");\nlet _ = url.set_password(Some(\"new_password\"));\nassert_eq!(url.username(), \"new_user\");\nassert_eq!(url.password(), Some(\"new_password\"));\n\n// It is also possible to remove the credentials entirely\nurl.remove_credentials();\nassert_eq!(url.username(), \"\");\nassert_eq!(url.password(), None);\n```", + "type": "string", + "format": "uri" + }, "ExcludeNewer": { "description": "Exclude distributions uploaded after the given timestamp.\n\nAccepts both RFC 3339 timestamps (e.g., `2006-12-02T02:07:43Z`) and local dates in the same format (e.g., `2006-12-02`).", "type": "string", "pattern": "^\\d{4}-\\d{2}-\\d{2}(T\\d{2}:\\d{2}:\\d{2}(Z|[+-]\\d{2}:\\d{2}))?$" }, "ExtraName": { - "description": "The normalized name of an extra dependency.\n\nConverts the name to lowercase and collapses runs of `-`, `_`, and `.` down to a single `-`. For example, `---`, `.`, and `__` are all converted to a single `-`.\n\nSee: - - ", + "description": "The normalized name of an extra dependency.\n\nConverts the name to lowercase and collapses runs of `-`, `_`, and `.` down to a single `-`.\nFor example, `---`, `.`, and `__` are all converted to a single `-`.\n\nSee:\n- \n- ", "type": "string" }, "ForkStrategy": { "oneOf": [ { - "description": "Optimize for selecting the fewest number of versions for each package. Older versions may be preferred if they are compatible with a wider range of supported Python versions or platforms.", + "description": "Optimize for selecting the fewest number of versions for each package. Older versions may\nbe preferred if they are compatible with a wider range of supported Python versions or\nplatforms.", "type": "string", - "enum": [ - "fewest" - ] + "const": "fewest" }, { - "description": "Optimize for selecting latest supported version of each package, for each supported Python version.", + "description": "Optimize for selecting latest supported version of each package, for each supported Python\nversion.", "type": "string", - "enum": [ - "requires-python" - ] + "const": "requires-python" } ] }, @@ -880,56 +888,53 @@ "additionalProperties": false }, "GroupName": { - "description": "The normalized name of a dependency group.\n\nSee: - - ", + "description": "The normalized name of a dependency group.\n\nSee:\n- \n- ", "type": "string" }, "Index": { "type": "object", - "required": [ - "url" - ], "properties": { "authenticate": { - "description": "When uv should use authentication for requests to the index.\n\n```toml [[tool.uv.index]] name = \"my-index\" url = \"https:///simple\" authenticate = \"always\" ```", - "default": "auto", + "description": "When uv should use authentication for requests to the index.\n\n```toml\n[[tool.uv.index]]\nname = \"my-index\"\nurl = \"https:///simple\"\nauthenticate = \"always\"\n```", "allOf": [ { "$ref": "#/definitions/AuthPolicy" } - ] + ], + "default": "auto" }, "default": { - "description": "Mark the index as the default index.\n\nBy default, uv uses PyPI as the default index, such that even if additional indexes are defined via `[[tool.uv.index]]`, PyPI will still be used as a fallback for packages that aren't found elsewhere. To disable the PyPI default, set `default = true` on at least one other index.\n\nMarking an index as default will move it to the front of the list of indexes, such that it is given the highest priority when resolving packages.", - "default": false, - "type": "boolean" + "description": "Mark the index as the default index.\n\nBy default, uv uses PyPI as the default index, such that even if additional indexes are\ndefined via `[[tool.uv.index]]`, PyPI will still be used as a fallback for packages that\naren't found elsewhere. To disable the PyPI default, set `default = true` on at least one\nother index.\n\nMarking an index as default will move it to the front of the list of indexes, such that it\nis given the highest priority when resolving packages.", + "type": "boolean", + "default": false }, "explicit": { - "description": "Mark the index as explicit.\n\nExplicit indexes will _only_ be used when explicitly requested via a `[tool.uv.sources]` definition, as in:\n\n```toml [[tool.uv.index]] name = \"pytorch\" url = \"https://download.pytorch.org/whl/cu121\" explicit = true\n\n[tool.uv.sources] torch = { index = \"pytorch\" } ```", - "default": false, - "type": "boolean" + "description": "Mark the index as explicit.\n\nExplicit indexes will _only_ be used when explicitly requested via a `[tool.uv.sources]`\ndefinition, as in:\n\n```toml\n[[tool.uv.index]]\nname = \"pytorch\"\nurl = \"https://download.pytorch.org/whl/cu121\"\nexplicit = true\n\n[tool.uv.sources]\ntorch = { index = \"pytorch\" }\n```", + "type": "boolean", + "default": false }, "format": { - "description": "The format used by the index.\n\nIndexes can either be PEP 503-compliant (i.e., a PyPI-style registry implementing the Simple API) or structured as a flat list of distributions (e.g., `--find-links`). In both cases, indexes can point to either local or remote resources.", - "default": "simple", + "description": "The format used by the index.\n\nIndexes can either be PEP 503-compliant (i.e., a PyPI-style registry implementing the Simple\nAPI) or structured as a flat list of distributions (e.g., `--find-links`). In both cases,\nindexes can point to either local or remote resources.", "allOf": [ { "$ref": "#/definitions/IndexFormat" } - ] + ], + "default": "simple" }, "ignore-error-codes": { - "description": "Status codes that uv should ignore when deciding whether to continue searching in the next index after a failure.\n\n```toml [[tool.uv.index]] name = \"my-index\" url = \"https:///simple\" ignore-error-codes = [401, 403] ```", - "default": null, + "description": "Status codes that uv should ignore when deciding whether\nto continue searching in the next index after a failure.\n\n```toml\n[[tool.uv.index]]\nname = \"my-index\"\nurl = \"https:///simple\"\nignore-error-codes = [401, 403]\n```", "type": [ "array", "null" ], + "default": null, "items": { "$ref": "#/definitions/StatusCode" } }, "name": { - "description": "The name of the index.\n\nIndex names can be used to reference indexes elsewhere in the configuration. For example, you can pin a package to a specific index by name:\n\n```toml [[tool.uv.index]] name = \"pytorch\" url = \"https://download.pytorch.org/whl/cu121\"\n\n[tool.uv.sources] torch = { index = \"pytorch\" } ```", + "description": "The name of the index.\n\nIndex names can be used to reference indexes elsewhere in the configuration. For example,\nyou can pin a package to a specific index by name:\n\n```toml\n[[tool.uv.index]]\nname = \"pytorch\"\nurl = \"https://download.pytorch.org/whl/cu121\"\n\n[tool.uv.sources]\ntorch = { index = \"pytorch\" }\n```", "anyOf": [ { "$ref": "#/definitions/IndexName" @@ -940,12 +945,15 @@ ] }, "publish-url": { - "description": "The URL of the upload endpoint.\n\nWhen using `uv publish --index `, this URL is used for publishing.\n\nA configuration for the default index PyPI would look as follows:\n\n```toml [[tool.uv.index]] name = \"pypi\" url = \"https://pypi.org/simple\" publish-url = \"https://upload.pypi.org/legacy/\" ```", - "type": [ - "string", - "null" - ], - "format": "uri" + "description": "The URL of the upload endpoint.\n\nWhen using `uv publish --index `, this URL is used for publishing.\n\nA configuration for the default index PyPI would look as follows:\n\n```toml\n[[tool.uv.index]]\nname = \"pypi\"\nurl = \"https://pypi.org/simple\"\npublish-url = \"https://upload.pypi.org/legacy/\"\n```", + "anyOf": [ + { + "$ref": "#/definitions/DisplaySafeUrl" + }, + { + "type": "null" + } + ] }, "url": { "description": "The URL of the index.\n\nExpects to receive a URL (e.g., `https://pypi.org/simple`) or a local path.", @@ -955,23 +963,22 @@ } ] } - } + }, + "required": [ + "url" + ] }, "IndexFormat": { "oneOf": [ { "description": "A PyPI-style index implementing the Simple Repository API.", "type": "string", - "enum": [ - "simple" - ] + "const": "simple" }, { "description": "A `--find-links`-style index containing a flat list of wheels and source distributions.", "type": "string", - "enum": [ - "flat" - ] + "const": "flat" } ] }, @@ -982,25 +989,19 @@ "IndexStrategy": { "oneOf": [ { - "description": "Only use results from the first index that returns a match for a given package name.\n\nWhile this differs from pip's behavior, it's the default index strategy as it's the most secure.", + "description": "Only use results from the first index that returns a match for a given package name.\n\nWhile this differs from pip's behavior, it's the default index strategy as it's the most\nsecure.", "type": "string", - "enum": [ - "first-index" - ] + "const": "first-index" }, { - "description": "Search for every package name across all indexes, exhausting the versions from the first index before moving on to the next.\n\nIn this strategy, we look for every package across all indexes. When resolving, we attempt to use versions from the indexes in order, such that we exhaust all available versions from the first index before moving on to the next. Further, if a version is found to be incompatible in the first index, we do not reconsider that version in subsequent indexes, even if the secondary index might contain compatible versions (e.g., variants of the same versions with different ABI tags or Python version constraints).\n\nSee: ", + "description": "Search for every package name across all indexes, exhausting the versions from the first\nindex before moving on to the next.\n\nIn this strategy, we look for every package across all indexes. When resolving, we attempt\nto use versions from the indexes in order, such that we exhaust all available versions from\nthe first index before moving on to the next. Further, if a version is found to be\nincompatible in the first index, we do not reconsider that version in subsequent indexes,\neven if the secondary index might contain compatible versions (e.g., variants of the same\nversions with different ABI tags or Python version constraints).\n\nSee: ", "type": "string", - "enum": [ - "unsafe-first-match" - ] + "const": "unsafe-first-match" }, { - "description": "Search for every package name across all indexes, preferring the \"best\" version found. If a package version is in multiple indexes, only look at the entry for the first index.\n\nIn this strategy, we look for every package across all indexes. When resolving, we consider all versions from all indexes, choosing the \"best\" version found (typically, the highest compatible version).\n\nThis most closely matches pip's behavior, but exposes the resolver to \"dependency confusion\" attacks whereby malicious actors can publish packages to public indexes with the same name as internal packages, causing the resolver to install the malicious package in lieu of the intended internal package.\n\nSee: ", + "description": "Search for every package name across all indexes, preferring the \"best\" version found. If a\npackage version is in multiple indexes, only look at the entry for the first index.\n\nIn this strategy, we look for every package across all indexes. When resolving, we consider\nall versions from all indexes, choosing the \"best\" version found (typically, the highest\ncompatible version).\n\nThis most closely matches pip's behavior, but exposes the resolver to \"dependency confusion\"\nattacks whereby malicious actors can publish packages to public indexes with the same name\nas internal packages, causing the resolver to install the malicious package in lieu of\nthe intended internal package.\n\nSee: ", "type": "string", - "enum": [ - "unsafe-best-match" - ] + "const": "unsafe-best-match" } ] }, @@ -1014,16 +1015,12 @@ { "description": "Do not use keyring for credential lookup.", "type": "string", - "enum": [ - "disabled" - ] + "const": "disabled" }, { "description": "Use the `keyring` command for credential lookup.", "type": "string", - "enum": [ - "subprocess" - ] + "const": "subprocess" } ] }, @@ -1032,30 +1029,22 @@ { "description": "Clone (i.e., copy-on-write) packages from the wheel into the `site-packages` directory.", "type": "string", - "enum": [ - "clone" - ] + "const": "clone" }, { "description": "Copy packages from the wheel into the `site-packages` directory.", "type": "string", - "enum": [ - "copy" - ] + "const": "copy" }, { "description": "Hard link packages from the wheel into the `site-packages` directory.", "type": "string", - "enum": [ - "hardlink" - ] + "const": "hardlink" }, { - "description": "Symbolically link packages from the wheel into the `site-packages` directory.\n\nWARNING: The use of symlinks is discouraged, as they create tight coupling between the cache and the target environment. For example, clearing the cache (`uv cache clear`) will break all installed packages by way of removing the underlying source files. Use symlinks with caution.", + "description": "Symbolically link packages from the wheel into the `site-packages` directory.\n\nWARNING: The use of symlinks is discouraged, as they create tight coupling between the\ncache and the target environment. For example, clearing the cache (`uv cache clear`) will\nbreak all installed packages by way of removing the underlying source files. Use symlinks\nwith caution.", "type": "string", - "enum": [ - "symlink" - ] + "const": "symlink" } ] }, @@ -1064,7 +1053,7 @@ "type": "string" }, "PackageName": { - "description": "The normalized name of a package.\n\nConverts the name to lowercase and collapses runs of `-`, `_`, and `.` down to a single `-`. For example, `---`, `.`, and `__` are all converted to a single `-`.\n\nSee: ", + "description": "The normalized name of a package.\n\nConverts the name to lowercase and collapses runs of `-`, `_`, and `.` down to a single `-`.\nFor example, `---`, `.`, and `__` are all converted to a single `-`.\n\nSee: ", "type": "string" }, "PackageNameSpecifier": { @@ -1073,11 +1062,8 @@ "pattern": "^(:none:|:all:|([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9]))$" }, "PipGroupName": { - "description": "The pip-compatible variant of a [`GroupName`].\n\nEither or :. If is omitted it defaults to \"pyproject.toml\".", + "description": "The pip-compatible variant of a [`GroupName`].\n\nEither or :.\nIf is omitted it defaults to \"pyproject.toml\".", "type": "object", - "required": [ - "name" - ], "properties": { "name": { "$ref": "#/definitions/GroupName" @@ -1088,10 +1074,13 @@ "null" ] } - } + }, + "required": [ + "name" + ] }, "PipOptions": { - "description": "Settings that are specific to the `uv pip` command-line interface.\n\nThese values will be ignored when running commands outside the `uv pip` namespace (e.g., `uv lock`, `uvx`).", + "description": "Settings that are specific to the `uv pip` command-line interface.\n\nThese values will be ignored when running commands outside the `uv pip` namespace (e.g.,\n`uv lock`, `uvx`).", "type": "object", "properties": { "all-extras": { @@ -1102,14 +1091,14 @@ ] }, "allow-empty-requirements": { - "description": "Allow `uv pip sync` with empty requirements, which will clear the environment of all packages.", + "description": "Allow `uv pip sync` with empty requirements, which will clear the environment of all\npackages.", "type": [ "boolean", "null" ] }, "annotation-style": { - "description": "The style of the annotation comments included in the output file, used to indicate the source of each package.", + "description": "The style of the annotation comments included in the output file, used to indicate the\nsource of each package.", "anyOf": [ { "$ref": "#/definitions/AnnotationStyle" @@ -1120,21 +1109,21 @@ ] }, "break-system-packages": { - "description": "Allow uv to modify an `EXTERNALLY-MANAGED` Python installation.\n\nWARNING: `--break-system-packages` is intended for use in continuous integration (CI) environments, when installing into Python installations that are managed by an external package manager, like `apt`. It should be used with caution, as such Python installations explicitly recommend against modifications by other package managers (like uv or pip).", + "description": "Allow uv to modify an `EXTERNALLY-MANAGED` Python installation.\n\nWARNING: `--break-system-packages` is intended for use in continuous integration (CI)\nenvironments, when installing into Python installations that are managed by an external\npackage manager, like `apt`. It should be used with caution, as such Python installations\nexplicitly recommend against modifications by other package managers (like uv or pip).", "type": [ "boolean", "null" ] }, "compile-bytecode": { - "description": "Compile Python files to bytecode after installation.\n\nBy default, uv does not compile Python (`.py`) files to bytecode (`__pycache__/*.pyc`); instead, compilation is performed lazily the first time a module is imported. For use-cases in which start time is critical, such as CLI applications and Docker containers, this option can be enabled to trade longer installation times for faster start times.\n\nWhen enabled, uv will process the entire site-packages directory (including packages that are not being modified by the current operation) for consistency. Like pip, it will also ignore errors.", + "description": "Compile Python files to bytecode after installation.\n\nBy default, uv does not compile Python (`.py`) files to bytecode (`__pycache__/*.pyc`);\ninstead, compilation is performed lazily the first time a module is imported. For use-cases\nin which start time is critical, such as CLI applications and Docker containers, this option\ncan be enabled to trade longer installation times for faster start times.\n\nWhen enabled, uv will process the entire site-packages directory (including packages that\nare not being modified by the current operation) for consistency. Like pip, it will also\nignore errors.", "type": [ "boolean", "null" ] }, "config-settings": { - "description": "Settings to pass to the [PEP 517](https://peps.python.org/pep-0517/) build backend, specified as `KEY=VALUE` pairs.", + "description": "Settings to pass to the [PEP 517](https://peps.python.org/pep-0517/) build backend,\nspecified as `KEY=VALUE` pairs.", "anyOf": [ { "$ref": "#/definitions/ConfigSettings" @@ -1152,7 +1141,7 @@ ] }, "dependency-metadata": { - "description": "Pre-defined static metadata for dependencies of the project (direct or transitive). When provided, enables the resolver to use the specified metadata instead of querying the registry or building the relevant package from source.\n\nMetadata should be provided in adherence with the [Metadata 2.3](https://packaging.python.org/en/latest/specifications/core-metadata/) standard, though only the following fields are respected:\n\n- `name`: The name of the package. - (Optional) `version`: The version of the package. If omitted, the metadata will be applied to all versions of the package. - (Optional) `requires-dist`: The dependencies of the package (e.g., `werkzeug>=0.14`). - (Optional) `requires-python`: The Python version required by the package (e.g., `>=3.10`). - (Optional) `provides-extras`: The extras provided by the package.", + "description": "Pre-defined static metadata for dependencies of the project (direct or transitive). When\nprovided, enables the resolver to use the specified metadata instead of querying the\nregistry or building the relevant package from source.\n\nMetadata should be provided in adherence with the [Metadata 2.3](https://packaging.python.org/en/latest/specifications/core-metadata/)\nstandard, though only the following fields are respected:\n\n- `name`: The name of the package.\n- (Optional) `version`: The version of the package. If omitted, the metadata will be applied\n to all versions of the package.\n- (Optional) `requires-dist`: The dependencies of the package (e.g., `werkzeug>=0.14`).\n- (Optional) `requires-python`: The Python version required by the package (e.g., `>=3.10`).\n- (Optional) `provides-extras`: The extras provided by the package.", "type": [ "array", "null" @@ -1176,7 +1165,7 @@ ] }, "emit-index-annotation": { - "description": "Include comment annotations indicating the index used to resolve each package (e.g., `# from https://pypi.org/simple`).", + "description": "Include comment annotations indicating the index used to resolve each package (e.g.,\n`# from https://pypi.org/simple`).", "type": [ "boolean", "null" @@ -1190,14 +1179,14 @@ ] }, "emit-marker-expression": { - "description": "Whether to emit a marker string indicating the conditions under which the set of pinned dependencies is valid.\n\nThe pinned dependencies may be valid even when the marker expression is false, but when the expression is true, the requirements are known to be correct.", + "description": "Whether to emit a marker string indicating the conditions under which the set of pinned\ndependencies is valid.\n\nThe pinned dependencies may be valid even when the marker expression is\nfalse, but when the expression is true, the requirements are known to\nbe correct.", "type": [ "boolean", "null" ] }, "exclude-newer": { - "description": "Limit candidate packages to those that were uploaded prior to a given point in time.\n\nAccepts a superset of [RFC 3339](https://www.rfc-editor.org/rfc/rfc3339.html) (e.g., `2006-12-02T02:07:43Z`). A full timestamp is required to ensure that the resolver will behave consistently across timezones.", + "description": "Limit candidate packages to those that were uploaded prior to a given point in time.\n\nAccepts a superset of [RFC 3339](https://www.rfc-editor.org/rfc/rfc3339.html) (e.g.,\n`2006-12-02T02:07:43Z`). A full timestamp is required to ensure that the resolver will\nbehave consistently across timezones.", "anyOf": [ { "$ref": "#/definitions/ExcludeNewer" @@ -1218,7 +1207,7 @@ } }, "extra-index-url": { - "description": "Extra URLs of package indexes to use, in addition to `--index-url`.\n\nAccepts either a repository compliant with [PEP 503](https://peps.python.org/pep-0503/) (the simple repository API), or a local directory laid out in the same format.\n\nAll indexes provided via this flag take priority over the index specified by [`index_url`](#index-url). When multiple indexes are provided, earlier values take priority.\n\nTo control uv's resolution strategy when multiple indexes are present, see [`index_strategy`](#index-strategy).", + "description": "Extra URLs of package indexes to use, in addition to `--index-url`.\n\nAccepts either a repository compliant with [PEP 503](https://peps.python.org/pep-0503/)\n(the simple repository API), or a local directory laid out in the same format.\n\nAll indexes provided via this flag take priority over the index specified by\n[`index_url`](#index-url). When multiple indexes are provided, earlier values take priority.\n\nTo control uv's resolution strategy when multiple indexes are present, see\n[`index_strategy`](#index-strategy).", "type": [ "array", "null" @@ -1228,7 +1217,7 @@ } }, "find-links": { - "description": "Locations to search for candidate distributions, in addition to those found in the registry indexes.\n\nIf a path, the target must be a directory that contains packages as wheel files (`.whl`) or source distributions (e.g., `.tar.gz` or `.zip`) at the top level.\n\nIf a URL, the page must contain a flat list of links to package files adhering to the formats described above.", + "description": "Locations to search for candidate distributions, in addition to those found in the registry\nindexes.\n\nIf a path, the target must be a directory that contains packages as wheel files (`.whl`) or\nsource distributions (e.g., `.tar.gz` or `.zip`) at the top level.\n\nIf a URL, the page must contain a flat list of links to package files adhering to the\nformats described above.", "type": [ "array", "null" @@ -1238,7 +1227,7 @@ } }, "fork-strategy": { - "description": "The strategy to use when selecting multiple versions of a given package across Python versions and platforms.\n\nBy default, uv will optimize for selecting the latest version of each package for each supported Python version (`requires-python`), while minimizing the number of selected versions across platforms.\n\nUnder `fewest`, uv will minimize the number of selected versions for each package, preferring older versions that are compatible with a wider range of supported Python versions or platforms.", + "description": "The strategy to use when selecting multiple versions of a given package across Python\nversions and platforms.\n\nBy default, uv will optimize for selecting the latest version of each package for each\nsupported Python version (`requires-python`), while minimizing the number of selected\nversions across platforms.\n\nUnder `fewest`, uv will minimize the number of selected versions for each package,\npreferring older versions that are compatible with a wider range of supported Python\nversions or platforms.", "anyOf": [ { "$ref": "#/definitions/ForkStrategy" @@ -1266,7 +1255,7 @@ } }, "index-strategy": { - "description": "The strategy to use when resolving against multiple index URLs.\n\nBy default, uv will stop at the first index on which a given package is available, and limit resolutions to those present on that first index (`first-index`). This prevents \"dependency confusion\" attacks, whereby an attacker can upload a malicious package under the same name to an alternate index.", + "description": "The strategy to use when resolving against multiple index URLs.\n\nBy default, uv will stop at the first index on which a given package is available, and\nlimit resolutions to those present on that first index (`first-index`). This prevents\n\"dependency confusion\" attacks, whereby an attacker can upload a malicious package under the\nsame name to an alternate index.", "anyOf": [ { "$ref": "#/definitions/IndexStrategy" @@ -1277,7 +1266,7 @@ ] }, "index-url": { - "description": "The URL of the Python package index (by default: ).\n\nAccepts either a repository compliant with [PEP 503](https://peps.python.org/pep-0503/) (the simple repository API), or a local directory laid out in the same format.\n\nThe index provided by this setting is given lower priority than any indexes specified via [`extra_index_url`](#extra-index-url).", + "description": "The URL of the Python package index (by default: ).\n\nAccepts either a repository compliant with [PEP 503](https://peps.python.org/pep-0503/)\n(the simple repository API), or a local directory laid out in the same format.\n\nThe index provided by this setting is given lower priority than any indexes specified via\n[`extra_index_url`](#extra-index-url).", "anyOf": [ { "$ref": "#/definitions/IndexUrl" @@ -1288,7 +1277,7 @@ ] }, "keyring-provider": { - "description": "Attempt to use `keyring` for authentication for index URLs.\n\nAt present, only `--keyring-provider subprocess` is supported, which configures uv to use the `keyring` CLI to handle authentication.", + "description": "Attempt to use `keyring` for authentication for index URLs.\n\nAt present, only `--keyring-provider subprocess` is supported, which configures uv to\nuse the `keyring` CLI to handle authentication.", "anyOf": [ { "$ref": "#/definitions/KeyringProviderType" @@ -1299,7 +1288,7 @@ ] }, "link-mode": { - "description": "The method to use when installing packages from the global cache.\n\nDefaults to `clone` (also known as Copy-on-Write) on macOS, and `hardlink` on Linux and Windows.", + "description": "The method to use when installing packages from the global cache.\n\nDefaults to `clone` (also known as Copy-on-Write) on macOS, and `hardlink` on Linux and\nWindows.", "anyOf": [ { "$ref": "#/definitions/LinkMode" @@ -1310,14 +1299,14 @@ ] }, "no-annotate": { - "description": "Exclude comment annotations indicating the source of each package from the output file generated by `uv pip compile`.", + "description": "Exclude comment annotations indicating the source of each package from the output file\ngenerated by `uv pip compile`.", "type": [ "boolean", "null" ] }, "no-binary": { - "description": "Don't install pre-built wheels.\n\nThe given packages will be built and installed from source. The resolver will still use pre-built wheels to extract package metadata, if available.\n\nMultiple packages may be provided. Disable binaries for all packages with `:all:`. Clear previously specified packages with `:none:`.", + "description": "Don't install pre-built wheels.\n\nThe given packages will be built and installed from source. The resolver will still use\npre-built wheels to extract package metadata, if available.\n\nMultiple packages may be provided. Disable binaries for all packages with `:all:`.\nClear previously specified packages with `:none:`.", "type": [ "array", "null" @@ -1327,21 +1316,21 @@ } }, "no-build": { - "description": "Don't build source distributions.\n\nWhen enabled, resolving will not run arbitrary Python code. The cached wheels of already-built source distributions will be reused, but operations that require building distributions will exit with an error.\n\nAlias for `--only-binary :all:`.", + "description": "Don't build source distributions.\n\nWhen enabled, resolving will not run arbitrary Python code. The cached wheels of\nalready-built source distributions will be reused, but operations that require building\ndistributions will exit with an error.\n\nAlias for `--only-binary :all:`.", "type": [ "boolean", "null" ] }, "no-build-isolation": { - "description": "Disable isolation when building source distributions.\n\nAssumes that build dependencies specified by [PEP 518](https://peps.python.org/pep-0518/) are already installed.", + "description": "Disable isolation when building source distributions.\n\nAssumes that build dependencies specified by [PEP 518](https://peps.python.org/pep-0518/)\nare already installed.", "type": [ "boolean", "null" ] }, "no-build-isolation-package": { - "description": "Disable isolation when building source distributions for a specific package.\n\nAssumes that the packages' build dependencies specified by [PEP 518](https://peps.python.org/pep-0518/) are already installed.", + "description": "Disable isolation when building source distributions for a specific package.\n\nAssumes that the packages' build dependencies specified by [PEP 518](https://peps.python.org/pep-0518/)\nare already installed.", "type": [ "array", "null" @@ -1351,14 +1340,14 @@ } }, "no-deps": { - "description": "Ignore package dependencies, instead only add those packages explicitly listed on the command line to the resulting requirements file.", + "description": "Ignore package dependencies, instead only add those packages explicitly listed\non the command line to the resulting requirements file.", "type": [ "boolean", "null" ] }, "no-emit-package": { - "description": "Specify a package to omit from the output resolution. Its dependencies will still be included in the resolution. Equivalent to pip-compile's `--unsafe-package` option.", + "description": "Specify a package to omit from the output resolution. Its dependencies will still be\nincluded in the resolution. Equivalent to pip-compile's `--unsafe-package` option.", "type": [ "array", "null" @@ -1385,35 +1374,35 @@ ] }, "no-index": { - "description": "Ignore all registry indexes (e.g., PyPI), instead relying on direct URL dependencies and those provided via `--find-links`.", + "description": "Ignore all registry indexes (e.g., PyPI), instead relying on direct URL dependencies and\nthose provided via `--find-links`.", "type": [ "boolean", "null" ] }, "no-sources": { - "description": "Ignore the `tool.uv.sources` table when resolving dependencies. Used to lock against the standards-compliant, publishable package metadata, as opposed to using any local or Git sources.", + "description": "Ignore the `tool.uv.sources` table when resolving dependencies. Used to lock against the\nstandards-compliant, publishable package metadata, as opposed to using any local or Git\nsources.", "type": [ "boolean", "null" ] }, "no-strip-extras": { - "description": "Include extras in the output file.\n\nBy default, uv strips extras, as any packages pulled in by the extras are already included as dependencies in the output file directly. Further, output files generated with `--no-strip-extras` cannot be used as constraints files in `install` and `sync` invocations.", + "description": "Include extras in the output file.\n\nBy default, uv strips extras, as any packages pulled in by the extras are already included\nas dependencies in the output file directly. Further, output files generated with\n`--no-strip-extras` cannot be used as constraints files in `install` and `sync` invocations.", "type": [ "boolean", "null" ] }, "no-strip-markers": { - "description": "Include environment markers in the output file generated by `uv pip compile`.\n\nBy default, uv strips environment markers, as the resolution generated by `compile` is only guaranteed to be correct for the target environment.", + "description": "Include environment markers in the output file generated by `uv pip compile`.\n\nBy default, uv strips environment markers, as the resolution generated by `compile` is\nonly guaranteed to be correct for the target environment.", "type": [ "boolean", "null" ] }, "only-binary": { - "description": "Only use pre-built wheels; don't build source distributions.\n\nWhen enabled, resolving will not run code from the given packages. The cached wheels of already-built source distributions will be reused, but operations that require building distributions will exit with an error.\n\nMultiple packages may be provided. Disable binaries for all packages with `:all:`. Clear previously specified packages with `:none:`.", + "description": "Only use pre-built wheels; don't build source distributions.\n\nWhen enabled, resolving will not run code from the given packages. The cached wheels of already-built\nsource distributions will be reused, but operations that require building distributions will\nexit with an error.\n\nMultiple packages may be provided. Disable binaries for all packages with `:all:`.\nClear previously specified packages with `:none:`.", "type": [ "array", "null" @@ -1423,21 +1412,21 @@ } }, "output-file": { - "description": "Write the requirements generated by `uv pip compile` to the given `requirements.txt` file.\n\nIf the file already exists, the existing versions will be preferred when resolving dependencies, unless `--upgrade` is also specified.", + "description": "Write the requirements generated by `uv pip compile` to the given `requirements.txt` file.\n\nIf the file already exists, the existing versions will be preferred when resolving\ndependencies, unless `--upgrade` is also specified.", "type": [ "string", "null" ] }, "prefix": { - "description": "Install packages into `lib`, `bin`, and other top-level folders under the specified directory, as if a virtual environment were present at that location.\n\nIn general, prefer the use of `--python` to install into an alternate environment, as scripts and other artifacts installed via `--prefix` will reference the installing interpreter, rather than any interpreter added to the `--prefix` directory, rendering them non-portable.", + "description": "Install packages into `lib`, `bin`, and other top-level folders under the specified\ndirectory, as if a virtual environment were present at that location.\n\nIn general, prefer the use of `--python` to install into an alternate environment, as\nscripts and other artifacts installed via `--prefix` will reference the installing\ninterpreter, rather than any interpreter added to the `--prefix` directory, rendering them\nnon-portable.", "type": [ "string", "null" ] }, "prerelease": { - "description": "The strategy to use when considering pre-release versions.\n\nBy default, uv will accept pre-releases for packages that _only_ publish pre-releases, along with first-party requirements that contain an explicit pre-release marker in the declared specifiers (`if-necessary-or-explicit`).", + "description": "The strategy to use when considering pre-release versions.\n\nBy default, uv will accept pre-releases for packages that _only_ publish pre-releases,\nalong with first-party requirements that contain an explicit pre-release marker in the\ndeclared specifiers (`if-necessary-or-explicit`).", "anyOf": [ { "$ref": "#/definitions/PrereleaseMode" @@ -1448,14 +1437,14 @@ ] }, "python": { - "description": "The Python interpreter into which packages should be installed.\n\nBy default, uv installs into the virtual environment in the current working directory or any parent directory. The `--python` option allows you to specify a different interpreter, which is intended for use in continuous integration (CI) environments or other automated workflows.\n\nSupported formats: - `3.10` looks for an installed Python 3.10 in the registry on Windows (see `py --list-paths`), or `python3.10` on Linux and macOS. - `python3.10` or `python.exe` looks for a binary with the given name in `PATH`. - `/home/ferris/.local/bin/python3.10` uses the exact Python at the given path.", + "description": "The Python interpreter into which packages should be installed.\n\nBy default, uv installs into the virtual environment in the current working directory or\nany parent directory. The `--python` option allows you to specify a different interpreter,\nwhich is intended for use in continuous integration (CI) environments or other automated\nworkflows.\n\nSupported formats:\n- `3.10` looks for an installed Python 3.10 in the registry on Windows (see\n `py --list-paths`), or `python3.10` on Linux and macOS.\n- `python3.10` or `python.exe` looks for a binary with the given name in `PATH`.\n- `/home/ferris/.local/bin/python3.10` uses the exact Python at the given path.", "type": [ "string", "null" ] }, "python-platform": { - "description": "The platform for which requirements should be resolved.\n\nRepresented as a \"target triple\", a string that describes the target platform in terms of its CPU, vendor, and operating system name, like `x86_64-unknown-linux-gnu` or `aarch64-apple-darwin`.", + "description": "The platform for which requirements should be resolved.\n\nRepresented as a \"target triple\", a string that describes the target platform in terms of\nits CPU, vendor, and operating system name, like `x86_64-unknown-linux-gnu` or\n`aarch64-apple-darwin`.", "anyOf": [ { "$ref": "#/definitions/TargetTriple" @@ -1466,7 +1455,7 @@ ] }, "python-version": { - "description": "The minimum Python version that should be supported by the resolved requirements (e.g., `3.8` or `3.8.17`).\n\nIf a patch version is omitted, the minimum patch version is assumed. For example, `3.8` is mapped to `3.8.0`.", + "description": "The minimum Python version that should be supported by the resolved requirements (e.g.,\n`3.8` or `3.8.17`).\n\nIf a patch version is omitted, the minimum patch version is assumed. For example, `3.8` is\nmapped to `3.8.0`.", "anyOf": [ { "$ref": "#/definitions/PythonVersion" @@ -1484,7 +1473,7 @@ ] }, "reinstall-package": { - "description": "Reinstall a specific package, regardless of whether it's already installed. Implies `refresh-package`.", + "description": "Reinstall a specific package, regardless of whether it's already installed. Implies\n`refresh-package`.", "type": [ "array", "null" @@ -1494,14 +1483,14 @@ } }, "require-hashes": { - "description": "Require a matching hash for each requirement.\n\nHash-checking mode is all or nothing. If enabled, _all_ requirements must be provided with a corresponding hash or set of hashes. Additionally, if enabled, _all_ requirements must either be pinned to exact versions (e.g., `==1.0.0`), or be specified via direct URL.\n\nHash-checking mode introduces a number of additional constraints:\n\n- Git dependencies are not supported. - Editable installations are not supported. - Local dependencies are not supported, unless they point to a specific wheel (`.whl`) or source archive (`.zip`, `.tar.gz`), as opposed to a directory.", + "description": "Require a matching hash for each requirement.\n\nHash-checking mode is all or nothing. If enabled, _all_ requirements must be provided\nwith a corresponding hash or set of hashes. Additionally, if enabled, _all_ requirements\nmust either be pinned to exact versions (e.g., `==1.0.0`), or be specified via direct URL.\n\nHash-checking mode introduces a number of additional constraints:\n\n- Git dependencies are not supported.\n- Editable installations are not supported.\n- Local dependencies are not supported, unless they point to a specific wheel (`.whl`) or\n source archive (`.zip`, `.tar.gz`), as opposed to a directory.", "type": [ "boolean", "null" ] }, "resolution": { - "description": "The strategy to use when selecting between the different compatible versions for a given package requirement.\n\nBy default, uv will use the latest compatible version of each package (`highest`).", + "description": "The strategy to use when selecting between the different compatible versions for a given\npackage requirement.\n\nBy default, uv will use the latest compatible version of each package (`highest`).", "anyOf": [ { "$ref": "#/definitions/ResolutionMode" @@ -1512,28 +1501,28 @@ ] }, "strict": { - "description": "Validate the Python environment, to detect packages with missing dependencies and other issues.", + "description": "Validate the Python environment, to detect packages with missing dependencies and other\nissues.", "type": [ "boolean", "null" ] }, "system": { - "description": "Install packages into the system Python environment.\n\nBy default, uv installs into the virtual environment in the current working directory or any parent directory. The `--system` option instructs uv to instead use the first Python found in the system `PATH`.\n\nWARNING: `--system` is intended for use in continuous integration (CI) environments and should be used with caution, as it can modify the system Python installation.", + "description": "Install packages into the system Python environment.\n\nBy default, uv installs into the virtual environment in the current working directory or\nany parent directory. The `--system` option instructs uv to instead use the first Python\nfound in the system `PATH`.\n\nWARNING: `--system` is intended for use in continuous integration (CI) environments and\nshould be used with caution, as it can modify the system Python installation.", "type": [ "boolean", "null" ] }, "target": { - "description": "Install packages into the specified directory, rather than into the virtual or system Python environment. The packages will be installed at the top-level of the directory.", + "description": "Install packages into the specified directory, rather than into the virtual or system Python\nenvironment. The packages will be installed at the top-level of the directory.", "type": [ "string", "null" ] }, "torch-backend": { - "description": "The backend to use when fetching packages in the PyTorch ecosystem.\n\nWhen set, uv will ignore the configured index URLs for packages in the PyTorch ecosystem, and will instead use the defined backend.\n\nFor example, when set to `cpu`, uv will use the CPU-only PyTorch index; when set to `cu126`, uv will use the PyTorch index for CUDA 12.6.\n\nThe `auto` mode will attempt to detect the appropriate PyTorch index based on the currently installed CUDA drivers.\n\nThis option is in preview and may change in any future release.", + "description": "The backend to use when fetching packages in the PyTorch ecosystem.\n\nWhen set, uv will ignore the configured index URLs for packages in the PyTorch ecosystem,\nand will instead use the defined backend.\n\nFor example, when set to `cpu`, uv will use the CPU-only PyTorch index; when set to `cu126`,\nuv will use the PyTorch index for CUDA 12.6.\n\nThe `auto` mode will attempt to detect the appropriate PyTorch index based on the currently\ninstalled CUDA drivers.\n\nThis option is in preview and may change in any future release.", "anyOf": [ { "$ref": "#/definitions/TorchMode" @@ -1544,7 +1533,7 @@ ] }, "universal": { - "description": "Perform a universal resolution, attempting to generate a single `requirements.txt` output file that is compatible with all operating systems, architectures, and Python implementations.\n\nIn universal mode, the current Python version (or user-provided `--python-version`) will be treated as a lower bound. For example, `--universal --python-version 3.7` would produce a universal resolution for Python 3.7 and later.", + "description": "Perform a universal resolution, attempting to generate a single `requirements.txt` output\nfile that is compatible with all operating systems, architectures, and Python\nimplementations.\n\nIn universal mode, the current Python version (or user-provided `--python-version`) will be\ntreated as a lower bound. For example, `--universal --python-version 3.7` would produce a\nuniversal resolution for Python 3.7 and later.", "type": [ "boolean", "null" @@ -1558,7 +1547,7 @@ ] }, "upgrade-package": { - "description": "Allow upgrades for a specific package, ignoring pinned versions in any existing output file.\n\nAccepts both standalone package names (`ruff`) and version specifiers (`ruff<0.5.0`).", + "description": "Allow upgrades for a specific package, ignoring pinned versions in any existing output\nfile.\n\nAccepts both standalone package names (`ruff`) and version specifiers (`ruff<0.5.0`).", "type": [ "array", "null" @@ -1568,7 +1557,7 @@ } }, "verify-hashes": { - "description": "Validate any hashes provided in the requirements file.\n\nUnlike `--require-hashes`, `--verify-hashes` does not require that all requirements have hashes; instead, it will limit itself to verifying the hashes of those requirements that do include them.", + "description": "Validate any hashes provided in the requirements file.\n\nUnlike `--require-hashes`, `--verify-hashes` does not require that all requirements have\nhashes; instead, it will limit itself to verifying the hashes of those requirements that do\ninclude them.", "type": [ "boolean", "null" @@ -1577,42 +1566,35 @@ }, "additionalProperties": false }, + "PortablePathBuf": { + "type": "string" + }, "PrereleaseMode": { "oneOf": [ { "description": "Disallow all pre-release versions.", "type": "string", - "enum": [ - "disallow" - ] + "const": "disallow" }, { "description": "Allow all pre-release versions.", "type": "string", - "enum": [ - "allow" - ] + "const": "allow" }, { "description": "Allow pre-release versions if all versions of a package are pre-release.", "type": "string", - "enum": [ - "if-necessary" - ] + "const": "if-necessary" }, { - "description": "Allow pre-release versions for first-party packages with explicit pre-release markers in their version requirements.", + "description": "Allow pre-release versions for first-party packages with explicit pre-release markers in\ntheir version requirements.", "type": "string", - "enum": [ - "explicit" - ] + "const": "explicit" }, { - "description": "Allow pre-release versions if all versions of a package are pre-release, or if the package has an explicit pre-release marker in its version requirements.", + "description": "Allow pre-release versions if all versions of a package are pre-release, or if the package\nhas an explicit pre-release marker in its version requirements.", "type": "string", - "enum": [ - "if-necessary-or-explicit" - ] + "const": "if-necessary-or-explicit" } ] }, @@ -1621,23 +1603,17 @@ { "description": "Automatically download managed Python installations when needed.", "type": "string", - "enum": [ - "automatic" - ] + "const": "automatic" }, { "description": "Do not automatically download managed Python installations; require explicit installation.", "type": "string", - "enum": [ - "manual" - ] + "const": "manual" }, { "description": "Do not ever allow Python downloads.", "type": "string", - "enum": [ - "never" - ] + "const": "never" } ] }, @@ -1646,30 +1622,22 @@ { "description": "Only use managed Python installations; never use system Python installations.", "type": "string", - "enum": [ - "only-managed" - ] + "const": "only-managed" }, { - "description": "Prefer managed Python installations over system Python installations.\n\nSystem Python installations are still preferred over downloading managed Python versions. Use `only-managed` to always fetch a managed Python version.", + "description": "Prefer managed Python installations over system Python installations.\n\nSystem Python installations are still preferred over downloading managed Python versions.\nUse `only-managed` to always fetch a managed Python version.", "type": "string", - "enum": [ - "managed" - ] + "const": "managed" }, { "description": "Prefer system Python installations over managed Python installations.\n\nIf a system Python installation cannot be found, a managed Python installation can be used.", "type": "string", - "enum": [ - "system" - ] + "const": "system" }, { "description": "Only use system Python installations; never use managed Python installations.", "type": "string", - "enum": [ - "only-system" - ] + "const": "only-system" } ] }, @@ -1691,32 +1659,25 @@ { "description": "Resolve the highest compatible version of each package.", "type": "string", - "enum": [ - "highest" - ] + "const": "highest" }, { "description": "Resolve the lowest compatible version of each package.", "type": "string", - "enum": [ - "lowest" - ] + "const": "lowest" }, { - "description": "Resolve the lowest compatible version of any direct dependencies, and the highest compatible version of any transitive dependencies.", + "description": "Resolve the lowest compatible version of any direct dependencies, and the highest\ncompatible version of any transitive dependencies.", "type": "string", - "enum": [ - "lowest-direct" - ] + "const": "lowest-direct" } ] }, "SchemaConflictItem": { - "description": "A single item in a conflicting set.\n\nEach item is a pair of an (optional) package and a corresponding extra or group name for that package.", + "description": "A single item in a conflicting set.\n\nEach item is a pair of an (optional) package and a corresponding extra or group name for that\npackage.", "type": "object", "properties": { "extra": { - "default": null, "anyOf": [ { "$ref": "#/definitions/ExtraName" @@ -1724,10 +1685,10 @@ { "type": "null" } - ] + ], + "default": null }, "group": { - "default": null, "anyOf": [ { "$ref": "#/definitions/GroupName" @@ -1735,10 +1696,10 @@ { "type": "null" } - ] + ], + "default": null }, "package": { - "default": null, "anyOf": [ { "$ref": "#/definitions/PackageName" @@ -1746,33 +1707,34 @@ { "type": "null" } - ] + ], + "default": null } } }, "SchemaConflictSet": { - "description": "Like [`ConflictSet`], but for deserialization in `pyproject.toml`.\n\nThe schema format is different from the in-memory format. Specifically, the schema format does not allow specifying the package name (or will make it optional in the future), where as the in-memory format needs the package name.", + "description": "Like [`ConflictSet`], but for deserialization in `pyproject.toml`.\n\nThe schema format is different from the in-memory format. Specifically, the\nschema format does not allow specifying the package name (or will make it\noptional in the future), where as the in-memory format needs the package\nname.", "type": "array", "items": { "$ref": "#/definitions/SchemaConflictItem" } }, "SchemaConflicts": { - "description": "Like [`Conflicts`], but for deserialization in `pyproject.toml`.\n\nThe schema format is different from the in-memory format. Specifically, the schema format does not allow specifying the package name (or will make it optional in the future), where as the in-memory format needs the package name.\n\nN.B. `Conflicts` is still used for (de)serialization. Specifically, in the lock file, where the package name is required.", + "description": "Like [`Conflicts`], but for deserialization in `pyproject.toml`.\n\nThe schema format is different from the in-memory format. Specifically, the\nschema format does not allow specifying the package name (or will make it\noptional in the future), where as the in-memory format needs the package\nname.\n\nN.B. `Conflicts` is still used for (de)serialization. Specifically, in the\nlock file, where the package name is required.", "type": "array", "items": { "$ref": "#/definitions/SchemaConflictSet" } }, + "SerdePattern": { + "type": "string" + }, "Source": { "description": "A `tool.uv.sources` value.", "anyOf": [ { - "description": "A remote Git repository, available over HTTPS or SSH.\n\nExample: ```toml flask = { git = \"https://github.com/pallets/flask\", tag = \"3.0.0\" } ```", + "description": "A remote Git repository, available over HTTPS or SSH.\n\nExample:\n```toml\nflask = { git = \"https://github.com/pallets/flask\", tag = \"3.0.0\" }\n```", "type": "object", - "required": [ - "git" - ], "properties": { "branch": { "type": [ @@ -1792,8 +1754,11 @@ }, "git": { "description": "The repository URL (without the `git+` prefix).", - "type": "string", - "format": "uri" + "allOf": [ + { + "$ref": "#/definitions/DisplaySafeUrl" + } + ] }, "group": { "anyOf": [ @@ -1818,7 +1783,7 @@ "description": "The path to the directory with the `pyproject.toml`, if it's not in the archive root.", "anyOf": [ { - "$ref": "#/definitions/String" + "$ref": "#/definitions/PortablePathBuf" }, { "type": "null" @@ -1832,14 +1797,14 @@ ] } }, - "additionalProperties": false + "additionalProperties": false, + "required": [ + "git" + ] }, { - "description": "A remote `http://` or `https://` URL, either a wheel (`.whl`) or a source distribution (`.zip`, `.tar.gz`).\n\nExample: ```toml flask = { url = \"https://files.pythonhosted.org/packages/61/80/ffe1da13ad9300f87c93af113edd0638c75138c42a0994becfacac078c06/flask-3.0.3-py3-none-any.whl\" } ```", + "description": "A remote `http://` or `https://` URL, either a wheel (`.whl`) or a source distribution\n(`.zip`, `.tar.gz`).\n\nExample:\n```toml\nflask = { url = \"https://files.pythonhosted.org/packages/61/80/ffe1da13ad9300f87c93af113edd0638c75138c42a0994becfacac078c06/flask-3.0.3-py3-none-any.whl\" }\n```", "type": "object", - "required": [ - "url" - ], "properties": { "extra": { "anyOf": [ @@ -1865,10 +1830,10 @@ "$ref": "#/definitions/MarkerTree" }, "subdirectory": { - "description": "For source distributions, the path to the directory with the `pyproject.toml`, if it's not in the archive root.", + "description": "For source distributions, the path to the directory with the `pyproject.toml`, if it's\nnot in the archive root.", "anyOf": [ { - "$ref": "#/definitions/String" + "$ref": "#/definitions/PortablePathBuf" }, { "type": "null" @@ -1876,18 +1841,17 @@ ] }, "url": { - "type": "string", - "format": "uri" + "$ref": "#/definitions/DisplaySafeUrl" } }, - "additionalProperties": false + "additionalProperties": false, + "required": [ + "url" + ] }, { - "description": "The path to a dependency, either a wheel (a `.whl` file), source distribution (a `.zip` or `.tar.gz` file), or source tree (i.e., a directory containing a `pyproject.toml` or `setup.py` file in the root).", + "description": "The path to a dependency, either a wheel (a `.whl` file), source distribution (a `.zip` or\n`.tar.gz` file), or source tree (i.e., a directory containing a `pyproject.toml` or\n`setup.py` file in the root).", "type": "object", - "required": [ - "path" - ], "properties": { "editable": { "description": "`false` by default.", @@ -1920,24 +1884,24 @@ "$ref": "#/definitions/MarkerTree" }, "package": { - "description": "Whether to treat the dependency as a buildable Python package (`true`) or as a virtual package (`false`). If `false`, the package will not be built or installed, but its dependencies will be included in the virtual environment.\n\nWhen omitted, the package status is inferred based on the presence of a `[build-system]` in the project's `pyproject.toml`.", + "description": "Whether to treat the dependency as a buildable Python package (`true`) or as a virtual\npackage (`false`). If `false`, the package will not be built or installed, but its\ndependencies will be included in the virtual environment.\n\nWhen omitted, the package status is inferred based on the presence of a `[build-system]`\nin the project's `pyproject.toml`.", "type": [ "boolean", "null" ] }, "path": { - "$ref": "#/definitions/String" + "$ref": "#/definitions/PortablePathBuf" } }, - "additionalProperties": false + "additionalProperties": false, + "required": [ + "path" + ] }, { "description": "A dependency pinned to a specific index, e.g., `torch` after setting `torch` to `https://download.pytorch.org/whl/cu118`.", "type": "object", - "required": [ - "index" - ], "properties": { "extra": { "anyOf": [ @@ -1966,14 +1930,14 @@ "$ref": "#/definitions/MarkerTree" } }, - "additionalProperties": false + "additionalProperties": false, + "required": [ + "index" + ] }, { "description": "A dependency on another package in the workspace.", "type": "object", - "required": [ - "workspace" - ], "properties": { "extra": { "anyOf": [ @@ -1999,18 +1963,18 @@ "$ref": "#/definitions/MarkerTree" }, "workspace": { - "description": "When set to `false`, the package will be fetched from the remote index, rather than included as a workspace package.", + "description": "When set to `false`, the package will be fetched from the remote index, rather than\nincluded as a workspace package.", "type": "boolean" } }, - "additionalProperties": false + "additionalProperties": false, + "required": [ + "workspace" + ] } ] }, "Sources": { - "$ref": "#/definitions/SourcesWire" - }, - "SourcesWire": { "anyOf": [ { "$ref": "#/definitions/Source" @@ -2024,25 +1988,22 @@ ] }, "StaticMetadata": { - "description": "A subset of the Python Package Metadata 2.3 standard as specified in .", + "description": "A subset of the Python Package Metadata 2.3 standard as specified in\n.", "type": "object", - "required": [ - "name" - ], "properties": { "name": { "$ref": "#/definitions/PackageName" }, "provides-extras": { - "default": [], "type": "array", + "default": [], "items": { "$ref": "#/definitions/ExtraName" } }, "requires-dist": { - "default": [], "type": "array", + "default": [], "items": { "$ref": "#/definitions/Requirement" } @@ -2061,289 +2022,218 @@ "null" ] } - } + }, + "required": [ + "name" + ] }, "StatusCode": { "description": "HTTP status code (100-599)", - "type": "integer", - "format": "uint16", - "maximum": 599.0, - "minimum": 100.0 - }, - "String": { - "type": "string" + "type": "number", + "maximum": 599, + "minimum": 100 }, "TargetTriple": { - "description": "The supported target triples. Each triple consists of an architecture, vendor, and operating system.\n\nSee: ", + "description": "The supported target triples. Each triple consists of an architecture, vendor, and operating\nsystem.\n\nSee: ", "oneOf": [ { "description": "An alias for `x86_64-pc-windows-msvc`, the default target for Windows.", "type": "string", - "enum": [ - "windows" - ] + "const": "windows" }, { "description": "An alias for `x86_64-unknown-linux-gnu`, the default target for Linux.", "type": "string", - "enum": [ - "linux" - ] + "const": "linux" }, { "description": "An alias for `aarch64-apple-darwin`, the default target for macOS.", "type": "string", - "enum": [ - "macos" - ] + "const": "macos" }, { "description": "A 64-bit x86 Windows target.", "type": "string", - "enum": [ - "x86_64-pc-windows-msvc" - ] + "const": "x86_64-pc-windows-msvc" }, { "description": "A 32-bit x86 Windows target.", "type": "string", - "enum": [ - "i686-pc-windows-msvc" - ] + "const": "i686-pc-windows-msvc" }, { "description": "An x86 Linux target. Equivalent to `x86_64-manylinux_2_17`.", "type": "string", - "enum": [ - "x86_64-unknown-linux-gnu" - ] + "const": "x86_64-unknown-linux-gnu" }, { - "description": "An ARM-based macOS target, as seen on Apple Silicon devices\n\nBy default, assumes the least-recent, non-EOL macOS version (13.0), but respects the `MACOSX_DEPLOYMENT_TARGET` environment variable if set.", + "description": "An ARM-based macOS target, as seen on Apple Silicon devices\n\nBy default, assumes the least-recent, non-EOL macOS version (13.0), but respects\nthe `MACOSX_DEPLOYMENT_TARGET` environment variable if set.", "type": "string", - "enum": [ - "aarch64-apple-darwin" - ] + "const": "aarch64-apple-darwin" }, { - "description": "An x86 macOS target.\n\nBy default, assumes the least-recent, non-EOL macOS version (13.0), but respects the `MACOSX_DEPLOYMENT_TARGET` environment variable if set.", + "description": "An x86 macOS target.\n\nBy default, assumes the least-recent, non-EOL macOS version (13.0), but respects\nthe `MACOSX_DEPLOYMENT_TARGET` environment variable if set.", "type": "string", - "enum": [ - "x86_64-apple-darwin" - ] + "const": "x86_64-apple-darwin" }, { "description": "An ARM64 Linux target. Equivalent to `aarch64-manylinux_2_17`.", "type": "string", - "enum": [ - "aarch64-unknown-linux-gnu" - ] + "const": "aarch64-unknown-linux-gnu" }, { "description": "An ARM64 Linux target.", "type": "string", - "enum": [ - "aarch64-unknown-linux-musl" - ] + "const": "aarch64-unknown-linux-musl" }, { "description": "An `x86_64` Linux target.", "type": "string", - "enum": [ - "x86_64-unknown-linux-musl" - ] + "const": "x86_64-unknown-linux-musl" }, { "description": "An `x86_64` target for the `manylinux2014` platform. Equivalent to `x86_64-manylinux_2_17`.", "type": "string", - "enum": [ - "x86_64-manylinux2014" - ] + "const": "x86_64-manylinux2014" }, { "description": "An `x86_64` target for the `manylinux_2_17` platform.", "type": "string", - "enum": [ - "x86_64-manylinux_2_17" - ] + "const": "x86_64-manylinux_2_17" }, { "description": "An `x86_64` target for the `manylinux_2_28` platform.", "type": "string", - "enum": [ - "x86_64-manylinux_2_28" - ] + "const": "x86_64-manylinux_2_28" }, { "description": "An `x86_64` target for the `manylinux_2_31` platform.", "type": "string", - "enum": [ - "x86_64-manylinux_2_31" - ] + "const": "x86_64-manylinux_2_31" }, { "description": "An `x86_64` target for the `manylinux_2_32` platform.", "type": "string", - "enum": [ - "x86_64-manylinux_2_32" - ] + "const": "x86_64-manylinux_2_32" }, { "description": "An `x86_64` target for the `manylinux_2_33` platform.", "type": "string", - "enum": [ - "x86_64-manylinux_2_33" - ] + "const": "x86_64-manylinux_2_33" }, { "description": "An `x86_64` target for the `manylinux_2_34` platform.", "type": "string", - "enum": [ - "x86_64-manylinux_2_34" - ] + "const": "x86_64-manylinux_2_34" }, { "description": "An `x86_64` target for the `manylinux_2_35` platform.", "type": "string", - "enum": [ - "x86_64-manylinux_2_35" - ] + "const": "x86_64-manylinux_2_35" }, { "description": "An `x86_64` target for the `manylinux_2_36` platform.", "type": "string", - "enum": [ - "x86_64-manylinux_2_36" - ] + "const": "x86_64-manylinux_2_36" }, { "description": "An `x86_64` target for the `manylinux_2_37` platform.", "type": "string", - "enum": [ - "x86_64-manylinux_2_37" - ] + "const": "x86_64-manylinux_2_37" }, { "description": "An `x86_64` target for the `manylinux_2_38` platform.", "type": "string", - "enum": [ - "x86_64-manylinux_2_38" - ] + "const": "x86_64-manylinux_2_38" }, { "description": "An `x86_64` target for the `manylinux_2_39` platform.", "type": "string", - "enum": [ - "x86_64-manylinux_2_39" - ] + "const": "x86_64-manylinux_2_39" }, { "description": "An `x86_64` target for the `manylinux_2_40` platform.", "type": "string", - "enum": [ - "x86_64-manylinux_2_40" - ] + "const": "x86_64-manylinux_2_40" }, { "description": "An ARM64 target for the `manylinux2014` platform. Equivalent to `aarch64-manylinux_2_17`.", "type": "string", - "enum": [ - "aarch64-manylinux2014" - ] + "const": "aarch64-manylinux2014" }, { "description": "An ARM64 target for the `manylinux_2_17` platform.", "type": "string", - "enum": [ - "aarch64-manylinux_2_17" - ] + "const": "aarch64-manylinux_2_17" }, { "description": "An ARM64 target for the `manylinux_2_28` platform.", "type": "string", - "enum": [ - "aarch64-manylinux_2_28" - ] + "const": "aarch64-manylinux_2_28" }, { "description": "An ARM64 target for the `manylinux_2_31` platform.", "type": "string", - "enum": [ - "aarch64-manylinux_2_31" - ] + "const": "aarch64-manylinux_2_31" }, { "description": "An ARM64 target for the `manylinux_2_32` platform.", "type": "string", - "enum": [ - "aarch64-manylinux_2_32" - ] + "const": "aarch64-manylinux_2_32" }, { "description": "An ARM64 target for the `manylinux_2_33` platform.", "type": "string", - "enum": [ - "aarch64-manylinux_2_33" - ] + "const": "aarch64-manylinux_2_33" }, { "description": "An ARM64 target for the `manylinux_2_34` platform.", "type": "string", - "enum": [ - "aarch64-manylinux_2_34" - ] + "const": "aarch64-manylinux_2_34" }, { "description": "An ARM64 target for the `manylinux_2_35` platform.", "type": "string", - "enum": [ - "aarch64-manylinux_2_35" - ] + "const": "aarch64-manylinux_2_35" }, { "description": "An ARM64 target for the `manylinux_2_36` platform.", "type": "string", - "enum": [ - "aarch64-manylinux_2_36" - ] + "const": "aarch64-manylinux_2_36" }, { "description": "An ARM64 target for the `manylinux_2_37` platform.", "type": "string", - "enum": [ - "aarch64-manylinux_2_37" - ] + "const": "aarch64-manylinux_2_37" }, { "description": "An ARM64 target for the `manylinux_2_38` platform.", "type": "string", - "enum": [ - "aarch64-manylinux_2_38" - ] + "const": "aarch64-manylinux_2_38" }, { "description": "An ARM64 target for the `manylinux_2_39` platform.", "type": "string", - "enum": [ - "aarch64-manylinux_2_39" - ] + "const": "aarch64-manylinux_2_39" }, { "description": "An ARM64 target for the `manylinux_2_40` platform.", "type": "string", - "enum": [ - "aarch64-manylinux_2_40" - ] + "const": "aarch64-manylinux_2_40" }, { "description": "A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12.", "type": "string", - "enum": [ - "wasm32-pyodide2024" - ] + "const": "wasm32-pyodide2024" } ] }, + "ToolUvDependencyGroups": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/DependencyGroupSettings" + } + }, "ToolUvSources": { "type": "object", "additionalProperties": { @@ -2354,13 +2244,13 @@ "type": "object", "properties": { "exclude": { - "description": "Packages to exclude as workspace members. If a package matches both `members` and `exclude`, it will be excluded.\n\nSupports both globs and explicit paths.\n\nFor more information on the glob syntax, refer to the [`glob` documentation](https://docs.rs/glob/latest/glob/struct.Pattern.html).", + "description": "Packages to exclude as workspace members. If a package matches both `members` and\n`exclude`, it will be excluded.\n\nSupports both globs and explicit paths.\n\nFor more information on the glob syntax, refer to the [`glob` documentation](https://docs.rs/glob/latest/glob/struct.Pattern.html).", "type": [ "array", "null" ], "items": { - "$ref": "#/definitions/String" + "$ref": "#/definitions/SerdePattern" } }, "members": { @@ -2370,7 +2260,7 @@ "null" ], "items": { - "$ref": "#/definitions/String" + "$ref": "#/definitions/SerdePattern" } } }, @@ -2382,184 +2272,217 @@ { "description": "Select the appropriate PyTorch index based on the operating system and CUDA driver version.", "type": "string", - "enum": [ - "auto" - ] + "const": "auto" }, { "description": "Use the CPU-only PyTorch index.", "type": "string", - "enum": [ - "cpu" - ] + "const": "cpu" }, { "description": "Use the PyTorch index for CUDA 12.8.", "type": "string", - "enum": [ - "cu128" - ] + "const": "cu128" }, { "description": "Use the PyTorch index for CUDA 12.6.", "type": "string", - "enum": [ - "cu126" - ] + "const": "cu126" }, { "description": "Use the PyTorch index for CUDA 12.5.", "type": "string", - "enum": [ - "cu125" - ] + "const": "cu125" }, { "description": "Use the PyTorch index for CUDA 12.4.", "type": "string", - "enum": [ - "cu124" - ] + "const": "cu124" }, { "description": "Use the PyTorch index for CUDA 12.3.", "type": "string", - "enum": [ - "cu123" - ] + "const": "cu123" }, { "description": "Use the PyTorch index for CUDA 12.2.", "type": "string", - "enum": [ - "cu122" - ] + "const": "cu122" }, { "description": "Use the PyTorch index for CUDA 12.1.", "type": "string", - "enum": [ - "cu121" - ] + "const": "cu121" }, { "description": "Use the PyTorch index for CUDA 12.0.", "type": "string", - "enum": [ - "cu120" - ] + "const": "cu120" }, { "description": "Use the PyTorch index for CUDA 11.8.", "type": "string", - "enum": [ - "cu118" - ] + "const": "cu118" }, { "description": "Use the PyTorch index for CUDA 11.7.", "type": "string", - "enum": [ - "cu117" - ] + "const": "cu117" }, { "description": "Use the PyTorch index for CUDA 11.6.", "type": "string", - "enum": [ - "cu116" - ] + "const": "cu116" }, { "description": "Use the PyTorch index for CUDA 11.5.", "type": "string", - "enum": [ - "cu115" - ] + "const": "cu115" }, { "description": "Use the PyTorch index for CUDA 11.4.", "type": "string", - "enum": [ - "cu114" - ] + "const": "cu114" }, { "description": "Use the PyTorch index for CUDA 11.3.", "type": "string", - "enum": [ - "cu113" - ] + "const": "cu113" }, { "description": "Use the PyTorch index for CUDA 11.2.", "type": "string", - "enum": [ - "cu112" - ] + "const": "cu112" }, { "description": "Use the PyTorch index for CUDA 11.1.", "type": "string", - "enum": [ - "cu111" - ] + "const": "cu111" }, { "description": "Use the PyTorch index for CUDA 11.0.", "type": "string", - "enum": [ - "cu110" - ] + "const": "cu110" }, { "description": "Use the PyTorch index for CUDA 10.2.", "type": "string", - "enum": [ - "cu102" - ] + "const": "cu102" }, { "description": "Use the PyTorch index for CUDA 10.1.", "type": "string", - "enum": [ - "cu101" - ] + "const": "cu101" }, { "description": "Use the PyTorch index for CUDA 10.0.", "type": "string", - "enum": [ - "cu100" - ] + "const": "cu100" }, { "description": "Use the PyTorch index for CUDA 9.2.", "type": "string", - "enum": [ - "cu92" - ] + "const": "cu92" }, { "description": "Use the PyTorch index for CUDA 9.1.", "type": "string", - "enum": [ - "cu91" - ] + "const": "cu91" }, { "description": "Use the PyTorch index for CUDA 9.0.", "type": "string", - "enum": [ - "cu90" - ] + "const": "cu90" }, { "description": "Use the PyTorch index for CUDA 8.0.", "type": "string", - "enum": [ - "cu80" - ] + "const": "cu80" + }, + { + "description": "Use the PyTorch index for ROCm 6.3.", + "type": "string", + "const": "rocm6.3" + }, + { + "description": "Use the PyTorch index for ROCm 6.2.4.", + "type": "string", + "const": "rocm6.2.4" + }, + { + "description": "Use the PyTorch index for ROCm 6.2.", + "type": "string", + "const": "rocm6.2" + }, + { + "description": "Use the PyTorch index for ROCm 6.1.", + "type": "string", + "const": "rocm6.1" + }, + { + "description": "Use the PyTorch index for ROCm 6.0.", + "type": "string", + "const": "rocm6.0" + }, + { + "description": "Use the PyTorch index for ROCm 5.7.", + "type": "string", + "const": "rocm5.7" + }, + { + "description": "Use the PyTorch index for ROCm 5.6.", + "type": "string", + "const": "rocm5.6" + }, + { + "description": "Use the PyTorch index for ROCm 5.5.", + "type": "string", + "const": "rocm5.5" + }, + { + "description": "Use the PyTorch index for ROCm 5.4.2.", + "type": "string", + "const": "rocm5.4.2" + }, + { + "description": "Use the PyTorch index for ROCm 5.4.", + "type": "string", + "const": "rocm5.4" + }, + { + "description": "Use the PyTorch index for ROCm 5.3.", + "type": "string", + "const": "rocm5.3" + }, + { + "description": "Use the PyTorch index for ROCm 5.2.", + "type": "string", + "const": "rocm5.2" + }, + { + "description": "Use the PyTorch index for ROCm 5.1.1.", + "type": "string", + "const": "rocm5.1.1" + }, + { + "description": "Use the PyTorch index for ROCm 4.2.", + "type": "string", + "const": "rocm4.2" + }, + { + "description": "Use the PyTorch index for ROCm 4.1.", + "type": "string", + "const": "rocm4.1" + }, + { + "description": "Use the PyTorch index for ROCm 4.0.1.", + "type": "string", + "const": "rocm4.0.1" + }, + { + "description": "Use the PyTorch index for Intel XPU.", + "type": "string", + "const": "xpu" } ] }, @@ -2579,9 +2502,7 @@ { "description": "Try trusted publishing when we're already in GitHub Actions, continue if that fails.", "type": "string", - "enum": [ - "automatic" - ] + "const": "automatic" } ] }, @@ -2590,39 +2511,39 @@ "type": "object", "properties": { "data": { - "default": null, "type": [ "string", "null" - ] + ], + "default": null }, "headers": { - "default": null, "type": [ "string", "null" - ] + ], + "default": null }, "platlib": { - "default": null, "type": [ "string", "null" - ] + ], + "default": null }, "purelib": { - "default": null, "type": [ "string", "null" - ] + ], + "default": null }, "scripts": { - "default": null, "type": [ "string", "null" - ] + ], + "default": null } }, "additionalProperties": false