mirror of
https://github.com/astral-sh/uv.git
synced 2025-07-07 21:35:00 +00:00
Merge remote-tracking branch 'upstream/main' into pyenv_compatybility
This commit is contained in:
commit
56e7a15048
403 changed files with 40456 additions and 20670 deletions
16
.github/renovate.json5
vendored
16
.github/renovate.json5
vendored
|
@ -2,13 +2,17 @@
|
|||
$schema: "https://docs.renovatebot.com/renovate-schema.json",
|
||||
dependencyDashboard: true,
|
||||
suppressNotifications: ["prEditedNotification"],
|
||||
extends: ["config:recommended"],
|
||||
extends: [
|
||||
"config:recommended",
|
||||
// For tool versions defined in GitHub Actions:
|
||||
"customManagers:githubActionsVersions",
|
||||
],
|
||||
labels: ["internal"],
|
||||
schedule: ["before 4am on Monday"],
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
prHourlyLimit: 10,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "regex"],
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "custom.regex"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
rangeStrategy: "update-lockfile",
|
||||
|
@ -21,13 +25,13 @@
|
|||
{
|
||||
// Disable updates of `zip-rs`; intentionally pinned for now due to ownership change
|
||||
// See: https://github.com/astral-sh/uv/issues/3642
|
||||
matchPackagePatterns: ["zip"],
|
||||
matchPackageNames: ["/zip/"],
|
||||
matchManagers: ["cargo"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// Create dedicated branches to update references to dependencies in the documentation.
|
||||
matchPaths: ["docs/**/*.md"],
|
||||
matchFileNames: ["docs/**/*.md"],
|
||||
commitMessageTopic: "documentation references to {{{depName}}}",
|
||||
semanticCommitType: "docs",
|
||||
semanticCommitScope: null,
|
||||
|
@ -38,7 +42,7 @@
|
|||
groupName: "Artifact GitHub Actions dependencies",
|
||||
matchManagers: ["github-actions"],
|
||||
matchDatasources: ["gitea-tags", "github-tags"],
|
||||
matchPackagePatterns: ["actions/.*-artifact"],
|
||||
matchPackageNames: ["/actions/.*-artifact/"],
|
||||
description: "Weekly update of artifact-related GitHub Actions dependencies",
|
||||
},
|
||||
{
|
||||
|
@ -67,7 +71,7 @@
|
|||
// of the PEP 440 and PEP 508 crates, which we vendored and forked.
|
||||
groupName: "pyo3",
|
||||
matchManagers: ["cargo"],
|
||||
matchPackagePatterns: ["pyo3"],
|
||||
matchPackageNames: ["/pyo3/"],
|
||||
description: "Weekly update of pyo3 dependencies",
|
||||
enabled: false,
|
||||
},
|
||||
|
|
3
.github/workflows/build-docker.yml
vendored
3
.github/workflows/build-docker.yml
vendored
|
@ -161,16 +161,19 @@ jobs:
|
|||
- alpine:3.20,alpine3.20,alpine
|
||||
- debian:bookworm-slim,bookworm-slim,debian-slim
|
||||
- buildpack-deps:bookworm,bookworm,debian
|
||||
- python:3.13-alpine,python3.13-alpine
|
||||
- python:3.12-alpine,python3.12-alpine
|
||||
- python:3.11-alpine,python3.11-alpine
|
||||
- python:3.10-alpine,python3.10-alpine
|
||||
- python:3.9-alpine,python3.9-alpine
|
||||
- python:3.8-alpine,python3.8-alpine
|
||||
- python:3.13-bookworm,python3.13-bookworm
|
||||
- python:3.12-bookworm,python3.12-bookworm
|
||||
- python:3.11-bookworm,python3.11-bookworm
|
||||
- python:3.10-bookworm,python3.10-bookworm
|
||||
- python:3.9-bookworm,python3.9-bookworm
|
||||
- python:3.8-bookworm,python3.8-bookworm
|
||||
- python:3.13-slim-bookworm,python3.13-bookworm-slim
|
||||
- python:3.12-slim-bookworm,python3.12-bookworm-slim
|
||||
- python:3.11-slim-bookworm,python3.11-bookworm-slim
|
||||
- python:3.10-slim-bookworm,python3.10-bookworm-slim
|
||||
|
|
80
.github/workflows/ci.yml
vendored
80
.github/workflows/ci.yml
vendored
|
@ -81,6 +81,17 @@ jobs:
|
|||
- name: "Python type check"
|
||||
run: uvx mypy
|
||||
|
||||
- name: "Lint shell scripts"
|
||||
uses: ludeeus/action-shellcheck@2.0.0
|
||||
env:
|
||||
# renovate: datasource=github-tags depName=koalaman/shellcheck
|
||||
SHELLCHECK_VERSION: "v0.10.0"
|
||||
SHELLCHECK_OPTS: --shell bash
|
||||
with:
|
||||
version: ${{ env.SHELLCHECK_VERSION }}
|
||||
severity: style
|
||||
check_together: "yes"
|
||||
|
||||
cargo-clippy:
|
||||
timeout-minutes: 10
|
||||
needs: determine_changes
|
||||
|
@ -98,7 +109,8 @@ jobs:
|
|||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||
|
||||
cargo-clippy-xwin:
|
||||
timeout-minutes: 10
|
||||
# Do not set timeout below 15 minutes as uncached xwin Windows SDK download can take 10+ minutes
|
||||
timeout-minutes: 20
|
||||
needs: determine_changes
|
||||
if: ${{ github.repository == 'astral-sh/uv' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
runs-on: ubuntu-latest
|
||||
|
@ -192,8 +204,8 @@ jobs:
|
|||
- name: "Smoke test"
|
||||
run: |
|
||||
uv="./target/debug/uv"
|
||||
$uv venv
|
||||
$uv pip install ruff
|
||||
$uv venv -v
|
||||
$uv pip install ruff -v
|
||||
|
||||
- name: "Smoke test completion"
|
||||
run: |
|
||||
|
@ -238,8 +250,8 @@ jobs:
|
|||
- name: "Smoke test"
|
||||
run: |
|
||||
uv="./target/debug/uv"
|
||||
$uv venv
|
||||
$uv pip install ruff
|
||||
$uv venv -v
|
||||
$uv pip install ruff -v
|
||||
|
||||
cargo-test-windows:
|
||||
timeout-minutes: 15
|
||||
|
@ -296,8 +308,8 @@ jobs:
|
|||
UV_STACK_SIZE: 2000000 # 2 megabyte, double the default on windows
|
||||
run: |
|
||||
Set-Alias -Name uv -Value ./target/debug/uv
|
||||
uv venv
|
||||
uv pip install ruff
|
||||
uv venv -v
|
||||
uv pip install ruff -v
|
||||
|
||||
- name: "Smoke test completion"
|
||||
working-directory: ${{ env.UV_WORKSPACE }}
|
||||
|
@ -313,7 +325,8 @@ jobs:
|
|||
|
||||
# Separate jobs for the nightly crate
|
||||
windows-trampoline-check:
|
||||
timeout-minutes: 10
|
||||
# Do not set timeout below 15 minutes as uncached xwin Windows SDK download can take 10+ minutes
|
||||
timeout-minutes: 20
|
||||
needs: determine_changes
|
||||
if: ${{ github.repository == 'astral-sh/uv' && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
runs-on: ubuntu-latest
|
||||
|
@ -416,6 +429,7 @@ jobs:
|
|||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: astral-sh/setup-uv@v3
|
||||
- uses: actions/setup-python@v5
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
|
@ -423,17 +437,12 @@ jobs:
|
|||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
|
||||
- name: "Install dependencies (public)"
|
||||
run: pip install -r docs/requirements.txt
|
||||
- name: "Build docs (public)"
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
run: uvx --with-requirements docs/requirements.txt mkdocs build --strict -f mkdocs.public.yml
|
||||
|
||||
- name: "Install dependencies (insiders)"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
- name: "Build docs (insiders)"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
run: uvx --with-requirements docs/requirements.txt mkdocs build --strict -f mkdocs.insiders.yml
|
||||
|
||||
build-binary-linux:
|
||||
timeout-minutes: 10
|
||||
|
@ -644,7 +653,24 @@ jobs:
|
|||
|
||||
- name: "Check install"
|
||||
run: |
|
||||
./uv pip install anyio
|
||||
./uv pip install -v anyio
|
||||
|
||||
- name: "Install free-threaded Python via uv"
|
||||
run: |
|
||||
./uv python install 3.13t
|
||||
./uv venv -p 3.13t --python-preference only-managed
|
||||
|
||||
- name: "Check version"
|
||||
run: |
|
||||
.venv/bin/python --version
|
||||
|
||||
- name: "Check is free-threaded"
|
||||
run: |
|
||||
.venv/bin/python -c "import sys; exit(1) if sys._is_gil_enabled() else exit(0)"
|
||||
|
||||
- name: "Check install"
|
||||
run: |
|
||||
./uv pip install -v anyio
|
||||
|
||||
integration-test-pypy-linux:
|
||||
timeout-minutes: 10
|
||||
|
@ -783,7 +809,7 @@ jobs:
|
|||
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "graalpy24.0"
|
||||
python-version: "graalpy24.1"
|
||||
|
||||
- name: "Download binary"
|
||||
uses: actions/download-artifact@v4
|
||||
|
@ -796,7 +822,6 @@ jobs:
|
|||
- name: Graalpy info
|
||||
run: |
|
||||
which graalpy
|
||||
echo "GRAAL_PYTHONHOME=$(graalpy -c 'print(__graalpython__.home)')" >> $GITHUB_ENV
|
||||
|
||||
- name: "Create a virtual environment"
|
||||
run: |
|
||||
|
@ -852,7 +877,7 @@ jobs:
|
|||
steps:
|
||||
- uses: timfel/setup-python@fc9bcb4a04f5b1ea7d678c2ca7ea1c479a2468d7
|
||||
with:
|
||||
python-version: "graalpy24.0"
|
||||
python-version: "graalpy24.1"
|
||||
|
||||
- name: "Download binary"
|
||||
uses: actions/download-artifact@v4
|
||||
|
@ -914,7 +939,7 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
python-version: "3.12.7"
|
||||
|
||||
- name: "Download binary"
|
||||
uses: actions/download-artifact@v4
|
||||
|
@ -946,21 +971,21 @@ jobs:
|
|||
./uv add anyio
|
||||
|
||||
- name: "Sync to the system Python"
|
||||
run: ./uv sync --python 3.12
|
||||
run: ./uv sync -v --python 3.12
|
||||
env:
|
||||
UV_PROJECT_ENVIRONMENT: "/opt/hostedtoolcache/Python/3.12.6/x64"
|
||||
UV_PROJECT_ENVIRONMENT: "/opt/hostedtoolcache/Python/3.12.7/x64"
|
||||
|
||||
- name: "Attempt to sync to the system Python with an incompatible version"
|
||||
run: |
|
||||
./uv sync --python 3.11 && { echo "ci: Error; should not succeed"; exit 1; } || { echo "ci: Ok; expected failure"; exit 0; }
|
||||
./uv sync -v --python 3.11 && { echo "ci: Error; should not succeed"; exit 1; } || { echo "ci: Ok; expected failure"; exit 0; }
|
||||
env:
|
||||
UV_PROJECT_ENVIRONMENT: "/opt/hostedtoolcache/Python/3.12.6/x64"
|
||||
UV_PROJECT_ENVIRONMENT: "/opt/hostedtoolcache/Python/3.12.7/x64"
|
||||
|
||||
- name: "Attempt to sync to a non-Python environment directory"
|
||||
run: |
|
||||
mkdir -p /home/runner/example
|
||||
touch /home/runner/example/some-file
|
||||
./uv sync && { echo "ci: Error; should not succeed"; exit 1; } || { echo "ci: Ok; expected failure"; exit 0; }
|
||||
./uv sync -v && { echo "ci: Error; should not succeed"; exit 1; } || { echo "ci: Ok; expected failure"; exit 0; }
|
||||
env:
|
||||
UV_PROJECT_ENVIRONMENT: "/home/runner/example"
|
||||
|
||||
|
@ -987,13 +1012,14 @@ jobs:
|
|||
code:
|
||||
- "crates/uv-publish/**/*"
|
||||
- "scripts/publish/**/*"
|
||||
- ".github/workflows/ci.yml"
|
||||
|
||||
integration-test-publish:
|
||||
timeout-minutes: 10
|
||||
needs: integration-test-publish-changed
|
||||
name: "integration test | uv publish"
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository == 'astral-sh/uv' && (needs.integration-test-publish-changed.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ github.repository == 'astral-sh/uv' && github.event.pull_request.head.repo.fork != true && (needs.integration-test-publish-changed.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
environment: uv-test-publish
|
||||
env:
|
||||
# No dbus in GitHub Actions
|
||||
|
@ -1034,6 +1060,8 @@ jobs:
|
|||
UV_TEST_PUBLISH_TOKEN: ${{ secrets.UV_TEST_PUBLISH_TOKEN }}
|
||||
UV_TEST_PUBLISH_PASSWORD: ${{ secrets.UV_TEST_PUBLISH_PASSWORD }}
|
||||
UV_TEST_PUBLISH_GITLAB_PAT: ${{ secrets.UV_TEST_PUBLISH_GITLAB_PAT }}
|
||||
UV_TEST_PUBLISH_CODEBERG_TOKEN: ${{ secrets.UV_TEST_PUBLISH_CODEBERG_TOKEN }}
|
||||
UV_TEST_PUBLISH_CLOUDSMITH_TOKEN: ${{ secrets.UV_TEST_PUBLISH_CLOUDSMITH_TOKEN }}
|
||||
|
||||
cache-test-ubuntu:
|
||||
timeout-minutes: 10
|
||||
|
|
8
.github/workflows/publish-pypi.yml
vendored
8
.github/workflows/publish-pypi.yml
vendored
|
@ -21,14 +21,12 @@ jobs:
|
|||
# For PyPI's trusted publishing.
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@v3
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: wheels-*
|
||||
path: wheels
|
||||
merge-multiple: true
|
||||
- name: Publish to PyPi
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
skip-existing: true
|
||||
packages-dir: wheels
|
||||
verbose: true
|
||||
run: uv publish -v wheels/*
|
||||
|
|
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
|
@ -66,7 +66,7 @@ jobs:
|
|||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.22.1/cargo-dist-installer.sh | sh"
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.23.0/cargo-dist-installer.sh | sh"
|
||||
- name: Cache cargo-dist
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
|
|
|
@ -12,7 +12,7 @@ repos:
|
|||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.24.6
|
||||
rev: v1.26.0
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
|
@ -42,7 +42,7 @@ repos:
|
|||
types_or: [yaml, json5]
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.6.8
|
||||
rev: v0.6.9
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
3.12.1
|
||||
3.11.7
|
||||
3.10.13
|
||||
3.9.18
|
||||
3.12.6
|
||||
3.11.10
|
||||
3.10.15
|
||||
3.9.20
|
||||
3.8.18
|
||||
3.8.12
|
||||
|
|
217
CHANGELOG.md
217
CHANGELOG.md
|
@ -1,5 +1,222 @@
|
|||
# Changelog
|
||||
|
||||
## 0.4.23
|
||||
|
||||
This release introduces a revamped system for defining package indexes, as an alternative to the existing pip-style
|
||||
`--index-url` and `--extra-index-url` configuration options.
|
||||
|
||||
You can now define named indexes in your `pyproject.toml` file using the `[[tool.uv.index]]` table:
|
||||
|
||||
```toml
|
||||
[[tool.uv.index]]
|
||||
name = "pytorch"
|
||||
url = "https://download.pytorch.org/whl/cpu"
|
||||
```
|
||||
|
||||
Packages can be pinned to a specific index via `tool.uv.sources`, to ensure that a given package is installed from the
|
||||
correct index. For example, to ensure that `torch` is _always_ installed from the `pytorch` index:
|
||||
|
||||
```toml
|
||||
[tool.uv.sources]
|
||||
torch = { index = "pytorch" }
|
||||
|
||||
[[tool.uv.index]]
|
||||
name = "pytorch"
|
||||
url = "https://download.pytorch.org/whl/cpu"
|
||||
```
|
||||
|
||||
Indexes can also be marked as `explicit = true` to prevent packages from being installed from that index
|
||||
unless explicitly pinned. For example, to ensure that `torch` is installed from the `pytorch` index, but all other
|
||||
packages are installed from the default index:
|
||||
|
||||
```toml
|
||||
[tool.uv.sources]
|
||||
torch = { index = "pytorch" }
|
||||
|
||||
[[tool.uv.index]]
|
||||
name = "pytorch"
|
||||
url = "https://download.pytorch.org/whl/cpu"
|
||||
explicit = true
|
||||
```
|
||||
|
||||
To define an additional index outside a `pyproject.toml` file, use the `--index` command-line argument
|
||||
(or the `UV_INDEX` environment variable); to replace the default index (PyPI), use the `--default-index` command-line
|
||||
argument (or `UV_DEFAULT_INDEX`).
|
||||
|
||||
These changes are entirely backwards-compatible with the deprecated `--index-url` and `--extra-index-url` options,
|
||||
which continue to work as before.
|
||||
|
||||
See the [Index](https://docs.astral.sh/uv/configuration/indexes/) documentation for more.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add index URLs when provided via `uv add --index` or `--default-index` ([#7746](https://github.com/astral-sh/uv/pull/7746))
|
||||
- Add support for named and explicit indexes ([#7481](https://github.com/astral-sh/uv/pull/7481))
|
||||
- Add templates for popular build backends ([#7857](https://github.com/astral-sh/uv/pull/7857))
|
||||
- Allow multiple pinned indexes in `tool.uv.sources` ([#7769](https://github.com/astral-sh/uv/pull/7769))
|
||||
- Allow users to incorporate Git tags into dynamic cache keys ([#8259](https://github.com/astral-sh/uv/pull/8259))
|
||||
- Pin named indexes in `uv add` ([#7747](https://github.com/astral-sh/uv/pull/7747))
|
||||
- Respect named `--index` and `--default-index` values in `tool.uv.sources` ([#7910](https://github.com/astral-sh/uv/pull/7910))
|
||||
- Update to latest PubGrub version ([#8245](https://github.com/astral-sh/uv/pull/8245))
|
||||
- Enable environment variable authentication for named indexes ([#7741](https://github.com/astral-sh/uv/pull/7741))
|
||||
- Avoid showing lower-bound warning outside of explicit lock and sync ([#8234](https://github.com/astral-sh/uv/pull/8234))
|
||||
- Improve logging during lock errors ([#8258](https://github.com/astral-sh/uv/pull/8258))
|
||||
- Improve styling of `requires-python` warnings ([#8240](https://github.com/astral-sh/uv/pull/8240))
|
||||
- Show hint in resolution failure on `Forbidden` (`403`) or `Unauthorized` (`401`) ([#8264](https://github.com/astral-sh/uv/pull/8264))
|
||||
- Update to latest `cargo-dist` version (includes new installer features) ([#8270](https://github.com/astral-sh/uv/pull/8270))
|
||||
- Warn when patch version in `requires-python` is implicitly `0` ([#7959](https://github.com/astral-sh/uv/pull/7959))
|
||||
- Add more context on client errors during range requests ([#8285](https://github.com/astral-sh/uv/pull/8285))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid writing duplicate index URLs with `--emit-index-url` ([#8226](https://github.com/astral-sh/uv/pull/8226))
|
||||
- Fix error leading to out-of-bound panic in `uv-pep508` ([#8282](https://github.com/astral-sh/uv/pull/8282))
|
||||
- Fix managed distributions of free-threaded Python on Windows ([#8268](https://github.com/astral-sh/uv/pull/8268))
|
||||
- Fix selection of free-threaded interpreters during default Python discovery ([#8239](https://github.com/astral-sh/uv/pull/8239))
|
||||
- Ignore sources in build requirements for non-source trees ([#8235](https://github.com/astral-sh/uv/pull/8235))
|
||||
- Invalid cache when adding lower bound to lockfile ([#8230](https://github.com/astral-sh/uv/pull/8230))
|
||||
- Respect index priority when storing credentials ([#8256](https://github.com/astral-sh/uv/pull/8256))
|
||||
- Respect relative paths in `uv build` sources ([#8237](https://github.com/astral-sh/uv/pull/8237))
|
||||
- Narrow what the pip3.<minor> logic drops from entry points. ([#8273](https://github.com/astral-sh/uv/pull/8273))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add some additional notes to `--index-url` docs ([#8267](https://github.com/astral-sh/uv/pull/8267))
|
||||
- Add upgrade note to README ([#7937](https://github.com/astral-sh/uv/pull/7937))
|
||||
- Remove note that "only a single source may be defined for each dependency" ([#8243](https://github.com/astral-sh/uv/pull/8243))
|
||||
|
||||
## 0.4.22
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Respect `[tool.uv.sources]` in build requirements ([#7172](https://github.com/astral-sh/uv/pull/7172))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Add a dedicated `uv publish` error message for missing usernames ([#8045](https://github.com/astral-sh/uv/pull/8045))
|
||||
- Support interactive input in `uv publish` ([#8158](https://github.com/astral-sh/uv/pull/8158))
|
||||
- Use raw filenames in `uv publish` ([#8204](https://github.com/astral-sh/uv/pull/8204))
|
||||
|
||||
### Performance
|
||||
|
||||
- Reuse the result of `which git` ([#8224](https://github.com/astral-sh/uv/pull/8224))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid environment check optimization for `uv pip install --exact` ([#8219](https://github.com/astral-sh/uv/pull/8219))
|
||||
- Do not use free-threaded interpreters without a free-threaded request ([#8191](https://github.com/astral-sh/uv/pull/8191))
|
||||
- Don't recommend `--prerelease=allow` during build requirement resolution errors ([#8192](https://github.com/astral-sh/uv/pull/8192))
|
||||
- Prefer optimized builds for free-threaded Python downloads ([#8196](https://github.com/astral-sh/uv/pull/8196))
|
||||
- Retain old `python-build-standalone` releases ([#8216](https://github.com/astral-sh/uv/pull/8216))
|
||||
- Run `uv build` builds in the source distribution bucket ([#8220](https://github.com/astral-sh/uv/pull/8220))
|
||||
|
||||
## 0.4.21
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add support for managed installations of free-threaded Python ([#8100](https://github.com/astral-sh/uv/pull/8100))
|
||||
- Add note about `uvx` to `uv tool run` short help ([#7695](https://github.com/astral-sh/uv/pull/7695))
|
||||
- Enable HTTP/2 requests ([#8049](https://github.com/astral-sh/uv/pull/8049))
|
||||
- Support `uv tree --no-dev` ([#8109](https://github.com/astral-sh/uv/pull/8109))
|
||||
- Support PEP 723 metadata with `uv run -` ([#8111](https://github.com/astral-sh/uv/pull/8111))
|
||||
- Support `pip install --exact` ([#8044](https://github.com/astral-sh/uv/pull/8044))
|
||||
- Support `uv export --no-header` ([#8096](https://github.com/astral-sh/uv/pull/8096))
|
||||
- Add Python 3.13 images to Docker publish ([#8105](https://github.com/astral-sh/uv/pull/8105))
|
||||
- Support remote (`https://`) scripts in `uv run` ([#6375](https://github.com/astral-sh/uv/pull/6375))
|
||||
- Allow comma value-delimited arguments in `uv run --with` ([#7909](https://github.com/astral-sh/uv/pull/7909))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Support wildcards in `UV_INSECURE_HOST` ([#8052](https://github.com/astral-sh/uv/pull/8052))
|
||||
|
||||
### Performance
|
||||
|
||||
- Use shared index when fetching metadata in lock satisfaction routine ([#8147](https://github.com/astral-sh/uv/pull/8147))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Add prerelease compatibility check to `uv python` CLI ([#8020](https://github.com/astral-sh/uv/pull/8020))
|
||||
- Avoid deleting a project environment directory if we cannot tell if a `pyvenv.cfg` file exists ([#8012](https://github.com/astral-sh/uv/pull/8012))
|
||||
- Avoid excluding valid wheels for exact `requires-python` bounds ([#8140](https://github.com/astral-sh/uv/pull/8140))
|
||||
- Bump `netrc` crate to latest commit ([#8021](https://github.com/astral-sh/uv/pull/8021))
|
||||
- Fix `uv python pin 3.13t` failure when parsing version for project requires check ([#8056](https://github.com/astral-sh/uv/pull/8056))
|
||||
- Fix handling of != intersections in `requires-python` ([#7897](https://github.com/astral-sh/uv/pull/7897))
|
||||
- Remove the newly created tool environment if sync failed ([#8038](https://github.com/astral-sh/uv/pull/8038))
|
||||
- Respect dynamic extras in `uv lock` and `uv sync` ([#8091](https://github.com/astral-sh/uv/pull/8091))
|
||||
- Treat resolver failures as fatal in lockfile validation ([#8083](https://github.com/astral-sh/uv/pull/8083))
|
||||
- Use `git config --get` for author information for improved backwards compatibility ([#8101](https://github.com/astral-sh/uv/pull/8101))
|
||||
- Use comma-separated values for `UV_FIND_LINKS` ([#8061](https://github.com/astral-sh/uv/pull/8061))
|
||||
- Use shared resolver state between add and lock to avoid double Git update ([#8146](https://github.com/astral-sh/uv/pull/8146))
|
||||
- Make `--relocatable` entrypoints robust to symlinking ([#8079](https://github.com/astral-sh/uv/pull/8079))
|
||||
- Improve compatibility with VSCode PS1 prompt ([#8006](https://github.com/astral-sh/uv/pull/8006))
|
||||
- Fix "Stream did not contain valid UTF-8" failures in Windows ([#8120](https://github.com/astral-sh/uv/pull/8120))
|
||||
- Use `--with-requirements` in `uvx` error hint ([#8112](https://github.com/astral-sh/uv/pull/8112))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Include `uvx` installation in Docker examples ([#8179](https://github.com/astral-sh/uv/pull/8179))
|
||||
- Make the instructions for the Windows standalone installer consistent across README and documentation ([#8125](https://github.com/astral-sh/uv/pull/8125))
|
||||
- Update pip compatibility guide to note transitive URL dependency support ([#8081](https://github.com/astral-sh/uv/pull/8081))
|
||||
- Document `--reinstall` with `--exclude-newer` to ensure downgrades ([#6721](https://github.com/astral-sh/uv/pull/6721))
|
||||
|
||||
## 0.4.20
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add managed downloads for CPython 3.13.0 (final) ([#8010](https://github.com/astral-sh/uv/pull/8010))
|
||||
- Python 3.13 is the default version for `uv python install` ([#8010](https://github.com/astral-sh/uv/pull/8010))
|
||||
- Hint at wrong endpoint in `uv publish` failures ([#7872](https://github.com/astral-sh/uv/pull/7872))
|
||||
- List available scripts when a command is not specified for `uv run` ([#7687](https://github.com/astral-sh/uv/pull/7687))
|
||||
- Fill in `authors` field during `uv init` ([#7756](https://github.com/astral-sh/uv/pull/7756))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add snapshot testing to contribution guide ([#7882](https://github.com/astral-sh/uv/pull/7882))
|
||||
- Fix and improve GitLab integration docs ([#8000](https://github.com/astral-sh/uv/pull/8000))
|
||||
|
||||
## 0.4.19
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add managed downloads for CPython 3.13.0rc3 and 3.12.7 ([#7880](https://github.com/astral-sh/uv/pull/7880))
|
||||
- Display the target virtual environment path if non-default ([#7850](https://github.com/astral-sh/uv/pull/7850))
|
||||
- Preserve case-insensitive sorts in `uv add` ([#7864](https://github.com/astral-sh/uv/pull/7864))
|
||||
- Respect project upper bounds when filtering wheels on `requires-python` ([#7904](https://github.com/astral-sh/uv/pull/7904))
|
||||
- Add `--script` to `uv run` to treat an input as PEP 723 regardless of extension ([#7739](https://github.com/astral-sh/uv/pull/7739))
|
||||
- Improve legibility of build failure errors ([#7854](https://github.com/astral-sh/uv/pull/7854))
|
||||
- Show interpreter source during Python discovery query errors ([#7928](https://github.com/astral-sh/uv/pull/7928))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `UV_FIND_LINKS` environment variable for `--find-links` ([#7912](https://github.com/astral-sh/uv/pull/7912))
|
||||
- Ignore empty string values for `UV_PYTHON` environment variable ([#7878](https://github.com/astral-sh/uv/pull/7878))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Allow `py3x-none` tags in newer than Python 3.x ([#7867](https://github.com/astral-sh/uv/pull/7867))
|
||||
- Allow self-dependencies in the `dev` section ([#7943](https://github.com/astral-sh/uv/pull/7943))
|
||||
- Always ignore `cp2` wheels in resolution ([#7902](https://github.com/astral-sh/uv/pull/7902))
|
||||
- Clear the publish progress bar on retry ([#7921](https://github.com/astral-sh/uv/pull/7921))
|
||||
- Fix parsing of `gnueabi` libc variants in Python version requests ([#7975](https://github.com/astral-sh/uv/pull/7975))
|
||||
- Simplify supported environments when comparing to lockfile ([#7894](https://github.com/astral-sh/uv/pull/7894))
|
||||
- Trim commits when reading from Git refs ([#7922](https://github.com/astral-sh/uv/pull/7922))
|
||||
- Use a higher HTTP read timeout when publishing packages ([#7923](https://github.com/astral-sh/uv/pull/7923))
|
||||
- Remove the first empty line for `uv tree --package foo` ([#7885](https://github.com/astral-sh/uv/pull/7885))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add 3.13 support to the platform reference ([#7971](https://github.com/astral-sh/uv/pull/7971))
|
||||
- Clarify project environment creation ([#7941](https://github.com/astral-sh/uv/pull/7941))
|
||||
- Fix code block title in Gitlab integration docs ([#7861](https://github.com/astral-sh/uv/pull/7861))
|
||||
- Fix project guide section on adding a Git dependency ([#7916](https://github.com/astral-sh/uv/pull/7916))
|
||||
- Fix uninstallation command for Windows ([#7944](https://github.com/astral-sh/uv/pull/7944))
|
||||
- Clearly specify the minimum supported Windows Server version ([#7946](https://github.com/astral-sh/uv/pull/7946))
|
||||
|
||||
### Rust API
|
||||
|
||||
- Remove unused `Sha256Reader` ([#7929](https://github.com/astral-sh/uv/pull/7929))
|
||||
- Remove unnecessary `Deserialize` derives on settings ([#7856](https://github.com/astral-sh/uv/pull/7856))
|
||||
|
||||
## 0.4.18
|
||||
|
||||
### Enhancements
|
||||
|
|
|
@ -49,6 +49,30 @@ cargo run python install
|
|||
|
||||
The storage directory can be configured with `UV_PYTHON_INSTALL_DIR`.
|
||||
|
||||
### Snapshot testing
|
||||
|
||||
uv uses [insta](https://insta.rs/) for snapshot testing. It's recommended (but not necessary) to use
|
||||
`cargo-insta` for a better snapshot review experience. See the
|
||||
[installation guide](https://insta.rs/docs/cli/) for more information.
|
||||
|
||||
In tests, you can use `uv_snapshot!` macro to simplify creating snapshots for uv commands. For
|
||||
example:
|
||||
|
||||
```rust
|
||||
#[test]
|
||||
fn test_add() {
|
||||
let context = TestContext::new("3.12");
|
||||
uv_snapshot!(context.filters(), context.add().arg("requests"), @"");
|
||||
}
|
||||
```
|
||||
|
||||
To run and review a specific snapshot test:
|
||||
|
||||
```shell
|
||||
cargo test --package <package> --test <test> -- <test_name> -- --exact
|
||||
cargo insta review
|
||||
```
|
||||
|
||||
### Local testing
|
||||
|
||||
You can invoke your development version of uv with `cargo run -- <args>`. For example:
|
||||
|
|
460
Cargo.lock
generated
460
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
14
Cargo.toml
14
Cargo.toml
|
@ -22,6 +22,7 @@ license = "MIT OR Apache-2.0"
|
|||
|
||||
[workspace.dependencies]
|
||||
uv-auth = { path = "crates/uv-auth" }
|
||||
uv-build-backend = { path = "crates/uv-build-backend" }
|
||||
uv-build-frontend = { path = "crates/uv-build-frontend" }
|
||||
uv-cache = { path = "crates/uv-cache" }
|
||||
uv-cache-info = { path = "crates/uv-cache-info" }
|
||||
|
@ -44,7 +45,7 @@ uv-metadata = { path = "crates/uv-metadata" }
|
|||
uv-normalize = { path = "crates/uv-normalize" }
|
||||
uv-once-map = { path = "crates/uv-once-map" }
|
||||
uv-options-metadata = { path = "crates/uv-options-metadata" }
|
||||
uv-pep440 = { path = "crates/uv-pep440" }
|
||||
uv-pep440 = { path = "crates/uv-pep440", features = ["tracing", "rkyv"] }
|
||||
uv-pep508 = { path = "crates/uv-pep508", features = ["non-pep508-extensions"] }
|
||||
uv-platform-tags = { path = "crates/uv-platform-tags" }
|
||||
uv-pubgrub = { path = "crates/uv-pubgrub" }
|
||||
|
@ -58,6 +59,7 @@ uv-scripts = { path = "crates/uv-scripts" }
|
|||
uv-settings = { path = "crates/uv-settings" }
|
||||
uv-shell = { path = "crates/uv-shell" }
|
||||
uv-state = { path = "crates/uv-state" }
|
||||
uv-static = { path = "crates/uv-static" }
|
||||
uv-tool = { path = "crates/uv-tool" }
|
||||
uv-types = { path = "crates/uv-types" }
|
||||
uv-version = { path = "crates/uv-version" }
|
||||
|
@ -75,6 +77,7 @@ async_zip = { git = "https://github.com/charliermarsh/rs-async-zip", rev = "011b
|
|||
axoupdater = { version = "0.7.2", default-features = false }
|
||||
backoff = { version = "0.4.0" }
|
||||
base64 = { version = "0.22.1" }
|
||||
bitflags = { version = "2.6.0" }
|
||||
boxcar = { version = "0.2.5" }
|
||||
bytecheck = { version = "0.8.0" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
|
@ -122,17 +125,17 @@ pathdiff = { version = "0.2.1" }
|
|||
petgraph = { version = "0.6.5" }
|
||||
platform-info = { version = "2.0.3" }
|
||||
proc-macro2 = { version = "1.0.86" }
|
||||
pubgrub = { git = "https://github.com/astral-sh/pubgrub", rev = "388685a8711092971930986644cfed152d1a1f6c" }
|
||||
pubgrub = { git = "https://github.com/astral-sh/pubgrub", rev = "19c77268c0ad5f69d7e12126e0cfacfbba466481" }
|
||||
quote = { version = "1.0.37" }
|
||||
rayon = { version = "1.10.0" }
|
||||
reflink-copy = { version = "0.1.19" }
|
||||
regex = { version = "1.10.6" }
|
||||
reqwest = { version = "0.12.7", default-features = false, features = ["json", "gzip", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart"] }
|
||||
reqwest = { version = "0.12.7", default-features = false, features = ["json", "gzip", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2"] }
|
||||
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "5e3eaf254b5bd481c75d2710eed055f95b756913", features = ["multipart"] }
|
||||
reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "5e3eaf254b5bd481c75d2710eed055f95b756913" }
|
||||
rkyv = { version = "0.8.8", features = ["bytecheck"] }
|
||||
rmp-serde = { version = "1.3.0" }
|
||||
rust-netrc = { version = "0.1.1" }
|
||||
rust-netrc = { git = "https://github.com/gribouille/netrc", rev = "544f3890b621f0dc30fcefb4f804269c160ce2e9" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
rustix = { version = "0.38.37", default-features = false, features = ["fs", "std"] }
|
||||
same-file = { version = "1.0.6" }
|
||||
|
@ -143,6 +146,7 @@ serde-untagged = { version = "0.1.6" }
|
|||
serde_json = { version = "1.0.128" }
|
||||
sha2 = { version = "0.10.8" }
|
||||
smallvec = { version = "1.13.2" }
|
||||
spdx = { version = "0.10.6" }
|
||||
syn = { version = "2.0.77" }
|
||||
sys-info = { version = "0.9.1" }
|
||||
target-lexicon = { version = "0.12.16" }
|
||||
|
@ -276,7 +280,7 @@ inherits = "release"
|
|||
# Config for 'cargo dist'
|
||||
[workspace.metadata.dist]
|
||||
# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.22.1"
|
||||
cargo-dist-version = "0.23.0"
|
||||
# CI backends to support
|
||||
ci = "github"
|
||||
# The installers to generate for each app
|
||||
|
|
|
@ -52,12 +52,18 @@ Install uv with our standalone installers, or from [PyPI](https://pypi.org/proje
|
|||
$ curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
|
||||
# On Windows.
|
||||
$ powershell -c "irm https://astral.sh/uv/install.ps1 | iex"
|
||||
$ powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install.ps1 | iex"
|
||||
|
||||
# With pip.
|
||||
$ pip install uv
|
||||
```
|
||||
|
||||
If installed via the standalone installer, uv can update itself to the latest version:
|
||||
|
||||
```console
|
||||
$ uv self update
|
||||
```
|
||||
|
||||
See the [installation documentation](https://docs.astral.sh/uv/getting-started/installation/) for
|
||||
details and alternative installation methods.
|
||||
|
||||
|
|
|
@ -3,6 +3,9 @@ name = "uv-auth"
|
|||
version = "0.0.1"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
@ -23,6 +26,8 @@ tracing = { workspace = true }
|
|||
url = { workspace = true }
|
||||
urlencoding = { workspace = true }
|
||||
|
||||
uv-static = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
|
|
|
@ -215,77 +215,4 @@ impl TrieState {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_trie() {
|
||||
let credentials1 = Arc::new(Credentials::new(
|
||||
Some("username1".to_string()),
|
||||
Some("password1".to_string()),
|
||||
));
|
||||
let credentials2 = Arc::new(Credentials::new(
|
||||
Some("username2".to_string()),
|
||||
Some("password2".to_string()),
|
||||
));
|
||||
let credentials3 = Arc::new(Credentials::new(
|
||||
Some("username3".to_string()),
|
||||
Some("password3".to_string()),
|
||||
));
|
||||
let credentials4 = Arc::new(Credentials::new(
|
||||
Some("username4".to_string()),
|
||||
Some("password4".to_string()),
|
||||
));
|
||||
|
||||
let mut trie = UrlTrie::new();
|
||||
trie.insert(
|
||||
&Url::parse("https://burntsushi.net").unwrap(),
|
||||
credentials1.clone(),
|
||||
);
|
||||
trie.insert(
|
||||
&Url::parse("https://astral.sh").unwrap(),
|
||||
credentials2.clone(),
|
||||
);
|
||||
trie.insert(
|
||||
&Url::parse("https://example.com/foo").unwrap(),
|
||||
credentials3.clone(),
|
||||
);
|
||||
trie.insert(
|
||||
&Url::parse("https://example.com/bar").unwrap(),
|
||||
credentials4.clone(),
|
||||
);
|
||||
|
||||
let url = Url::parse("https://burntsushi.net/regex-internals").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials1));
|
||||
|
||||
let url = Url::parse("https://burntsushi.net/").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials1));
|
||||
|
||||
let url = Url::parse("https://astral.sh/about").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials2));
|
||||
|
||||
let url = Url::parse("https://example.com/foo").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials3));
|
||||
|
||||
let url = Url::parse("https://example.com/foo/").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials3));
|
||||
|
||||
let url = Url::parse("https://example.com/foo/bar").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials3));
|
||||
|
||||
let url = Url::parse("https://example.com/bar").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials4));
|
||||
|
||||
let url = Url::parse("https://example.com/bar/").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials4));
|
||||
|
||||
let url = Url::parse("https://example.com/bar/foo").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials4));
|
||||
|
||||
let url = Url::parse("https://example.com/about").unwrap();
|
||||
assert_eq!(trie.get(&url), None);
|
||||
|
||||
let url = Url::parse("https://example.com/foobar").unwrap();
|
||||
assert_eq!(trie.get(&url), None);
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
72
crates/uv-auth/src/cache/tests.rs
vendored
Normal file
72
crates/uv-auth/src/cache/tests.rs
vendored
Normal file
|
@ -0,0 +1,72 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_trie() {
|
||||
let credentials1 = Arc::new(Credentials::new(
|
||||
Some("username1".to_string()),
|
||||
Some("password1".to_string()),
|
||||
));
|
||||
let credentials2 = Arc::new(Credentials::new(
|
||||
Some("username2".to_string()),
|
||||
Some("password2".to_string()),
|
||||
));
|
||||
let credentials3 = Arc::new(Credentials::new(
|
||||
Some("username3".to_string()),
|
||||
Some("password3".to_string()),
|
||||
));
|
||||
let credentials4 = Arc::new(Credentials::new(
|
||||
Some("username4".to_string()),
|
||||
Some("password4".to_string()),
|
||||
));
|
||||
|
||||
let mut trie = UrlTrie::new();
|
||||
trie.insert(
|
||||
&Url::parse("https://burntsushi.net").unwrap(),
|
||||
credentials1.clone(),
|
||||
);
|
||||
trie.insert(
|
||||
&Url::parse("https://astral.sh").unwrap(),
|
||||
credentials2.clone(),
|
||||
);
|
||||
trie.insert(
|
||||
&Url::parse("https://example.com/foo").unwrap(),
|
||||
credentials3.clone(),
|
||||
);
|
||||
trie.insert(
|
||||
&Url::parse("https://example.com/bar").unwrap(),
|
||||
credentials4.clone(),
|
||||
);
|
||||
|
||||
let url = Url::parse("https://burntsushi.net/regex-internals").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials1));
|
||||
|
||||
let url = Url::parse("https://burntsushi.net/").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials1));
|
||||
|
||||
let url = Url::parse("https://astral.sh/about").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials2));
|
||||
|
||||
let url = Url::parse("https://example.com/foo").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials3));
|
||||
|
||||
let url = Url::parse("https://example.com/foo/").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials3));
|
||||
|
||||
let url = Url::parse("https://example.com/foo/bar").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials3));
|
||||
|
||||
let url = Url::parse("https://example.com/bar").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials4));
|
||||
|
||||
let url = Url::parse("https://example.com/bar/").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials4));
|
||||
|
||||
let url = Url::parse("https://example.com/bar/foo").unwrap();
|
||||
assert_eq!(trie.get(&url), Some(&credentials4));
|
||||
|
||||
let url = Url::parse("https://example.com/about").unwrap();
|
||||
assert_eq!(trie.get(&url), None);
|
||||
|
||||
let url = Url::parse("https://example.com/foobar").unwrap();
|
||||
assert_eq!(trie.get(&url), None);
|
||||
}
|
|
@ -9,6 +9,8 @@ use std::io::Read;
|
|||
use std::io::Write;
|
||||
use url::Url;
|
||||
|
||||
use uv_static::EnvVars;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Credentials {
|
||||
/// The name of the user for authentication.
|
||||
|
@ -139,6 +141,21 @@ impl Credentials {
|
|||
})
|
||||
}
|
||||
|
||||
/// Extract the [`Credentials`] from the environment, given a named source.
|
||||
///
|
||||
/// For example, given a name of `"pytorch"`, search for `UV_HTTP_BASIC_PYTORCH_USERNAME` and
|
||||
/// `UV_HTTP_BASIC_PYTORCH_PASSWORD`.
|
||||
pub fn from_env(name: &str) -> Option<Self> {
|
||||
let name = name.to_uppercase();
|
||||
let username = std::env::var(EnvVars::http_basic_username(&name)).ok();
|
||||
let password = std::env::var(EnvVars::http_basic_password(&name)).ok();
|
||||
if username.is_none() && password.is_none() {
|
||||
None
|
||||
} else {
|
||||
Some(Self::new(username, password))
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse [`Credentials`] from an HTTP request, if any.
|
||||
///
|
||||
/// Only HTTP Basic Authentication is supported.
|
||||
|
@ -230,111 +247,4 @@ impl Credentials {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use insta::assert_debug_snapshot;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn from_url_no_credentials() {
|
||||
let url = &Url::parse("https://example.com/simple/first/").unwrap();
|
||||
assert_eq!(Credentials::from_url(url), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_url_username_and_password() {
|
||||
let url = &Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user").unwrap();
|
||||
auth_url.set_password(Some("password")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
assert_eq!(credentials.username(), Some("user"));
|
||||
assert_eq!(credentials.password(), Some("password"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_url_no_username() {
|
||||
let url = &Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_password(Some("password")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
assert_eq!(credentials.username(), None);
|
||||
assert_eq!(credentials.password(), Some("password"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_url_no_password() {
|
||||
let url = &Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user").unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
assert_eq!(credentials.username(), Some("user"));
|
||||
assert_eq!(credentials.password(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn authenticated_request_from_url() {
|
||||
let url = Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user").unwrap();
|
||||
auth_url.set_password(Some("password")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
|
||||
let mut request = reqwest::Request::new(reqwest::Method::GET, url);
|
||||
request = credentials.authenticate(request);
|
||||
|
||||
let mut header = request
|
||||
.headers()
|
||||
.get(reqwest::header::AUTHORIZATION)
|
||||
.expect("Authorization header should be set")
|
||||
.clone();
|
||||
header.set_sensitive(false);
|
||||
|
||||
assert_debug_snapshot!(header, @r###""Basic dXNlcjpwYXNzd29yZA==""###);
|
||||
assert_eq!(Credentials::from_header_value(&header), Some(credentials));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn authenticated_request_from_url_with_percent_encoded_user() {
|
||||
let url = Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user@domain").unwrap();
|
||||
auth_url.set_password(Some("password")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
|
||||
let mut request = reqwest::Request::new(reqwest::Method::GET, url);
|
||||
request = credentials.authenticate(request);
|
||||
|
||||
let mut header = request
|
||||
.headers()
|
||||
.get(reqwest::header::AUTHORIZATION)
|
||||
.expect("Authorization header should be set")
|
||||
.clone();
|
||||
header.set_sensitive(false);
|
||||
|
||||
assert_debug_snapshot!(header, @r###""Basic dXNlckBkb21haW46cGFzc3dvcmQ=""###);
|
||||
assert_eq!(Credentials::from_header_value(&header), Some(credentials));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn authenticated_request_from_url_with_percent_encoded_password() {
|
||||
let url = Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user").unwrap();
|
||||
auth_url.set_password(Some("password==")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
|
||||
let mut request = reqwest::Request::new(reqwest::Method::GET, url);
|
||||
request = credentials.authenticate(request);
|
||||
|
||||
let mut header = request
|
||||
.headers()
|
||||
.get(reqwest::header::AUTHORIZATION)
|
||||
.expect("Authorization header should be set")
|
||||
.clone();
|
||||
header.set_sensitive(false);
|
||||
|
||||
assert_debug_snapshot!(header, @r###""Basic dXNlcjpwYXNzd29yZD09""###);
|
||||
assert_eq!(Credentials::from_header_value(&header), Some(credentials));
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
106
crates/uv-auth/src/credentials/tests.rs
Normal file
106
crates/uv-auth/src/credentials/tests.rs
Normal file
|
@ -0,0 +1,106 @@
|
|||
use insta::assert_debug_snapshot;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn from_url_no_credentials() {
|
||||
let url = &Url::parse("https://example.com/simple/first/").unwrap();
|
||||
assert_eq!(Credentials::from_url(url), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_url_username_and_password() {
|
||||
let url = &Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user").unwrap();
|
||||
auth_url.set_password(Some("password")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
assert_eq!(credentials.username(), Some("user"));
|
||||
assert_eq!(credentials.password(), Some("password"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_url_no_username() {
|
||||
let url = &Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_password(Some("password")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
assert_eq!(credentials.username(), None);
|
||||
assert_eq!(credentials.password(), Some("password"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_url_no_password() {
|
||||
let url = &Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user").unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
assert_eq!(credentials.username(), Some("user"));
|
||||
assert_eq!(credentials.password(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn authenticated_request_from_url() {
|
||||
let url = Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user").unwrap();
|
||||
auth_url.set_password(Some("password")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
|
||||
let mut request = reqwest::Request::new(reqwest::Method::GET, url);
|
||||
request = credentials.authenticate(request);
|
||||
|
||||
let mut header = request
|
||||
.headers()
|
||||
.get(reqwest::header::AUTHORIZATION)
|
||||
.expect("Authorization header should be set")
|
||||
.clone();
|
||||
header.set_sensitive(false);
|
||||
|
||||
assert_debug_snapshot!(header, @r###""Basic dXNlcjpwYXNzd29yZA==""###);
|
||||
assert_eq!(Credentials::from_header_value(&header), Some(credentials));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn authenticated_request_from_url_with_percent_encoded_user() {
|
||||
let url = Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user@domain").unwrap();
|
||||
auth_url.set_password(Some("password")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
|
||||
let mut request = reqwest::Request::new(reqwest::Method::GET, url);
|
||||
request = credentials.authenticate(request);
|
||||
|
||||
let mut header = request
|
||||
.headers()
|
||||
.get(reqwest::header::AUTHORIZATION)
|
||||
.expect("Authorization header should be set")
|
||||
.clone();
|
||||
header.set_sensitive(false);
|
||||
|
||||
assert_debug_snapshot!(header, @r###""Basic dXNlckBkb21haW46cGFzc3dvcmQ=""###);
|
||||
assert_eq!(Credentials::from_header_value(&header), Some(credentials));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn authenticated_request_from_url_with_percent_encoded_password() {
|
||||
let url = Url::parse("https://example.com/simple/first/").unwrap();
|
||||
let mut auth_url = url.clone();
|
||||
auth_url.set_username("user").unwrap();
|
||||
auth_url.set_password(Some("password==")).unwrap();
|
||||
let credentials = Credentials::from_url(&auth_url).unwrap();
|
||||
|
||||
let mut request = reqwest::Request::new(reqwest::Method::GET, url);
|
||||
request = credentials.authenticate(request);
|
||||
|
||||
let mut header = request
|
||||
.headers()
|
||||
.get(reqwest::header::AUTHORIZATION)
|
||||
.expect("Authorization header should be set")
|
||||
.clone();
|
||||
header.set_sensitive(false);
|
||||
|
||||
assert_debug_snapshot!(header, @r###""Basic dXNlcjpwYXNzd29yZD09""###);
|
||||
assert_eq!(Credentials::from_header_value(&header), Some(credentials));
|
||||
}
|
|
@ -151,133 +151,4 @@ impl KeyringProvider {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use futures::FutureExt;
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_no_host() {
|
||||
let url = Url::parse("file:/etc/bin/").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let result = std::panic::AssertUnwindSafe(keyring.fetch(&url, "user"))
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_with_password() {
|
||||
let url = Url::parse("https://user:password@example.com").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let result = std::panic::AssertUnwindSafe(keyring.fetch(&url, url.username()))
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_with_no_username() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let result = std::panic::AssertUnwindSafe(keyring.fetch(&url, url.username()))
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_no_auth() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
let credentials = keyring.fetch(&url, "user");
|
||||
assert!(credentials.await.is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::dummy([((url.host_str().unwrap(), "user"), "password")]);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url, "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("password".to_string())
|
||||
))
|
||||
);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url.join("test").unwrap(), "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("password".to_string())
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_no_match() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::dummy([(("other.com", "user"), "password")]);
|
||||
let credentials = keyring.fetch(&url, "user").await;
|
||||
assert_eq!(credentials, None);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_prefers_url_to_host() {
|
||||
let url = Url::parse("https://example.com/").unwrap();
|
||||
let keyring = KeyringProvider::dummy([
|
||||
((url.join("foo").unwrap().as_str(), "user"), "password"),
|
||||
((url.host_str().unwrap(), "user"), "other-password"),
|
||||
]);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url.join("foo").unwrap(), "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("password".to_string())
|
||||
))
|
||||
);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url, "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("other-password".to_string())
|
||||
))
|
||||
);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url.join("bar").unwrap(), "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("other-password".to_string())
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_username() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::dummy([((url.host_str().unwrap(), "user"), "password")]);
|
||||
let credentials = keyring.fetch(&url, "user").await;
|
||||
assert_eq!(
|
||||
credentials,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("password".to_string())
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_username_no_match() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::dummy([((url.host_str().unwrap(), "foo"), "password")]);
|
||||
let credentials = keyring.fetch(&url, "bar").await;
|
||||
assert_eq!(credentials, None);
|
||||
|
||||
// Still fails if we have `foo` in the URL itself
|
||||
let url = Url::parse("https://foo@example.com").unwrap();
|
||||
let credentials = keyring.fetch(&url, "bar").await;
|
||||
assert_eq!(credentials, None);
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
128
crates/uv-auth/src/keyring/tests.rs
Normal file
128
crates/uv-auth/src/keyring/tests.rs
Normal file
|
@ -0,0 +1,128 @@
|
|||
use super::*;
|
||||
use futures::FutureExt;
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_no_host() {
|
||||
let url = Url::parse("file:/etc/bin/").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let result = std::panic::AssertUnwindSafe(keyring.fetch(&url, "user"))
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_with_password() {
|
||||
let url = Url::parse("https://user:password@example.com").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let result = std::panic::AssertUnwindSafe(keyring.fetch(&url, url.username()))
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_with_no_username() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let result = std::panic::AssertUnwindSafe(keyring.fetch(&url, url.username()))
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_no_auth() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
let credentials = keyring.fetch(&url, "user");
|
||||
assert!(credentials.await.is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::dummy([((url.host_str().unwrap(), "user"), "password")]);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url, "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("password".to_string())
|
||||
))
|
||||
);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url.join("test").unwrap(), "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("password".to_string())
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_no_match() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::dummy([(("other.com", "user"), "password")]);
|
||||
let credentials = keyring.fetch(&url, "user").await;
|
||||
assert_eq!(credentials, None);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_prefers_url_to_host() {
|
||||
let url = Url::parse("https://example.com/").unwrap();
|
||||
let keyring = KeyringProvider::dummy([
|
||||
((url.join("foo").unwrap().as_str(), "user"), "password"),
|
||||
((url.host_str().unwrap(), "user"), "other-password"),
|
||||
]);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url.join("foo").unwrap(), "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("password".to_string())
|
||||
))
|
||||
);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url, "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("other-password".to_string())
|
||||
))
|
||||
);
|
||||
assert_eq!(
|
||||
keyring.fetch(&url.join("bar").unwrap(), "user").await,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("other-password".to_string())
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_username() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::dummy([((url.host_str().unwrap(), "user"), "password")]);
|
||||
let credentials = keyring.fetch(&url, "user").await;
|
||||
assert_eq!(
|
||||
credentials,
|
||||
Some(Credentials::new(
|
||||
Some("user".to_string()),
|
||||
Some("password".to_string())
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn fetch_url_username_no_match() {
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::dummy([((url.host_str().unwrap(), "foo"), "password")]);
|
||||
let credentials = keyring.fetch(&url, "bar").await;
|
||||
assert_eq!(credentials, None);
|
||||
|
||||
// Still fails if we have `foo` in the URL itself
|
||||
let url = Url::parse("https://foo@example.com").unwrap();
|
||||
let credentials = keyring.fetch(&url, "bar").await;
|
||||
assert_eq!(credentials, None);
|
||||
}
|
|
@ -35,3 +35,11 @@ pub fn store_credentials_from_url(url: &Url) -> bool {
|
|||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Populate the global authentication store with credentials on a URL, if there are any.
|
||||
///
|
||||
/// Returns `true` if the store was updated.
|
||||
pub fn store_credentials(url: &Url, credentials: Credentials) {
|
||||
trace!("Caching credentials for {url}");
|
||||
CREDENTIALS_CACHE.insert(url, Arc::new(credentials));
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
1079
crates/uv-auth/src/middleware/tests.rs
Normal file
1079
crates/uv-auth/src/middleware/tests.rs
Normal file
File diff suppressed because it is too large
Load diff
|
@ -59,89 +59,4 @@ impl Display for Realm {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use url::{ParseError, Url};
|
||||
|
||||
use crate::Realm;
|
||||
|
||||
#[test]
|
||||
fn test_should_retain_auth() -> Result<(), ParseError> {
|
||||
// Exact match (https)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("https://example.com")?),
|
||||
Realm::from(&Url::parse("https://example.com")?)
|
||||
);
|
||||
|
||||
// Exact match (with port)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("https://example.com:1234")?),
|
||||
Realm::from(&Url::parse("https://example.com:1234")?)
|
||||
);
|
||||
|
||||
// Exact match (http)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("http://example.com")?),
|
||||
Realm::from(&Url::parse("http://example.com")?)
|
||||
);
|
||||
|
||||
// Okay, path differs
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("http://example.com/foo")?),
|
||||
Realm::from(&Url::parse("http://example.com/bar")?)
|
||||
);
|
||||
|
||||
// Okay, default port differs (https)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("https://example.com:443")?),
|
||||
Realm::from(&Url::parse("https://example.com")?)
|
||||
);
|
||||
|
||||
// Okay, default port differs (http)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("http://example.com:80")?),
|
||||
Realm::from(&Url::parse("http://example.com")?)
|
||||
);
|
||||
|
||||
// Mismatched scheme
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com")?),
|
||||
Realm::from(&Url::parse("http://example.com")?)
|
||||
);
|
||||
|
||||
// Mismatched scheme, we explicitly do not allow upgrade to https
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("http://example.com")?),
|
||||
Realm::from(&Url::parse("https://example.com")?)
|
||||
);
|
||||
|
||||
// Mismatched host
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://foo.com")?),
|
||||
Realm::from(&Url::parse("https://bar.com")?)
|
||||
);
|
||||
|
||||
// Mismatched port
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com:1234")?),
|
||||
Realm::from(&Url::parse("https://example.com:5678")?)
|
||||
);
|
||||
|
||||
// Mismatched port, with one as default for scheme
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com:443")?),
|
||||
Realm::from(&Url::parse("https://example.com:5678")?)
|
||||
);
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com:1234")?),
|
||||
Realm::from(&Url::parse("https://example.com:443")?)
|
||||
);
|
||||
|
||||
// Mismatched port, with default for a different scheme
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com:80")?),
|
||||
Realm::from(&Url::parse("https://example.com")?)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
84
crates/uv-auth/src/realm/tests.rs
Normal file
84
crates/uv-auth/src/realm/tests.rs
Normal file
|
@ -0,0 +1,84 @@
|
|||
use url::{ParseError, Url};
|
||||
|
||||
use crate::Realm;
|
||||
|
||||
#[test]
|
||||
fn test_should_retain_auth() -> Result<(), ParseError> {
|
||||
// Exact match (https)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("https://example.com")?),
|
||||
Realm::from(&Url::parse("https://example.com")?)
|
||||
);
|
||||
|
||||
// Exact match (with port)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("https://example.com:1234")?),
|
||||
Realm::from(&Url::parse("https://example.com:1234")?)
|
||||
);
|
||||
|
||||
// Exact match (http)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("http://example.com")?),
|
||||
Realm::from(&Url::parse("http://example.com")?)
|
||||
);
|
||||
|
||||
// Okay, path differs
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("http://example.com/foo")?),
|
||||
Realm::from(&Url::parse("http://example.com/bar")?)
|
||||
);
|
||||
|
||||
// Okay, default port differs (https)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("https://example.com:443")?),
|
||||
Realm::from(&Url::parse("https://example.com")?)
|
||||
);
|
||||
|
||||
// Okay, default port differs (http)
|
||||
assert_eq!(
|
||||
Realm::from(&Url::parse("http://example.com:80")?),
|
||||
Realm::from(&Url::parse("http://example.com")?)
|
||||
);
|
||||
|
||||
// Mismatched scheme
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com")?),
|
||||
Realm::from(&Url::parse("http://example.com")?)
|
||||
);
|
||||
|
||||
// Mismatched scheme, we explicitly do not allow upgrade to https
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("http://example.com")?),
|
||||
Realm::from(&Url::parse("https://example.com")?)
|
||||
);
|
||||
|
||||
// Mismatched host
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://foo.com")?),
|
||||
Realm::from(&Url::parse("https://bar.com")?)
|
||||
);
|
||||
|
||||
// Mismatched port
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com:1234")?),
|
||||
Realm::from(&Url::parse("https://example.com:5678")?)
|
||||
);
|
||||
|
||||
// Mismatched port, with one as default for scheme
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com:443")?),
|
||||
Realm::from(&Url::parse("https://example.com:5678")?)
|
||||
);
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com:1234")?),
|
||||
Realm::from(&Url::parse("https://example.com:443")?)
|
||||
);
|
||||
|
||||
// Mismatched port, with default for a different scheme
|
||||
assert_ne!(
|
||||
Realm::from(&Url::parse("https://example.com:80")?),
|
||||
Realm::from(&Url::parse("https://example.com")?)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -15,6 +15,7 @@ license = { workspace = true }
|
|||
workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
bench = false
|
||||
|
||||
[[bench]]
|
||||
|
|
|
@ -86,7 +86,8 @@ mod resolver {
|
|||
use uv_cache::Cache;
|
||||
use uv_client::RegistryClient;
|
||||
use uv_configuration::{
|
||||
BuildOptions, Concurrency, ConfigSettings, Constraints, IndexStrategy, SourceStrategy,
|
||||
BuildOptions, Concurrency, ConfigSettings, Constraints, IndexStrategy, LowerBound,
|
||||
SourceStrategy,
|
||||
};
|
||||
use uv_dispatch::BuildDispatch;
|
||||
use uv_distribution::DistributionDatabase;
|
||||
|
@ -191,6 +192,7 @@ mod resolver {
|
|||
&build_options,
|
||||
&hashes,
|
||||
exclude_newer,
|
||||
LowerBound::default(),
|
||||
sources,
|
||||
concurrency,
|
||||
);
|
||||
|
|
44
crates/uv-build-backend/Cargo.toml
Normal file
44
crates/uv-build-backend/Cargo.toml
Normal file
|
@ -0,0 +1,44 @@
|
|||
[package]
|
||||
name = "uv-build-backend"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
documentation.workspace = true
|
||||
repository.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
uv-distribution-filename = { workspace = true }
|
||||
uv-fs = { workspace = true }
|
||||
uv-normalize = { workspace = true }
|
||||
uv-pep440 = { workspace = true }
|
||||
uv-pep508 = { workspace = true }
|
||||
uv-pubgrub = { workspace = true }
|
||||
uv-pypi-types = { workspace = true }
|
||||
uv-warnings = { workspace = true }
|
||||
|
||||
csv = { workspace = true}
|
||||
fs-err = { workspace = true }
|
||||
glob = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
spdx = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
indoc = { version = "2.0.5" }
|
||||
insta = { version = "1.40.0" }
|
||||
tempfile = { version = "3.12.0" }
|
517
crates/uv-build-backend/src/lib.rs
Normal file
517
crates/uv-build-backend/src/lib.rs
Normal file
|
@ -0,0 +1,517 @@
|
|||
mod metadata;
|
||||
mod pep639_glob;
|
||||
|
||||
use crate::metadata::{PyProjectToml, ValidationError};
|
||||
use crate::pep639_glob::Pep639GlobError;
|
||||
use fs_err::File;
|
||||
use glob::{GlobError, PatternError};
|
||||
use itertools::Itertools;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::fs::FileType;
|
||||
use std::io::{BufReader, Read, Write};
|
||||
use std::path::{Path, PathBuf, StripPrefixError};
|
||||
use std::{io, mem};
|
||||
use thiserror::Error;
|
||||
use tracing::{debug, trace};
|
||||
use uv_distribution_filename::WheelFilename;
|
||||
use uv_fs::Simplified;
|
||||
use walkdir::WalkDir;
|
||||
use zip::{CompressionMethod, ZipWriter};
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
Io(#[from] io::Error),
|
||||
#[error("Invalid pyproject.toml")]
|
||||
Toml(#[from] toml::de::Error),
|
||||
#[error("Invalid pyproject.toml")]
|
||||
Validation(#[from] ValidationError),
|
||||
#[error("Invalid `project.license-files` glob expression: `{0}`")]
|
||||
Pep639Glob(String, #[source] Pep639GlobError),
|
||||
#[error("The `project.license-files` entry is not a valid glob pattern: `{0}`")]
|
||||
Pattern(String, #[source] PatternError),
|
||||
/// [`GlobError`] is a wrapped io error.
|
||||
#[error(transparent)]
|
||||
Glob(#[from] GlobError),
|
||||
#[error("Failed to walk source tree: `{}`", root.user_display())]
|
||||
WalkDir {
|
||||
root: PathBuf,
|
||||
#[source]
|
||||
err: walkdir::Error,
|
||||
},
|
||||
#[error("Non-UTF-8 paths are not supported: `{}`", _0.user_display())]
|
||||
NotUtf8Path(PathBuf),
|
||||
#[error("Failed to walk source tree")]
|
||||
StripPrefix(#[from] StripPrefixError),
|
||||
#[error("Unsupported file type: {0:?}")]
|
||||
UnsupportedFileType(FileType),
|
||||
#[error("Failed to write wheel zip archive")]
|
||||
Zip(#[from] zip::result::ZipError),
|
||||
#[error("Failed to write RECORD file")]
|
||||
Csv(#[from] csv::Error),
|
||||
#[error("Expected a Python module with an `__init__.py` at: `{}`", _0.user_display())]
|
||||
MissingModule(PathBuf),
|
||||
#[error("Inconsistent metadata between prepare and build step: `{0}`")]
|
||||
InconsistentSteps(&'static str),
|
||||
}
|
||||
|
||||
/// Allow dispatching between writing to a directory, writing to zip and writing to a `.tar.gz`.
|
||||
///
|
||||
/// All paths are string types instead of path types since wheel are portable between platforms.
|
||||
///
|
||||
/// Contract: You must call close before dropping to obtain a valid output (dropping is fine in the
|
||||
/// error case).
|
||||
trait DirectoryWriter {
|
||||
/// Add a file with the given content.
|
||||
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error>;
|
||||
|
||||
/// Add a file with the given name and return a writer for it.
|
||||
fn new_writer<'slf>(&'slf mut self, path: &str) -> Result<Box<dyn Write + 'slf>, Error>;
|
||||
|
||||
/// Add a local file.
|
||||
fn write_file(&mut self, path: &str, file: &Path) -> Result<(), Error>;
|
||||
|
||||
/// Create a directory.
|
||||
fn write_directory(&mut self, directory: &str) -> Result<(), Error>;
|
||||
|
||||
/// Write the `RECORD` file and if applicable, the central directory.
|
||||
fn close(self, dist_info_dir: &str) -> Result<(), Error>;
|
||||
}
|
||||
|
||||
/// Zip archive (wheel) writer.
|
||||
struct ZipDirectoryWriter {
|
||||
writer: ZipWriter<File>,
|
||||
compression: CompressionMethod,
|
||||
/// The entries in the `RECORD` file.
|
||||
record: Vec<RecordEntry>,
|
||||
}
|
||||
|
||||
impl ZipDirectoryWriter {
|
||||
/// A wheel writer with deflate compression.
|
||||
fn new_wheel(file: File) -> Self {
|
||||
Self {
|
||||
writer: ZipWriter::new(file),
|
||||
compression: CompressionMethod::Deflated,
|
||||
record: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// A wheel writer with no (stored) compression.
|
||||
///
|
||||
/// Since editables are temporary, we save time be skipping compression and decompression.
|
||||
#[expect(dead_code)]
|
||||
fn new_editable(file: File) -> Self {
|
||||
Self {
|
||||
writer: ZipWriter::new(file),
|
||||
compression: CompressionMethod::Stored,
|
||||
record: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DirectoryWriter for ZipDirectoryWriter {
|
||||
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error> {
|
||||
trace!("Adding {}", path);
|
||||
let options = zip::write::FileOptions::default().compression_method(self.compression);
|
||||
self.writer.start_file(path, options)?;
|
||||
self.writer.write_all(bytes)?;
|
||||
|
||||
let hash = format!("{:x}", Sha256::new().chain_update(bytes).finalize());
|
||||
self.record.push(RecordEntry {
|
||||
path: path.to_string(),
|
||||
hash,
|
||||
size: bytes.len(),
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn new_writer<'slf>(&'slf mut self, path: &str) -> Result<Box<dyn Write + 'slf>, Error> {
|
||||
// TODO(konsti): We need to preserve permissions, at least the executable bit.
|
||||
self.writer.start_file(
|
||||
path,
|
||||
zip::write::FileOptions::default().compression_method(self.compression),
|
||||
)?;
|
||||
Ok(Box::new(&mut self.writer))
|
||||
}
|
||||
|
||||
fn write_file(&mut self, path: &str, file: &Path) -> Result<(), Error> {
|
||||
trace!("Adding {} from {}", path, file.user_display());
|
||||
let mut reader = BufReader::new(File::open(file)?);
|
||||
let mut writer = self.new_writer(path)?;
|
||||
let record = write_hashed(path, &mut reader, &mut writer)?;
|
||||
drop(writer);
|
||||
self.record.push(record);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_directory(&mut self, directory: &str) -> Result<(), Error> {
|
||||
trace!("Adding directory {}", directory);
|
||||
let options = zip::write::FileOptions::default().compression_method(self.compression);
|
||||
Ok(self.writer.add_directory(directory, options)?)
|
||||
}
|
||||
|
||||
/// Write the `RECORD` file and the central directory.
|
||||
fn close(mut self, dist_info_dir: &str) -> Result<(), Error> {
|
||||
let record_path = format!("{dist_info_dir}/RECORD");
|
||||
trace!("Adding {record_path}");
|
||||
let record = mem::take(&mut self.record);
|
||||
write_record(&mut self.new_writer(&record_path)?, dist_info_dir, record)?;
|
||||
|
||||
trace!("Adding central directory");
|
||||
self.writer.finish()?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
struct FilesystemWrite {
|
||||
/// The virtualenv or metadata directory that add file paths are relative to.
|
||||
root: PathBuf,
|
||||
/// The entries in the `RECORD` file.
|
||||
record: Vec<RecordEntry>,
|
||||
}
|
||||
|
||||
impl FilesystemWrite {
|
||||
fn new(root: &Path) -> Self {
|
||||
Self {
|
||||
root: root.to_owned(),
|
||||
record: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// File system writer.
|
||||
impl DirectoryWriter for FilesystemWrite {
|
||||
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error> {
|
||||
trace!("Adding {}", path);
|
||||
let hash = format!("{:x}", Sha256::new().chain_update(bytes).finalize());
|
||||
self.record.push(RecordEntry {
|
||||
path: path.to_string(),
|
||||
hash,
|
||||
size: bytes.len(),
|
||||
});
|
||||
|
||||
Ok(fs_err::write(self.root.join(path), bytes)?)
|
||||
}
|
||||
|
||||
fn new_writer<'slf>(&'slf mut self, path: &str) -> Result<Box<dyn Write + 'slf>, Error> {
|
||||
trace!("Adding {}", path);
|
||||
Ok(Box::new(File::create(self.root.join(path))?))
|
||||
}
|
||||
|
||||
fn write_file(&mut self, path: &str, file: &Path) -> Result<(), Error> {
|
||||
trace!("Adding {} from {}", path, file.user_display());
|
||||
let mut reader = BufReader::new(File::open(file)?);
|
||||
let mut writer = self.new_writer(path)?;
|
||||
let record = write_hashed(path, &mut reader, &mut writer)?;
|
||||
drop(writer);
|
||||
self.record.push(record);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_directory(&mut self, directory: &str) -> Result<(), Error> {
|
||||
trace!("Adding directory {}", directory);
|
||||
Ok(fs_err::create_dir(self.root.join(directory))?)
|
||||
}
|
||||
|
||||
/// Write the `RECORD` file.
|
||||
fn close(mut self, dist_info_dir: &str) -> Result<(), Error> {
|
||||
let record = mem::take(&mut self.record);
|
||||
write_record(
|
||||
&mut self.new_writer(&format!("{dist_info_dir}/RECORD"))?,
|
||||
dist_info_dir,
|
||||
record,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// An entry in the `RECORD` file.
|
||||
///
|
||||
/// <https://packaging.python.org/en/latest/specifications/recording-installed-packages/#the-record-file>
|
||||
struct RecordEntry {
|
||||
/// The path to the file relative to the package root.
|
||||
///
|
||||
/// While the spec would allow backslashes, we always use portable paths with forward slashes.
|
||||
path: String,
|
||||
/// The SHA256 of the files.
|
||||
hash: String,
|
||||
/// The size of the file in bytes.
|
||||
size: usize,
|
||||
}
|
||||
|
||||
/// Read the input file and write it both to the hasher and the target file.
|
||||
///
|
||||
/// We're implementing this tee-ing manually since there is no sync `InspectReader` or std tee
|
||||
/// function.
|
||||
fn write_hashed(
|
||||
path: &str,
|
||||
reader: &mut dyn Read,
|
||||
writer: &mut dyn Write,
|
||||
) -> Result<RecordEntry, io::Error> {
|
||||
let mut hasher = Sha256::new();
|
||||
let mut size = 0;
|
||||
// 8KB is the default defined in `std::sys_common::io`.
|
||||
let mut buffer = vec![0; 8 * 1024];
|
||||
loop {
|
||||
let read = match reader.read(&mut buffer) {
|
||||
Ok(read) => read,
|
||||
Err(err) if err.kind() == io::ErrorKind::Interrupted => continue,
|
||||
Err(err) => return Err(err),
|
||||
};
|
||||
if read == 0 {
|
||||
// End of file
|
||||
break;
|
||||
}
|
||||
hasher.update(&buffer[..read]);
|
||||
writer.write_all(&buffer[..read])?;
|
||||
size += read;
|
||||
}
|
||||
Ok(RecordEntry {
|
||||
path: path.to_string(),
|
||||
hash: format!("{:x}", hasher.finalize()),
|
||||
size,
|
||||
})
|
||||
}
|
||||
|
||||
/// Build a wheel from the source tree and place it in the output directory.
|
||||
pub fn build(
|
||||
source_tree: &Path,
|
||||
wheel_dir: &Path,
|
||||
metadata_directory: Option<&Path>,
|
||||
uv_version: &str,
|
||||
) -> Result<WheelFilename, Error> {
|
||||
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||
pyproject_toml.check_build_system("1.0.0+test");
|
||||
|
||||
check_metadata_directory(source_tree, metadata_directory, &pyproject_toml)?;
|
||||
|
||||
let filename = WheelFilename {
|
||||
name: pyproject_toml.name().clone(),
|
||||
version: pyproject_toml.version().clone(),
|
||||
build_tag: None,
|
||||
python_tag: vec!["py3".to_string()],
|
||||
abi_tag: vec!["none".to_string()],
|
||||
platform_tag: vec!["any".to_string()],
|
||||
};
|
||||
|
||||
let wheel_path = wheel_dir.join(filename.to_string());
|
||||
debug!("Writing wheel at {}", wheel_path.user_display());
|
||||
let mut wheel_writer = ZipDirectoryWriter::new_wheel(File::create(&wheel_path)?);
|
||||
|
||||
debug!("Adding content files to {}", wheel_path.user_display());
|
||||
let strip_root = source_tree.join("src");
|
||||
let module_root = strip_root.join(pyproject_toml.name().as_dist_info_name().as_ref());
|
||||
if !module_root.join("__init__.py").is_file() {
|
||||
return Err(Error::MissingModule(module_root));
|
||||
}
|
||||
for entry in WalkDir::new(module_root) {
|
||||
let entry = entry.map_err(|err| Error::WalkDir {
|
||||
root: source_tree.to_path_buf(),
|
||||
err,
|
||||
})?;
|
||||
|
||||
let relative_path = entry.path().strip_prefix(&strip_root)?;
|
||||
let relative_path_str = relative_path
|
||||
.to_str()
|
||||
.ok_or_else(|| Error::NotUtf8Path(relative_path.to_path_buf()))?;
|
||||
if entry.file_type().is_dir() {
|
||||
wheel_writer.write_directory(relative_path_str)?;
|
||||
} else if entry.file_type().is_file() {
|
||||
wheel_writer.write_file(relative_path_str, entry.path())?;
|
||||
} else {
|
||||
// TODO(konsti): We may want to support symlinks, there is support for installing them.
|
||||
return Err(Error::UnsupportedFileType(entry.file_type()));
|
||||
}
|
||||
|
||||
entry.path();
|
||||
}
|
||||
|
||||
debug!("Adding metadata files to {}", wheel_path.user_display());
|
||||
let dist_info_dir = write_dist_info(
|
||||
&mut wheel_writer,
|
||||
&pyproject_toml,
|
||||
&filename,
|
||||
source_tree,
|
||||
uv_version,
|
||||
)?;
|
||||
wheel_writer.close(&dist_info_dir)?;
|
||||
|
||||
Ok(filename)
|
||||
}
|
||||
|
||||
/// Write the dist-info directory to the output directory without building the wheel.
|
||||
pub fn metadata(
|
||||
source_tree: &Path,
|
||||
metadata_directory: &Path,
|
||||
uv_version: &str,
|
||||
) -> Result<String, Error> {
|
||||
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||
pyproject_toml.check_build_system("1.0.0+test");
|
||||
|
||||
let filename = WheelFilename {
|
||||
name: pyproject_toml.name().clone(),
|
||||
version: pyproject_toml.version().clone(),
|
||||
build_tag: None,
|
||||
python_tag: vec!["py3".to_string()],
|
||||
abi_tag: vec!["none".to_string()],
|
||||
platform_tag: vec!["any".to_string()],
|
||||
};
|
||||
|
||||
debug!(
|
||||
"Writing metadata files to {}",
|
||||
metadata_directory.user_display()
|
||||
);
|
||||
let mut wheel_writer = FilesystemWrite::new(metadata_directory);
|
||||
let dist_info_dir = write_dist_info(
|
||||
&mut wheel_writer,
|
||||
&pyproject_toml,
|
||||
&filename,
|
||||
source_tree,
|
||||
uv_version,
|
||||
)?;
|
||||
wheel_writer.close(&dist_info_dir)?;
|
||||
|
||||
Ok(dist_info_dir)
|
||||
}
|
||||
|
||||
/// PEP 517 requires that the metadata directory from the prepare metadata call is identical to the
|
||||
/// build wheel call. This method performs a prudence check that `METADATA` and `entry_points.txt`
|
||||
/// match.
|
||||
fn check_metadata_directory(
|
||||
source_tree: &Path,
|
||||
metadata_directory: Option<&Path>,
|
||||
pyproject_toml: &PyProjectToml,
|
||||
) -> Result<(), Error> {
|
||||
let Some(metadata_directory) = metadata_directory else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let dist_info_dir = format!(
|
||||
"{}-{}.dist-info",
|
||||
pyproject_toml.name().as_dist_info_name(),
|
||||
pyproject_toml.version()
|
||||
);
|
||||
|
||||
// `METADATA` is a mandatory file.
|
||||
let current = pyproject_toml
|
||||
.to_metadata(source_tree)?
|
||||
.core_metadata_format();
|
||||
let previous =
|
||||
fs_err::read_to_string(metadata_directory.join(&dist_info_dir).join("METADATA"))?;
|
||||
if previous != current {
|
||||
return Err(Error::InconsistentSteps("METADATA"));
|
||||
}
|
||||
|
||||
// `entry_points.txt` is not written if it would be empty.
|
||||
let entrypoints_path = metadata_directory
|
||||
.join(&dist_info_dir)
|
||||
.join("entry_points.txt");
|
||||
match pyproject_toml.to_entry_points()? {
|
||||
None => {
|
||||
if entrypoints_path.is_file() {
|
||||
return Err(Error::InconsistentSteps("entry_points.txt"));
|
||||
}
|
||||
}
|
||||
Some(entrypoints) => {
|
||||
if fs_err::read_to_string(&entrypoints_path)? != entrypoints {
|
||||
return Err(Error::InconsistentSteps("entry_points.txt"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Add `METADATA` and `entry_points.txt` to the dist-info directory.
|
||||
///
|
||||
/// Returns the name of the dist-info directory.
|
||||
fn write_dist_info(
|
||||
writer: &mut dyn DirectoryWriter,
|
||||
pyproject_toml: &PyProjectToml,
|
||||
filename: &WheelFilename,
|
||||
root: &Path,
|
||||
uv_version: &str,
|
||||
) -> Result<String, Error> {
|
||||
let dist_info_dir = format!(
|
||||
"{}-{}.dist-info",
|
||||
pyproject_toml.name().as_dist_info_name(),
|
||||
pyproject_toml.version()
|
||||
);
|
||||
|
||||
writer.write_directory(&dist_info_dir)?;
|
||||
|
||||
// Add `WHEEL`.
|
||||
let wheel_info = wheel_info(filename, uv_version);
|
||||
writer.write_bytes(&format!("{dist_info_dir}/WHEEL"), wheel_info.as_bytes())?;
|
||||
|
||||
// Add `entry_points.txt`.
|
||||
if let Some(entrypoint) = pyproject_toml.to_entry_points()? {
|
||||
writer.write_bytes(
|
||||
&format!("{dist_info_dir}/entry_points.txt"),
|
||||
entrypoint.as_bytes(),
|
||||
)?;
|
||||
}
|
||||
|
||||
// Add `METADATA`.
|
||||
let metadata = pyproject_toml.to_metadata(root)?.core_metadata_format();
|
||||
writer.write_bytes(&format!("{dist_info_dir}/METADATA"), metadata.as_bytes())?;
|
||||
|
||||
// `RECORD` is added on closing.
|
||||
|
||||
Ok(dist_info_dir)
|
||||
}
|
||||
|
||||
/// Returns the `WHEEL` file contents.
|
||||
fn wheel_info(filename: &WheelFilename, uv_version: &str) -> String {
|
||||
// https://packaging.python.org/en/latest/specifications/binary-distribution-format/#file-contents
|
||||
let mut wheel_info = vec![
|
||||
("Wheel-Version", "1.0".to_string()),
|
||||
("Generator", format!("uv {uv_version}")),
|
||||
("Root-Is-Purelib", "true".to_string()),
|
||||
];
|
||||
for python_tag in &filename.python_tag {
|
||||
for abi_tag in &filename.abi_tag {
|
||||
for platform_tag in &filename.platform_tag {
|
||||
wheel_info.push(("Tag", format!("{python_tag}-{abi_tag}-{platform_tag}")));
|
||||
}
|
||||
}
|
||||
}
|
||||
wheel_info
|
||||
.into_iter()
|
||||
.map(|(key, value)| format!("{key}: {value}"))
|
||||
.join("\n")
|
||||
}
|
||||
|
||||
/// Write the `RECORD` file.
|
||||
///
|
||||
/// <https://packaging.python.org/en/latest/specifications/recording-installed-packages/#the-record-file>
|
||||
fn write_record(
|
||||
writer: &mut dyn Write,
|
||||
dist_info_dir: &str,
|
||||
record: Vec<RecordEntry>,
|
||||
) -> Result<(), Error> {
|
||||
let mut record_writer = csv::Writer::from_writer(writer);
|
||||
for entry in record {
|
||||
record_writer.write_record(&[
|
||||
entry.path,
|
||||
format!("sha256={}", entry.hash),
|
||||
entry.size.to_string(),
|
||||
])?;
|
||||
}
|
||||
|
||||
// We can't compute the hash or size for RECORD without modifying it at the same time.
|
||||
record_writer.write_record(&[
|
||||
format!("{dist_info_dir}/RECORD"),
|
||||
String::new(),
|
||||
String::new(),
|
||||
])?;
|
||||
record_writer.flush()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
631
crates/uv-build-backend/src/metadata.rs
Normal file
631
crates/uv-build-backend/src/metadata.rs
Normal file
|
@ -0,0 +1,631 @@
|
|||
use crate::pep639_glob::parse_pep639_glob;
|
||||
use crate::Error;
|
||||
use itertools::Itertools;
|
||||
use serde::Deserialize;
|
||||
use std::collections::{BTreeMap, Bound};
|
||||
use std::ffi::OsStr;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
use tracing::debug;
|
||||
use uv_fs::Simplified;
|
||||
use uv_normalize::{ExtraName, PackageName};
|
||||
use uv_pep440::{Version, VersionSpecifiers};
|
||||
use uv_pep508::{Requirement, VersionOrUrl};
|
||||
use uv_pubgrub::PubGrubSpecifier;
|
||||
use uv_pypi_types::{Metadata23, VerbatimParsedUrl};
|
||||
use uv_warnings::warn_user_once;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ValidationError {
|
||||
/// The spec isn't clear about what the values in that field would be, and we only support the
|
||||
/// default value (UTF-8).
|
||||
#[error("Charsets other than UTF-8 are not supported. Please convert your README to UTF-8 and remove `project.readme.charset`.")]
|
||||
ReadmeCharset,
|
||||
#[error("Unknown Readme extension `{0}`, can't determine content type. Please use a support extension (`.md`, `.rst`, `.txt`) or set the content type manually.")]
|
||||
UnknownExtension(String),
|
||||
#[error("Can't infer content type because `{}` does not have an extension. Please use a support extension (`.md`, `.rst`, `.txt`) or set the content type manually.", _0.user_display())]
|
||||
MissingExtension(PathBuf),
|
||||
#[error("Unsupported content type: `{0}`")]
|
||||
UnsupportedContentType(String),
|
||||
#[error("`project.description` must be a single line")]
|
||||
DescriptionNewlines,
|
||||
#[error("Dynamic metadata is not supported")]
|
||||
Dynamic,
|
||||
#[error("When `project.license-files` is defined, `project.license` must be an SPDX expression string")]
|
||||
MixedLicenseGenerations,
|
||||
#[error("Entrypoint groups must consist of letters and numbers separated by dots, invalid group: `{0}`")]
|
||||
InvalidGroup(String),
|
||||
#[error(
|
||||
"Entrypoint names must consist of letters, numbers, dots and dashes; invalid name: `{0}`"
|
||||
)]
|
||||
InvalidName(String),
|
||||
#[error("Use `project.scripts` instead of `project.entry-points.console_scripts`")]
|
||||
ReservedScripts,
|
||||
#[error("Use `project.gui-scripts` instead of `project.entry-points.gui_scripts`")]
|
||||
ReservedGuiScripts,
|
||||
#[error("`project.license` is not a valid SPDX expression: `{0}`")]
|
||||
InvalidSpdx(String, #[source] spdx::error::ParseError),
|
||||
}
|
||||
|
||||
/// A `pyproject.toml` as specified in PEP 517.
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(
|
||||
rename_all = "kebab-case",
|
||||
expecting = "The project table needs to follow \
|
||||
https://packaging.python.org/en/latest/guides/writing-pyproject-toml"
|
||||
)]
|
||||
pub(crate) struct PyProjectToml {
|
||||
/// Project metadata
|
||||
project: Project,
|
||||
/// Build-related data
|
||||
build_system: BuildSystem,
|
||||
}
|
||||
|
||||
impl PyProjectToml {
|
||||
pub(crate) fn name(&self) -> &PackageName {
|
||||
&self.project.name
|
||||
}
|
||||
|
||||
pub(crate) fn version(&self) -> &Version {
|
||||
&self.project.version
|
||||
}
|
||||
|
||||
pub(crate) fn parse(contents: &str) -> Result<Self, Error> {
|
||||
Ok(toml::from_str(contents)?)
|
||||
}
|
||||
|
||||
/// Warn if the `[build-system]` table looks suspicious.
|
||||
///
|
||||
/// Example of a valid table:
|
||||
///
|
||||
/// ```toml
|
||||
/// [build-system]
|
||||
/// requires = ["uv>=0.4.15,<5"]
|
||||
/// build-backend = "uv"
|
||||
/// ```
|
||||
///
|
||||
/// Returns whether all checks passed.
|
||||
pub(crate) fn check_build_system(&self, uv_version: &str) -> bool {
|
||||
let mut passed = true;
|
||||
if self.build_system.build_backend.as_deref() != Some("uv") {
|
||||
warn_user_once!(
|
||||
r#"The value for `build_system.build-backend` should be `"uv"`, not `"{}"`"#,
|
||||
self.build_system.build_backend.clone().unwrap_or_default()
|
||||
);
|
||||
passed = false;
|
||||
}
|
||||
|
||||
let uv_version =
|
||||
Version::from_str(uv_version).expect("uv's own version is not PEP 440 compliant");
|
||||
let next_minor = uv_version.release().get(1).copied().unwrap_or_default() + 1;
|
||||
let next_breaking = Version::new([0, next_minor]);
|
||||
|
||||
let expected = || {
|
||||
format!(
|
||||
"Expected a single uv requirement in `build-system.requires`, found `{}`",
|
||||
toml::to_string(&self.build_system.requires).unwrap_or_default()
|
||||
)
|
||||
};
|
||||
|
||||
let [uv_requirement] = &self.build_system.requires.as_slice() else {
|
||||
warn_user_once!("{}", expected());
|
||||
return false;
|
||||
};
|
||||
if uv_requirement.name.as_str() != "uv" {
|
||||
warn_user_once!("{}", expected());
|
||||
return false;
|
||||
}
|
||||
let bounded = match &uv_requirement.version_or_url {
|
||||
None => false,
|
||||
Some(VersionOrUrl::Url(_)) => {
|
||||
// We can't validate the url
|
||||
true
|
||||
}
|
||||
Some(VersionOrUrl::VersionSpecifier(specifier)) => {
|
||||
// We don't check how wide the range is (that's up to the user), we just
|
||||
// check that the current version is compliant, to avoid accidentally using a
|
||||
// too new or too old uv, and we check that an upper bound exists. The latter
|
||||
// is very important to allow making breaking changes in uv without breaking
|
||||
// the existing immutable source distributions on pypi.
|
||||
if !specifier.contains(&uv_version) {
|
||||
// This is allowed to happen when testing prereleases, but we should still warn.
|
||||
warn_user_once!(
|
||||
r#"`build_system.requires = ["{uv_requirement}"]` does not contain the
|
||||
current uv version {uv_version}"#,
|
||||
);
|
||||
passed = false;
|
||||
}
|
||||
PubGrubSpecifier::from_pep440_specifiers(specifier)
|
||||
.ok()
|
||||
.and_then(|specifier| Some(specifier.bounding_range()?.1 != Bound::Unbounded))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
};
|
||||
|
||||
if !bounded {
|
||||
warn_user_once!(
|
||||
r#"`build_system.requires = ["{uv_requirement}"]` is missing an
|
||||
upper bound on the uv version such as `<{next_breaking}`.
|
||||
Without bounding the uv version, the source distribution will break
|
||||
when a future, breaking version of uv is released."#,
|
||||
);
|
||||
passed = false;
|
||||
}
|
||||
|
||||
passed
|
||||
}
|
||||
|
||||
/// Validate and convert a `pyproject.toml` to core metadata.
|
||||
///
|
||||
/// <https://packaging.python.org/en/latest/guides/writing-pyproject-toml/>
|
||||
/// <https://packaging.python.org/en/latest/specifications/pyproject-toml/>
|
||||
/// <https://packaging.python.org/en/latest/specifications/core-metadata/>
|
||||
pub(crate) fn to_metadata(&self, root: &Path) -> Result<Metadata23, Error> {
|
||||
let summary = if let Some(description) = &self.project.description {
|
||||
if description.contains('\n') {
|
||||
return Err(ValidationError::DescriptionNewlines.into());
|
||||
}
|
||||
Some(description.clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let supported_content_types = ["text/plain", "text/x-rst", "text/markdown"];
|
||||
let (description, description_content_type) = match &self.project.readme {
|
||||
Some(Readme::String(path)) => {
|
||||
let content = fs_err::read_to_string(root.join(path))?;
|
||||
let content_type = match path.extension().and_then(OsStr::to_str) {
|
||||
Some("txt") => "text/plain",
|
||||
Some("rst") => "text/x-rst",
|
||||
Some("md") => "text/markdown",
|
||||
Some(unknown) => {
|
||||
return Err(ValidationError::UnknownExtension(unknown.to_owned()).into())
|
||||
}
|
||||
None => return Err(ValidationError::MissingExtension(path.clone()).into()),
|
||||
}
|
||||
.to_string();
|
||||
(Some(content), Some(content_type))
|
||||
}
|
||||
Some(Readme::File {
|
||||
file,
|
||||
content_type,
|
||||
charset,
|
||||
}) => {
|
||||
let content = fs_err::read_to_string(root.join(file))?;
|
||||
if !supported_content_types.contains(&content_type.as_str()) {
|
||||
return Err(
|
||||
ValidationError::UnsupportedContentType(content_type.clone()).into(),
|
||||
);
|
||||
}
|
||||
if charset.as_ref().is_some_and(|charset| charset != "UTF-8") {
|
||||
return Err(ValidationError::ReadmeCharset.into());
|
||||
}
|
||||
(Some(content), Some(content_type.clone()))
|
||||
}
|
||||
Some(Readme::Text {
|
||||
text,
|
||||
content_type,
|
||||
charset,
|
||||
}) => {
|
||||
if !supported_content_types.contains(&content_type.as_str()) {
|
||||
return Err(
|
||||
ValidationError::UnsupportedContentType(content_type.clone()).into(),
|
||||
);
|
||||
}
|
||||
if charset.as_ref().is_some_and(|charset| charset != "UTF-8") {
|
||||
return Err(ValidationError::ReadmeCharset.into());
|
||||
}
|
||||
(Some(text.clone()), Some(content_type.clone()))
|
||||
}
|
||||
None => (None, None),
|
||||
};
|
||||
|
||||
if self
|
||||
.project
|
||||
.dynamic
|
||||
.as_ref()
|
||||
.is_some_and(|dynamic| !dynamic.is_empty())
|
||||
{
|
||||
return Err(ValidationError::Dynamic.into());
|
||||
}
|
||||
|
||||
let author = self
|
||||
.project
|
||||
.authors
|
||||
.as_ref()
|
||||
.map(|authors| {
|
||||
authors
|
||||
.iter()
|
||||
.filter_map(|author| match author {
|
||||
Contact::Name { name } => Some(name),
|
||||
Contact::Email { .. } => None,
|
||||
Contact::NameEmail { name, .. } => Some(name),
|
||||
})
|
||||
.join(", ")
|
||||
})
|
||||
.filter(|author| !author.is_empty());
|
||||
let author_email = self
|
||||
.project
|
||||
.authors
|
||||
.as_ref()
|
||||
.map(|authors| {
|
||||
authors
|
||||
.iter()
|
||||
.filter_map(|author| match author {
|
||||
Contact::Name { .. } => None,
|
||||
Contact::Email { email } => Some(email.clone()),
|
||||
Contact::NameEmail { name, email } => Some(format!("{name} <{email}>")),
|
||||
})
|
||||
.join(", ")
|
||||
})
|
||||
.filter(|author_email| !author_email.is_empty());
|
||||
let maintainer = self
|
||||
.project
|
||||
.maintainers
|
||||
.as_ref()
|
||||
.map(|maintainers| {
|
||||
maintainers
|
||||
.iter()
|
||||
.filter_map(|maintainer| match maintainer {
|
||||
Contact::Name { name } => Some(name),
|
||||
Contact::Email { .. } => None,
|
||||
Contact::NameEmail { name, .. } => Some(name),
|
||||
})
|
||||
.join(", ")
|
||||
})
|
||||
.filter(|maintainer| !maintainer.is_empty());
|
||||
let maintainer_email = self
|
||||
.project
|
||||
.maintainers
|
||||
.as_ref()
|
||||
.map(|maintainers| {
|
||||
maintainers
|
||||
.iter()
|
||||
.filter_map(|maintainer| match maintainer {
|
||||
Contact::Name { .. } => None,
|
||||
Contact::Email { email } => Some(email.clone()),
|
||||
Contact::NameEmail { name, email } => Some(format!("{name} <{email}>")),
|
||||
})
|
||||
.join(", ")
|
||||
})
|
||||
.filter(|maintainer_email| !maintainer_email.is_empty());
|
||||
|
||||
// Using PEP 639 bumps the METADATA version
|
||||
let metadata_version = if self.project.license_files.is_some()
|
||||
|| matches!(self.project.license, Some(License::Spdx(_)))
|
||||
{
|
||||
debug!("Found PEP 639 license declarations, using METADATA 2.4");
|
||||
"2.4"
|
||||
} else {
|
||||
"2.3"
|
||||
};
|
||||
|
||||
// TODO(konsti): Issue a warning on old license metadata once PEP 639 is universal.
|
||||
let (license, license_expression, license_files) =
|
||||
if let Some(license_globs) = &self.project.license_files {
|
||||
let license_expression = match &self.project.license {
|
||||
None => None,
|
||||
Some(License::Spdx(license_expression)) => Some(license_expression.clone()),
|
||||
Some(License::Text { .. } | License::File { .. }) => {
|
||||
return Err(ValidationError::MixedLicenseGenerations.into())
|
||||
}
|
||||
};
|
||||
|
||||
let mut license_files = Vec::new();
|
||||
for license_glob in license_globs {
|
||||
let pep639_glob = parse_pep639_glob(license_glob)
|
||||
.map_err(|err| Error::Pep639Glob(license_glob.to_string(), err))?;
|
||||
let absolute_glob = PathBuf::from(glob::Pattern::escape(
|
||||
root.simplified().to_string_lossy().as_ref(),
|
||||
))
|
||||
.join(pep639_glob.to_string())
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
for license_file in glob::glob(&absolute_glob)
|
||||
.map_err(|err| Error::Pattern(absolute_glob.to_string(), err))?
|
||||
{
|
||||
let license_file = license_file
|
||||
.map_err(Error::Glob)?
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
if !license_files.contains(&license_file) {
|
||||
license_files.push(license_file);
|
||||
}
|
||||
}
|
||||
}
|
||||
// The glob order may be unstable
|
||||
license_files.sort();
|
||||
|
||||
(None, license_expression, license_files)
|
||||
} else {
|
||||
match &self.project.license {
|
||||
None => (None, None, Vec::new()),
|
||||
Some(License::Spdx(license_expression)) => {
|
||||
(None, Some(license_expression.clone()), Vec::new())
|
||||
}
|
||||
Some(License::Text { text }) => (Some(text.clone()), None, Vec::new()),
|
||||
Some(License::File { file }) => {
|
||||
let text = fs_err::read_to_string(root.join(file))?;
|
||||
(Some(text), None, Vec::new())
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Check that the license expression is a valid SPDX identifier.
|
||||
if let Some(license_expression) = &license_expression {
|
||||
if let Err(err) = spdx::Expression::parse(license_expression) {
|
||||
return Err(ValidationError::InvalidSpdx(license_expression.clone(), err).into());
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(konsti): https://peps.python.org/pep-0753/#label-normalization (Draft)
|
||||
let project_urls = self
|
||||
.project
|
||||
.urls
|
||||
.iter()
|
||||
.flatten()
|
||||
.map(|(key, value)| format!("{key}, {value}"))
|
||||
.collect();
|
||||
|
||||
let extras = self
|
||||
.project
|
||||
.optional_dependencies
|
||||
.iter()
|
||||
.flat_map(|optional_dependencies| optional_dependencies.keys())
|
||||
.map(ToString::to_string)
|
||||
.collect();
|
||||
|
||||
Ok(Metadata23 {
|
||||
metadata_version: metadata_version.to_string(),
|
||||
name: self.project.name.to_string(),
|
||||
version: self.project.version.to_string(),
|
||||
// Not supported.
|
||||
platforms: vec![],
|
||||
// Not supported.
|
||||
supported_platforms: vec![],
|
||||
summary,
|
||||
description,
|
||||
description_content_type,
|
||||
keywords: self
|
||||
.project
|
||||
.keywords
|
||||
.as_ref()
|
||||
.map(|keywords| keywords.join(",")),
|
||||
home_page: None,
|
||||
download_url: None,
|
||||
author,
|
||||
author_email,
|
||||
maintainer,
|
||||
maintainer_email,
|
||||
license,
|
||||
license_expression,
|
||||
license_files,
|
||||
classifiers: self.project.classifiers.clone().unwrap_or_default(),
|
||||
requires_dist: self
|
||||
.project
|
||||
.dependencies
|
||||
.iter()
|
||||
.flatten()
|
||||
.map(ToString::to_string)
|
||||
.collect(),
|
||||
// Not commonly set.
|
||||
provides_dist: vec![],
|
||||
// Not supported.
|
||||
obsoletes_dist: vec![],
|
||||
requires_python: self
|
||||
.project
|
||||
.requires_python
|
||||
.as_ref()
|
||||
.map(ToString::to_string),
|
||||
// Not used by other tools, not supported.
|
||||
requires_external: vec![],
|
||||
project_urls,
|
||||
provides_extras: extras,
|
||||
dynamic: vec![],
|
||||
})
|
||||
}
|
||||
|
||||
/// Validate and convert the entrypoints in `pyproject.toml`, including console and GUI scripts,
|
||||
/// to an `entry_points.txt`.
|
||||
///
|
||||
/// <https://packaging.python.org/en/latest/specifications/entry-points/>
|
||||
///
|
||||
/// Returns `None` if no entrypoints were defined.
|
||||
pub(crate) fn to_entry_points(&self) -> Result<Option<String>, ValidationError> {
|
||||
let mut writer = String::new();
|
||||
|
||||
if self.project.scripts.is_none()
|
||||
&& self.project.gui_scripts.is_none()
|
||||
&& self.project.entry_points.is_none()
|
||||
{
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
if let Some(scripts) = &self.project.scripts {
|
||||
Self::write_group(&mut writer, "console_scripts", scripts)?;
|
||||
}
|
||||
if let Some(gui_scripts) = &self.project.gui_scripts {
|
||||
Self::write_group(&mut writer, "gui_scripts", gui_scripts)?;
|
||||
}
|
||||
for (group, entries) in self.project.entry_points.iter().flatten() {
|
||||
if group == "console_scripts" {
|
||||
return Err(ValidationError::ReservedScripts);
|
||||
}
|
||||
if group == "gui_scripts" {
|
||||
return Err(ValidationError::ReservedGuiScripts);
|
||||
}
|
||||
Self::write_group(&mut writer, group, entries)?;
|
||||
}
|
||||
Ok(Some(writer))
|
||||
}
|
||||
|
||||
/// Write a group to `entry_points.txt`.
|
||||
fn write_group<'a>(
|
||||
writer: &mut String,
|
||||
group: &str,
|
||||
entries: impl IntoIterator<Item = (&'a String, &'a String)>,
|
||||
) -> Result<(), ValidationError> {
|
||||
if !group
|
||||
.chars()
|
||||
.next()
|
||||
.map(|c| c.is_alphanumeric() || c == '_')
|
||||
.unwrap_or(false)
|
||||
|| !group
|
||||
.chars()
|
||||
.all(|c| c.is_alphanumeric() || c == '.' || c == '_')
|
||||
{
|
||||
return Err(ValidationError::InvalidGroup(group.to_string()));
|
||||
}
|
||||
|
||||
writer.push_str(&format!("[{group}]\n"));
|
||||
for (name, object_reference) in entries {
|
||||
// More strict than the spec, we enforce the recommendation
|
||||
if !name
|
||||
.chars()
|
||||
.all(|c| c.is_alphanumeric() || c == '.' || c == '-')
|
||||
{
|
||||
return Err(ValidationError::InvalidName(name.to_string()));
|
||||
}
|
||||
|
||||
// TODO(konsti): Validate that the object references are valid Python identifiers.
|
||||
writer.push_str(&format!("{name} = {object_reference}\n"));
|
||||
}
|
||||
writer.push('\n');
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// The `[project]` section of a pyproject.toml as specified in
|
||||
/// <https://packaging.python.org/en/latest/specifications/pyproject-toml>.
|
||||
///
|
||||
/// This struct does not have schema export; the schema is shared between all Python tools, and we
|
||||
/// should update the shared schema instead.
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
struct Project {
|
||||
/// The name of the project.
|
||||
name: PackageName,
|
||||
/// The version of the project.
|
||||
version: Version,
|
||||
/// The summary description of the project in one line.
|
||||
description: Option<String>,
|
||||
/// The full description of the project (i.e. the README).
|
||||
readme: Option<Readme>,
|
||||
/// The Python version requirements of the project.
|
||||
requires_python: Option<VersionSpecifiers>,
|
||||
/// The license under which the project is distributed.
|
||||
///
|
||||
/// Supports both the current standard and the provisional PEP 639.
|
||||
license: Option<License>,
|
||||
/// The paths to files containing licenses and other legal notices to be distributed with the
|
||||
/// project.
|
||||
///
|
||||
/// From the provisional PEP 639
|
||||
license_files: Option<Vec<String>>,
|
||||
/// The people or organizations considered to be the "authors" of the project.
|
||||
authors: Option<Vec<Contact>>,
|
||||
/// The people or organizations considered to be the "maintainers" of the project.
|
||||
maintainers: Option<Vec<Contact>>,
|
||||
/// The keywords for the project.
|
||||
keywords: Option<Vec<String>>,
|
||||
/// Trove classifiers which apply to the project.
|
||||
classifiers: Option<Vec<String>>,
|
||||
/// A table of URLs where the key is the URL label and the value is the URL itself.
|
||||
///
|
||||
/// PyPI shows all URLs with their name. For some known patterns, they add favicons.
|
||||
/// main: <https://github.com/pypi/warehouse/blob/main/warehouse/templates/packaging/detail.html>
|
||||
/// archived: <https://github.com/pypi/warehouse/blob/e3bd3c3805ff47fff32b67a899c1ce11c16f3c31/warehouse/templates/packaging/detail.html>
|
||||
urls: Option<BTreeMap<String, String>>,
|
||||
/// The console entrypoints of the project.
|
||||
///
|
||||
/// The key of the table is the name of the entry point and the value is the object reference.
|
||||
scripts: Option<BTreeMap<String, String>>,
|
||||
/// The GUI entrypoints of the project.
|
||||
///
|
||||
/// The key of the table is the name of the entry point and the value is the object reference.
|
||||
gui_scripts: Option<BTreeMap<String, String>>,
|
||||
/// Entrypoints groups of the project.
|
||||
///
|
||||
/// The key of the table is the name of the entry point and the value is the object reference.
|
||||
entry_points: Option<BTreeMap<String, BTreeMap<String, String>>>,
|
||||
/// The dependencies of the project.
|
||||
dependencies: Option<Vec<Requirement>>,
|
||||
/// The optional dependencies of the project.
|
||||
optional_dependencies: Option<BTreeMap<ExtraName, Vec<Requirement>>>,
|
||||
/// Specifies which fields listed by PEP 621 were intentionally unspecified so another tool
|
||||
/// can/will provide such metadata dynamically.
|
||||
///
|
||||
/// Not supported, an error if anything but the default empty list.
|
||||
dynamic: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
/// The optional `project.readme` key in a pyproject.toml as specified in
|
||||
/// <https://packaging.python.org/en/latest/specifications/pyproject-toml/#readme>.
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(untagged, rename_all = "kebab-case")]
|
||||
enum Readme {
|
||||
/// Relative path to the README.
|
||||
String(PathBuf),
|
||||
/// Relative path to the README.
|
||||
File {
|
||||
file: PathBuf,
|
||||
content_type: String,
|
||||
charset: Option<String>,
|
||||
},
|
||||
/// The full description of the project as inline value.
|
||||
Text {
|
||||
text: String,
|
||||
content_type: String,
|
||||
charset: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
/// The optional `project.license` key in a pyproject.toml as specified in
|
||||
/// <https://packaging.python.org/en/latest/specifications/pyproject-toml/#license>.
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(untagged)]
|
||||
enum License {
|
||||
/// An SPDX Expression.
|
||||
///
|
||||
/// From the provisional PEP 639.
|
||||
Spdx(String),
|
||||
Text {
|
||||
/// The full text of the license.
|
||||
text: String,
|
||||
},
|
||||
File {
|
||||
/// The file containing the license text.
|
||||
file: PathBuf,
|
||||
},
|
||||
}
|
||||
|
||||
/// A `project.authors` or `project.maintainers` entry as specified in
|
||||
/// <https://packaging.python.org/en/latest/specifications/pyproject-toml/#authors-maintainers>.
|
||||
///
|
||||
/// The entry is derived from the email format of `John Doe <john.doe@example.net>`. You need to
|
||||
/// provide at least name or email.
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(untagged, expecting = "a table with 'name' and/or 'email' keys")]
|
||||
enum Contact {
|
||||
/// TODO(konsti): RFC 822 validation.
|
||||
Name { name: String },
|
||||
/// TODO(konsti): RFC 822 validation.
|
||||
Email { email: String },
|
||||
/// TODO(konsti): RFC 822 validation.
|
||||
NameEmail { name: String, email: String },
|
||||
}
|
||||
|
||||
/// The `[build-system]` section of a pyproject.toml as specified in PEP 517.
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
struct BuildSystem {
|
||||
/// PEP 508 dependencies required to execute the build system.
|
||||
requires: Vec<Requirement<VerbatimParsedUrl>>,
|
||||
/// A string naming a Python object that will be used to perform the build.
|
||||
build_backend: Option<String>,
|
||||
/// <https://peps.python.org/pep-0517/#in-tree-build-backends>
|
||||
backend_path: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
401
crates/uv-build-backend/src/metadata/tests.rs
Normal file
401
crates/uv-build-backend/src/metadata/tests.rs
Normal file
|
@ -0,0 +1,401 @@
|
|||
use super::*;
|
||||
use indoc::{formatdoc, indoc};
|
||||
use insta::assert_snapshot;
|
||||
use std::iter;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn extend_project(payload: &str) -> String {
|
||||
formatdoc! {r#"
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
{payload}
|
||||
|
||||
[build-system]
|
||||
requires = ["uv>=0.4.15,<5"]
|
||||
build-backend = "uv"
|
||||
"#
|
||||
}
|
||||
}
|
||||
|
||||
fn format_err(err: impl std::error::Error) -> String {
|
||||
let mut formatted = err.to_string();
|
||||
for source in iter::successors(err.source(), |&err| err.source()) {
|
||||
formatted += &format!("\n Caused by: {source}");
|
||||
}
|
||||
formatted
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn valid() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
fs_err::write(
|
||||
temp_dir.path().join("Readme.md"),
|
||||
indoc! {r"
|
||||
# Foo
|
||||
|
||||
This is the foo library.
|
||||
"},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
fs_err::write(
|
||||
temp_dir.path().join("License.txt"),
|
||||
indoc! {r#"
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
||||
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
|
||||
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
|
||||
OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
"#},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let contents = indoc! {r#"
|
||||
# See https://github.com/pypa/sampleproject/blob/main/pyproject.toml for another example
|
||||
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
description = "A Python package"
|
||||
readme = "Readme.md"
|
||||
requires_python = ">=3.12"
|
||||
license = { file = "License.txt" }
|
||||
authors = [{ name = "Ferris the crab", email = "ferris@rustacean.net" }]
|
||||
maintainers = [{ name = "Konsti", email = "konstin@mailbox.org" }]
|
||||
keywords = ["demo", "example", "package"]
|
||||
classifiers = [
|
||||
"Development Status :: 6 - Mature",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
# https://github.com/pypa/trove-classifiers/issues/17
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Programming Language :: Python",
|
||||
]
|
||||
dependencies = ["flask>=3,<4", "sqlalchemy[asyncio]>=2.0.35,<3"]
|
||||
# We don't support dynamic fields, the default empty array is the only allowed value.
|
||||
dynamic = []
|
||||
|
||||
[project.optional-dependencies]
|
||||
postgres = ["psycopg>=3.2.2,<4"]
|
||||
mysql = ["pymysql>=1.1.1,<2"]
|
||||
|
||||
[project.urls]
|
||||
"Homepage" = "https://github.com/astral-sh/uv"
|
||||
"Repository" = "https://astral.sh"
|
||||
|
||||
[project.scripts]
|
||||
foo = "foo.cli:__main__"
|
||||
|
||||
[project.gui-scripts]
|
||||
foo-gui = "foo.gui"
|
||||
|
||||
[project.entry-points.bar_group]
|
||||
foo-bar = "foo:bar"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv>=0.4.15,<5"]
|
||||
build-backend = "uv"
|
||||
"#
|
||||
};
|
||||
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
||||
Metadata-Version: 2.3
|
||||
Name: hello-world
|
||||
Version: 0.1.0
|
||||
Summary: A Python package
|
||||
Keywords: demo,example,package
|
||||
Author: Ferris the crab
|
||||
License: THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
||||
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
|
||||
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
|
||||
OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
Classifier: Development Status :: 6 - Mature
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: License :: OSI Approved :: Apache Software License
|
||||
Classifier: Programming Language :: Python
|
||||
Requires-Dist: flask>=3,<4
|
||||
Requires-Dist: sqlalchemy[asyncio]>=2.0.35,<3
|
||||
Maintainer: Konsti
|
||||
Project-URL: Homepage, https://github.com/astral-sh/uv
|
||||
Project-URL: Repository, https://astral.sh
|
||||
Provides-Extra: mysql
|
||||
Provides-Extra: postgres
|
||||
Description-Content-Type: text/markdown
|
||||
|
||||
# Foo
|
||||
|
||||
This is the foo library.
|
||||
"###);
|
||||
|
||||
assert_snapshot!(pyproject_toml.to_entry_points().unwrap().unwrap(), @r###"
|
||||
[console_scripts]
|
||||
foo = foo.cli:__main__
|
||||
|
||||
[gui_scripts]
|
||||
foo-gui = foo.gui
|
||||
|
||||
[bar_group]
|
||||
foo-bar = foo:bar
|
||||
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_system_valid() {
|
||||
let contents = extend_project("");
|
||||
let pyproject_toml = PyProjectToml::parse(&contents).unwrap();
|
||||
assert!(pyproject_toml.check_build_system("1.0.0+test"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_system_no_bound() {
|
||||
let contents = indoc! {r#"
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv"]
|
||||
build-backend = "uv"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert!(!pyproject_toml.check_build_system("1.0.0+test"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_system_multiple_packages() {
|
||||
let contents = indoc! {r#"
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv>=0.4.15,<5", "wheel"]
|
||||
build-backend = "uv"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert!(!pyproject_toml.check_build_system("1.0.0+test"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_system_no_requires_uv() {
|
||||
let contents = indoc! {r#"
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools"]
|
||||
build-backend = "uv"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert!(!pyproject_toml.check_build_system("1.0.0+test"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_system_not_uv() {
|
||||
let contents = indoc! {r#"
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv>=0.4.15,<5"]
|
||||
build-backend = "setuptools"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert!(!pyproject_toml.check_build_system("1.0.0+test"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn minimal() {
|
||||
let contents = extend_project("");
|
||||
|
||||
let metadata = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap();
|
||||
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
||||
Metadata-Version: 2.3
|
||||
Name: hello-world
|
||||
Version: 0.1.0
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_readme_spec() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
readme = { path = "Readme.md" }
|
||||
"#
|
||||
});
|
||||
|
||||
let err = PyProjectToml::parse(&contents).unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: TOML parse error at line 4, column 10
|
||||
|
|
||||
4 | readme = { path = "Readme.md" }
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
data did not match any variant of untagged enum Readme
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_readme() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
readme = "Readme.md"
|
||||
"#
|
||||
});
|
||||
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
// Simplified for windows compatibility.
|
||||
assert_snapshot!(err.to_string().replace('\\', "/"), @"failed to open file `/do/not/read/Readme.md`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiline_description() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
description = "Hi :)\nThis is my project"
|
||||
"#
|
||||
});
|
||||
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: `project.description` must be a single line
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mixed_licenses() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
license-files = ["licenses/*"]
|
||||
license = { text = "MIT" }
|
||||
"#
|
||||
});
|
||||
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: When `project.license-files` is defined, `project.license` must be an SPDX expression string
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn valid_license() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
license = "MIT OR Apache-2.0"
|
||||
"#
|
||||
});
|
||||
let metadata = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap();
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
||||
Metadata-Version: 2.4
|
||||
Name: hello-world
|
||||
Version: 0.1.0
|
||||
License-Expression: MIT OR Apache-2.0
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_license() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
license = "MIT XOR Apache-2"
|
||||
"#
|
||||
});
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
// TODO(konsti): We mess up the indentation in the error.
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: `project.license` is not a valid SPDX expression: `MIT XOR Apache-2`
|
||||
Caused by: MIT XOR Apache-2
|
||||
^^^ unknown term
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dynamic() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
dynamic = ["dependencies"]
|
||||
"#
|
||||
});
|
||||
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: Dynamic metadata is not supported
|
||||
"###);
|
||||
}
|
||||
|
||||
fn script_error(contents: &str) -> String {
|
||||
let err = PyProjectToml::parse(contents)
|
||||
.unwrap()
|
||||
.to_entry_points()
|
||||
.unwrap_err();
|
||||
format_err(err)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_entry_point_group() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
[project.entry-points."a@b"]
|
||||
foo = "bar"
|
||||
"#
|
||||
});
|
||||
assert_snapshot!(script_error(&contents), @"Entrypoint groups must consist of letters and numbers separated by dots, invalid group: `a@b`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_entry_point_name() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
[project.scripts]
|
||||
"a@b" = "bar"
|
||||
"#
|
||||
});
|
||||
assert_snapshot!(script_error(&contents), @"Entrypoint names must consist of letters, numbers, dots and dashes; invalid name: `a@b`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_entry_point_conflict_scripts() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
[project.entry-points.console_scripts]
|
||||
foo = "bar"
|
||||
"#
|
||||
});
|
||||
assert_snapshot!(script_error(&contents), @"Use `project.scripts` instead of `project.entry-points.console_scripts`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_entry_point_conflict_gui_scripts() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
[project.entry-points.gui_scripts]
|
||||
foo = "bar"
|
||||
"#
|
||||
});
|
||||
assert_snapshot!(script_error(&contents), @"Use `project.gui-scripts` instead of `project.entry-points.gui_scripts`");
|
||||
}
|
81
crates/uv-build-backend/src/pep639_glob.rs
Normal file
81
crates/uv-build-backend/src/pep639_glob.rs
Normal file
|
@ -0,0 +1,81 @@
|
|||
//! Implementation of PEP 639 cross-language restricted globs.
|
||||
|
||||
use glob::{Pattern, PatternError};
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum Pep639GlobError {
|
||||
#[error(transparent)]
|
||||
PatternError(#[from] PatternError),
|
||||
#[error("The parent directory operator (`..`) at position {pos} is not allowed in license file globs")]
|
||||
ParentDirectory { pos: usize },
|
||||
#[error("Glob contains invalid character at position {pos}: `{invalid}`")]
|
||||
InvalidCharacter { pos: usize, invalid: char },
|
||||
#[error("Glob contains invalid character in range at position {pos}: `{invalid}`")]
|
||||
InvalidCharacterRange { pos: usize, invalid: char },
|
||||
}
|
||||
|
||||
/// Parse a PEP 639 `license-files` glob.
|
||||
///
|
||||
/// The syntax is more restricted than regular globbing in Python or Rust for platform independent
|
||||
/// results. Since [`glob::Pattern`] is a superset over this format, we can use it after validating
|
||||
/// that no unsupported features are in the string.
|
||||
///
|
||||
/// From [PEP 639](https://peps.python.org/pep-0639/#add-license-files-key):
|
||||
///
|
||||
/// > Its value is an array of strings which MUST contain valid glob patterns,
|
||||
/// > as specified below:
|
||||
/// >
|
||||
/// > - Alphanumeric characters, underscores (`_`), hyphens (`-`) and dots (`.`)
|
||||
/// > MUST be matched verbatim.
|
||||
/// >
|
||||
/// > - Special glob characters: `*`, `?`, `**` and character ranges: `[]`
|
||||
/// > containing only the verbatim matched characters MUST be supported.
|
||||
/// > Within `[...]`, the hyphen indicates a range (e.g. `a-z`).
|
||||
/// > Hyphens at the start or end are matched literally.
|
||||
/// >
|
||||
/// > - Path delimiters MUST be the forward slash character (`/`).
|
||||
/// > Patterns are relative to the directory containing `pyproject.toml`,
|
||||
/// > therefore the leading slash character MUST NOT be used.
|
||||
/// >
|
||||
/// > - Parent directory indicators (`..`) MUST NOT be used.
|
||||
/// >
|
||||
/// > Any characters or character sequences not covered by this specification are
|
||||
/// > invalid. Projects MUST NOT use such values.
|
||||
/// > Tools consuming this field MAY reject invalid values with an error.
|
||||
pub(crate) fn parse_pep639_glob(glob: &str) -> Result<Pattern, Pep639GlobError> {
|
||||
let mut chars = glob.chars().enumerate().peekable();
|
||||
// A `..` is on a parent directory indicator at the start of the string or after a directory
|
||||
// separator.
|
||||
let mut start_or_slash = true;
|
||||
while let Some((pos, c)) = chars.next() {
|
||||
if c.is_alphanumeric() || matches!(c, '_' | '-' | '*' | '?') {
|
||||
start_or_slash = false;
|
||||
} else if c == '.' {
|
||||
if start_or_slash && matches!(chars.peek(), Some((_, '.'))) {
|
||||
return Err(Pep639GlobError::ParentDirectory { pos });
|
||||
}
|
||||
start_or_slash = false;
|
||||
} else if c == '/' {
|
||||
start_or_slash = true;
|
||||
} else if c == '[' {
|
||||
for (pos, c) in chars.by_ref() {
|
||||
// TODO: https://discuss.python.org/t/pep-639-round-3-improving-license-clarity-with-better-package-metadata/53020/98
|
||||
if c.is_alphanumeric() || matches!(c, '_' | '-' | '.') {
|
||||
// Allowed.
|
||||
} else if c == ']' {
|
||||
break;
|
||||
} else {
|
||||
return Err(Pep639GlobError::InvalidCharacterRange { pos, invalid: c });
|
||||
}
|
||||
}
|
||||
start_or_slash = false;
|
||||
} else {
|
||||
return Err(Pep639GlobError::InvalidCharacter { pos, invalid: c });
|
||||
}
|
||||
}
|
||||
Ok(Pattern::new(glob)?)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
54
crates/uv-build-backend/src/pep639_glob/tests.rs
Normal file
54
crates/uv-build-backend/src/pep639_glob/tests.rs
Normal file
|
@ -0,0 +1,54 @@
|
|||
use super::*;
|
||||
use insta::assert_snapshot;
|
||||
|
||||
#[test]
|
||||
fn test_error() {
|
||||
let parse_err = |glob| parse_pep639_glob(glob).unwrap_err().to_string();
|
||||
assert_snapshot!(
|
||||
parse_err(".."),
|
||||
@"The parent directory operator (`..`) at position 0 is not allowed in license file globs"
|
||||
);
|
||||
assert_snapshot!(
|
||||
parse_err("licenses/.."),
|
||||
@"The parent directory operator (`..`) at position 9 is not allowed in license file globs"
|
||||
);
|
||||
assert_snapshot!(
|
||||
parse_err("licenses/LICEN!E.txt"),
|
||||
@"Glob contains invalid character at position 14: `!`"
|
||||
);
|
||||
assert_snapshot!(
|
||||
parse_err("licenses/LICEN[!C]E.txt"),
|
||||
@"Glob contains invalid character in range at position 15: `!`"
|
||||
);
|
||||
assert_snapshot!(
|
||||
parse_err("licenses/LICEN[C?]E.txt"),
|
||||
@"Glob contains invalid character in range at position 16: `?`"
|
||||
);
|
||||
assert_snapshot!(parse_err("******"), @"Pattern syntax error near position 2: wildcards are either regular `*` or recursive `**`");
|
||||
assert_snapshot!(
|
||||
parse_err(r"licenses\eula.txt"),
|
||||
@r"Glob contains invalid character at position 8: `\`"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid() {
|
||||
let cases = [
|
||||
"licenses/*.txt",
|
||||
"licenses/**/*.txt",
|
||||
"LICEN[CS]E.txt",
|
||||
"LICEN?E.txt",
|
||||
"[a-z].txt",
|
||||
"[a-z._-].txt",
|
||||
"*/**",
|
||||
"LICENSE..txt",
|
||||
"LICENSE_file-1.txt",
|
||||
// (google translate)
|
||||
"licenses/라이센스*.txt",
|
||||
"licenses/ライセンス*.txt",
|
||||
"licenses/执照*.txt",
|
||||
];
|
||||
for case in cases {
|
||||
parse_pep639_glob(case).unwrap();
|
||||
}
|
||||
}
|
128
crates/uv-build-backend/src/tests.rs
Normal file
128
crates/uv-build-backend/src/tests.rs
Normal file
|
@ -0,0 +1,128 @@
|
|||
use super::*;
|
||||
use insta::assert_snapshot;
|
||||
use std::str::FromStr;
|
||||
use tempfile::TempDir;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pep440::Version;
|
||||
|
||||
#[test]
|
||||
fn test_wheel() {
|
||||
let filename = WheelFilename {
|
||||
name: PackageName::from_str("foo").unwrap(),
|
||||
version: Version::from_str("1.2.3").unwrap(),
|
||||
build_tag: None,
|
||||
python_tag: vec!["py2".to_string(), "py3".to_string()],
|
||||
abi_tag: vec!["none".to_string()],
|
||||
platform_tag: vec!["any".to_string()],
|
||||
};
|
||||
|
||||
assert_snapshot!(wheel_info(&filename, "1.0.0+test"), @r"
|
||||
Wheel-Version: 1.0
|
||||
Generator: uv 1.0.0+test
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_record() {
|
||||
let record = vec![RecordEntry {
|
||||
path: "uv_backend/__init__.py".to_string(),
|
||||
hash: "89f869e53a3a0061a52c0233e6442d4d72de80a8a2d3406d9ea0bfd397ed7865".to_string(),
|
||||
size: 37,
|
||||
}];
|
||||
|
||||
let mut writer = Vec::new();
|
||||
write_record(&mut writer, "uv_backend-0.1.0", record).unwrap();
|
||||
assert_snapshot!(String::from_utf8(writer).unwrap(), @r"
|
||||
uv_backend/__init__.py,sha256=89f869e53a3a0061a52c0233e6442d4d72de80a8a2d3406d9ea0bfd397ed7865,37
|
||||
uv_backend-0.1.0/RECORD,,
|
||||
");
|
||||
}
|
||||
|
||||
/// Check that we write deterministic wheels.
|
||||
#[test]
|
||||
fn test_determinism() {
|
||||
let temp1 = TempDir::new().unwrap();
|
||||
let uv_backend = Path::new("../../scripts/packages/uv_backend");
|
||||
build(uv_backend, temp1.path(), None, "1.0.0+test").unwrap();
|
||||
|
||||
// Touch the file to check that we don't serialize the last modified date.
|
||||
fs_err::write(
|
||||
uv_backend.join("src/uv_backend/__init__.py"),
|
||||
"def greet():\n print(\"Hello 👋\")\n",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let temp2 = TempDir::new().unwrap();
|
||||
build(uv_backend, temp2.path(), None, "1.0.0+test").unwrap();
|
||||
|
||||
let wheel_filename = "uv_backend-0.1.0-py3-none-any.whl";
|
||||
assert_eq!(
|
||||
fs_err::read(temp1.path().join(wheel_filename)).unwrap(),
|
||||
fs_err::read(temp2.path().join(wheel_filename)).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
/// Snapshot all files from the prepare metadata hook.
|
||||
#[test]
|
||||
fn test_prepare_metadata() {
|
||||
let metadata_dir = TempDir::new().unwrap();
|
||||
let uv_backend = Path::new("../../scripts/packages/uv_backend");
|
||||
metadata(uv_backend, metadata_dir.path(), "1.0.0+test").unwrap();
|
||||
|
||||
let mut files: Vec<_> = WalkDir::new(metadata_dir.path())
|
||||
.into_iter()
|
||||
.map(|entry| {
|
||||
entry
|
||||
.unwrap()
|
||||
.path()
|
||||
.strip_prefix(metadata_dir.path())
|
||||
.unwrap()
|
||||
.portable_display()
|
||||
.to_string()
|
||||
})
|
||||
.filter(|path| !path.is_empty())
|
||||
.collect();
|
||||
files.sort();
|
||||
assert_snapshot!(files.join("\n"), @r"
|
||||
uv_backend-0.1.0.dist-info
|
||||
uv_backend-0.1.0.dist-info/METADATA
|
||||
uv_backend-0.1.0.dist-info/RECORD
|
||||
uv_backend-0.1.0.dist-info/WHEEL
|
||||
");
|
||||
|
||||
let metadata_file = metadata_dir
|
||||
.path()
|
||||
.join("uv_backend-0.1.0.dist-info/METADATA");
|
||||
assert_snapshot!(fs_err::read_to_string(metadata_file).unwrap(), @r###"
|
||||
Metadata-Version: 2.3
|
||||
Name: uv-backend
|
||||
Version: 0.1.0
|
||||
Summary: Add your description here
|
||||
Requires-Python: >=3.12
|
||||
Description-Content-Type: text/markdown
|
||||
|
||||
# uv_backend
|
||||
|
||||
A simple package to be built with the uv build backend.
|
||||
"###);
|
||||
|
||||
let record_file = metadata_dir
|
||||
.path()
|
||||
.join("uv_backend-0.1.0.dist-info/RECORD");
|
||||
assert_snapshot!(fs_err::read_to_string(record_file).unwrap(), @r###"
|
||||
uv_backend-0.1.0.dist-info/WHEEL,sha256=3da1bfa0e8fd1b6cc246aa0b2b44a35815596c600cb485c39a6f8c106c3d5a8d,83
|
||||
uv_backend-0.1.0.dist-info/METADATA,sha256=e4a0d390317d7182f65ea978254c71ed283e0a4242150cf1c99a694b113ff68d,224
|
||||
uv_backend-0.1.0.dist-info/RECORD,,
|
||||
"###);
|
||||
|
||||
let wheel_file = metadata_dir.path().join("uv_backend-0.1.0.dist-info/WHEEL");
|
||||
assert_snapshot!(fs_err::read_to_string(wheel_file).unwrap(), @r###"
|
||||
Wheel-Version: 1.0
|
||||
Generator: uv 1.0.0+test
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
"###);
|
||||
}
|
|
@ -10,17 +10,22 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
uv-configuration = { workspace = true }
|
||||
uv-distribution = { workspace = true }
|
||||
uv-distribution-types = { workspace = true }
|
||||
uv-fs = { workspace = true }
|
||||
uv-pep440 = { workspace = true }
|
||||
uv-pep508 = { workspace = true }
|
||||
uv-pypi-types = { workspace = true }
|
||||
uv-python = { workspace = true }
|
||||
uv-static = { workspace = true }
|
||||
uv-types = { workspace = true }
|
||||
uv-virtualenv = { workspace = true }
|
||||
|
||||
|
|
|
@ -57,6 +57,8 @@ static DISTUTILS_NOT_FOUND_RE: LazyLock<Regex> =
|
|||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
Io(#[from] io::Error),
|
||||
#[error(transparent)]
|
||||
Lowering(#[from] uv_distribution::MetadataError),
|
||||
#[error("{} does not appear to be a Python project, as neither `pyproject.toml` nor `setup.py` are present in the directory", _0.simplified_display())]
|
||||
InvalidSourceDist(PathBuf),
|
||||
#[error("Invalid `pyproject.toml`")]
|
||||
|
|
|
@ -27,16 +27,19 @@ use tokio::process::Command;
|
|||
use tokio::sync::{Mutex, Semaphore};
|
||||
use tracing::{debug, info_span, instrument, Instrument};
|
||||
|
||||
pub use crate::error::{Error, MissingHeaderCause};
|
||||
use uv_configuration::{BuildKind, BuildOutput, ConfigSettings};
|
||||
use uv_distribution_types::Resolution;
|
||||
use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, LowerBound, SourceStrategy};
|
||||
use uv_distribution::RequiresDist;
|
||||
use uv_distribution_types::{IndexLocations, Resolution};
|
||||
use uv_fs::{rename_with_retry, PythonExt, Simplified};
|
||||
use uv_pep440::Version;
|
||||
use uv_pep508::PackageName;
|
||||
use uv_pypi_types::{Requirement, VerbatimParsedUrl};
|
||||
use uv_python::{Interpreter, PythonEnvironment};
|
||||
use uv_static::EnvVars;
|
||||
use uv_types::{BuildContext, BuildIsolation, SourceBuildTrait};
|
||||
|
||||
pub use crate::error::{Error, MissingHeaderCause};
|
||||
|
||||
/// The default backend to use when PEP 517 is used without a `build-system` section.
|
||||
static DEFAULT_BACKEND: LazyLock<Pep517Backend> = LazyLock::new(|| Pep517Backend {
|
||||
backend: "setuptools.build_meta:__legacy__".to_string(),
|
||||
|
@ -242,12 +245,15 @@ impl SourceBuild {
|
|||
pub async fn setup(
|
||||
source: &Path,
|
||||
subdirectory: Option<&Path>,
|
||||
install_path: &Path,
|
||||
fallback_package_name: Option<&PackageName>,
|
||||
fallback_package_version: Option<&Version>,
|
||||
interpreter: &Interpreter,
|
||||
build_context: &impl BuildContext,
|
||||
source_build_context: SourceBuildContext,
|
||||
version_id: Option<String>,
|
||||
locations: &IndexLocations,
|
||||
source_strategy: SourceStrategy,
|
||||
config_settings: ConfigSettings,
|
||||
build_isolation: BuildIsolation<'_>,
|
||||
build_kind: BuildKind,
|
||||
|
@ -266,8 +272,16 @@ impl SourceBuild {
|
|||
let default_backend: Pep517Backend = DEFAULT_BACKEND.clone();
|
||||
|
||||
// Check if we have a PEP 517 build backend.
|
||||
let (pep517_backend, project) =
|
||||
Self::extract_pep517_backend(&source_tree, &default_backend).map_err(|err| *err)?;
|
||||
let (pep517_backend, project) = Self::extract_pep517_backend(
|
||||
&source_tree,
|
||||
install_path,
|
||||
fallback_package_name,
|
||||
locations,
|
||||
source_strategy,
|
||||
&default_backend,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| *err)?;
|
||||
|
||||
let package_name = project
|
||||
.as_ref()
|
||||
|
@ -318,10 +332,10 @@ impl SourceBuild {
|
|||
|
||||
// Figure out what the modified path should be, and remove the PATH variable from the
|
||||
// environment variables if it's there.
|
||||
let user_path = environment_variables.remove(&OsString::from("PATH"));
|
||||
let user_path = environment_variables.remove(&OsString::from(EnvVars::PATH));
|
||||
|
||||
// See if there is an OS PATH variable.
|
||||
let os_path = env::var_os("PATH");
|
||||
let os_path = env::var_os(EnvVars::PATH);
|
||||
|
||||
// Prepend the user supplied PATH to the existing OS PATH
|
||||
let modified_path = if let Some(user_path) = user_path {
|
||||
|
@ -356,12 +370,15 @@ impl SourceBuild {
|
|||
create_pep517_build_environment(
|
||||
&runner,
|
||||
&source_tree,
|
||||
install_path,
|
||||
&venv,
|
||||
&pep517_backend,
|
||||
build_context,
|
||||
package_name.as_ref(),
|
||||
package_version.as_ref(),
|
||||
version_id.as_deref(),
|
||||
locations,
|
||||
source_strategy,
|
||||
build_kind,
|
||||
level,
|
||||
&config_settings,
|
||||
|
@ -420,8 +437,12 @@ impl SourceBuild {
|
|||
}
|
||||
|
||||
/// Extract the PEP 517 backend from the `pyproject.toml` or `setup.py` file.
|
||||
fn extract_pep517_backend(
|
||||
async fn extract_pep517_backend(
|
||||
source_tree: &Path,
|
||||
install_path: &Path,
|
||||
package_name: Option<&PackageName>,
|
||||
locations: &IndexLocations,
|
||||
source_strategy: SourceStrategy,
|
||||
default_backend: &Pep517Backend,
|
||||
) -> Result<(Pep517Backend, Option<Project>), Box<Error>> {
|
||||
match fs::read_to_string(source_tree.join("pyproject.toml")) {
|
||||
|
@ -432,7 +453,49 @@ impl SourceBuild {
|
|||
let pyproject_toml: PyProjectToml =
|
||||
PyProjectToml::deserialize(pyproject_toml.into_deserializer())
|
||||
.map_err(Error::InvalidPyprojectTomlSchema)?;
|
||||
|
||||
let backend = if let Some(build_system) = pyproject_toml.build_system {
|
||||
// If necessary, lower the requirements.
|
||||
let requirements = match source_strategy {
|
||||
SourceStrategy::Enabled => {
|
||||
if let Some(name) = pyproject_toml
|
||||
.project
|
||||
.as_ref()
|
||||
.map(|project| &project.name)
|
||||
.or(package_name)
|
||||
{
|
||||
// TODO(charlie): Add a type to lower requirements without providing
|
||||
// empty extras.
|
||||
let requires_dist = uv_pypi_types::RequiresDist {
|
||||
name: name.clone(),
|
||||
requires_dist: build_system.requires,
|
||||
provides_extras: vec![],
|
||||
};
|
||||
let requires_dist = RequiresDist::from_project_maybe_workspace(
|
||||
requires_dist,
|
||||
install_path,
|
||||
locations,
|
||||
source_strategy,
|
||||
LowerBound::Allow,
|
||||
)
|
||||
.await
|
||||
.map_err(Error::Lowering)?;
|
||||
requires_dist.requires_dist
|
||||
} else {
|
||||
build_system
|
||||
.requires
|
||||
.into_iter()
|
||||
.map(Requirement::from)
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
SourceStrategy::Disabled => build_system
|
||||
.requires
|
||||
.into_iter()
|
||||
.map(Requirement::from)
|
||||
.collect(),
|
||||
};
|
||||
|
||||
Pep517Backend {
|
||||
// If `build-backend` is missing, inject the legacy setuptools backend, but
|
||||
// retain the `requires`, to match `pip` and `build`. Note that while PEP 517
|
||||
|
@ -445,11 +508,7 @@ impl SourceBuild {
|
|||
.build_backend
|
||||
.unwrap_or_else(|| "setuptools.build_meta:__legacy__".to_string()),
|
||||
backend_path: build_system.backend_path,
|
||||
requirements: build_system
|
||||
.requires
|
||||
.into_iter()
|
||||
.map(Requirement::from)
|
||||
.collect(),
|
||||
requirements,
|
||||
}
|
||||
} else {
|
||||
// If a `pyproject.toml` is present, but `[build-system]` is missing, proceed with
|
||||
|
@ -748,12 +807,15 @@ fn escape_path_for_python(path: &Path) -> String {
|
|||
async fn create_pep517_build_environment(
|
||||
runner: &PythonRunner,
|
||||
source_tree: &Path,
|
||||
install_path: &Path,
|
||||
venv: &PythonEnvironment,
|
||||
pep517_backend: &Pep517Backend,
|
||||
build_context: &impl BuildContext,
|
||||
package_name: Option<&PackageName>,
|
||||
package_version: Option<&Version>,
|
||||
version_id: Option<&str>,
|
||||
locations: &IndexLocations,
|
||||
source_strategy: SourceStrategy,
|
||||
build_kind: BuildKind,
|
||||
level: BuildOutput,
|
||||
config_settings: &ConfigSettings,
|
||||
|
@ -850,7 +912,34 @@ async fn create_pep517_build_environment(
|
|||
version_id,
|
||||
)
|
||||
})?;
|
||||
let extra_requires: Vec<_> = extra_requires.into_iter().map(Requirement::from).collect();
|
||||
|
||||
// If necessary, lower the requirements.
|
||||
let extra_requires = match source_strategy {
|
||||
SourceStrategy::Enabled => {
|
||||
if let Some(package_name) = package_name {
|
||||
// TODO(charlie): Add a type to lower requirements without providing
|
||||
// empty extras.
|
||||
let requires_dist = uv_pypi_types::RequiresDist {
|
||||
name: package_name.clone(),
|
||||
requires_dist: extra_requires,
|
||||
provides_extras: vec![],
|
||||
};
|
||||
let requires_dist = RequiresDist::from_project_maybe_workspace(
|
||||
requires_dist,
|
||||
install_path,
|
||||
locations,
|
||||
source_strategy,
|
||||
LowerBound::Allow,
|
||||
)
|
||||
.await
|
||||
.map_err(Error::Lowering)?;
|
||||
requires_dist.requires_dist
|
||||
} else {
|
||||
extra_requires.into_iter().map(Requirement::from).collect()
|
||||
}
|
||||
}
|
||||
SourceStrategy::Disabled => extra_requires.into_iter().map(Requirement::from).collect(),
|
||||
};
|
||||
|
||||
// Some packages (such as tqdm 4.66.1) list only extra requires that have already been part of
|
||||
// the pyproject.toml requires (in this case, `wheel`). We can skip doing the whole resolution
|
||||
|
@ -921,13 +1010,15 @@ impl PythonRunner {
|
|||
) -> Result<PythonRunnerOutput, Error> {
|
||||
/// Read lines from a reader and store them in a buffer.
|
||||
async fn read_from(
|
||||
mut reader: tokio::io::Lines<tokio::io::BufReader<impl tokio::io::AsyncRead + Unpin>>,
|
||||
mut reader: tokio::io::Split<tokio::io::BufReader<impl tokio::io::AsyncRead + Unpin>>,
|
||||
mut printer: Printer,
|
||||
buffer: &mut Vec<String>,
|
||||
) -> io::Result<()> {
|
||||
loop {
|
||||
match reader.next_line().await? {
|
||||
Some(line) => {
|
||||
match reader.next_segment().await? {
|
||||
Some(line_buf) => {
|
||||
let line_buf = line_buf.strip_suffix(b"\r").unwrap_or(&line_buf);
|
||||
let line = String::from_utf8_lossy(line_buf).into();
|
||||
let _ = write!(printer, "{line}");
|
||||
buffer.push(line);
|
||||
}
|
||||
|
@ -942,10 +1033,10 @@ impl PythonRunner {
|
|||
.args(["-c", script])
|
||||
.current_dir(source_tree.simplified())
|
||||
.envs(environment_variables)
|
||||
.env("PATH", modified_path)
|
||||
.env("VIRTUAL_ENV", venv.root())
|
||||
.env("CLICOLOR_FORCE", "1")
|
||||
.env("PYTHONIOENCODING", "utf-8")
|
||||
.env(EnvVars::PATH, modified_path)
|
||||
.env(EnvVars::VIRTUAL_ENV, venv.root())
|
||||
.env(EnvVars::CLICOLOR_FORCE, "1")
|
||||
.env(EnvVars::PYTHONIOENCODING, "utf-8:backslashreplace")
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::piped())
|
||||
.spawn()
|
||||
|
@ -956,8 +1047,8 @@ impl PythonRunner {
|
|||
let mut stderr_buf = Vec::with_capacity(1024);
|
||||
|
||||
// Create separate readers for `stdout` and `stderr`.
|
||||
let stdout_reader = tokio::io::BufReader::new(child.stdout.take().unwrap()).lines();
|
||||
let stderr_reader = tokio::io::BufReader::new(child.stderr.take().unwrap()).lines();
|
||||
let stdout_reader = tokio::io::BufReader::new(child.stdout.take().unwrap()).split(b'\n');
|
||||
let stderr_reader = tokio::io::BufReader::new(child.stderr.take().unwrap()).split(b'\n');
|
||||
|
||||
// Asynchronously read from the in-memory pipes.
|
||||
let printer = Printer::from(self.level);
|
||||
|
|
|
@ -9,6 +9,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::commit_info::CacheCommit;
|
||||
use crate::git_info::{Commit, Tags};
|
||||
use crate::timestamp::Timestamp;
|
||||
|
||||
use serde::Deserialize;
|
||||
|
@ -26,7 +26,9 @@ pub struct CacheInfo {
|
|||
/// files to timestamp via the `cache-keys` field.
|
||||
timestamp: Option<Timestamp>,
|
||||
/// The commit at which the distribution was built.
|
||||
commit: Option<CacheCommit>,
|
||||
commit: Option<Commit>,
|
||||
/// The Git tags present at the time of the build.
|
||||
tags: Option<Tags>,
|
||||
}
|
||||
|
||||
impl CacheInfo {
|
||||
|
@ -51,6 +53,7 @@ impl CacheInfo {
|
|||
/// Compute the cache info for a given directory.
|
||||
pub fn from_directory(directory: &Path) -> Result<Self, CacheInfoError> {
|
||||
let mut commit = None;
|
||||
let mut tags = None;
|
||||
let mut timestamp = None;
|
||||
|
||||
// Read the cache keys.
|
||||
|
@ -92,6 +95,9 @@ impl CacheInfo {
|
|||
let path = directory.join(file);
|
||||
let metadata = match path.metadata() {
|
||||
Ok(metadata) => metadata,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
continue;
|
||||
}
|
||||
Err(err) => {
|
||||
warn!("Failed to read metadata for file: {err}");
|
||||
continue;
|
||||
|
@ -106,13 +112,37 @@ impl CacheInfo {
|
|||
}
|
||||
timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata)));
|
||||
}
|
||||
CacheKey::Git { git: true } => match CacheCommit::from_repository(directory) {
|
||||
CacheKey::Git {
|
||||
git: GitPattern::Bool(true),
|
||||
} => match Commit::from_repository(directory) {
|
||||
Ok(commit_info) => commit = Some(commit_info),
|
||||
Err(err) => {
|
||||
debug!("Failed to read the current commit: {err}");
|
||||
}
|
||||
},
|
||||
CacheKey::Git { git: false } => {}
|
||||
CacheKey::Git {
|
||||
git: GitPattern::Set(set),
|
||||
} => {
|
||||
if set.commit.unwrap_or(false) {
|
||||
match Commit::from_repository(directory) {
|
||||
Ok(commit_info) => commit = Some(commit_info),
|
||||
Err(err) => {
|
||||
debug!("Failed to read the current commit: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
if set.tags.unwrap_or(false) {
|
||||
match Tags::from_repository(directory) {
|
||||
Ok(tags_info) => tags = Some(tags_info),
|
||||
Err(err) => {
|
||||
debug!("Failed to read the current tags: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
CacheKey::Git {
|
||||
git: GitPattern::Bool(false),
|
||||
} => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -147,7 +177,11 @@ impl CacheInfo {
|
|||
}
|
||||
}
|
||||
|
||||
Ok(Self { timestamp, commit })
|
||||
Ok(Self {
|
||||
timestamp,
|
||||
commit,
|
||||
tags,
|
||||
})
|
||||
}
|
||||
|
||||
/// Compute the cache info for a given file, assumed to be a binary or source distribution
|
||||
|
@ -162,14 +196,18 @@ impl CacheInfo {
|
|||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.timestamp.is_none() && self.commit.is_none()
|
||||
self.timestamp.is_none() && self.commit.is_none() && self.tags.is_none()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
struct TimestampCommit {
|
||||
#[serde(default)]
|
||||
timestamp: Option<Timestamp>,
|
||||
commit: Option<CacheCommit>,
|
||||
#[serde(default)]
|
||||
commit: Option<Commit>,
|
||||
#[serde(default)]
|
||||
tags: Option<Tags>,
|
||||
}
|
||||
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
|
@ -189,9 +227,15 @@ impl From<CacheInfoWire> for CacheInfo {
|
|||
timestamp: Some(timestamp),
|
||||
..Self::default()
|
||||
},
|
||||
CacheInfoWire::TimestampCommit(TimestampCommit { timestamp, commit }) => {
|
||||
Self { timestamp, commit }
|
||||
}
|
||||
CacheInfoWire::TimestampCommit(TimestampCommit {
|
||||
timestamp,
|
||||
commit,
|
||||
tags,
|
||||
}) => Self {
|
||||
timestamp,
|
||||
commit,
|
||||
tags,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -223,8 +267,24 @@ pub enum CacheKey {
|
|||
Path(String),
|
||||
/// Ex) `{ file = "Cargo.lock" }` or `{ file = "**/*.toml" }`
|
||||
File { file: String },
|
||||
/// Ex) `{ git = true }`
|
||||
Git { git: bool },
|
||||
/// Ex) `{ git = true }` or `{ git = { commit = true, tags = false } }`
|
||||
Git { git: GitPattern },
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(untagged, rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub enum GitPattern {
|
||||
Bool(bool),
|
||||
Set(GitSet),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct GitSet {
|
||||
commit: Option<bool>,
|
||||
tags: Option<bool>,
|
||||
}
|
||||
|
||||
pub enum FilePattern {
|
||||
|
|
|
@ -1,92 +0,0 @@
|
|||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub(crate) enum CacheCommitError {
|
||||
#[error("The repository at {0} is missing a `.git` directory")]
|
||||
MissingGitDir(PathBuf),
|
||||
#[error("The repository at {0} is missing a `HEAD` file")]
|
||||
MissingHead(PathBuf),
|
||||
#[error("The repository at {0} has an invalid reference: `{1}`")]
|
||||
InvalidRef(PathBuf, String),
|
||||
#[error("The discovered commit has an invalid length (expected 40 characters): `{0}`")]
|
||||
WrongLength(String),
|
||||
#[error("The discovered commit has an invalid character (expected hexadecimal): `{0}`")]
|
||||
WrongDigit(String),
|
||||
#[error(transparent)]
|
||||
Io(#[from] std::io::Error),
|
||||
}
|
||||
|
||||
/// The current commit for a repository (i.e., a 40-character hexadecimal string).
|
||||
#[derive(Default, Debug, Clone, Hash, PartialEq, Eq, serde::Deserialize, serde::Serialize)]
|
||||
pub(crate) struct CacheCommit(String);
|
||||
|
||||
impl CacheCommit {
|
||||
/// Return the [`CacheCommit`] for the repository at the given path.
|
||||
pub(crate) fn from_repository(path: &Path) -> Result<Self, CacheCommitError> {
|
||||
// Find the `.git` directory, searching through parent directories if necessary.
|
||||
let git_dir = path
|
||||
.ancestors()
|
||||
.map(|ancestor| ancestor.join(".git"))
|
||||
.find(|git_dir| git_dir.exists())
|
||||
.ok_or_else(|| CacheCommitError::MissingGitDir(path.to_path_buf()))?;
|
||||
|
||||
let git_head_path =
|
||||
git_head(&git_dir).ok_or_else(|| CacheCommitError::MissingHead(git_dir.clone()))?;
|
||||
let git_head_contents = fs_err::read_to_string(git_head_path)?;
|
||||
|
||||
// The contents are either a commit or a reference in the following formats
|
||||
// - "<commit>" when the head is detached
|
||||
// - "ref <ref>" when working on a branch
|
||||
// If a commit, checking if the HEAD file has changed is sufficient
|
||||
// If a ref, we need to add the head file for that ref to rebuild on commit
|
||||
let mut git_ref_parts = git_head_contents.split_whitespace();
|
||||
let commit_or_ref = git_ref_parts.next().ok_or_else(|| {
|
||||
CacheCommitError::InvalidRef(git_dir.clone(), git_head_contents.clone())
|
||||
})?;
|
||||
let commit = if let Some(git_ref) = git_ref_parts.next() {
|
||||
let git_ref_path = git_dir.join(git_ref);
|
||||
let commit = fs_err::read_to_string(git_ref_path)?;
|
||||
commit.trim().to_string()
|
||||
} else {
|
||||
commit_or_ref.to_string()
|
||||
};
|
||||
|
||||
// The commit should be 40 hexadecimal characters.
|
||||
if commit.len() != 40 {
|
||||
return Err(CacheCommitError::WrongLength(commit));
|
||||
}
|
||||
if commit.chars().any(|c| !c.is_ascii_hexdigit()) {
|
||||
return Err(CacheCommitError::WrongDigit(commit));
|
||||
}
|
||||
|
||||
Ok(Self(commit))
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the path to the `HEAD` file of a Git repository, taking worktrees into account.
|
||||
fn git_head(git_dir: &Path) -> Option<PathBuf> {
|
||||
// The typical case is a standard git repository.
|
||||
let git_head_path = git_dir.join("HEAD");
|
||||
if git_head_path.exists() {
|
||||
return Some(git_head_path);
|
||||
}
|
||||
if !git_dir.is_file() {
|
||||
return None;
|
||||
}
|
||||
// If `.git/HEAD` doesn't exist and `.git` is actually a file,
|
||||
// then let's try to attempt to read it as a worktree. If it's
|
||||
// a worktree, then its contents will look like this, e.g.:
|
||||
//
|
||||
// gitdir: /home/andrew/astral/uv/main/.git/worktrees/pr2
|
||||
//
|
||||
// And the HEAD file we want to watch will be at:
|
||||
//
|
||||
// /home/andrew/astral/uv/main/.git/worktrees/pr2/HEAD
|
||||
let contents = fs_err::read_to_string(git_dir).ok()?;
|
||||
let (label, worktree_path) = contents.split_once(':')?;
|
||||
if label != "gitdir" {
|
||||
return None;
|
||||
}
|
||||
let worktree_path = worktree_path.trim();
|
||||
Some(PathBuf::from(worktree_path))
|
||||
}
|
173
crates/uv-cache-info/src/git_info.rs
Normal file
173
crates/uv-cache-info/src/git_info.rs
Normal file
|
@ -0,0 +1,173 @@
|
|||
use std::collections::BTreeMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub(crate) enum GitInfoError {
|
||||
#[error("The repository at {0} is missing a `.git` directory")]
|
||||
MissingGitDir(PathBuf),
|
||||
#[error("The repository at {0} is missing a `HEAD` file")]
|
||||
MissingHead(PathBuf),
|
||||
#[error("The repository at {0} is missing a `refs` directory")]
|
||||
MissingRefs(PathBuf),
|
||||
#[error("The repository at {0} has an invalid reference: `{1}`")]
|
||||
InvalidRef(PathBuf, String),
|
||||
#[error("The discovered commit has an invalid length (expected 40 characters): `{0}`")]
|
||||
WrongLength(String),
|
||||
#[error("The discovered commit has an invalid character (expected hexadecimal): `{0}`")]
|
||||
WrongDigit(String),
|
||||
#[error(transparent)]
|
||||
Io(#[from] std::io::Error),
|
||||
}
|
||||
|
||||
/// The current commit for a repository (i.e., a 40-character hexadecimal string).
|
||||
#[derive(Default, Debug, Clone, Hash, PartialEq, Eq, serde::Deserialize, serde::Serialize)]
|
||||
pub(crate) struct Commit(String);
|
||||
|
||||
impl Commit {
|
||||
/// Return the [`Commit`] for the repository at the given path.
|
||||
pub(crate) fn from_repository(path: &Path) -> Result<Self, GitInfoError> {
|
||||
// Find the `.git` directory, searching through parent directories if necessary.
|
||||
let git_dir = path
|
||||
.ancestors()
|
||||
.map(|ancestor| ancestor.join(".git"))
|
||||
.find(|git_dir| git_dir.exists())
|
||||
.ok_or_else(|| GitInfoError::MissingGitDir(path.to_path_buf()))?;
|
||||
|
||||
let git_head_path =
|
||||
git_head(&git_dir).ok_or_else(|| GitInfoError::MissingHead(git_dir.clone()))?;
|
||||
let git_head_contents = fs_err::read_to_string(git_head_path)?;
|
||||
|
||||
// The contents are either a commit or a reference in the following formats
|
||||
// - "<commit>" when the head is detached
|
||||
// - "ref <ref>" when working on a branch
|
||||
// If a commit, checking if the HEAD file has changed is sufficient
|
||||
// If a ref, we need to add the head file for that ref to rebuild on commit
|
||||
let mut git_ref_parts = git_head_contents.split_whitespace();
|
||||
let commit_or_ref = git_ref_parts
|
||||
.next()
|
||||
.ok_or_else(|| GitInfoError::InvalidRef(git_dir.clone(), git_head_contents.clone()))?;
|
||||
let commit = if let Some(git_ref) = git_ref_parts.next() {
|
||||
let git_ref_path = git_dir.join(git_ref);
|
||||
let commit = fs_err::read_to_string(git_ref_path)?;
|
||||
commit.trim().to_string()
|
||||
} else {
|
||||
commit_or_ref.to_string()
|
||||
};
|
||||
|
||||
// The commit should be 40 hexadecimal characters.
|
||||
if commit.len() != 40 {
|
||||
return Err(GitInfoError::WrongLength(commit));
|
||||
}
|
||||
if commit.chars().any(|c| !c.is_ascii_hexdigit()) {
|
||||
return Err(GitInfoError::WrongDigit(commit));
|
||||
}
|
||||
|
||||
Ok(Self(commit))
|
||||
}
|
||||
}
|
||||
|
||||
/// The set of tags visible in a repository.
|
||||
#[derive(Default, Debug, Clone, Hash, PartialEq, Eq, serde::Deserialize, serde::Serialize)]
|
||||
pub(crate) struct Tags(BTreeMap<String, String>);
|
||||
|
||||
impl Tags {
|
||||
/// Return the [`Tags`] for the repository at the given path.
|
||||
pub(crate) fn from_repository(path: &Path) -> Result<Self, GitInfoError> {
|
||||
// Find the `.git` directory, searching through parent directories if necessary.
|
||||
let git_dir = path
|
||||
.ancestors()
|
||||
.map(|ancestor| ancestor.join(".git"))
|
||||
.find(|git_dir| git_dir.exists())
|
||||
.ok_or_else(|| GitInfoError::MissingGitDir(path.to_path_buf()))?;
|
||||
|
||||
let git_refs_path =
|
||||
git_refs(&git_dir).ok_or_else(|| GitInfoError::MissingRefs(git_dir.clone()))?;
|
||||
|
||||
let mut tags = BTreeMap::new();
|
||||
|
||||
// Map each tag to its commit.
|
||||
let read_dir = match fs_err::read_dir(git_refs_path.join("tags")) {
|
||||
Ok(read_dir) => read_dir,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
return Ok(Self(tags));
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
};
|
||||
for entry in read_dir {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if let Some(tag) = path.file_name().and_then(|name| name.to_str()) {
|
||||
let commit = fs_err::read_to_string(&path)?.trim().to_string();
|
||||
|
||||
// The commit should be 40 hexadecimal characters.
|
||||
if commit.len() != 40 {
|
||||
return Err(GitInfoError::WrongLength(commit));
|
||||
}
|
||||
if commit.chars().any(|c| !c.is_ascii_hexdigit()) {
|
||||
return Err(GitInfoError::WrongDigit(commit));
|
||||
}
|
||||
|
||||
tags.insert(tag.to_string(), commit);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Self(tags))
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the path to the `HEAD` file of a Git repository, taking worktrees into account.
|
||||
fn git_head(git_dir: &Path) -> Option<PathBuf> {
|
||||
// The typical case is a standard git repository.
|
||||
let git_head_path = git_dir.join("HEAD");
|
||||
if git_head_path.exists() {
|
||||
return Some(git_head_path);
|
||||
}
|
||||
if !git_dir.is_file() {
|
||||
return None;
|
||||
}
|
||||
// If `.git/HEAD` doesn't exist and `.git` is actually a file,
|
||||
// then let's try to attempt to read it as a worktree. If it's
|
||||
// a worktree, then its contents will look like this, e.g.:
|
||||
//
|
||||
// gitdir: /home/andrew/astral/uv/main/.git/worktrees/pr2
|
||||
//
|
||||
// And the HEAD file we want to watch will be at:
|
||||
//
|
||||
// /home/andrew/astral/uv/main/.git/worktrees/pr2/HEAD
|
||||
let contents = fs_err::read_to_string(git_dir).ok()?;
|
||||
let (label, worktree_path) = contents.split_once(':')?;
|
||||
if label != "gitdir" {
|
||||
return None;
|
||||
}
|
||||
let worktree_path = worktree_path.trim();
|
||||
Some(PathBuf::from(worktree_path))
|
||||
}
|
||||
|
||||
/// Return the path to the `refs` directory of a Git repository, taking worktrees into account.
|
||||
fn git_refs(git_dir: &Path) -> Option<PathBuf> {
|
||||
// The typical case is a standard git repository.
|
||||
let git_head_path = git_dir.join("refs");
|
||||
if git_head_path.exists() {
|
||||
return Some(git_head_path);
|
||||
}
|
||||
if !git_dir.is_file() {
|
||||
return None;
|
||||
}
|
||||
// If `.git/refs` doesn't exist and `.git` is actually a file,
|
||||
// then let's try to attempt to read it as a worktree. If it's
|
||||
// a worktree, then its contents will look like this, e.g.:
|
||||
//
|
||||
// gitdir: /home/andrew/astral/uv/main/.git/worktrees/pr2
|
||||
//
|
||||
// And the HEAD refs we want to watch will be at:
|
||||
//
|
||||
// /home/andrew/astral/uv/main/.git/refs
|
||||
let contents = fs_err::read_to_string(git_dir).ok()?;
|
||||
let (label, worktree_path) = contents.split_once(':')?;
|
||||
if label != "gitdir" {
|
||||
return None;
|
||||
}
|
||||
let worktree_path = PathBuf::from(worktree_path.trim());
|
||||
let refs_path = worktree_path.parent()?.parent()?.join("refs");
|
||||
Some(refs_path)
|
||||
}
|
|
@ -2,5 +2,5 @@ pub use crate::cache_info::*;
|
|||
pub use crate::timestamp::*;
|
||||
|
||||
mod cache_info;
|
||||
mod commit_info;
|
||||
mod git_info;
|
||||
mod timestamp;
|
||||
|
|
|
@ -10,6 +10,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -181,144 +181,4 @@ impl std::fmt::Display for RepositoryUrl {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn user_credential_does_not_affect_cache_key() -> Result<(), url::ParseError> {
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_without_creds = hasher.finish();
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse(
|
||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0",
|
||||
)?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_with_creds = hasher.finish();
|
||||
assert_eq!(
|
||||
hash_without_creds, hash_with_creds,
|
||||
"URLs with no user credentials should hash the same as URLs with different user credentials",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse(
|
||||
"https://user:bar@example.com/pypa/sample-namespace-packages.git@2.0.0",
|
||||
)?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_with_creds = hasher.finish();
|
||||
assert_eq!(
|
||||
hash_without_creds, hash_with_creds,
|
||||
"URLs with different user credentials should hash the same",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://:bar@example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_with_creds = hasher.finish();
|
||||
assert_eq!(
|
||||
hash_without_creds, hash_with_creds,
|
||||
"URLs with no username, though with a password, should hash the same as URLs with different user credentials",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://user:@example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_with_creds = hasher.finish();
|
||||
assert_eq!(
|
||||
hash_without_creds, hash_with_creds,
|
||||
"URLs with no password, though with a username, should hash the same as URLs with different user credentials",
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn canonical_url() -> Result<(), url::ParseError> {
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git@2.0.0")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages@2.0.0")?,
|
||||
);
|
||||
|
||||
// Two URLs should be _not_ considered equal if they point to different repositories.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-packages.git")?,
|
||||
);
|
||||
|
||||
// Two URLs should _not_ be considered equal if they request different subdirectories.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_a")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_b")?,
|
||||
);
|
||||
|
||||
// Two URLs should _not_ be considered equal if they request different commit tags.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git@v1.0.0"
|
||||
)?,
|
||||
CanonicalUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git@v2.0.0"
|
||||
)?,
|
||||
);
|
||||
|
||||
// Two URLs that cannot be a base should be considered equal.
|
||||
assert_eq!(
|
||||
CanonicalUrl::parse("git+https:://github.com/pypa/sample-namespace-packages.git")?,
|
||||
CanonicalUrl::parse("git+https:://github.com/pypa/sample-namespace-packages.git")?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn repository_url() -> Result<(), url::ParseError> {
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git@2.0.0"
|
||||
)?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages@2.0.0")?,
|
||||
);
|
||||
|
||||
// Two URLs should be _not_ considered equal if they point to different repositories.
|
||||
assert_ne!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-packages.git")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal if they map to the same repository, even if they
|
||||
// request different subdirectories.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_a")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_b")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal if they map to the same repository, even if they
|
||||
// request different commit tags.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git@v1.0.0"
|
||||
)?,
|
||||
RepositoryUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git@v2.0.0"
|
||||
)?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
125
crates/uv-cache-key/src/canonical_url/tests.rs
Normal file
125
crates/uv-cache-key/src/canonical_url/tests.rs
Normal file
|
@ -0,0 +1,125 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn user_credential_does_not_affect_cache_key() -> Result<(), url::ParseError> {
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_without_creds = hasher.finish();
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_with_creds = hasher.finish();
|
||||
assert_eq!(
|
||||
hash_without_creds, hash_with_creds,
|
||||
"URLs with no user credentials should hash the same as URLs with different user credentials",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://user:bar@example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_with_creds = hasher.finish();
|
||||
assert_eq!(
|
||||
hash_without_creds, hash_with_creds,
|
||||
"URLs with different user credentials should hash the same",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://:bar@example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_with_creds = hasher.finish();
|
||||
assert_eq!(
|
||||
hash_without_creds, hash_with_creds,
|
||||
"URLs with no username, though with a password, should hash the same as URLs with different user credentials",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://user:@example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let hash_with_creds = hasher.finish();
|
||||
assert_eq!(
|
||||
hash_without_creds, hash_with_creds,
|
||||
"URLs with no password, though with a username, should hash the same as URLs with different user credentials",
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn canonical_url() -> Result<(), url::ParseError> {
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git@2.0.0")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages@2.0.0")?,
|
||||
);
|
||||
|
||||
// Two URLs should be _not_ considered equal if they point to different repositories.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-packages.git")?,
|
||||
);
|
||||
|
||||
// Two URLs should _not_ be considered equal if they request different subdirectories.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_a")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_b")?,
|
||||
);
|
||||
|
||||
// Two URLs should _not_ be considered equal if they request different commit tags.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git@v1.0.0")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git@v2.0.0")?,
|
||||
);
|
||||
|
||||
// Two URLs that cannot be a base should be considered equal.
|
||||
assert_eq!(
|
||||
CanonicalUrl::parse("git+https:://github.com/pypa/sample-namespace-packages.git")?,
|
||||
CanonicalUrl::parse("git+https:://github.com/pypa/sample-namespace-packages.git")?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn repository_url() -> Result<(), url::ParseError> {
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git@2.0.0")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages@2.0.0")?,
|
||||
);
|
||||
|
||||
// Two URLs should be _not_ considered equal if they point to different repositories.
|
||||
assert_ne!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-packages.git")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal if they map to the same repository, even if they
|
||||
// request different subdirectories.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_a")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_b")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal if they map to the same repository, even if they
|
||||
// request different commit tags.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git@v1.0.0")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git@v2.0.0")?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -10,6 +10,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
@ -20,6 +23,7 @@ uv-distribution-types = { workspace = true }
|
|||
uv-fs = { workspace = true, features = ["tokio"] }
|
||||
uv-normalize = { workspace = true }
|
||||
uv-pypi-types = { workspace = true }
|
||||
uv-static = { workspace = true }
|
||||
|
||||
clap = { workspace = true, features = ["derive", "env"], optional = true }
|
||||
directories = { workspace = true }
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use uv_static::EnvVars;
|
||||
|
||||
use crate::Cache;
|
||||
use clap::Parser;
|
||||
|
@ -17,7 +18,7 @@ pub struct CacheArgs {
|
|||
long,
|
||||
short,
|
||||
alias = "no-cache-dir",
|
||||
env = "UV_NO_CACHE",
|
||||
env = EnvVars::UV_NO_CACHE,
|
||||
value_parser = clap::builder::BoolishValueParser::new(),
|
||||
)]
|
||||
pub no_cache: bool,
|
||||
|
@ -26,7 +27,7 @@ pub struct CacheArgs {
|
|||
///
|
||||
/// Defaults to `$HOME/Library/Caches/uv` on macOS, `$XDG_CACHE_HOME/uv` or `$HOME/.cache/uv` on
|
||||
/// Linux, and `%LOCALAPPDATA%\uv\cache` on Windows.
|
||||
#[arg(global = true, long, env = "UV_CACHE_DIR")]
|
||||
#[arg(global = true, long, env = EnvVars::UV_CACHE_DIR)]
|
||||
pub cache_dir: Option<PathBuf>,
|
||||
}
|
||||
|
||||
|
|
|
@ -631,7 +631,7 @@ pub enum CacheBucket {
|
|||
/// can put next to the wheels as in the `Wheels` bucket.
|
||||
///
|
||||
/// The unzipped source distribution is stored in a directory matching the source distribution
|
||||
/// acrhive name.
|
||||
/// archive name.
|
||||
///
|
||||
/// Source distributions are built into zipped wheel files (as PEP 517 specifies) and unzipped
|
||||
/// lazily before installing. So when resolving, we only build the wheel and store the archive
|
||||
|
|
|
@ -10,6 +10,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
@ -24,6 +27,7 @@ uv-pypi-types = { workspace = true }
|
|||
uv-python = { workspace = true, features = ["clap", "schemars"]}
|
||||
uv-resolver = { workspace = true, features = ["clap"] }
|
||||
uv-settings = { workspace = true, features = ["schemars"] }
|
||||
uv-static = { workspace = true }
|
||||
uv-version = { workspace = true }
|
||||
uv-warnings = { workspace = true }
|
||||
|
||||
|
@ -42,4 +46,5 @@ default = []
|
|||
self-update = []
|
||||
|
||||
[build-dependencies]
|
||||
uv-static = { workspace = true }
|
||||
fs-err = { workspace = true }
|
||||
|
|
|
@ -5,10 +5,12 @@ use std::{
|
|||
|
||||
use fs_err as fs;
|
||||
|
||||
use uv_static::EnvVars;
|
||||
|
||||
fn main() {
|
||||
// The workspace root directory is not available without walking up the tree
|
||||
// https://github.com/rust-lang/cargo/issues/3946
|
||||
let workspace_root = Path::new(&std::env::var("CARGO_MANIFEST_DIR").unwrap())
|
||||
let workspace_root = Path::new(&std::env::var(EnvVars::CARGO_MANIFEST_DIR).unwrap())
|
||||
.parent()
|
||||
.expect("CARGO_MANIFEST_DIR should be nested in workspace")
|
||||
.parent()
|
||||
|
@ -18,7 +20,7 @@ fn main() {
|
|||
commit_info(&workspace_root);
|
||||
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let target = std::env::var("TARGET").unwrap();
|
||||
let target = std::env::var(EnvVars::TARGET).unwrap();
|
||||
println!("cargo:rustc-env=RUST_HOST_TARGET={target}");
|
||||
}
|
||||
|
||||
|
@ -62,21 +64,27 @@ fn commit_info(workspace_root: &Path) {
|
|||
let stdout = String::from_utf8(output.stdout).unwrap();
|
||||
let mut parts = stdout.split_whitespace();
|
||||
let mut next = || parts.next().unwrap();
|
||||
println!("cargo:rustc-env=UV_COMMIT_HASH={}", next());
|
||||
println!("cargo:rustc-env=UV_COMMIT_SHORT_HASH={}", next());
|
||||
println!("cargo:rustc-env=UV_COMMIT_DATE={}", next());
|
||||
println!("cargo:rustc-env={}={}", EnvVars::UV_COMMIT_HASH, next());
|
||||
println!(
|
||||
"cargo:rustc-env={}={}",
|
||||
EnvVars::UV_COMMIT_SHORT_HASH,
|
||||
next()
|
||||
);
|
||||
println!("cargo:rustc-env={}={}", EnvVars::UV_COMMIT_DATE, next());
|
||||
|
||||
// Describe can fail for some commits
|
||||
// https://git-scm.com/docs/pretty-formats#Documentation/pretty-formats.txt-emdescribeoptionsem
|
||||
if let Some(describe) = parts.next() {
|
||||
let mut describe_parts = describe.split('-');
|
||||
println!(
|
||||
"cargo:rustc-env=UV_LAST_TAG={}",
|
||||
"cargo:rustc-env={}={}",
|
||||
EnvVars::UV_LAST_TAG,
|
||||
describe_parts.next().unwrap()
|
||||
);
|
||||
// If this is the tagged commit, this component will be missing
|
||||
println!(
|
||||
"cargo:rustc-env=UV_LAST_TAG_DISTANCE={}",
|
||||
"cargo:rustc-env={}={}",
|
||||
EnvVars::UV_LAST_TAG_DISTANCE,
|
||||
describe_parts.next().unwrap_or("0")
|
||||
);
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,7 +1,8 @@
|
|||
use uv_cache::Refresh;
|
||||
use uv_configuration::ConfigSettings;
|
||||
use uv_distribution_types::{PipExtraIndex, PipFindLinks, PipIndex};
|
||||
use uv_resolver::PrereleaseMode;
|
||||
use uv_settings::{PipOptions, ResolverInstallerOptions, ResolverOptions};
|
||||
use uv_settings::{Combine, PipOptions, ResolverInstallerOptions, ResolverOptions};
|
||||
|
||||
use crate::{
|
||||
BuildOptionsArgs, IndexArgs, InstallerArgs, Maybe, RefreshArgs, ResolverArgs,
|
||||
|
@ -186,6 +187,8 @@ impl From<ResolverInstallerArgs> for PipOptions {
|
|||
impl From<IndexArgs> for PipOptions {
|
||||
fn from(args: IndexArgs) -> Self {
|
||||
let IndexArgs {
|
||||
default_index,
|
||||
index,
|
||||
index_url,
|
||||
extra_index_url,
|
||||
no_index,
|
||||
|
@ -193,11 +196,18 @@ impl From<IndexArgs> for PipOptions {
|
|||
} = args;
|
||||
|
||||
Self {
|
||||
index_url: index_url.and_then(Maybe::into_option),
|
||||
extra_index_url: extra_index_url.map(|extra_index_url| {
|
||||
extra_index_url
|
||||
index: default_index
|
||||
.and_then(Maybe::into_option)
|
||||
.map(|default_index| vec![default_index])
|
||||
.combine(
|
||||
index.map(|index| index.into_iter().filter_map(Maybe::into_option).collect()),
|
||||
),
|
||||
index_url: index_url.and_then(Maybe::into_option).map(PipIndex::from),
|
||||
extra_index_url: extra_index_url.map(|extra_index_urls| {
|
||||
extra_index_urls
|
||||
.into_iter()
|
||||
.filter_map(Maybe::into_option)
|
||||
.map(PipExtraIndex::from)
|
||||
.collect()
|
||||
}),
|
||||
no_index: if no_index { Some(true) } else { None },
|
||||
|
@ -205,6 +215,7 @@ impl From<IndexArgs> for PipOptions {
|
|||
find_links
|
||||
.into_iter()
|
||||
.filter_map(Maybe::into_option)
|
||||
.map(PipFindLinks::from)
|
||||
.collect()
|
||||
}),
|
||||
..PipOptions::default()
|
||||
|
@ -247,6 +258,15 @@ pub fn resolver_options(
|
|||
} = build_args;
|
||||
|
||||
ResolverOptions {
|
||||
index: index_args
|
||||
.default_index
|
||||
.and_then(Maybe::into_option)
|
||||
.map(|default_index| vec![default_index])
|
||||
.combine(
|
||||
index_args
|
||||
.index
|
||||
.map(|index| index.into_iter().filter_map(Maybe::into_option).collect()),
|
||||
),
|
||||
index_url: index_args.index_url.and_then(Maybe::into_option),
|
||||
extra_index_url: index_args.extra_index_url.map(|extra_index_url| {
|
||||
extra_index_url
|
||||
|
@ -335,7 +355,16 @@ pub fn resolver_installer_options(
|
|||
no_binary_package,
|
||||
} = build_args;
|
||||
|
||||
let default_index = index_args
|
||||
.default_index
|
||||
.and_then(Maybe::into_option)
|
||||
.map(|default_index| vec![default_index]);
|
||||
let index = index_args
|
||||
.index
|
||||
.map(|index| index.into_iter().filter_map(Maybe::into_option).collect());
|
||||
|
||||
ResolverInstallerOptions {
|
||||
index: default_index.combine(index),
|
||||
index_url: index_args.index_url.and_then(Maybe::into_option),
|
||||
extra_index_url: index_args.extra_index_url.map(|extra_index_url| {
|
||||
extra_index_url
|
||||
|
|
|
@ -77,73 +77,4 @@ pub fn version() -> VersionInfo {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::{assert_json_snapshot, assert_snapshot};
|
||||
|
||||
use super::{CommitInfo, VersionInfo};
|
||||
|
||||
#[test]
|
||||
fn version_formatting() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: None,
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_formatting_with_commit_info() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
|
||||
last_tag: Some("v0.0.1".to_string()),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 0,
|
||||
}),
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0 (53b0f5d92 2023-10-19)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_formatting_with_commits_since_last_tag() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
|
||||
last_tag: Some("v0.0.1".to_string()),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 24,
|
||||
}),
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0+24 (53b0f5d92 2023-10-19)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_serializable() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
|
||||
last_tag: Some("v0.0.1".to_string()),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 0,
|
||||
}),
|
||||
};
|
||||
assert_json_snapshot!(version, @r###"
|
||||
{
|
||||
"version": "0.0.0",
|
||||
"commit_info": {
|
||||
"short_commit_hash": "53b0f5d92",
|
||||
"commit_hash": "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7",
|
||||
"commit_date": "2023-10-19",
|
||||
"last_tag": "v0.0.1",
|
||||
"commits_since_last_tag": 0
|
||||
}
|
||||
}
|
||||
"###);
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
68
crates/uv-cli/src/version/tests.rs
Normal file
68
crates/uv-cli/src/version/tests.rs
Normal file
|
@ -0,0 +1,68 @@
|
|||
use insta::{assert_json_snapshot, assert_snapshot};
|
||||
|
||||
use super::{CommitInfo, VersionInfo};
|
||||
|
||||
#[test]
|
||||
fn version_formatting() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: None,
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_formatting_with_commit_info() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
|
||||
last_tag: Some("v0.0.1".to_string()),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 0,
|
||||
}),
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0 (53b0f5d92 2023-10-19)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_formatting_with_commits_since_last_tag() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
|
||||
last_tag: Some("v0.0.1".to_string()),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 24,
|
||||
}),
|
||||
};
|
||||
assert_snapshot!(version, @"0.0.0+24 (53b0f5d92 2023-10-19)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_serializable() {
|
||||
let version = VersionInfo {
|
||||
version: "0.0.0".to_string(),
|
||||
commit_info: Some(CommitInfo {
|
||||
short_commit_hash: "53b0f5d92".to_string(),
|
||||
commit_hash: "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7".to_string(),
|
||||
last_tag: Some("v0.0.1".to_string()),
|
||||
commit_date: "2023-10-19".to_string(),
|
||||
commits_since_last_tag: 0,
|
||||
}),
|
||||
};
|
||||
assert_json_snapshot!(version, @r#"
|
||||
{
|
||||
"version": "0.0.0",
|
||||
"commit_info": {
|
||||
"short_commit_hash": "53b0f5d92",
|
||||
"commit_hash": "53b0f5d924110e5b26fbf09f6fd3a03d67b475b7",
|
||||
"commit_date": "2023-10-19",
|
||||
"last_tag": "v0.0.1",
|
||||
"commits_since_last_tag": 0
|
||||
}
|
||||
}
|
||||
"#);
|
||||
}
|
|
@ -3,6 +3,9 @@ name = "uv-client"
|
|||
version = "0.0.1"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
@ -20,6 +23,7 @@ uv-pep440 = { workspace = true }
|
|||
uv-pep508 = { workspace = true }
|
||||
uv-platform-tags = { workspace = true }
|
||||
uv-pypi-types = { workspace = true }
|
||||
uv-static = { workspace = true }
|
||||
uv-version = { workspace = true }
|
||||
uv-warnings = { workspace = true }
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@ use uv_configuration::{KeyringProviderType, TrustedHost};
|
|||
use uv_fs::Simplified;
|
||||
use uv_pep508::MarkerEnvironment;
|
||||
use uv_platform_tags::Platform;
|
||||
use uv_static::EnvVars;
|
||||
use uv_version::version;
|
||||
use uv_warnings::warn_user_once;
|
||||
|
||||
|
@ -156,7 +157,7 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
}
|
||||
|
||||
// Check for the presence of an `SSL_CERT_FILE`.
|
||||
let ssl_cert_file_exists = env::var_os("SSL_CERT_FILE").is_some_and(|path| {
|
||||
let ssl_cert_file_exists = env::var_os(EnvVars::SSL_CERT_FILE).is_some_and(|path| {
|
||||
let path_exists = Path::new(&path).exists();
|
||||
if !path_exists {
|
||||
warn_user_once!(
|
||||
|
@ -169,9 +170,9 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
|
||||
// Timeout options, matching https://doc.rust-lang.org/nightly/cargo/reference/config.html#httptimeout
|
||||
// `UV_REQUEST_TIMEOUT` is provided for backwards compatibility with v0.1.6
|
||||
let timeout = env::var("UV_HTTP_TIMEOUT")
|
||||
.or_else(|_| env::var("UV_REQUEST_TIMEOUT"))
|
||||
.or_else(|_| env::var("HTTP_TIMEOUT"))
|
||||
let timeout = env::var(EnvVars::UV_HTTP_TIMEOUT)
|
||||
.or_else(|_| env::var(EnvVars::UV_REQUEST_TIMEOUT))
|
||||
.or_else(|_| env::var(EnvVars::HTTP_TIMEOUT))
|
||||
.and_then(|value| {
|
||||
value.parse::<u64>()
|
||||
.map(Duration::from_secs)
|
||||
|
@ -260,7 +261,7 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
};
|
||||
|
||||
// Configure mTLS.
|
||||
let client_builder = if let Some(ssl_client_cert) = env::var_os("SSL_CLIENT_CERT") {
|
||||
let client_builder = if let Some(ssl_client_cert) = env::var_os(EnvVars::SSL_CLIENT_CERT) {
|
||||
match read_identity(&ssl_client_cert) {
|
||||
Ok(identity) => client_builder.identity(identity),
|
||||
Err(err) => {
|
||||
|
|
|
@ -56,6 +56,7 @@ impl Error {
|
|||
match &*self.kind {
|
||||
// The server doesn't support range requests (as reported by the `HEAD` check).
|
||||
ErrorKind::AsyncHttpRangeReader(
|
||||
_,
|
||||
AsyncHttpRangeReaderError::HttpRangeRequestUnsupported,
|
||||
) => {
|
||||
return true;
|
||||
|
@ -63,6 +64,7 @@ impl Error {
|
|||
|
||||
// The server doesn't support range requests (it doesn't return the necessary headers).
|
||||
ErrorKind::AsyncHttpRangeReader(
|
||||
_,
|
||||
AsyncHttpRangeReaderError::ContentLengthMissing
|
||||
| AsyncHttpRangeReaderError::ContentRangeMissing,
|
||||
) => {
|
||||
|
@ -187,8 +189,8 @@ pub enum ErrorKind {
|
|||
#[error("Received some unexpected HTML from {url}")]
|
||||
BadHtml { source: html::Error, url: Url },
|
||||
|
||||
#[error(transparent)]
|
||||
AsyncHttpRangeReader(#[from] AsyncHttpRangeReaderError),
|
||||
#[error("Failed to read zip with range requests: `{0}`")]
|
||||
AsyncHttpRangeReader(Url, #[source] AsyncHttpRangeReaderError),
|
||||
|
||||
#[error("{0} is not a valid wheel filename")]
|
||||
WheelFilename(#[source] WheelFilenameError),
|
||||
|
|
|
@ -5,13 +5,14 @@ use reqwest::Response;
|
|||
use tracing::{debug, info_span, warn, Instrument};
|
||||
use url::Url;
|
||||
|
||||
use crate::cached_client::{CacheControl, CachedClientError};
|
||||
use crate::html::SimpleHtml;
|
||||
use crate::{Connectivity, Error, ErrorKind, OwnedArchive, RegistryClient};
|
||||
use uv_cache::{Cache, CacheBucket};
|
||||
use uv_cache_key::cache_digest;
|
||||
use uv_distribution_filename::DistFilename;
|
||||
use uv_distribution_types::{File, FileLocation, FlatIndexLocation, IndexUrl, UrlString};
|
||||
use uv_distribution_types::{File, FileLocation, IndexUrl, UrlString};
|
||||
|
||||
use crate::cached_client::{CacheControl, CachedClientError};
|
||||
use crate::html::SimpleHtml;
|
||||
use crate::{Connectivity, Error, ErrorKind, OwnedArchive, RegistryClient};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum FlatIndexError {
|
||||
|
@ -94,19 +95,19 @@ impl<'a> FlatIndexClient<'a> {
|
|||
#[allow(clippy::result_large_err)]
|
||||
pub async fn fetch(
|
||||
&self,
|
||||
indexes: impl Iterator<Item = &FlatIndexLocation>,
|
||||
indexes: impl Iterator<Item = &IndexUrl>,
|
||||
) -> Result<FlatIndexEntries, FlatIndexError> {
|
||||
let mut fetches = futures::stream::iter(indexes)
|
||||
.map(|index| async move {
|
||||
let entries = match index {
|
||||
FlatIndexLocation::Path(url) => {
|
||||
IndexUrl::Path(url) => {
|
||||
let path = url
|
||||
.to_file_path()
|
||||
.map_err(|()| FlatIndexError::NonFileUrl(url.to_url()))?;
|
||||
Self::read_from_directory(&path, index)
|
||||
.map_err(|err| FlatIndexError::FindLinksDirectory(path.clone(), err))?
|
||||
}
|
||||
FlatIndexLocation::Url(url) => self
|
||||
IndexUrl::Pypi(url) | IndexUrl::Url(url) => self
|
||||
.read_from_url(url, index)
|
||||
.await
|
||||
.map_err(|err| FlatIndexError::FindLinksUrl(url.to_url(), err))?,
|
||||
|
@ -136,7 +137,7 @@ impl<'a> FlatIndexClient<'a> {
|
|||
async fn read_from_url(
|
||||
&self,
|
||||
url: &Url,
|
||||
flat_index: &FlatIndexLocation,
|
||||
flat_index: &IndexUrl,
|
||||
) -> Result<FlatIndexEntries, Error> {
|
||||
let cache_entry = self.cache.entry(
|
||||
CacheBucket::FlatIndex,
|
||||
|
@ -210,7 +211,7 @@ impl<'a> FlatIndexClient<'a> {
|
|||
Some((
|
||||
DistFilename::try_from_normalized_filename(&file.filename)?,
|
||||
file,
|
||||
IndexUrl::from(flat_index.clone()),
|
||||
flat_index.clone(),
|
||||
))
|
||||
})
|
||||
.collect();
|
||||
|
@ -226,7 +227,7 @@ impl<'a> FlatIndexClient<'a> {
|
|||
/// Read a flat remote index from a `--find-links` directory.
|
||||
fn read_from_directory(
|
||||
path: &Path,
|
||||
flat_index: &FlatIndexLocation,
|
||||
flat_index: &IndexUrl,
|
||||
) -> Result<FlatIndexEntries, FindLinksDirectoryError> {
|
||||
let mut dists = Vec::new();
|
||||
for entry in fs_err::read_dir(path)? {
|
||||
|
@ -279,7 +280,7 @@ impl<'a> FlatIndexClient<'a> {
|
|||
);
|
||||
continue;
|
||||
};
|
||||
dists.push((filename, file, IndexUrl::from(flat_index.clone())));
|
||||
dists.push((filename, file, flat_index.clone()));
|
||||
}
|
||||
Ok(FlatIndexEntries::from_entries(dists))
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
995
crates/uv-client/src/html/tests.rs
Normal file
995
crates/uv-client/src/html/tests.rs
Normal file
|
@ -0,0 +1,995 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parse_sha256() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: Some(
|
||||
"6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
),
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_md5() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl#md5=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: Some(
|
||||
"6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
),
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl#md5=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_base() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<base href="https://index.python.org/">
|
||||
</head>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"index.python.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: Some(
|
||||
"6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
),
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_escaped_fragment() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2+233fca715f49-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61">Jinja2-3.1.2+233fca715f49-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2+233fca715f49-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: Some(
|
||||
"6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
),
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2+233fca715f49-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_encoded_fragment() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl#sha256%3D4095ada29e51070f7d199a0a5bdf5c8d8e238e03f0bf4dcc02571e78c9ae800d">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: Some(
|
||||
"4095ada29e51070f7d199a0a5bdf5c8d8e238e03f0bf4dcc02571e78c9ae800d",
|
||||
),
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl#sha256%3D4095ada29e51070f7d199a0a5bdf5c8d8e238e03f0bf4dcc02571e78c9ae800d",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_quoted_filepath() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="cpu/torchtext-0.17.0%2Bcpu-cp39-cp39-win_amd64.whl">cpu/torchtext-0.17.0%2Bcpu-cp39-cp39-win_amd64.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "torchtext-0.17.0+cpu-cp39-cp39-win_amd64.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "cpu/torchtext-0.17.0%2Bcpu-cp39-cp39-win_amd64.whl",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_missing_hash() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_missing_href() {
|
||||
let text = r"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a>Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
";
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap_err();
|
||||
insta::assert_snapshot!(result, @"Missing href attribute on anchor link");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_empty_href() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap_err();
|
||||
insta::assert_snapshot!(result, @"Missing href attribute on anchor link");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_empty_fragment() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl#">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl#",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_query_string() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl?project=legacy">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl?project=legacy",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_missing_hash_value() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl#sha256">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap_err();
|
||||
insta::assert_snapshot!(result, @"Unexpected fragment (expected `#sha256=...` or similar) on URL: sha256");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_unknown_hash() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl#blake2=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap_err();
|
||||
insta::assert_snapshot!(result, @"Unsupported hash algorithm (expected one of: `md5`, `sha256`, `sha384`, or `sha512`) on: `blake2=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_flat_index_html() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head><meta http-equiv="Content-Type" content="text/html; charset=utf-8"></head>
|
||||
<body>
|
||||
<a href="https://storage.googleapis.com/jax-releases/cuda100/jaxlib-0.1.52+cuda100-cp36-none-manylinux2010_x86_64.whl">cuda100/jaxlib-0.1.52+cuda100-cp36-none-manylinux2010_x86_64.whl</a><br>
|
||||
<a href="https://storage.googleapis.com/jax-releases/cuda100/jaxlib-0.1.52+cuda100-cp37-none-manylinux2010_x86_64.whl">cuda100/jaxlib-0.1.52+cuda100-cp37-none-manylinux2010_x86_64.whl</a><br>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let base =
|
||||
Url::parse("https://storage.googleapis.com/jax-releases/jax_cuda_releases.html").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"storage.googleapis.com",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/jax-releases/jax_cuda_releases.html",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "jaxlib-0.1.52+cuda100-cp36-none-manylinux2010_x86_64.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "https://storage.googleapis.com/jax-releases/cuda100/jaxlib-0.1.52+cuda100-cp36-none-manylinux2010_x86_64.whl",
|
||||
yanked: None,
|
||||
},
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "jaxlib-0.1.52+cuda100-cp37-none-manylinux2010_x86_64.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "https://storage.googleapis.com/jax-releases/cuda100/jaxlib-0.1.52+cuda100-cp37-none-manylinux2010_x86_64.whl",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
/// Test for AWS Code Artifact
|
||||
///
|
||||
/// See: <https://github.com/astral-sh/uv/issues/1388#issuecomment-1947659088>
|
||||
#[test]
|
||||
fn parse_code_artifact_index_html() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Links for flask</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Links for flask</h1>
|
||||
<a href="0.1/Flask-0.1.tar.gz#sha256=9da884457e910bf0847d396cb4b778ad9f3c3d17db1c5997cb861937bd284237" data-gpg-sig="false" >Flask-0.1.tar.gz</a>
|
||||
<br/>
|
||||
<a href="0.10.1/Flask-0.10.1.tar.gz#sha256=4c83829ff83d408b5e1d4995472265411d2c414112298f2eb4b359d9e4563373" data-gpg-sig="false" >Flask-0.10.1.tar.gz</a>
|
||||
<br/>
|
||||
<a href="3.0.1/flask-3.0.1.tar.gz#sha256=6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403" data-requires-python=">=3.8" data-gpg-sig="false" >flask-3.0.1.tar.gz</a>
|
||||
<br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let base = Url::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
||||
.unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"account.d.codeartifact.us-west-2.amazonaws.com",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/pypi/shared-packages-pypi/simple/flask/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Flask-0.1.tar.gz",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: Some(
|
||||
"9da884457e910bf0847d396cb4b778ad9f3c3d17db1c5997cb861937bd284237",
|
||||
),
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "0.1/Flask-0.1.tar.gz#sha256=9da884457e910bf0847d396cb4b778ad9f3c3d17db1c5997cb861937bd284237",
|
||||
yanked: None,
|
||||
},
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Flask-0.10.1.tar.gz",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: Some(
|
||||
"4c83829ff83d408b5e1d4995472265411d2c414112298f2eb4b359d9e4563373",
|
||||
),
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "0.10.1/Flask-0.10.1.tar.gz#sha256=4c83829ff83d408b5e1d4995472265411d2c414112298f2eb4b359d9e4563373",
|
||||
yanked: None,
|
||||
},
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "flask-3.0.1.tar.gz",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: Some(
|
||||
"6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403",
|
||||
),
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: Some(
|
||||
Ok(
|
||||
VersionSpecifiers(
|
||||
[
|
||||
VersionSpecifier {
|
||||
operator: GreaterThanEqual,
|
||||
version: "3.8",
|
||||
},
|
||||
],
|
||||
),
|
||||
),
|
||||
),
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "3.0.1/flask-3.0.1.tar.gz#sha256=6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_file_requires_python_trailing_comma() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61" data-requires-python=">=3.8,">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"download.pytorch.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/whl/jinja2/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: None,
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: Some(
|
||||
"6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
),
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: Some(
|
||||
Ok(
|
||||
VersionSpecifiers(
|
||||
[
|
||||
VersionSpecifier {
|
||||
operator: GreaterThanEqual,
|
||||
version: "3.8",
|
||||
},
|
||||
],
|
||||
),
|
||||
),
|
||||
),
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl#sha256=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
/// Respect PEP 714 (see: <https://peps.python.org/pep-0714/>).
|
||||
#[test]
|
||||
fn parse_core_metadata() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Links for jinja2</h1>
|
||||
<a href="/whl/Jinja2-3.1.2-py3-none-any.whl" data-dist-info-metadata="true">Jinja2-3.1.2-py3-none-any.whl</a><br/>
|
||||
<a href="/whl/Jinja2-3.1.3-py3-none-any.whl" data-core-metadata="true">Jinja2-3.1.3-py3-none-any.whl</a><br/>
|
||||
<a href="/whl/Jinja2-3.1.4-py3-none-any.whl" data-dist-info-metadata="false">Jinja2-3.1.4-py3-none-any.whl</a><br/>
|
||||
<a href="/whl/Jinja2-3.1.5-py3-none-any.whl" data-core-metadata="false">Jinja2-3.1.5-py3-none-any.whl</a><br/>
|
||||
<a href="/whl/Jinja2-3.1.6-py3-none-any.whl" data-core-metadata="true" data-dist-info-metadata="false">Jinja2-3.1.6-py3-none-any.whl</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let base = Url::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
||||
.unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r###"
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
Url {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"account.d.codeartifact.us-west-2.amazonaws.com",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/pypi/shared-packages-pypi/simple/flask/",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
),
|
||||
files: [
|
||||
File {
|
||||
core_metadata: Some(
|
||||
Bool(
|
||||
true,
|
||||
),
|
||||
),
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl",
|
||||
yanked: None,
|
||||
},
|
||||
File {
|
||||
core_metadata: Some(
|
||||
Bool(
|
||||
true,
|
||||
),
|
||||
),
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.3-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.3-py3-none-any.whl",
|
||||
yanked: None,
|
||||
},
|
||||
File {
|
||||
core_metadata: Some(
|
||||
Bool(
|
||||
false,
|
||||
),
|
||||
),
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.4-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.4-py3-none-any.whl",
|
||||
yanked: None,
|
||||
},
|
||||
File {
|
||||
core_metadata: Some(
|
||||
Bool(
|
||||
false,
|
||||
),
|
||||
),
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.5-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.5-py3-none-any.whl",
|
||||
yanked: None,
|
||||
},
|
||||
File {
|
||||
core_metadata: Some(
|
||||
Bool(
|
||||
true,
|
||||
),
|
||||
),
|
||||
dist_info_metadata: None,
|
||||
data_dist_info_metadata: None,
|
||||
filename: "Jinja2-3.1.6-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
md5: None,
|
||||
sha256: None,
|
||||
sha384: None,
|
||||
sha512: None,
|
||||
},
|
||||
requires_python: None,
|
||||
size: None,
|
||||
upload_time: None,
|
||||
url: "/whl/Jinja2-3.1.6-py3-none-any.whl",
|
||||
yanked: None,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
|
@ -453,326 +453,4 @@ impl CacheControlDirective {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn cache_control_token() {
|
||||
let cc: CacheControl = CacheControlParser::new(["no-cache"]).collect();
|
||||
assert!(cc.no_cache);
|
||||
assert!(!cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_max_age() {
|
||||
let cc: CacheControl = CacheControlParser::new(["max-age=60"]).collect();
|
||||
assert_eq!(Some(60), cc.max_age_seconds);
|
||||
assert!(!cc.must_revalidate);
|
||||
}
|
||||
|
||||
// [RFC 9111 S5.2.1.1] says that client MUST NOT quote max-age, but we
|
||||
// support parsing it that way anyway.
|
||||
//
|
||||
// [RFC 9111 S5.2.1.1]: https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.1.1
|
||||
#[test]
|
||||
fn cache_control_max_age_quoted() {
|
||||
let cc: CacheControl = CacheControlParser::new([r#"max-age="60""#]).collect();
|
||||
assert_eq!(Some(60), cc.max_age_seconds);
|
||||
assert!(!cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_max_age_invalid() {
|
||||
let cc: CacheControl = CacheControlParser::new(["max-age=6a0"]).collect();
|
||||
assert_eq!(None, cc.max_age_seconds);
|
||||
assert!(cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_immutable() {
|
||||
let cc: CacheControl = CacheControlParser::new(["max-age=31536000, immutable"]).collect();
|
||||
assert_eq!(Some(31_536_000), cc.max_age_seconds);
|
||||
assert!(cc.immutable);
|
||||
assert!(!cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_unrecognized() {
|
||||
let cc: CacheControl = CacheControlParser::new(["lion,max-age=60,zebra"]).collect();
|
||||
assert_eq!(Some(60), cc.max_age_seconds);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_invalid_squashes_remainder() {
|
||||
let cc: CacheControl = CacheControlParser::new(["no-cache,\x00,max-age=60"]).collect();
|
||||
// The invalid data doesn't impact things before it.
|
||||
assert!(cc.no_cache);
|
||||
// The invalid data precludes parsing anything after.
|
||||
assert_eq!(None, cc.max_age_seconds);
|
||||
// The invalid contents should force revalidation.
|
||||
assert!(cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_invalid_squashes_remainder_but_not_other_header_values() {
|
||||
let cc: CacheControl =
|
||||
CacheControlParser::new(["no-cache,\x00,max-age=60", "max-stale=30"]).collect();
|
||||
// The invalid data doesn't impact things before it.
|
||||
assert!(cc.no_cache);
|
||||
// The invalid data precludes parsing anything after
|
||||
// in the same header value, but not in other
|
||||
// header values.
|
||||
assert_eq!(Some(30), cc.max_stale_seconds);
|
||||
// The invalid contents should force revalidation.
|
||||
assert!(cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_token() {
|
||||
let directives = CacheControlParser::new(["no-cache"]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_token_to_token_value() {
|
||||
let directives = CacheControlParser::new(["max-age=60"]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_token_to_quoted_string() {
|
||||
let directives =
|
||||
CacheControlParser::new([r#"private="cookie,x-something-else""#]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "private".to_string(),
|
||||
value: b"cookie,x-something-else".to_vec(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_token_to_quoted_string_with_escape() {
|
||||
let directives =
|
||||
CacheControlParser::new([r#"private="something\"crazy""#]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "private".to_string(),
|
||||
value: br#"something"crazy"#.to_vec(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_multiple_directives() {
|
||||
let header = r#"max-age=60, no-cache, private="cookie", no-transform"#;
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "private".to_string(),
|
||||
value: b"cookie".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-transform".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_multiple_directives_across_multiple_header_values() {
|
||||
let headers = [
|
||||
r"max-age=60, no-cache",
|
||||
r#"private="cookie""#,
|
||||
r"no-transform",
|
||||
];
|
||||
let directives = CacheControlParser::new(headers).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "private".to_string(),
|
||||
value: b"cookie".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-transform".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_one_header_invalid() {
|
||||
let headers = [
|
||||
r"max-age=60, no-cache",
|
||||
r#", private="cookie""#,
|
||||
r"no-transform",
|
||||
];
|
||||
let directives = CacheControlParser::new(headers).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-transform".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_invalid_directive_drops_remainder() {
|
||||
let header = r#"max-age=60, no-cache, ="cookie", no-transform"#;
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_name_normalized() {
|
||||
let header = r"MAX-AGE=60";
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},]
|
||||
);
|
||||
}
|
||||
|
||||
// When a duplicate directive is found, we keep the first one
|
||||
// and add in a `must-revalidate` directive to indicate that
|
||||
// things are stale and the client should do a re-check.
|
||||
#[test]
|
||||
fn cache_control_parse_duplicate_directives() {
|
||||
let header = r"max-age=60, no-cache, max-age=30";
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_duplicate_directives_across_headers() {
|
||||
let headers = [r"max-age=60, no-cache", r"max-age=30"];
|
||||
let directives = CacheControlParser::new(headers).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
// Tests that we don't emit must-revalidate multiple times
|
||||
// even when something is duplicated multiple times.
|
||||
#[test]
|
||||
fn cache_control_parse_duplicate_redux() {
|
||||
let header = r"max-age=60, no-cache, no-cache, max-age=30";
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
320
crates/uv-client/src/httpcache/control/tests.rs
Normal file
320
crates/uv-client/src/httpcache/control/tests.rs
Normal file
|
@ -0,0 +1,320 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn cache_control_token() {
|
||||
let cc: CacheControl = CacheControlParser::new(["no-cache"]).collect();
|
||||
assert!(cc.no_cache);
|
||||
assert!(!cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_max_age() {
|
||||
let cc: CacheControl = CacheControlParser::new(["max-age=60"]).collect();
|
||||
assert_eq!(Some(60), cc.max_age_seconds);
|
||||
assert!(!cc.must_revalidate);
|
||||
}
|
||||
|
||||
// [RFC 9111 S5.2.1.1] says that client MUST NOT quote max-age, but we
|
||||
// support parsing it that way anyway.
|
||||
//
|
||||
// [RFC 9111 S5.2.1.1]: https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.1.1
|
||||
#[test]
|
||||
fn cache_control_max_age_quoted() {
|
||||
let cc: CacheControl = CacheControlParser::new([r#"max-age="60""#]).collect();
|
||||
assert_eq!(Some(60), cc.max_age_seconds);
|
||||
assert!(!cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_max_age_invalid() {
|
||||
let cc: CacheControl = CacheControlParser::new(["max-age=6a0"]).collect();
|
||||
assert_eq!(None, cc.max_age_seconds);
|
||||
assert!(cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_immutable() {
|
||||
let cc: CacheControl = CacheControlParser::new(["max-age=31536000, immutable"]).collect();
|
||||
assert_eq!(Some(31_536_000), cc.max_age_seconds);
|
||||
assert!(cc.immutable);
|
||||
assert!(!cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_unrecognized() {
|
||||
let cc: CacheControl = CacheControlParser::new(["lion,max-age=60,zebra"]).collect();
|
||||
assert_eq!(Some(60), cc.max_age_seconds);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_invalid_squashes_remainder() {
|
||||
let cc: CacheControl = CacheControlParser::new(["no-cache,\x00,max-age=60"]).collect();
|
||||
// The invalid data doesn't impact things before it.
|
||||
assert!(cc.no_cache);
|
||||
// The invalid data precludes parsing anything after.
|
||||
assert_eq!(None, cc.max_age_seconds);
|
||||
// The invalid contents should force revalidation.
|
||||
assert!(cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_invalid_squashes_remainder_but_not_other_header_values() {
|
||||
let cc: CacheControl =
|
||||
CacheControlParser::new(["no-cache,\x00,max-age=60", "max-stale=30"]).collect();
|
||||
// The invalid data doesn't impact things before it.
|
||||
assert!(cc.no_cache);
|
||||
// The invalid data precludes parsing anything after
|
||||
// in the same header value, but not in other
|
||||
// header values.
|
||||
assert_eq!(Some(30), cc.max_stale_seconds);
|
||||
// The invalid contents should force revalidation.
|
||||
assert!(cc.must_revalidate);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_token() {
|
||||
let directives = CacheControlParser::new(["no-cache"]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_token_to_token_value() {
|
||||
let directives = CacheControlParser::new(["max-age=60"]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_token_to_quoted_string() {
|
||||
let directives =
|
||||
CacheControlParser::new([r#"private="cookie,x-something-else""#]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "private".to_string(),
|
||||
value: b"cookie,x-something-else".to_vec(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_token_to_quoted_string_with_escape() {
|
||||
let directives = CacheControlParser::new([r#"private="something\"crazy""#]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "private".to_string(),
|
||||
value: br#"something"crazy"#.to_vec(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_multiple_directives() {
|
||||
let header = r#"max-age=60, no-cache, private="cookie", no-transform"#;
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "private".to_string(),
|
||||
value: b"cookie".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-transform".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_multiple_directives_across_multiple_header_values() {
|
||||
let headers = [
|
||||
r"max-age=60, no-cache",
|
||||
r#"private="cookie""#,
|
||||
r"no-transform",
|
||||
];
|
||||
let directives = CacheControlParser::new(headers).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "private".to_string(),
|
||||
value: b"cookie".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-transform".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_one_header_invalid() {
|
||||
let headers = [
|
||||
r"max-age=60, no-cache",
|
||||
r#", private="cookie""#,
|
||||
r"no-transform",
|
||||
];
|
||||
let directives = CacheControlParser::new(headers).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-transform".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_invalid_directive_drops_remainder() {
|
||||
let header = r#"max-age=60, no-cache, ="cookie", no-transform"#;
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_name_normalized() {
|
||||
let header = r"MAX-AGE=60";
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},]
|
||||
);
|
||||
}
|
||||
|
||||
// When a duplicate directive is found, we keep the first one
|
||||
// and add in a `must-revalidate` directive to indicate that
|
||||
// things are stale and the client should do a re-check.
|
||||
#[test]
|
||||
fn cache_control_parse_duplicate_directives() {
|
||||
let header = r"max-age=60, no-cache, max-age=30";
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cache_control_parse_duplicate_directives_across_headers() {
|
||||
let headers = [r"max-age=60, no-cache", r"max-age=30"];
|
||||
let directives = CacheControlParser::new(headers).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
// Tests that we don't emit must-revalidate multiple times
|
||||
// even when something is duplicated multiple times.
|
||||
#[test]
|
||||
fn cache_control_parse_duplicate_redux() {
|
||||
let header = r"max-age=60, no-cache, no-cache, max-age=30";
|
||||
let directives = CacheControlParser::new([header]).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
directives,
|
||||
vec![
|
||||
CacheControlDirective {
|
||||
name: "max-age".to_string(),
|
||||
value: b"60".to_vec(),
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "no-cache".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
CacheControlDirective {
|
||||
name: "must-revalidate".to_string(),
|
||||
value: vec![]
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
use async_http_range_reader::AsyncHttpRangeReader;
|
||||
use futures::{FutureExt, TryStreamExt};
|
||||
use http::HeaderMap;
|
||||
use itertools::Either;
|
||||
use reqwest::{Client, Response, StatusCode};
|
||||
use reqwest_middleware::ClientWithMiddleware;
|
||||
use std::collections::BTreeMap;
|
||||
|
@ -16,7 +17,7 @@ use uv_configuration::KeyringProviderType;
|
|||
use uv_configuration::{IndexStrategy, TrustedHost};
|
||||
use uv_distribution_filename::{DistFilename, SourceDistFilename, WheelFilename};
|
||||
use uv_distribution_types::{
|
||||
BuiltDist, File, FileLocation, IndexCapabilities, IndexUrl, IndexUrls, Name,
|
||||
BuiltDist, File, FileLocation, Index, IndexCapabilities, IndexUrl, IndexUrls, Name,
|
||||
};
|
||||
use uv_metadata::{read_metadata_async_seek, read_metadata_async_stream};
|
||||
use uv_normalize::PackageName;
|
||||
|
@ -201,11 +202,19 @@ impl RegistryClient {
|
|||
/// and [PEP 691 – JSON-based Simple API for Python Package Indexes](https://peps.python.org/pep-0691/),
|
||||
/// which the pypi json api approximately implements.
|
||||
#[instrument("simple_api", skip_all, fields(package = % package_name))]
|
||||
pub async fn simple(
|
||||
&self,
|
||||
pub async fn simple<'index>(
|
||||
&'index self,
|
||||
package_name: &PackageName,
|
||||
) -> Result<Vec<(IndexUrl, OwnedArchive<SimpleMetadata>)>, Error> {
|
||||
let mut it = self.index_urls.indexes().peekable();
|
||||
index: Option<&'index IndexUrl>,
|
||||
capabilities: &IndexCapabilities,
|
||||
) -> Result<Vec<(&'index IndexUrl, OwnedArchive<SimpleMetadata>)>, Error> {
|
||||
let indexes = if let Some(index) = index {
|
||||
Either::Left(std::iter::once(index))
|
||||
} else {
|
||||
Either::Right(self.index_urls.indexes().map(Index::url))
|
||||
};
|
||||
|
||||
let mut it = indexes.peekable();
|
||||
if it.peek().is_none() {
|
||||
return Err(ErrorKind::NoIndex(package_name.to_string()).into());
|
||||
}
|
||||
|
@ -214,7 +223,7 @@ impl RegistryClient {
|
|||
for index in it {
|
||||
match self.simple_single_index(package_name, index).await {
|
||||
Ok(metadata) => {
|
||||
results.push((index.clone(), metadata));
|
||||
results.push((index, metadata));
|
||||
|
||||
// If we're only using the first match, we can stop here.
|
||||
if self.index_strategy == IndexStrategy::FirstIndex {
|
||||
|
@ -222,22 +231,23 @@ impl RegistryClient {
|
|||
}
|
||||
}
|
||||
Err(err) => match err.into_kind() {
|
||||
// The package is unavailable due to a lack of connectivity.
|
||||
ErrorKind::Offline(_) => continue,
|
||||
|
||||
// The package could not be found in the remote index.
|
||||
ErrorKind::WrappedReqwestError(err) => {
|
||||
if err.status() == Some(StatusCode::NOT_FOUND)
|
||||
|| err.status() == Some(StatusCode::UNAUTHORIZED)
|
||||
|| err.status() == Some(StatusCode::FORBIDDEN)
|
||||
{
|
||||
continue;
|
||||
ErrorKind::WrappedReqwestError(err) => match err.status() {
|
||||
Some(StatusCode::NOT_FOUND) => {}
|
||||
Some(StatusCode::UNAUTHORIZED) => {
|
||||
capabilities.set_unauthorized(index.clone());
|
||||
}
|
||||
return Err(ErrorKind::from(err).into());
|
||||
}
|
||||
Some(StatusCode::FORBIDDEN) => {
|
||||
capabilities.set_forbidden(index.clone());
|
||||
}
|
||||
_ => return Err(ErrorKind::from(err).into()),
|
||||
},
|
||||
|
||||
// The package is unavailable due to a lack of connectivity.
|
||||
ErrorKind::Offline(_) => {}
|
||||
|
||||
// The package could not be found in the local index.
|
||||
ErrorKind::FileNotFound(_) => continue,
|
||||
ErrorKind::FileNotFound(_) => {}
|
||||
|
||||
other => return Err(other.into()),
|
||||
},
|
||||
|
@ -625,7 +635,7 @@ impl RegistryClient {
|
|||
headers,
|
||||
)
|
||||
.await
|
||||
.map_err(ErrorKind::AsyncHttpRangeReader)?;
|
||||
.map_err(|err| ErrorKind::AsyncHttpRangeReader(url.clone(), err))?;
|
||||
trace!("Getting metadata for {filename} by range request");
|
||||
let text = wheel_metadata_from_remote_zip(filename, url, &mut reader).await?;
|
||||
let metadata =
|
||||
|
@ -663,7 +673,7 @@ impl RegistryClient {
|
|||
|
||||
// Mark the index as not supporting range requests.
|
||||
if let Some(index) = index {
|
||||
capabilities.set_supports_range_requests(index.clone(), false);
|
||||
capabilities.set_no_range_requests(index.clone());
|
||||
}
|
||||
} else {
|
||||
return Err(err);
|
||||
|
@ -901,107 +911,4 @@ impl Connectivity {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::str::FromStr;
|
||||
|
||||
use url::Url;
|
||||
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pypi_types::{JoinRelativeError, SimpleJson};
|
||||
|
||||
use crate::{html::SimpleHtml, SimpleMetadata, SimpleMetadatum};
|
||||
|
||||
#[test]
|
||||
fn ignore_failing_files() {
|
||||
// 1.7.7 has an invalid requires-python field (double comma), 1.7.8 is valid
|
||||
let response = r#"
|
||||
{
|
||||
"files": [
|
||||
{
|
||||
"core-metadata": false,
|
||||
"data-dist-info-metadata": false,
|
||||
"filename": "pyflyby-1.7.7.tar.gz",
|
||||
"hashes": {
|
||||
"sha256": "0c4d953f405a7be1300b440dbdbc6917011a07d8401345a97e72cd410d5fb291"
|
||||
},
|
||||
"requires-python": ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*, !=3.2.*, !=3.3.*, !=3.4.*,, !=3.5.*, !=3.6.*, <4",
|
||||
"size": 427200,
|
||||
"upload-time": "2022-05-19T09:14:36.591835Z",
|
||||
"url": "https://files.pythonhosted.org/packages/61/93/9fec62902d0b4fc2521333eba047bff4adbba41f1723a6382367f84ee522/pyflyby-1.7.7.tar.gz",
|
||||
"yanked": false
|
||||
},
|
||||
{
|
||||
"core-metadata": false,
|
||||
"data-dist-info-metadata": false,
|
||||
"filename": "pyflyby-1.7.8.tar.gz",
|
||||
"hashes": {
|
||||
"sha256": "1ee37474f6da8f98653dbcc208793f50b7ace1d9066f49e2707750a5ba5d53c6"
|
||||
},
|
||||
"requires-python": ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*, <4",
|
||||
"size": 424460,
|
||||
"upload-time": "2022-08-04T10:42:02.190074Z",
|
||||
"url": "https://files.pythonhosted.org/packages/ad/39/17180d9806a1c50197bc63b25d0f1266f745fc3b23f11439fccb3d6baa50/pyflyby-1.7.8.tar.gz",
|
||||
"yanked": false
|
||||
}
|
||||
]
|
||||
}
|
||||
"#;
|
||||
let data: SimpleJson = serde_json::from_str(response).unwrap();
|
||||
let base = Url::parse("https://pypi.org/simple/pyflyby/").unwrap();
|
||||
let simple_metadata = SimpleMetadata::from_files(
|
||||
data.files,
|
||||
&PackageName::from_str("pyflyby").unwrap(),
|
||||
&base,
|
||||
);
|
||||
let versions: Vec<String> = simple_metadata
|
||||
.iter()
|
||||
.map(|SimpleMetadatum { version, .. }| version.to_string())
|
||||
.collect();
|
||||
assert_eq!(versions, ["1.7.8".to_string()]);
|
||||
}
|
||||
|
||||
/// Test for AWS Code Artifact registry
|
||||
///
|
||||
/// See: <https://github.com/astral-sh/uv/issues/1388>
|
||||
#[test]
|
||||
fn relative_urls_code_artifact() -> Result<(), JoinRelativeError> {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Links for flask</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Links for flask</h1>
|
||||
<a href="0.1/Flask-0.1.tar.gz#sha256=9da884457e910bf0847d396cb4b778ad9f3c3d17db1c5997cb861937bd284237" data-gpg-sig="false" >Flask-0.1.tar.gz</a>
|
||||
<br/>
|
||||
<a href="0.10.1/Flask-0.10.1.tar.gz#sha256=4c83829ff83d408b5e1d4995472265411d2c414112298f2eb4b359d9e4563373" data-gpg-sig="false" >Flask-0.10.1.tar.gz</a>
|
||||
<br/>
|
||||
<a href="3.0.1/flask-3.0.1.tar.gz#sha256=6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403" data-requires-python=">=3.8" data-gpg-sig="false" >flask-3.0.1.tar.gz</a>
|
||||
<br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
|
||||
// Note the lack of a trailing `/` here is important for coverage of url-join behavior
|
||||
let base = Url::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask")
|
||||
.unwrap();
|
||||
let SimpleHtml { base, files } = SimpleHtml::parse(text, &base).unwrap();
|
||||
|
||||
// Test parsing of the file urls
|
||||
let urls = files
|
||||
.iter()
|
||||
.map(|file| uv_pypi_types::base_url_join_relative(base.as_url().as_str(), &file.url))
|
||||
.collect::<Result<Vec<_>, JoinRelativeError>>()?;
|
||||
let urls = urls.iter().map(reqwest::Url::as_str).collect::<Vec<_>>();
|
||||
insta::assert_debug_snapshot!(urls, @r###"
|
||||
[
|
||||
"https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/0.1/Flask-0.1.tar.gz#sha256=9da884457e910bf0847d396cb4b778ad9f3c3d17db1c5997cb861937bd284237",
|
||||
"https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/0.10.1/Flask-0.10.1.tar.gz#sha256=4c83829ff83d408b5e1d4995472265411d2c414112298f2eb4b359d9e4563373",
|
||||
"https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/3.0.1/flask-3.0.1.tar.gz#sha256=6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403",
|
||||
]
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
102
crates/uv-client/src/registry_client/tests.rs
Normal file
102
crates/uv-client/src/registry_client/tests.rs
Normal file
|
@ -0,0 +1,102 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use url::Url;
|
||||
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pypi_types::{JoinRelativeError, SimpleJson};
|
||||
|
||||
use crate::{html::SimpleHtml, SimpleMetadata, SimpleMetadatum};
|
||||
|
||||
#[test]
|
||||
fn ignore_failing_files() {
|
||||
// 1.7.7 has an invalid requires-python field (double comma), 1.7.8 is valid
|
||||
let response = r#"
|
||||
{
|
||||
"files": [
|
||||
{
|
||||
"core-metadata": false,
|
||||
"data-dist-info-metadata": false,
|
||||
"filename": "pyflyby-1.7.7.tar.gz",
|
||||
"hashes": {
|
||||
"sha256": "0c4d953f405a7be1300b440dbdbc6917011a07d8401345a97e72cd410d5fb291"
|
||||
},
|
||||
"requires-python": ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*, !=3.2.*, !=3.3.*, !=3.4.*,, !=3.5.*, !=3.6.*, <4",
|
||||
"size": 427200,
|
||||
"upload-time": "2022-05-19T09:14:36.591835Z",
|
||||
"url": "https://files.pythonhosted.org/packages/61/93/9fec62902d0b4fc2521333eba047bff4adbba41f1723a6382367f84ee522/pyflyby-1.7.7.tar.gz",
|
||||
"yanked": false
|
||||
},
|
||||
{
|
||||
"core-metadata": false,
|
||||
"data-dist-info-metadata": false,
|
||||
"filename": "pyflyby-1.7.8.tar.gz",
|
||||
"hashes": {
|
||||
"sha256": "1ee37474f6da8f98653dbcc208793f50b7ace1d9066f49e2707750a5ba5d53c6"
|
||||
},
|
||||
"requires-python": ">=2.5, !=3.0.*, !=3.1.*, !=3.2.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*, <4",
|
||||
"size": 424460,
|
||||
"upload-time": "2022-08-04T10:42:02.190074Z",
|
||||
"url": "https://files.pythonhosted.org/packages/ad/39/17180d9806a1c50197bc63b25d0f1266f745fc3b23f11439fccb3d6baa50/pyflyby-1.7.8.tar.gz",
|
||||
"yanked": false
|
||||
}
|
||||
]
|
||||
}
|
||||
"#;
|
||||
let data: SimpleJson = serde_json::from_str(response).unwrap();
|
||||
let base = Url::parse("https://pypi.org/simple/pyflyby/").unwrap();
|
||||
let simple_metadata = SimpleMetadata::from_files(
|
||||
data.files,
|
||||
&PackageName::from_str("pyflyby").unwrap(),
|
||||
&base,
|
||||
);
|
||||
let versions: Vec<String> = simple_metadata
|
||||
.iter()
|
||||
.map(|SimpleMetadatum { version, .. }| version.to_string())
|
||||
.collect();
|
||||
assert_eq!(versions, ["1.7.8".to_string()]);
|
||||
}
|
||||
|
||||
/// Test for AWS Code Artifact registry
|
||||
///
|
||||
/// See: <https://github.com/astral-sh/uv/issues/1388>
|
||||
#[test]
|
||||
fn relative_urls_code_artifact() -> Result<(), JoinRelativeError> {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Links for flask</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Links for flask</h1>
|
||||
<a href="0.1/Flask-0.1.tar.gz#sha256=9da884457e910bf0847d396cb4b778ad9f3c3d17db1c5997cb861937bd284237" data-gpg-sig="false" >Flask-0.1.tar.gz</a>
|
||||
<br/>
|
||||
<a href="0.10.1/Flask-0.10.1.tar.gz#sha256=4c83829ff83d408b5e1d4995472265411d2c414112298f2eb4b359d9e4563373" data-gpg-sig="false" >Flask-0.10.1.tar.gz</a>
|
||||
<br/>
|
||||
<a href="3.0.1/flask-3.0.1.tar.gz#sha256=6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403" data-requires-python=">=3.8" data-gpg-sig="false" >flask-3.0.1.tar.gz</a>
|
||||
<br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
|
||||
// Note the lack of a trailing `/` here is important for coverage of url-join behavior
|
||||
let base = Url::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask")
|
||||
.unwrap();
|
||||
let SimpleHtml { base, files } = SimpleHtml::parse(text, &base).unwrap();
|
||||
|
||||
// Test parsing of the file urls
|
||||
let urls = files
|
||||
.iter()
|
||||
.map(|file| uv_pypi_types::base_url_join_relative(base.as_url().as_str(), &file.url))
|
||||
.collect::<Result<Vec<_>, JoinRelativeError>>()?;
|
||||
let urls = urls.iter().map(reqwest::Url::as_str).collect::<Vec<_>>();
|
||||
insta::assert_debug_snapshot!(urls, @r###"
|
||||
[
|
||||
"https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/0.1/Flask-0.1.tar.gz#sha256=9da884457e910bf0847d396cb4b778ad9f3c3d17db1c5997cb861937bd284237",
|
||||
"https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/0.10.1/Flask-0.10.1.tar.gz#sha256=4c83829ff83d408b5e1d4995472265411d2c414112298f2eb4b359d9e4563373",
|
||||
"https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/3.0.1/flask-3.0.1.tar.gz#sha256=6489f51bb3666def6f314e15f19d50a1869a19ae0e8c9a3641ffe66c77d42403",
|
||||
]
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
2
crates/uv-client/tests/it/main.rs
Normal file
2
crates/uv-client/tests/it/main.rs
Normal file
|
@ -0,0 +1,2 @@
|
|||
mod remote_metadata;
|
||||
mod user_agent_version;
|
|
@ -9,6 +9,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
@ -21,6 +24,7 @@ uv-normalize = { workspace = true }
|
|||
uv-pep508 = { workspace = true, features = ["schemars"] }
|
||||
uv-platform-tags = { workspace = true }
|
||||
uv-pypi-types = { workspace = true }
|
||||
uv-static = { workspace = true }
|
||||
|
||||
clap = { workspace = true, features = ["derive"], optional = true }
|
||||
either = { workspace = true }
|
||||
|
|
8
crates/uv-configuration/src/bounds.rs
Normal file
8
crates/uv-configuration/src/bounds.rs
Normal file
|
@ -0,0 +1,8 @@
|
|||
#[derive(Debug, Default, Copy, Clone)]
|
||||
pub enum LowerBound {
|
||||
/// Allow missing lower bounds.
|
||||
#[default]
|
||||
Allow,
|
||||
/// Warn about missing lower bounds.
|
||||
Warn,
|
||||
}
|
|
@ -354,66 +354,4 @@ pub enum IndexStrategy {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Error;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn no_build_from_args() -> Result<(), Error> {
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(vec![PackageNameSpecifier::from_str(":all:")?], false),
|
||||
NoBuild::All,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(vec![PackageNameSpecifier::from_str(":all:")?], true),
|
||||
NoBuild::All,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(vec![PackageNameSpecifier::from_str(":none:")?], true),
|
||||
NoBuild::All,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(vec![PackageNameSpecifier::from_str(":none:")?], false),
|
||||
NoBuild::None,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(
|
||||
vec![
|
||||
PackageNameSpecifier::from_str("foo")?,
|
||||
PackageNameSpecifier::from_str("bar")?
|
||||
],
|
||||
false
|
||||
),
|
||||
NoBuild::Packages(vec![
|
||||
PackageName::from_str("foo")?,
|
||||
PackageName::from_str("bar")?
|
||||
]),
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(
|
||||
vec![
|
||||
PackageNameSpecifier::from_str("test")?,
|
||||
PackageNameSpecifier::All
|
||||
],
|
||||
false
|
||||
),
|
||||
NoBuild::All,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(
|
||||
vec![
|
||||
PackageNameSpecifier::from_str("foo")?,
|
||||
PackageNameSpecifier::from_str(":none:")?,
|
||||
PackageNameSpecifier::from_str("bar")?
|
||||
],
|
||||
false
|
||||
),
|
||||
NoBuild::Packages(vec![PackageName::from_str("bar")?]),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
61
crates/uv-configuration/src/build_options/tests.rs
Normal file
61
crates/uv-configuration/src/build_options/tests.rs
Normal file
|
@ -0,0 +1,61 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Error;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn no_build_from_args() -> Result<(), Error> {
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(vec![PackageNameSpecifier::from_str(":all:")?], false),
|
||||
NoBuild::All,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(vec![PackageNameSpecifier::from_str(":all:")?], true),
|
||||
NoBuild::All,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(vec![PackageNameSpecifier::from_str(":none:")?], true),
|
||||
NoBuild::All,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(vec![PackageNameSpecifier::from_str(":none:")?], false),
|
||||
NoBuild::None,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(
|
||||
vec![
|
||||
PackageNameSpecifier::from_str("foo")?,
|
||||
PackageNameSpecifier::from_str("bar")?
|
||||
],
|
||||
false
|
||||
),
|
||||
NoBuild::Packages(vec![
|
||||
PackageName::from_str("foo")?,
|
||||
PackageName::from_str("bar")?
|
||||
]),
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(
|
||||
vec![
|
||||
PackageNameSpecifier::from_str("test")?,
|
||||
PackageNameSpecifier::All
|
||||
],
|
||||
false
|
||||
),
|
||||
NoBuild::All,
|
||||
);
|
||||
assert_eq!(
|
||||
NoBuild::from_pip_args(
|
||||
vec![
|
||||
PackageNameSpecifier::from_str("foo")?,
|
||||
PackageNameSpecifier::from_str(":none:")?,
|
||||
PackageNameSpecifier::from_str("bar")?
|
||||
],
|
||||
false
|
||||
),
|
||||
NoBuild::Packages(vec![PackageName::from_str("bar")?]),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -213,82 +213,4 @@ impl<'de> serde::Deserialize<'de> for ConfigSettings {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn collect_config_settings() {
|
||||
let settings: ConfigSettings = vec![
|
||||
ConfigSettingEntry {
|
||||
key: "key".to_string(),
|
||||
value: "value".to_string(),
|
||||
},
|
||||
ConfigSettingEntry {
|
||||
key: "key".to_string(),
|
||||
value: "value2".to_string(),
|
||||
},
|
||||
ConfigSettingEntry {
|
||||
key: "list".to_string(),
|
||||
value: "value3".to_string(),
|
||||
},
|
||||
ConfigSettingEntry {
|
||||
key: "list".to_string(),
|
||||
value: "value4".to_string(),
|
||||
},
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
assert_eq!(
|
||||
settings.0.get("key"),
|
||||
Some(&ConfigSettingValue::List(vec![
|
||||
"value".to_string(),
|
||||
"value2".to_string()
|
||||
]))
|
||||
);
|
||||
assert_eq!(
|
||||
settings.0.get("list"),
|
||||
Some(&ConfigSettingValue::List(vec![
|
||||
"value3".to_string(),
|
||||
"value4".to_string()
|
||||
]))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn escape_for_python() {
|
||||
let mut settings = ConfigSettings::default();
|
||||
settings.0.insert(
|
||||
"key".to_string(),
|
||||
ConfigSettingValue::String("value".to_string()),
|
||||
);
|
||||
settings.0.insert(
|
||||
"list".to_string(),
|
||||
ConfigSettingValue::List(vec!["value1".to_string(), "value2".to_string()]),
|
||||
);
|
||||
assert_eq!(
|
||||
settings.escape_for_python(),
|
||||
r#"{"key":"value","list":["value1","value2"]}"#
|
||||
);
|
||||
|
||||
let mut settings = ConfigSettings::default();
|
||||
settings.0.insert(
|
||||
"key".to_string(),
|
||||
ConfigSettingValue::String("Hello, \"world!\"".to_string()),
|
||||
);
|
||||
settings.0.insert(
|
||||
"list".to_string(),
|
||||
ConfigSettingValue::List(vec!["'value1'".to_string()]),
|
||||
);
|
||||
assert_eq!(
|
||||
settings.escape_for_python(),
|
||||
r#"{"key":"Hello, \"world!\"","list":["'value1'"]}"#
|
||||
);
|
||||
|
||||
let mut settings = ConfigSettings::default();
|
||||
settings.0.insert(
|
||||
"key".to_string(),
|
||||
ConfigSettingValue::String("val\\1 {}value".to_string()),
|
||||
);
|
||||
assert_eq!(settings.escape_for_python(), r#"{"key":"val\\1 {}value"}"#);
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
77
crates/uv-configuration/src/config_settings/tests.rs
Normal file
77
crates/uv-configuration/src/config_settings/tests.rs
Normal file
|
@ -0,0 +1,77 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn collect_config_settings() {
|
||||
let settings: ConfigSettings = vec![
|
||||
ConfigSettingEntry {
|
||||
key: "key".to_string(),
|
||||
value: "value".to_string(),
|
||||
},
|
||||
ConfigSettingEntry {
|
||||
key: "key".to_string(),
|
||||
value: "value2".to_string(),
|
||||
},
|
||||
ConfigSettingEntry {
|
||||
key: "list".to_string(),
|
||||
value: "value3".to_string(),
|
||||
},
|
||||
ConfigSettingEntry {
|
||||
key: "list".to_string(),
|
||||
value: "value4".to_string(),
|
||||
},
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
assert_eq!(
|
||||
settings.0.get("key"),
|
||||
Some(&ConfigSettingValue::List(vec![
|
||||
"value".to_string(),
|
||||
"value2".to_string()
|
||||
]))
|
||||
);
|
||||
assert_eq!(
|
||||
settings.0.get("list"),
|
||||
Some(&ConfigSettingValue::List(vec![
|
||||
"value3".to_string(),
|
||||
"value4".to_string()
|
||||
]))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn escape_for_python() {
|
||||
let mut settings = ConfigSettings::default();
|
||||
settings.0.insert(
|
||||
"key".to_string(),
|
||||
ConfigSettingValue::String("value".to_string()),
|
||||
);
|
||||
settings.0.insert(
|
||||
"list".to_string(),
|
||||
ConfigSettingValue::List(vec!["value1".to_string(), "value2".to_string()]),
|
||||
);
|
||||
assert_eq!(
|
||||
settings.escape_for_python(),
|
||||
r#"{"key":"value","list":["value1","value2"]}"#
|
||||
);
|
||||
|
||||
let mut settings = ConfigSettings::default();
|
||||
settings.0.insert(
|
||||
"key".to_string(),
|
||||
ConfigSettingValue::String("Hello, \"world!\"".to_string()),
|
||||
);
|
||||
settings.0.insert(
|
||||
"list".to_string(),
|
||||
ConfigSettingValue::List(vec!["'value1'".to_string()]),
|
||||
);
|
||||
assert_eq!(
|
||||
settings.escape_for_python(),
|
||||
r#"{"key":"Hello, \"world!\"","list":["'value1'"]}"#
|
||||
);
|
||||
|
||||
let mut settings = ConfigSettings::default();
|
||||
settings.0.insert(
|
||||
"key".to_string(),
|
||||
ConfigSettingValue::String("val\\1 {}value".to_string()),
|
||||
);
|
||||
assert_eq!(settings.escape_for_python(), r#"{"key":"val\\1 {}value"}"#);
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
pub use authentication::*;
|
||||
pub use bounds::*;
|
||||
pub use build_options::*;
|
||||
pub use concurrency::*;
|
||||
pub use config_settings::*;
|
||||
|
@ -13,6 +14,7 @@ pub use name_specifiers::*;
|
|||
pub use overrides::*;
|
||||
pub use package_options::*;
|
||||
pub use preview::*;
|
||||
pub use project_build_backend::*;
|
||||
pub use sources::*;
|
||||
pub use target_triple::*;
|
||||
pub use trusted_host::*;
|
||||
|
@ -20,6 +22,7 @@ pub use trusted_publishing::*;
|
|||
pub use vcs::*;
|
||||
|
||||
mod authentication;
|
||||
mod bounds;
|
||||
mod build_options;
|
||||
mod concurrency;
|
||||
mod config_settings;
|
||||
|
@ -34,6 +37,7 @@ mod name_specifiers;
|
|||
mod overrides;
|
||||
mod package_options;
|
||||
mod preview;
|
||||
mod project_build_backend;
|
||||
mod sources;
|
||||
mod target_triple;
|
||||
mod trusted_host;
|
||||
|
|
20
crates/uv-configuration/src/project_build_backend.rs
Normal file
20
crates/uv-configuration/src/project_build_backend.rs
Normal file
|
@ -0,0 +1,20 @@
|
|||
/// Available project build backends for use in `pyproject.toml`.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Default, serde::Deserialize)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
#[cfg_attr(feature = "clap", derive(clap::ValueEnum))]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub enum ProjectBuildBackend {
|
||||
#[default]
|
||||
/// Use [hatchling](https://pypi.org/project/hatchling) as the project build backend.
|
||||
Hatch,
|
||||
/// Use [flit-core](https://pypi.org/project/flit-core) as the project build backend.
|
||||
Flit,
|
||||
/// Use [pdm-backend](https://pypi.org/project/pdm-backend) as the project build backend.
|
||||
PDM,
|
||||
/// Use [setuptools](https://pypi.org/project/setuptools) as the project build backend.
|
||||
Setuptools,
|
||||
/// Use [maturin](https://pypi.org/project/maturin) as the project build backend.
|
||||
Maturin,
|
||||
/// Use [scikit-build-core](https://pypi.org/project/scikit-build-core) as the project build backend.
|
||||
Scikit,
|
||||
}
|
|
@ -2,6 +2,7 @@ use tracing::debug;
|
|||
|
||||
use uv_pep508::MarkerEnvironment;
|
||||
use uv_platform_tags::{Arch, Os, Platform};
|
||||
use uv_static::EnvVars;
|
||||
|
||||
/// The supported target triples. Each triple consists of an architecture, vendor, and operating
|
||||
/// system.
|
||||
|
@ -321,7 +322,7 @@ impl TargetTriple {
|
|||
|
||||
/// Return the macOS deployment target as parsed from the environment.
|
||||
fn macos_deployment_target() -> Option<(u16, u16)> {
|
||||
let version = std::env::var("MACOSX_DEPLOYMENT_TARGET").ok()?;
|
||||
let version = std::env::var(EnvVars::MACOSX_DEPLOYMENT_TARGET).ok()?;
|
||||
let mut parts = version.split('.');
|
||||
|
||||
// Parse the major version (e.g., `12` in `12.0`).
|
||||
|
|
|
@ -2,34 +2,39 @@ use serde::{Deserialize, Deserializer};
|
|||
use std::str::FromStr;
|
||||
use url::Url;
|
||||
|
||||
/// A trusted host, which could be a host or a host-port pair.
|
||||
/// A host specification (wildcard, or host, with optional scheme and/or port) for which
|
||||
/// certificates are not verified when making HTTPS requests.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct TrustedHost {
|
||||
scheme: Option<String>,
|
||||
host: String,
|
||||
port: Option<u16>,
|
||||
pub enum TrustedHost {
|
||||
Wildcard,
|
||||
Host {
|
||||
scheme: Option<String>,
|
||||
host: String,
|
||||
port: Option<u16>,
|
||||
},
|
||||
}
|
||||
|
||||
impl TrustedHost {
|
||||
/// Returns `true` if the [`Url`] matches this trusted host.
|
||||
pub fn matches(&self, url: &Url) -> bool {
|
||||
if self
|
||||
.scheme
|
||||
.as_ref()
|
||||
.is_some_and(|scheme| scheme != url.scheme())
|
||||
{
|
||||
return false;
|
||||
}
|
||||
match self {
|
||||
TrustedHost::Wildcard => true,
|
||||
TrustedHost::Host { scheme, host, port } => {
|
||||
if scheme.as_ref().is_some_and(|scheme| scheme != url.scheme()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if self.port.is_some_and(|port| url.port() != Some(port)) {
|
||||
return false;
|
||||
}
|
||||
if port.is_some_and(|port| url.port() != Some(port)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if Some(self.host.as_ref()) != url.host_str() {
|
||||
return false;
|
||||
}
|
||||
if Some(host.as_str()) != url.host_str() {
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -48,7 +53,7 @@ impl<'de> Deserialize<'de> for TrustedHost {
|
|||
serde_untagged::UntaggedEnumVisitor::new()
|
||||
.string(|string| TrustedHost::from_str(string).map_err(serde::de::Error::custom))
|
||||
.map(|map| {
|
||||
map.deserialize::<Inner>().map(|inner| TrustedHost {
|
||||
map.deserialize::<Inner>().map(|inner| TrustedHost::Host {
|
||||
scheme: inner.scheme,
|
||||
host: inner.host,
|
||||
port: inner.port,
|
||||
|
@ -80,6 +85,10 @@ impl std::str::FromStr for TrustedHost {
|
|||
type Err = TrustedHostError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
if s == "*" {
|
||||
return Ok(Self::Wildcard);
|
||||
}
|
||||
|
||||
// Detect scheme.
|
||||
let (scheme, s) = if let Some(s) = s.strip_prefix("https://") {
|
||||
(Some("https".to_string()), s)
|
||||
|
@ -105,20 +114,27 @@ impl std::str::FromStr for TrustedHost {
|
|||
.transpose()
|
||||
.map_err(|_| TrustedHostError::InvalidPort(s.to_string()))?;
|
||||
|
||||
Ok(Self { scheme, host, port })
|
||||
Ok(Self::Host { scheme, host, port })
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TrustedHost {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
if let Some(scheme) = &self.scheme {
|
||||
write!(f, "{}://{}", scheme, self.host)?;
|
||||
} else {
|
||||
write!(f, "{}", self.host)?;
|
||||
}
|
||||
match self {
|
||||
TrustedHost::Wildcard => {
|
||||
write!(f, "*")?;
|
||||
}
|
||||
TrustedHost::Host { scheme, host, port } => {
|
||||
if let Some(scheme) = &scheme {
|
||||
write!(f, "{scheme}://{host}")?;
|
||||
} else {
|
||||
write!(f, "{host}")?;
|
||||
}
|
||||
|
||||
if let Some(port) = self.port {
|
||||
write!(f, ":{port}")?;
|
||||
if let Some(port) = port {
|
||||
write!(f, ":{port}")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -145,45 +161,4 @@ impl schemars::JsonSchema for TrustedHost {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[test]
|
||||
fn parse() {
|
||||
assert_eq!(
|
||||
"example.com".parse::<super::TrustedHost>().unwrap(),
|
||||
super::TrustedHost {
|
||||
scheme: None,
|
||||
host: "example.com".to_string(),
|
||||
port: None
|
||||
}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
"example.com:8080".parse::<super::TrustedHost>().unwrap(),
|
||||
super::TrustedHost {
|
||||
scheme: None,
|
||||
host: "example.com".to_string(),
|
||||
port: Some(8080)
|
||||
}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
"https://example.com".parse::<super::TrustedHost>().unwrap(),
|
||||
super::TrustedHost {
|
||||
scheme: Some("https".to_string()),
|
||||
host: "example.com".to_string(),
|
||||
port: None
|
||||
}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
"https://example.com/hello/world"
|
||||
.parse::<super::TrustedHost>()
|
||||
.unwrap(),
|
||||
super::TrustedHost {
|
||||
scheme: Some("https".to_string()),
|
||||
host: "example.com".to_string(),
|
||||
port: None
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
45
crates/uv-configuration/src/trusted_host/tests.rs
Normal file
45
crates/uv-configuration/src/trusted_host/tests.rs
Normal file
|
@ -0,0 +1,45 @@
|
|||
#[test]
|
||||
fn parse() {
|
||||
assert_eq!(
|
||||
"*".parse::<super::TrustedHost>().unwrap(),
|
||||
super::TrustedHost::Wildcard
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
"example.com".parse::<super::TrustedHost>().unwrap(),
|
||||
super::TrustedHost::Host {
|
||||
scheme: None,
|
||||
host: "example.com".to_string(),
|
||||
port: None
|
||||
}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
"example.com:8080".parse::<super::TrustedHost>().unwrap(),
|
||||
super::TrustedHost::Host {
|
||||
scheme: None,
|
||||
host: "example.com".to_string(),
|
||||
port: Some(8080)
|
||||
}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
"https://example.com".parse::<super::TrustedHost>().unwrap(),
|
||||
super::TrustedHost::Host {
|
||||
scheme: Some("https".to_string()),
|
||||
host: "example.com".to_string(),
|
||||
port: None
|
||||
}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
"https://example.com/hello/world"
|
||||
.parse::<super::TrustedHost>()
|
||||
.unwrap(),
|
||||
super::TrustedHost::Host {
|
||||
scheme: Some("https".to_string()),
|
||||
host: "example.com".to_string(),
|
||||
port: None
|
||||
}
|
||||
);
|
||||
}
|
|
@ -4,6 +4,9 @@ version = "0.0.1"
|
|||
edition = "2021"
|
||||
description = "Utilities for interacting with the terminal"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use console::{style, Key, Term};
|
||||
use console::{measure_text_width, style, Key, Term};
|
||||
use std::{cmp::Ordering, iter};
|
||||
|
||||
/// Prompt the user for confirmation in the given [`Term`].
|
||||
///
|
||||
|
@ -72,3 +73,190 @@ pub fn confirm(message: &str, term: &Term, default: bool) -> std::io::Result<boo
|
|||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
/// Prompt the user for password in the given [`Term`].
|
||||
///
|
||||
/// This is a slimmed-down version of `dialoguer::Password`.
|
||||
pub fn password(prompt: &str, term: &Term) -> std::io::Result<String> {
|
||||
term.write_str(prompt)?;
|
||||
term.show_cursor()?;
|
||||
term.flush()?;
|
||||
|
||||
let input = term.read_secure_line()?;
|
||||
|
||||
term.clear_line()?;
|
||||
|
||||
Ok(input)
|
||||
}
|
||||
|
||||
/// Prompt the user for input text in the given [`Term`].
|
||||
///
|
||||
/// This is a slimmed-down version of `dialoguer::Input`.
|
||||
#[allow(
|
||||
// Suppress Clippy lints triggered by `dialoguer::Input`.
|
||||
clippy::cast_possible_truncation,
|
||||
clippy::cast_possible_wrap,
|
||||
clippy::cast_sign_loss
|
||||
)]
|
||||
pub fn input(prompt: &str, term: &Term) -> std::io::Result<String> {
|
||||
term.write_str(prompt)?;
|
||||
term.show_cursor()?;
|
||||
term.flush()?;
|
||||
|
||||
let prompt_len = measure_text_width(prompt);
|
||||
|
||||
let mut chars: Vec<char> = Vec::new();
|
||||
let mut position = 0;
|
||||
loop {
|
||||
match term.read_key()? {
|
||||
Key::Backspace if position > 0 => {
|
||||
position -= 1;
|
||||
chars.remove(position);
|
||||
let line_size = term.size().1 as usize;
|
||||
// Case we want to delete last char of a line so the cursor is at the beginning of the next line
|
||||
if (position + prompt_len) % (line_size - 1) == 0 {
|
||||
term.clear_line()?;
|
||||
term.move_cursor_up(1)?;
|
||||
term.move_cursor_right(line_size + 1)?;
|
||||
} else {
|
||||
term.clear_chars(1)?;
|
||||
}
|
||||
|
||||
let tail: String = chars[position..].iter().collect();
|
||||
|
||||
if !tail.is_empty() {
|
||||
term.write_str(&tail)?;
|
||||
|
||||
let total = position + prompt_len + tail.chars().count();
|
||||
let total_line = total / line_size;
|
||||
let line_cursor = (position + prompt_len) / line_size;
|
||||
term.move_cursor_up(total_line - line_cursor)?;
|
||||
|
||||
term.move_cursor_left(line_size)?;
|
||||
term.move_cursor_right((position + prompt_len) % line_size)?;
|
||||
}
|
||||
|
||||
term.flush()?;
|
||||
}
|
||||
Key::Char(chr) if !chr.is_ascii_control() => {
|
||||
chars.insert(position, chr);
|
||||
position += 1;
|
||||
let tail: String = iter::once(&chr).chain(chars[position..].iter()).collect();
|
||||
term.write_str(&tail)?;
|
||||
term.move_cursor_left(tail.chars().count() - 1)?;
|
||||
term.flush()?;
|
||||
}
|
||||
Key::ArrowLeft if position > 0 => {
|
||||
if (position + prompt_len) % term.size().1 as usize == 0 {
|
||||
term.move_cursor_up(1)?;
|
||||
term.move_cursor_right(term.size().1 as usize)?;
|
||||
} else {
|
||||
term.move_cursor_left(1)?;
|
||||
}
|
||||
position -= 1;
|
||||
term.flush()?;
|
||||
}
|
||||
Key::ArrowRight if position < chars.len() => {
|
||||
if (position + prompt_len) % (term.size().1 as usize - 1) == 0 {
|
||||
term.move_cursor_down(1)?;
|
||||
term.move_cursor_left(term.size().1 as usize)?;
|
||||
} else {
|
||||
term.move_cursor_right(1)?;
|
||||
}
|
||||
position += 1;
|
||||
term.flush()?;
|
||||
}
|
||||
Key::UnknownEscSeq(seq) if seq == vec!['b'] => {
|
||||
let line_size = term.size().1 as usize;
|
||||
let nb_space = chars[..position]
|
||||
.iter()
|
||||
.rev()
|
||||
.take_while(|c| c.is_whitespace())
|
||||
.count();
|
||||
let find_last_space = chars[..position - nb_space]
|
||||
.iter()
|
||||
.rposition(|c| c.is_whitespace());
|
||||
|
||||
// If we find a space we set the cursor to the next char else we set it to the beginning of the input
|
||||
if let Some(mut last_space) = find_last_space {
|
||||
if last_space < position {
|
||||
last_space += 1;
|
||||
let new_line = (prompt_len + last_space) / line_size;
|
||||
let old_line = (prompt_len + position) / line_size;
|
||||
let diff_line = old_line - new_line;
|
||||
if diff_line != 0 {
|
||||
term.move_cursor_up(old_line - new_line)?;
|
||||
}
|
||||
|
||||
let new_pos_x = (prompt_len + last_space) % line_size;
|
||||
let old_pos_x = (prompt_len + position) % line_size;
|
||||
let diff_pos_x = new_pos_x as i64 - old_pos_x as i64;
|
||||
if diff_pos_x < 0 {
|
||||
term.move_cursor_left(-diff_pos_x as usize)?;
|
||||
} else {
|
||||
term.move_cursor_right((diff_pos_x) as usize)?;
|
||||
}
|
||||
position = last_space;
|
||||
}
|
||||
} else {
|
||||
term.move_cursor_left(position)?;
|
||||
position = 0;
|
||||
}
|
||||
|
||||
term.flush()?;
|
||||
}
|
||||
Key::UnknownEscSeq(seq) if seq == vec!['f'] => {
|
||||
let line_size = term.size().1 as usize;
|
||||
let find_next_space = chars[position..].iter().position(|c| c.is_whitespace());
|
||||
|
||||
// If we find a space we set the cursor to the next char else we set it to the beginning of the input
|
||||
if let Some(mut next_space) = find_next_space {
|
||||
let nb_space = chars[position + next_space..]
|
||||
.iter()
|
||||
.take_while(|c| c.is_whitespace())
|
||||
.count();
|
||||
next_space += nb_space;
|
||||
let new_line = (prompt_len + position + next_space) / line_size;
|
||||
let old_line = (prompt_len + position) / line_size;
|
||||
term.move_cursor_down(new_line - old_line)?;
|
||||
|
||||
let new_pos_x = (prompt_len + position + next_space) % line_size;
|
||||
let old_pos_x = (prompt_len + position) % line_size;
|
||||
let diff_pos_x = new_pos_x as i64 - old_pos_x as i64;
|
||||
if diff_pos_x < 0 {
|
||||
term.move_cursor_left(-diff_pos_x as usize)?;
|
||||
} else {
|
||||
term.move_cursor_right((diff_pos_x) as usize)?;
|
||||
}
|
||||
position += next_space;
|
||||
} else {
|
||||
let new_line = (prompt_len + chars.len()) / line_size;
|
||||
let old_line = (prompt_len + position) / line_size;
|
||||
term.move_cursor_down(new_line - old_line)?;
|
||||
|
||||
let new_pos_x = (prompt_len + chars.len()) % line_size;
|
||||
let old_pos_x = (prompt_len + position) % line_size;
|
||||
let diff_pos_x = new_pos_x as i64 - old_pos_x as i64;
|
||||
match diff_pos_x.cmp(&0) {
|
||||
Ordering::Less => {
|
||||
term.move_cursor_left((-diff_pos_x - 1) as usize)?;
|
||||
}
|
||||
Ordering::Equal => {}
|
||||
Ordering::Greater => {
|
||||
term.move_cursor_right((diff_pos_x) as usize)?;
|
||||
}
|
||||
}
|
||||
position = chars.len();
|
||||
}
|
||||
|
||||
term.flush()?;
|
||||
}
|
||||
Key::Enter => break,
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
let input = chars.iter().collect::<String>();
|
||||
term.write_line("")?;
|
||||
|
||||
Ok(input)
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ uv-pep508 = { workspace = true }
|
|||
uv-pypi-types = { workspace = true }
|
||||
uv-python = { workspace = true }
|
||||
uv-settings = { workspace = true, features = ["schemars"] }
|
||||
uv-static = { workspace = true }
|
||||
uv-workspace = { workspace = true, features = ["schemars"] }
|
||||
|
||||
# Any dependencies that are exclusively used in `uv-dev` should be listed as non-workspace
|
||||
|
|
|
@ -324,22 +324,4 @@ fn emit_possible_options(opt: &clap::Arg, output: &mut String) {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::env;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
use super::{main, Args};
|
||||
|
||||
#[test]
|
||||
fn test_generate_cli_reference() -> Result<()> {
|
||||
let mode = if env::var("UV_UPDATE_SCHEMA").as_deref() == Ok("1") {
|
||||
Mode::Write
|
||||
} else {
|
||||
Mode::Check
|
||||
};
|
||||
main(&Args { mode })
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
19
crates/uv-dev/src/generate_cli_reference/tests.rs
Normal file
19
crates/uv-dev/src/generate_cli_reference/tests.rs
Normal file
|
@ -0,0 +1,19 @@
|
|||
use std::env;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use uv_static::EnvVars;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
use super::{main, Args};
|
||||
|
||||
#[test]
|
||||
fn test_generate_cli_reference() -> Result<()> {
|
||||
let mode = if env::var(EnvVars::UV_UPDATE_SCHEMA).as_deref() == Ok("1") {
|
||||
Mode::Write
|
||||
} else {
|
||||
Mode::Check
|
||||
};
|
||||
main(&Args { mode })
|
||||
}
|
|
@ -81,22 +81,4 @@ pub(crate) fn main(args: &Args) -> Result<()> {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::env;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
use super::{main, Args};
|
||||
|
||||
#[test]
|
||||
fn test_generate_json_schema() -> Result<()> {
|
||||
let mode = if env::var("UV_UPDATE_SCHEMA").as_deref() == Ok("1") {
|
||||
Mode::Write
|
||||
} else {
|
||||
Mode::Check
|
||||
};
|
||||
main(&Args { mode })
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
19
crates/uv-dev/src/generate_json_schema/tests.rs
Normal file
19
crates/uv-dev/src/generate_json_schema/tests.rs
Normal file
|
@ -0,0 +1,19 @@
|
|||
use std::env;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use uv_static::EnvVars;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
use super::{main, Args};
|
||||
|
||||
#[test]
|
||||
fn test_generate_json_schema() -> Result<()> {
|
||||
let mode = if env::var(EnvVars::UV_UPDATE_SCHEMA).as_deref() == Ok("1") {
|
||||
Mode::Write
|
||||
} else {
|
||||
Mode::Check
|
||||
};
|
||||
main(&Args { mode })
|
||||
}
|
|
@ -350,22 +350,4 @@ impl Visit for CollectOptionsVisitor {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::env;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
use super::{main, Args};
|
||||
|
||||
#[test]
|
||||
fn test_generate_options_reference() -> Result<()> {
|
||||
let mode = if env::var("UV_UPDATE_SCHEMA").as_deref() == Ok("1") {
|
||||
Mode::Write
|
||||
} else {
|
||||
Mode::Check
|
||||
};
|
||||
main(&Args { mode })
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
19
crates/uv-dev/src/generate_options_reference/tests.rs
Normal file
19
crates/uv-dev/src/generate_options_reference/tests.rs
Normal file
|
@ -0,0 +1,19 @@
|
|||
use std::env;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use uv_static::EnvVars;
|
||||
|
||||
use crate::generate_all::Mode;
|
||||
|
||||
use super::{main, Args};
|
||||
|
||||
#[test]
|
||||
fn test_generate_options_reference() -> Result<()> {
|
||||
let mode = if env::var(EnvVars::UV_UPDATE_SCHEMA).as_deref() == Ok("1") {
|
||||
Mode::Write
|
||||
} else {
|
||||
Mode::Check
|
||||
};
|
||||
main(&Args { mode })
|
||||
}
|
|
@ -25,6 +25,7 @@ use crate::generate_options_reference::Args as GenerateOptionsReferenceArgs;
|
|||
#[cfg(feature = "render")]
|
||||
use crate::render_benchmarks::RenderBenchmarksArgs;
|
||||
use crate::wheel_metadata::WheelMetadataArgs;
|
||||
use uv_static::EnvVars;
|
||||
|
||||
mod clear_compile;
|
||||
mod compile;
|
||||
|
@ -77,7 +78,7 @@ async fn run() -> Result<()> {
|
|||
|
||||
#[tokio::main(flavor = "current_thread")]
|
||||
async fn main() -> ExitCode {
|
||||
let (duration_layer, _guard) = if let Ok(location) = env::var("TRACING_DURATIONS_FILE") {
|
||||
let (duration_layer, _guard) = if let Ok(location) = env::var(EnvVars::TRACING_DURATIONS_FILE) {
|
||||
let location = PathBuf::from(location);
|
||||
if let Some(parent) = location.parent() {
|
||||
fs_err::tokio::create_dir_all(&parent)
|
||||
|
|
|
@ -10,6 +10,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -15,7 +15,8 @@ use uv_build_frontend::{SourceBuild, SourceBuildContext};
|
|||
use uv_cache::Cache;
|
||||
use uv_client::RegistryClient;
|
||||
use uv_configuration::{
|
||||
BuildKind, BuildOptions, ConfigSettings, Constraints, IndexStrategy, Reinstall, SourceStrategy,
|
||||
BuildKind, BuildOptions, ConfigSettings, Constraints, IndexStrategy, LowerBound, Reinstall,
|
||||
SourceStrategy,
|
||||
};
|
||||
use uv_configuration::{BuildOutput, Concurrency};
|
||||
use uv_distribution::DistributionDatabase;
|
||||
|
@ -28,8 +29,8 @@ use uv_installer::{Installer, Plan, Planner, Preparer, SitePackages};
|
|||
use uv_pypi_types::Requirement;
|
||||
use uv_python::{Interpreter, PythonEnvironment};
|
||||
use uv_resolver::{
|
||||
ExcludeNewer, FlatIndex, InMemoryIndex, Manifest, OptionsBuilder, PythonRequirement, Resolver,
|
||||
ResolverMarkers,
|
||||
ExcludeNewer, FlatIndex, Flexibility, InMemoryIndex, Manifest, OptionsBuilder,
|
||||
PythonRequirement, Resolver, ResolverMarkers,
|
||||
};
|
||||
use uv_types::{BuildContext, BuildIsolation, EmptyInstalledPackages, HashStrategy, InFlight};
|
||||
|
||||
|
@ -56,6 +57,7 @@ pub struct BuildDispatch<'a> {
|
|||
exclude_newer: Option<ExcludeNewer>,
|
||||
source_build_context: SourceBuildContext,
|
||||
build_extra_env_vars: FxHashMap<OsString, OsString>,
|
||||
bounds: LowerBound,
|
||||
sources: SourceStrategy,
|
||||
concurrency: Concurrency,
|
||||
}
|
||||
|
@ -80,6 +82,7 @@ impl<'a> BuildDispatch<'a> {
|
|||
build_options: &'a BuildOptions,
|
||||
hasher: &'a HashStrategy,
|
||||
exclude_newer: Option<ExcludeNewer>,
|
||||
bounds: LowerBound,
|
||||
sources: SourceStrategy,
|
||||
concurrency: Concurrency,
|
||||
) -> Self {
|
||||
|
@ -104,6 +107,7 @@ impl<'a> BuildDispatch<'a> {
|
|||
exclude_newer,
|
||||
source_build_context: SourceBuildContext::default(),
|
||||
build_extra_env_vars: FxHashMap::default(),
|
||||
bounds,
|
||||
sources,
|
||||
concurrency,
|
||||
}
|
||||
|
@ -152,11 +156,15 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
|||
self.config_settings
|
||||
}
|
||||
|
||||
fn bounds(&self) -> LowerBound {
|
||||
self.bounds
|
||||
}
|
||||
|
||||
fn sources(&self) -> SourceStrategy {
|
||||
self.sources
|
||||
}
|
||||
|
||||
fn index_locations(&self) -> &IndexLocations {
|
||||
fn locations(&self) -> &IndexLocations {
|
||||
self.index_locations
|
||||
}
|
||||
|
||||
|
@ -170,6 +178,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
|||
OptionsBuilder::new()
|
||||
.exclude_newer(self.exclude_newer)
|
||||
.index_strategy(self.index_strategy)
|
||||
.flexibility(Flexibility::Fixed)
|
||||
.build(),
|
||||
&python_requirement,
|
||||
ResolverMarkers::specific_environment(markers),
|
||||
|
@ -228,7 +237,7 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
|||
} = Planner::new(resolution).build(
|
||||
site_packages,
|
||||
&Reinstall::default(),
|
||||
&BuildOptions::default(),
|
||||
self.build_options,
|
||||
self.hasher,
|
||||
self.index_locations,
|
||||
self.config_settings,
|
||||
|
@ -309,8 +318,10 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
|||
&'data self,
|
||||
source: &'data Path,
|
||||
subdirectory: Option<&'data Path>,
|
||||
install_path: &'data Path,
|
||||
version_id: Option<String>,
|
||||
dist: Option<&'data SourceDist>,
|
||||
sources: SourceStrategy,
|
||||
build_kind: BuildKind,
|
||||
build_output: BuildOutput,
|
||||
) -> Result<SourceBuild> {
|
||||
|
@ -342,12 +353,15 @@ impl<'a> BuildContext for BuildDispatch<'a> {
|
|||
let builder = SourceBuild::setup(
|
||||
source,
|
||||
subdirectory,
|
||||
install_path,
|
||||
dist_name,
|
||||
dist_version,
|
||||
self.interpreter,
|
||||
self,
|
||||
self.source_build_context.clone(),
|
||||
version_id,
|
||||
self.index_locations,
|
||||
sources,
|
||||
self.config_settings.clone(),
|
||||
self.build_isolation,
|
||||
build_kind,
|
||||
|
|
|
@ -9,6 +9,9 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
|
|
@ -80,38 +80,4 @@ impl FromStr for EggInfoFilename {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn egg_info_filename() {
|
||||
let filename = "zstandard-0.22.0-py3.12-darwin.egg-info";
|
||||
let parsed = EggInfoFilename::from_str(filename).unwrap();
|
||||
assert_eq!(parsed.name.as_ref(), "zstandard");
|
||||
assert_eq!(
|
||||
parsed.version.map(|v| v.to_string()),
|
||||
Some("0.22.0".to_string())
|
||||
);
|
||||
|
||||
let filename = "zstandard-0.22.0-py3.12.egg-info";
|
||||
let parsed = EggInfoFilename::from_str(filename).unwrap();
|
||||
assert_eq!(parsed.name.as_ref(), "zstandard");
|
||||
assert_eq!(
|
||||
parsed.version.map(|v| v.to_string()),
|
||||
Some("0.22.0".to_string())
|
||||
);
|
||||
|
||||
let filename = "zstandard-0.22.0.egg-info";
|
||||
let parsed = EggInfoFilename::from_str(filename).unwrap();
|
||||
assert_eq!(parsed.name.as_ref(), "zstandard");
|
||||
assert_eq!(
|
||||
parsed.version.map(|v| v.to_string()),
|
||||
Some("0.22.0".to_string())
|
||||
);
|
||||
|
||||
let filename = "zstandard.egg-info";
|
||||
let parsed = EggInfoFilename::from_str(filename).unwrap();
|
||||
assert_eq!(parsed.name.as_ref(), "zstandard");
|
||||
assert!(parsed.version.is_none());
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
33
crates/uv-distribution-filename/src/egg/tests.rs
Normal file
33
crates/uv-distribution-filename/src/egg/tests.rs
Normal file
|
@ -0,0 +1,33 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn egg_info_filename() {
|
||||
let filename = "zstandard-0.22.0-py3.12-darwin.egg-info";
|
||||
let parsed = EggInfoFilename::from_str(filename).unwrap();
|
||||
assert_eq!(parsed.name.as_ref(), "zstandard");
|
||||
assert_eq!(
|
||||
parsed.version.map(|v| v.to_string()),
|
||||
Some("0.22.0".to_string())
|
||||
);
|
||||
|
||||
let filename = "zstandard-0.22.0-py3.12.egg-info";
|
||||
let parsed = EggInfoFilename::from_str(filename).unwrap();
|
||||
assert_eq!(parsed.name.as_ref(), "zstandard");
|
||||
assert_eq!(
|
||||
parsed.version.map(|v| v.to_string()),
|
||||
Some("0.22.0".to_string())
|
||||
);
|
||||
|
||||
let filename = "zstandard-0.22.0.egg-info";
|
||||
let parsed = EggInfoFilename::from_str(filename).unwrap();
|
||||
assert_eq!(parsed.name.as_ref(), "zstandard");
|
||||
assert_eq!(
|
||||
parsed.version.map(|v| v.to_string()),
|
||||
Some("0.22.0".to_string())
|
||||
);
|
||||
|
||||
let filename = "zstandard.egg-info";
|
||||
let parsed = EggInfoFilename::from_str(filename).unwrap();
|
||||
assert_eq!(parsed.name.as_ref(), "zstandard");
|
||||
assert!(parsed.version.is_none());
|
||||
}
|
|
@ -170,58 +170,4 @@ enum SourceDistFilenameErrorKind {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::str::FromStr;
|
||||
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
use crate::{SourceDistExtension, SourceDistFilename};
|
||||
|
||||
/// Only test already normalized names since the parsing is lossy
|
||||
///
|
||||
/// <https://packaging.python.org/en/latest/specifications/source-distribution-format/#source-distribution-file-name>
|
||||
/// <https://packaging.python.org/en/latest/specifications/binary-distribution-format/#escaping-and-unicode>
|
||||
#[test]
|
||||
fn roundtrip() {
|
||||
for normalized in [
|
||||
"foo_lib-1.2.3.zip",
|
||||
"foo_lib-1.2.3a3.zip",
|
||||
"foo_lib-1.2.3.tar.gz",
|
||||
"foo_lib-1.2.3.tar.bz2",
|
||||
"foo_lib-1.2.3.tar.zst",
|
||||
] {
|
||||
let ext = SourceDistExtension::from_path(normalized).unwrap();
|
||||
assert_eq!(
|
||||
SourceDistFilename::parse(
|
||||
normalized,
|
||||
ext,
|
||||
&PackageName::from_str("foo_lib").unwrap()
|
||||
)
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
normalized
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn errors() {
|
||||
for invalid in ["b-1.2.3.zip", "a-1.2.3-gamma.3.zip"] {
|
||||
let ext = SourceDistExtension::from_path(invalid).unwrap();
|
||||
assert!(
|
||||
SourceDistFilename::parse(invalid, ext, &PackageName::from_str("a").unwrap())
|
||||
.is_err()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn name_too_long() {
|
||||
assert!(SourceDistFilename::parse(
|
||||
"foo.zip",
|
||||
SourceDistExtension::Zip,
|
||||
&PackageName::from_str("foo-lib").unwrap()
|
||||
)
|
||||
.is_err());
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
48
crates/uv-distribution-filename/src/source_dist/tests.rs
Normal file
48
crates/uv-distribution-filename/src/source_dist/tests.rs
Normal file
|
@ -0,0 +1,48 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
use crate::{SourceDistExtension, SourceDistFilename};
|
||||
|
||||
/// Only test already normalized names since the parsing is lossy
|
||||
///
|
||||
/// <https://packaging.python.org/en/latest/specifications/source-distribution-format/#source-distribution-file-name>
|
||||
/// <https://packaging.python.org/en/latest/specifications/binary-distribution-format/#escaping-and-unicode>
|
||||
#[test]
|
||||
fn roundtrip() {
|
||||
for normalized in [
|
||||
"foo_lib-1.2.3.zip",
|
||||
"foo_lib-1.2.3a3.zip",
|
||||
"foo_lib-1.2.3.tar.gz",
|
||||
"foo_lib-1.2.3.tar.bz2",
|
||||
"foo_lib-1.2.3.tar.zst",
|
||||
] {
|
||||
let ext = SourceDistExtension::from_path(normalized).unwrap();
|
||||
assert_eq!(
|
||||
SourceDistFilename::parse(normalized, ext, &PackageName::from_str("foo_lib").unwrap())
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
normalized
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn errors() {
|
||||
for invalid in ["b-1.2.3.zip", "a-1.2.3-gamma.3.zip"] {
|
||||
let ext = SourceDistExtension::from_path(invalid).unwrap();
|
||||
assert!(
|
||||
SourceDistFilename::parse(invalid, ext, &PackageName::from_str("a").unwrap()).is_err()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn name_too_long() {
|
||||
assert!(SourceDistFilename::parse(
|
||||
"foo.zip",
|
||||
SourceDistExtension::Zip,
|
||||
&PackageName::from_str("foo-lib").unwrap()
|
||||
)
|
||||
.is_err());
|
||||
}
|
|
@ -234,101 +234,4 @@ pub enum WheelFilenameError {
|
|||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn err_not_whl_extension() {
|
||||
let err = WheelFilename::from_str("foo.rs").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo.rs" is invalid: Must end with .whl"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_1_part_empty() {
|
||||
let err = WheelFilename::from_str(".whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename ".whl" is invalid: Must have a version"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_1_part_no_version() {
|
||||
let err = WheelFilename::from_str("foo.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo.whl" is invalid: Must have a version"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_2_part_no_pythontag() {
|
||||
let err = WheelFilename::from_str("foo-version.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-version.whl" is invalid: Must have a Python tag"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_3_part_no_abitag() {
|
||||
let err = WheelFilename::from_str("foo-version-python.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-version-python.whl" is invalid: Must have an ABI tag"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_4_part_no_platformtag() {
|
||||
let err = WheelFilename::from_str("foo-version-python-abi.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-version-python-abi.whl" is invalid: Must have a platform tag"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_too_many_parts() {
|
||||
let err =
|
||||
WheelFilename::from_str("foo-1.2.3-build-python-abi-platform-oops.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-1.2.3-build-python-abi-platform-oops.whl" is invalid: Must have 5 or 6 components, but has more"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_invalid_package_name() {
|
||||
let err = WheelFilename::from_str("f!oo-1.2.3-python-abi-platform.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "f!oo-1.2.3-python-abi-platform.whl" has an invalid package name"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_invalid_version() {
|
||||
let err = WheelFilename::from_str("foo-x.y.z-python-abi-platform.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-x.y.z-python-abi-platform.whl" has an invalid version: expected version to start with a number, but no leading ASCII digits were found"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_invalid_build_tag() {
|
||||
let err = WheelFilename::from_str("foo-1.2.3-tag-python-abi-platform.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-1.2.3-tag-python-abi-platform.whl" has an invalid build tag: must start with a digit"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ok_single_tags() {
|
||||
insta::assert_debug_snapshot!(WheelFilename::from_str("foo-1.2.3-foo-bar-baz.whl"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ok_multiple_tags() {
|
||||
insta::assert_debug_snapshot!(WheelFilename::from_str(
|
||||
"foo-1.2.3-ab.cd.ef-gh-ij.kl.mn.op.qr.st.whl"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ok_build_tag() {
|
||||
insta::assert_debug_snapshot!(WheelFilename::from_str(
|
||||
"foo-1.2.3-202206090410-python-abi-platform.whl"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_and_to_string() {
|
||||
let wheel_names = &[
|
||||
"django_allauth-0.51.0-py3-none-any.whl",
|
||||
"osm2geojson-0.2.4-py3-none-any.whl",
|
||||
"numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
|
||||
];
|
||||
for wheel_name in wheel_names {
|
||||
assert_eq!(
|
||||
WheelFilename::from_str(wheel_name).unwrap().to_string(),
|
||||
*wheel_name
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
mod tests;
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
---
|
||||
source: crates/uv-distribution-filename/src/wheel/tests.rs
|
||||
expression: "WheelFilename::from_str(\"foo-1.2.3-202206090410-python-abi-platform.whl\")"
|
||||
---
|
||||
Ok(
|
||||
WheelFilename {
|
||||
name: PackageName(
|
||||
"foo",
|
||||
),
|
||||
version: "1.2.3",
|
||||
build_tag: Some(
|
||||
BuildTag(
|
||||
202206090410,
|
||||
None,
|
||||
),
|
||||
),
|
||||
python_tag: [
|
||||
"python",
|
||||
],
|
||||
abi_tag: [
|
||||
"abi",
|
||||
],
|
||||
platform_tag: [
|
||||
"platform",
|
||||
],
|
||||
},
|
||||
)
|
|
@ -0,0 +1,29 @@
|
|||
---
|
||||
source: crates/uv-distribution-filename/src/wheel/tests.rs
|
||||
expression: "WheelFilename::from_str(\"foo-1.2.3-ab.cd.ef-gh-ij.kl.mn.op.qr.st.whl\")"
|
||||
---
|
||||
Ok(
|
||||
WheelFilename {
|
||||
name: PackageName(
|
||||
"foo",
|
||||
),
|
||||
version: "1.2.3",
|
||||
build_tag: None,
|
||||
python_tag: [
|
||||
"ab",
|
||||
"cd",
|
||||
"ef",
|
||||
],
|
||||
abi_tag: [
|
||||
"gh",
|
||||
],
|
||||
platform_tag: [
|
||||
"ij",
|
||||
"kl",
|
||||
"mn",
|
||||
"op",
|
||||
"qr",
|
||||
"st",
|
||||
],
|
||||
},
|
||||
)
|
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
source: crates/uv-distribution-filename/src/wheel/tests.rs
|
||||
expression: "WheelFilename::from_str(\"foo-1.2.3-foo-bar-baz.whl\")"
|
||||
---
|
||||
Ok(
|
||||
WheelFilename {
|
||||
name: PackageName(
|
||||
"foo",
|
||||
),
|
||||
version: "1.2.3",
|
||||
build_tag: None,
|
||||
python_tag: [
|
||||
"foo",
|
||||
],
|
||||
abi_tag: [
|
||||
"bar",
|
||||
],
|
||||
platform_tag: [
|
||||
"baz",
|
||||
],
|
||||
},
|
||||
)
|
95
crates/uv-distribution-filename/src/wheel/tests.rs
Normal file
95
crates/uv-distribution-filename/src/wheel/tests.rs
Normal file
|
@ -0,0 +1,95 @@
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn err_not_whl_extension() {
|
||||
let err = WheelFilename::from_str("foo.rs").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo.rs" is invalid: Must end with .whl"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_1_part_empty() {
|
||||
let err = WheelFilename::from_str(".whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename ".whl" is invalid: Must have a version"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_1_part_no_version() {
|
||||
let err = WheelFilename::from_str("foo.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo.whl" is invalid: Must have a version"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_2_part_no_pythontag() {
|
||||
let err = WheelFilename::from_str("foo-version.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-version.whl" is invalid: Must have a Python tag"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_3_part_no_abitag() {
|
||||
let err = WheelFilename::from_str("foo-version-python.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-version-python.whl" is invalid: Must have an ABI tag"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_4_part_no_platformtag() {
|
||||
let err = WheelFilename::from_str("foo-version-python-abi.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-version-python-abi.whl" is invalid: Must have a platform tag"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_too_many_parts() {
|
||||
let err = WheelFilename::from_str("foo-1.2.3-build-python-abi-platform-oops.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-1.2.3-build-python-abi-platform-oops.whl" is invalid: Must have 5 or 6 components, but has more"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_invalid_package_name() {
|
||||
let err = WheelFilename::from_str("f!oo-1.2.3-python-abi-platform.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "f!oo-1.2.3-python-abi-platform.whl" has an invalid package name"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_invalid_version() {
|
||||
let err = WheelFilename::from_str("foo-x.y.z-python-abi-platform.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-x.y.z-python-abi-platform.whl" has an invalid version: expected version to start with a number, but no leading ASCII digits were found"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_invalid_build_tag() {
|
||||
let err = WheelFilename::from_str("foo-1.2.3-tag-python-abi-platform.whl").unwrap_err();
|
||||
insta::assert_snapshot!(err, @r###"The wheel filename "foo-1.2.3-tag-python-abi-platform.whl" has an invalid build tag: must start with a digit"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ok_single_tags() {
|
||||
insta::assert_debug_snapshot!(WheelFilename::from_str("foo-1.2.3-foo-bar-baz.whl"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ok_multiple_tags() {
|
||||
insta::assert_debug_snapshot!(WheelFilename::from_str(
|
||||
"foo-1.2.3-ab.cd.ef-gh-ij.kl.mn.op.qr.st.whl"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ok_build_tag() {
|
||||
insta::assert_debug_snapshot!(WheelFilename::from_str(
|
||||
"foo-1.2.3-202206090410-python-abi-platform.whl"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_and_to_string() {
|
||||
let wheel_names = &[
|
||||
"django_allauth-0.51.0-py3-none-any.whl",
|
||||
"osm2geojson-0.2.4-py3-none-any.whl",
|
||||
"numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
|
||||
];
|
||||
for wheel_name in wheel_names {
|
||||
assert_eq!(
|
||||
WheelFilename::from_str(wheel_name).unwrap().to_string(),
|
||||
*wheel_name
|
||||
);
|
||||
}
|
||||
}
|
|
@ -9,10 +9,14 @@ repository = { workspace = true }
|
|||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
uv-auth = { workspace = true }
|
||||
uv-cache-info = { workspace = true }
|
||||
uv-cache-key = { workspace = true }
|
||||
uv-distribution-filename = { workspace = true }
|
||||
|
@ -25,6 +29,7 @@ uv-platform-tags = { workspace = true }
|
|||
uv-pypi-types = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
fs-err = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
jiff = { workspace = true }
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue