diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml new file mode 100644 index 00000000..403e19d1 --- /dev/null +++ b/.github/workflows/benchmark.yml @@ -0,0 +1,51 @@ +name: CodSpeed Benchmarks + +on: + push: + branches: + - "main" + pull_request: + types: + - opened + - synchronize + - reopened + - ready_for_review + - labeled + # `workflow_dispatch` allows CodSpeed to trigger backtest + # performance analysis in order to generate initial data. + workflow_dispatch: + +permissions: {} + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + benchmarks: + name: Run benchmarks + # PRs only get benchmarked if they have the `run-benchmarks` label. + if: | + contains(github.event.pull_request.labels.*.name, 'run-benchmarks') + || github.event_name == 'push' + || github.event_name == 'workflow_dispatch' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false + + - uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2 + with: + cache-all-crates: true + + - uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6 + + - name: Build zizmor (release) + run: cargo build --release + + - name: Run the benchmarks + uses: CodSpeedHQ/action@346a2d8a8d9d38909abd0bc3d23f773110f076ad # v4.4.1 + with: + mode: walltime + run: make bench diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index eb997be9..5a5b056e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -5,6 +5,11 @@ on: branches: - main pull_request: + types: + - opened + - synchronize + - reopened + - ready_for_review permissions: {} @@ -17,14 +22,14 @@ jobs: name: Lint runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Format run: cargo fmt --check - - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + - uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2 - name: Lint run: cargo clippy -- --deny warnings @@ -33,13 +38,13 @@ jobs: name: Test runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + - uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2 - - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 + - uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6 - name: Test dependencies run: | @@ -62,11 +67,11 @@ jobs: name: Test site build runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 + - uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6 - name: Test site run: make site diff --git a/.github/workflows/codegen.yml b/.github/workflows/codegen.yml index 4c09cc12..00dcb468 100644 --- a/.github/workflows/codegen.yml +++ b/.github/workflows/codegen.yml @@ -22,7 +22,7 @@ jobs: pull-requests: write # for opening PRs steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false @@ -31,7 +31,7 @@ jobs: make refresh-schemas - name: create PR - uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8 + uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0 with: draft: true commit-message: "[BOT] update JSON schemas from SchemaStore" @@ -59,18 +59,18 @@ jobs: pull-requests: write # for opening PRs steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 + - uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6 - name: try to refresh context capabilities run: | make webhooks-to-contexts - name: create PR - uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8 + uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0 with: draft: true commit-message: "[BOT] update context capabilities" @@ -97,18 +97,18 @@ jobs: pull-requests: write # for opening PRs steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 + - uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6 - name: try to refresh CodeQL injection sinks run: | make codeql-injection-sinks - name: create PR - uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8 + uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0 with: draft: true commit-message: "[BOT] update CodeQL injection sinks" diff --git a/.github/workflows/release-binaries.yml b/.github/workflows/release-binaries.yml index a7bf492e..c76760dd 100644 --- a/.github/workflows/release-binaries.yml +++ b/.github/workflows/release-binaries.yml @@ -27,7 +27,7 @@ jobs: target: x86_64-pc-windows-msvc steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false @@ -60,7 +60,7 @@ jobs: shell: bash - name: Upload artifact - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: name: artifacts-${{ matrix.target }} path: ${{ steps.archive-release.outputs.filename }} @@ -78,7 +78,7 @@ jobs: steps: - name: Download artifacts - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 with: pattern: artifacts-* path: distrib/ diff --git a/.github/workflows/release-docker.yml b/.github/workflows/release-docker.yml index 45851771..132ad257 100644 --- a/.github/workflows/release-docker.yml +++ b/.github/workflows/release-docker.yml @@ -43,7 +43,7 @@ jobs: packages: write steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false @@ -54,7 +54,7 @@ jobs: - name: Extract Docker metadata id: docker-metadata - uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0 + uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0 with: images: "${{ env.ZIZMOR_IMAGE }}" @@ -86,7 +86,7 @@ jobs: shell: bash - name: Upload digest - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: name: digests-${{ matrix.image.platform-pair }} path: ${{ runner.temp }}/digests/* @@ -107,7 +107,7 @@ jobs: steps: - name: Download digests - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 with: path: ${{ runner.temp }}/digests pattern: digests-* @@ -128,7 +128,7 @@ jobs: - name: Extract Docker metadata id: docker-metadata - uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0 + uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0 env: DOCKER_METADATA_ANNOTATIONS_LEVELS: index with: diff --git a/.github/workflows/release-pypi.yml b/.github/workflows/release-pypi.yml index 3079a1a7..41b4dcaf 100644 --- a/.github/workflows/release-pypi.yml +++ b/.github/workflows/release-pypi.yml @@ -37,7 +37,7 @@ jobs: # target: ppc64le # manylinux: "2_28" steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Build wheels @@ -47,7 +47,7 @@ jobs: args: --release --out dist --manifest-path crates/zizmor/Cargo.toml manylinux: ${{ matrix.platform.manylinux }} - name: Upload wheels - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: name: wheels-linux-${{ matrix.platform.target }} path: dist @@ -67,7 +67,7 @@ jobs: - runner: ubuntu-24.04 target: armv7 steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Build wheels @@ -77,7 +77,7 @@ jobs: args: --release --out dist --manifest-path crates/zizmor/Cargo.toml manylinux: musllinux_1_2 - name: Upload wheels - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: name: wheels-musllinux-${{ matrix.platform.target }} path: dist @@ -93,7 +93,7 @@ jobs: - runner: windows-latest target: x86 steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Build wheels @@ -102,7 +102,7 @@ jobs: target: ${{ matrix.platform.target }} args: --release --out dist --manifest-path crates/zizmor/Cargo.toml - name: Upload wheels - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: name: wheels-windows-${{ matrix.platform.target }} path: dist @@ -118,7 +118,7 @@ jobs: - runner: macos-15 target: aarch64 steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Build wheels @@ -127,7 +127,7 @@ jobs: target: ${{ matrix.platform.target }} args: --release --out dist --manifest-path crates/zizmor/Cargo.toml - name: Upload wheels - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: name: wheels-macos-${{ matrix.platform.target }} path: dist @@ -136,7 +136,7 @@ jobs: name: Build source distribution runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Build sdist @@ -145,7 +145,7 @@ jobs: command: sdist args: --out dist --manifest-path crates/zizmor/Cargo.toml - name: Upload sdist - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 with: name: wheels-sdist path: dist @@ -161,7 +161,7 @@ jobs: permissions: id-token: write # Trusted Publishing + PEP 740 attestations steps: - - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 + - uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 - name: Attest uses: astral-sh/attest-action@2c727738cea36d6c97dd85eb133ea0e0e8fe754b # v0.0.4 with: diff --git a/.github/workflows/release-support-crate.yml b/.github/workflows/release-support-crate.yml index 5c632fbb..51bb1fc5 100644 --- a/.github/workflows/release-support-crate.yml +++ b/.github/workflows/release-support-crate.yml @@ -53,7 +53,7 @@ jobs: permissions: id-token: write # for trusted publishing to crates.io steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false diff --git a/.github/workflows/release-zizmor-crate.yml b/.github/workflows/release-zizmor-crate.yml index ab1e88d9..6b5e45ba 100644 --- a/.github/workflows/release-zizmor-crate.yml +++ b/.github/workflows/release-zizmor-crate.yml @@ -19,7 +19,7 @@ jobs: permissions: id-token: write # for trusted publishing to crates.io steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false diff --git a/.github/workflows/site.yml b/.github/workflows/site.yml index 00ed3b5a..bd0734a6 100644 --- a/.github/workflows/site.yml +++ b/.github/workflows/site.yml @@ -26,12 +26,12 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Install the latest version of uv - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 + uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6 - name: build site run: make site diff --git a/.github/workflows/test-output.yml b/.github/workflows/test-output.yml index 9ef0e343..ec9fdec6 100644 --- a/.github/workflows/test-output.yml +++ b/.github/workflows/test-output.yml @@ -19,18 +19,18 @@ jobs: pull-requests: write # for 'Leave comment' step steps: - name: Checkout repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + - uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2 - name: Run zizmor run: | cargo run -- --format sarif . > results.sarif - name: Upload SARIF file - uses: github/codeql-action/upload-sarif@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2 + uses: github/codeql-action/upload-sarif@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8 with: sarif_file: results.sarif category: zizmor-test-sarif-presentation @@ -52,11 +52,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + - uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2 - name: Run zizmor run: | @@ -67,3 +67,27 @@ jobs: --no-exit-codes \ --format github \ crates/zizmor/tests/integration/test-data/several-vulnerabilities.yml + + test-plain-presentation: + name: Test plain text presentation + runs-on: ubuntu-latest + if: contains(github.event.pull_request.labels.*.name, 'test-plain-presentation') + permissions: {} + + steps: + - name: Checkout repository + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 + with: + persist-credentials: false + + - uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2 + + - name: Run zizmor + run: | + # Normally we'd want a workflow to fail if the audit fails, + # but we're only testing presentation here. + cargo run \ + -- \ + --no-exit-codes \ + --format plain \ + crates/zizmor/tests/integration/test-data/several-vulnerabilities.yml diff --git a/.github/workflows/wolfi-update-check.yml b/.github/workflows/wolfi-update-check.yml index f438ca08..b83931af 100644 --- a/.github/workflows/wolfi-update-check.yml +++ b/.github/workflows/wolfi-update-check.yml @@ -19,7 +19,7 @@ jobs: issues: write # to create an issue if a new version is found steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false sparse-checkout: support/ diff --git a/.github/workflows/zizmor.yml b/.github/workflows/zizmor.yml index 359dca6d..92a0f936 100644 --- a/.github/workflows/zizmor.yml +++ b/.github/workflows/zizmor.yml @@ -5,6 +5,11 @@ on: branches: ["main"] pull_request: branches: ["**"] + types: + - opened + - synchronize + - reopened + - ready_for_review permissions: {} @@ -16,12 +21,12 @@ jobs: security-events: write steps: - name: Checkout repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Run zizmor 🌈 - uses: zizmorcore/zizmor-action@e673c3917a1aef3c65c972347ed84ccd013ecda4 # v0.2.0 + uses: zizmorcore/zizmor-action@e639db99335bc9038abc0e066dfcd72e23d26fb4 # v0.3.0 with: # intentionally not scanning the entire repository, # since it contains integration tests. diff --git a/.gitignore b/.gitignore index dd445d43..2a0a134d 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,6 @@ # pending snapshots .*.pending-snap + +# benchmarks +.codspeed/ diff --git a/Cargo.lock b/Cargo.lock index c544f2e0..a7638a54 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -58,9 +58,9 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "annotate-snippets" -version = "0.12.9" +version = "0.12.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44baf24dd94e781f74dfe67ffee75a09a57971ddf0f615a178b4f6d404b48ff" +checksum = "15580ece6ea97cbf832d60ba19c021113469480852c6a2a6beb0db28f097bf1f" dependencies = [ "anstyle", "unicode-width 0.2.2", @@ -228,12 +228,6 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - [[package]] name = "bitflags" version = "2.10.0" @@ -313,18 +307,18 @@ dependencies = [ [[package]] name = "camino" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "276a59bf2b2c967788139340c9f0c5b12d7fd6630315c15c217e559de85d2609" +checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48" dependencies = [ "serde_core", ] [[package]] name = "cc" -version = "1.2.41" +version = "1.2.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac9fe6cdbb24b6ade63616c0a0688e45bb56732262c158df3c0c4bea4ca47cb7" +checksum = "90583009037521a116abf44494efecd645ba48b6622457080f080b85544e2215" dependencies = [ "find-msvc-tools", "shlex", @@ -344,9 +338,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "clap" -version = "4.5.51" +version = "4.5.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c26d721170e0295f191a69bd9a1f93efcdb0aff38684b61ab5750468972e5f5" +checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8" dependencies = [ "clap_builder", "clap_derive", @@ -364,9 +358,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.51" +version = "4.5.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75835f0c7bf681bfd05abe44e965760fea999a5286c6eb2d59883634fd02011a" +checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00" dependencies = [ "anstream", "anstyle", @@ -376,9 +370,9 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.5.60" +version = "4.5.61" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e602857739c5a4291dfa33b5a298aeac9006185229a700e5810a3ef7272d971" +checksum = "39615915e2ece2550c0149addac32fb5bd312c657f43845bb9088cb9c8a7c992" dependencies = [ "clap", ] @@ -564,6 +558,12 @@ dependencies = [ "parking_lot_core", ] +[[package]] +name = "data-encoding" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" + [[package]] name = "deranged" version = "0.5.4" @@ -705,9 +705,9 @@ dependencies = [ [[package]] name = "find-msvc-tools" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52051878f80a721bb68ebfbc930e07b65ba72f2da88968ea5c06fd6ca3d3a127" +checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" [[package]] name = "flate2" @@ -721,11 +721,12 @@ dependencies = [ [[package]] name = "fluent-uri" -version = "0.1.4" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17c704e9dbe1ddd863da1e6ff3567795087b1eb201ce80d8fa81162e1516500d" +checksum = "1918b65d96df47d3591bed19c5cca17e3fa5d0707318e4b5ef2eae01764df7e5" dependencies = [ - "bitflags 1.3.2", + "borrow-or-share", + "ref-cast", ] [[package]] @@ -923,9 +924,11 @@ dependencies = [ [[package]] name = "github-actions-models" -version = "0.39.0" +version = "0.42.0" dependencies = [ "indexmap", + "insta", + "self_cell", "serde", "serde_yaml", "tracing", @@ -952,9 +955,9 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" [[package]] name = "hashbrown" -version = "0.16.0" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" dependencies = [ "allocator-api2", "equivalent", @@ -975,12 +978,11 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "http" -version = "1.3.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" dependencies = [ "bytes", - "fnv", "itoa", ] @@ -1289,12 +1291,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.12.0" +version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f" +checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" dependencies = [ "equivalent", - "hashbrown 0.16.0", + "hashbrown 0.16.1", "serde", "serde_core", ] @@ -1315,9 +1317,9 @@ dependencies = [ [[package]] name = "insta" -version = "1.43.2" +version = "1.44.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46fdb647ebde000f43b5b53f773c30cf9b0cb4300453208713fa38b2c70935a0" +checksum = "b5c943d4415edd8153251b6f197de5eb1640e56d84e8d9159bea190421c73698" dependencies = [ "console 0.15.11", "once_cell", @@ -1382,13 +1384,13 @@ dependencies = [ [[package]] name = "jsonschema" -version = "0.35.0" +version = "0.37.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0303b14f91cbac17c64aaf2ef60ab71fe5f34c3867cedcbca72c9dd15f5040fe" +checksum = "73c9ffb2b5c56d58030e1b532d8e8389da94590515f118cf35b5cb68e4764a7e" dependencies = [ "ahash", - "base64 0.22.1", "bytecount", + "data-encoding", "email_address", "fancy-regex", "fraction", @@ -1426,7 +1428,7 @@ version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" dependencies = [ - "bitflags 2.10.0", + "bitflags", "libc", "redox_syscall", ] @@ -1475,16 +1477,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" [[package]] -name = "lsp-types" -version = "0.97.0" +name = "ls-types" +version = "0.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53353550a17c04ac46c585feb189c2db82154fc84b79c7a66c96c2c644f66071" +checksum = "7a7deb98ef9daaa7500324351a5bab7c80c644cfb86b4be0c4433b582af93510" dependencies = [ - "bitflags 1.3.2", - "fluent-uri 0.1.4", + "bitflags", + "fluent-uri 0.3.2", + "percent-encoding", "serde", "serde_json", - "serde_repr", ] [[package]] @@ -1780,9 +1782,9 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pest" -version = "2.8.3" +version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "989e7521a040efde50c3ab6bbadafbe15ab6dc042686926be59ac35d74607df4" +checksum = "cbcfd20a6d4eeba40179f05735784ad32bdaef05ce8e8af05f180d45bb3e7e22" dependencies = [ "memchr", "ucd-trie", @@ -1790,9 +1792,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.8.3" +version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "187da9a3030dbafabbbfb20cb323b976dc7b7ce91fcd84f2f74d6e31d378e2de" +checksum = "51f72981ade67b1ca6adc26ec221be9f463f2b5839c7508998daa17c23d94d7f" dependencies = [ "pest", "pest_generator", @@ -1800,9 +1802,9 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.8.3" +version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49b401d98f5757ebe97a26085998d6c0eecec4995cad6ab7fc30ffdf4b052843" +checksum = "dee9efd8cdb50d719a80088b76f81aec7c41ed6d522ee750178f83883d271625" dependencies = [ "pest", "pest_meta", @@ -1813,9 +1815,9 @@ dependencies = [ [[package]] name = "pest_meta" -version = "2.8.3" +version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72f27a2cfee9f9039c4d86faa5af122a0ac3851441a34865b8a043b46be0065a" +checksum = "bf1d70880e76bdc13ba52eafa6239ce793d85c8e43896507e43dd8984ff05b82" dependencies = [ "pest", "sha2", @@ -2046,7 +2048,7 @@ version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ - "bitflags 2.10.0", + "bitflags", ] [[package]] @@ -2071,14 +2073,14 @@ dependencies = [ [[package]] name = "referencing" -version = "0.35.0" +version = "0.37.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22d0d0665043906aacf1d83bea9d61e5134f8f437815b84320e7facf8ff4e9c2" +checksum = "4283168a506f0dcbdce31c9f9cce3129c924da4c6bca46e46707fcb746d2d70c" dependencies = [ "ahash", "fluent-uri 0.4.1", "getrandom 0.3.4", - "hashbrown 0.16.0", + "hashbrown 0.16.1", "parking_lot", "percent-encoding", "serde_json", @@ -2127,9 +2129,9 @@ checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" [[package]] name = "reqwest" -version = "0.12.24" +version = "0.12.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f" +checksum = "3b4c14b2d9afca6a60277086b0cc6a6ae0b568f6f7916c943a8cdc79f8be240f" dependencies = [ "base64 0.22.1", "bytes", @@ -2222,7 +2224,7 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" dependencies = [ - "bitflags 2.10.0", + "bitflags", "errno", "libc", "linux-raw-sys", @@ -2345,7 +2347,7 @@ version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "80fb1d92c5028aa318b4b8bd7302a5bfcf48be96a37fc6fc790f806b0004ee0c" dependencies = [ - "bitflags 2.10.0", + "bitflags", "core-foundation", "core-foundation-sys", "libc", @@ -2488,17 +2490,6 @@ dependencies = [ "syn 2.0.108", ] -[[package]] -name = "serde_repr" -version = "0.1.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.108", -] - [[package]] name = "serde_spanned" version = "1.0.3" @@ -3002,11 +2993,11 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.6.6" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" dependencies = [ - "bitflags 2.10.0", + "bitflags", "bytes", "futures-util", "http", @@ -3026,17 +3017,16 @@ checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-lsp-server" -version = "0.22.1" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88f3f8ec0dcfdda4d908bad2882fe0f89cf2b606e78d16491323e918dfa95765" +checksum = "2f0e711655c89181a6bc6a2cc348131fcd9680085f5b06b6af13427a393a6e72" dependencies = [ "bytes", "dashmap", "futures", "httparse", - "lsp-types", + "ls-types", "memchr", - "percent-encoding", "serde", "serde_json", "tokio", @@ -3053,9 +3043,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.41" +version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647" dependencies = [ "pin-project-lite", "tracing-attributes", @@ -3064,9 +3054,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.30" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", @@ -3075,9 +3065,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.34" +version = "0.1.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" +checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c" dependencies = [ "once_cell", "valuable", @@ -3085,9 +3075,9 @@ dependencies = [ [[package]] name = "tracing-indicatif" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04d4e11e0e27acef25a47f27e9435355fecdc488867fa2bc90e75b0700d2823d" +checksum = "e1ef6990e0438749f0080573248e96631171a0b5ddfddde119aa5ba8c3a9c47e" dependencies = [ "indicatif", "tracing", @@ -3108,9 +3098,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.20" +version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5" +checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" dependencies = [ "matchers", "nu-ansi-term", @@ -3126,9 +3116,9 @@ dependencies = [ [[package]] name = "tree-sitter" -version = "0.25.10" +version = "0.26.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78f873475d258561b06f1c595d93308a7ed124d9977cb26b148c2084a4a3cc87" +checksum = "974d205cc395652cfa8b37daa053fe56eebd429acf8dc055503fee648dae981e" dependencies = [ "cc", "regex", @@ -3140,9 +3130,9 @@ dependencies = [ [[package]] name = "tree-sitter-bash" -version = "0.25.0" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "871b0606e667e98a1237ebdc1b0d7056e0aebfdc3141d12b399865d4cb6ed8a6" +checksum = "9e5ec769279cc91b561d3df0d8a5deb26b0ad40d183127f409494d6d8fc53062" dependencies = [ "cc", "tree-sitter-language", @@ -3150,7 +3140,7 @@ dependencies = [ [[package]] name = "tree-sitter-iter" -version = "0.0.2" +version = "0.0.3" dependencies = [ "tree-sitter", "tree-sitter-yaml", @@ -3164,9 +3154,9 @@ checksum = "c4013970217383f67b18aef68f6fb2e8d409bc5755227092d32efb0422ba24b8" [[package]] name = "tree-sitter-powershell" -version = "0.25.9" +version = "0.25.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae0e37101b110badaf99aa40460915a8797ceba15fc0ed22773280377a8dffb6" +checksum = "415ec6251d133d26b4f62c60721149fe36c315334f47812450187d6ea59cffdf" dependencies = [ "cc", "tree-sitter-language", @@ -3814,7 +3804,7 @@ checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3" [[package]] name = "yamlpatch" -version = "0.7.0" +version = "0.8.0" dependencies = [ "indexmap", "insta", @@ -3830,7 +3820,7 @@ dependencies = [ [[package]] name = "yamlpath" -version = "0.29.0" +version = "0.31.0" dependencies = [ "line-index", "self_cell", @@ -3954,7 +3944,7 @@ dependencies = [ [[package]] name = "zizmor" -version = "1.18.0-rc3" +version = "1.19.0" dependencies = [ "annotate-snippets", "anstream", diff --git a/Cargo.toml b/Cargo.toml index ea8fef7d..863aca2b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,38 +21,38 @@ rust-version = "1.88.0" [workspace.dependencies] anyhow = "1.0.100" github-actions-expressions = { path = "crates/github-actions-expressions", version = "0.0.11" } -github-actions-models = { path = "crates/github-actions-models", version = "0.39.0" } +github-actions-models = { path = "crates/github-actions-models", version = "0.42.0" } itertools = "0.14.0" -pest = "2.8.3" -pest_derive = "2.8.3" +pest = "2.8.4" +pest_derive = "2.8.4" pretty_assertions = "1.4.1" -annotate-snippets = "0.12.9" +annotate-snippets = "0.12.10" anstream = "0.6.21" assert_cmd = "2.1.1" async-trait = "0.1.89" -camino = "1.2.1" -clap = "4.5.51" +camino = "1.2.2" +clap = "4.5.53" clap-verbosity-flag = { version = "3.0.4", default-features = false } -clap_complete = "4.5.60" +clap_complete = "4.5.61" clap_complete_nushell = "4.5.10" csv = "1.3.1" etcetera = "0.11.0" flate2 = "1.1.5" fst = "0.4.7" futures = "0.3" -http = "1.3.1" +http = "1.4.0" http-cache-reqwest = { version = "1.0.0-alpha.2", features = ["manager-moka"] } human-panic = "2.0.4" ignore = "0.4.25" -indexmap = { version = "2.11.4", features = ["serde"] } +indexmap = { version = "2.12.1", features = ["serde"] } indicatif = "0.18" -insta = "1.43.2" -jsonschema = "0.35.0" +insta = "1.44.3" +jsonschema = "0.37.4" line-index = "0.1.2" memchr = "2.7.6" owo-colors = "4.2.3" regex = "1.12.1" -reqwest = { version = "0.12.23", default-features = false } +reqwest = { version = "0.12.25", default-features = false } reqwest-middleware = "0.4.2" self_cell = "1" serde = { version = "1.0.228", features = ["derive"] } @@ -65,16 +65,18 @@ tar = "0.4.44" terminal-link = "0.1.0" thiserror = "2.0.17" tokio = { version = "1.47.1", features = ["rt-multi-thread", "io-std"] } -tower-lsp-server = "0.22" -tracing = "0.1.41" -tracing-indicatif = "0.3.13" +tower-lsp-server = "0.23" +tracing = "0.1.43" +tracing-indicatif = "0.3.14" tracing-subscriber = "0.3.20" -tree-sitter = "0.25.10" -tree-sitter-bash = "0.25.0" -tree-sitter-iter = { path = "crates/tree-sitter-iter", version = "0.0.2" } -tree-sitter-powershell = "0.25.9" -yamlpath = { path = "crates/yamlpath", version = "0.29.0" } -yamlpatch = { path = "crates/yamlpatch", version = "0.7.0" } +tree-sitter = "0.26.3" +tree-sitter-bash = "0.25.1" +tree-sitter-iter = { path = "crates/tree-sitter-iter", version = "0.0.3" } +# Exact version since the upstream performed a breaking change outside of semver. +# See: https://github.com/zizmorcore/zizmor/pull/1427 +tree-sitter-powershell = "=0.25.10" +yamlpath = { path = "crates/yamlpath", version = "0.31.0" } +yamlpatch = { path = "crates/yamlpatch", version = "0.8.0" } tree-sitter-yaml = "0.7.2" tikv-jemallocator = "0.6" diff --git a/Makefile b/Makefile index ccf4353b..a271bc8b 100644 --- a/Makefile +++ b/Makefile @@ -4,7 +4,7 @@ all: .PHONY: site site: - uv run --only-group docs zensical build + uv run --only-group docs zensical build --clean .PHONY: site-live site-live: @@ -42,6 +42,10 @@ codeql-injection-sinks: crates/zizmor/data/codeql-injection-sinks.json crates/zizmor/data/codeql-injection-sinks.json: support/codeql-injection-sinks.py $< > $@ +.PHONY: archived-repos +archived-repos: + support/archived-repos.py + .PHONY: pinact pinact: pinact run --update --verify @@ -49,4 +53,4 @@ pinact: .PHONY: bench bench: - uv run bench/benchmark.py --offline + uv run --only-group=bench pytest bench/ --codspeed diff --git a/bench/.gitignore b/bench/.gitignore deleted file mode 100644 index fbca2253..00000000 --- a/bench/.gitignore +++ /dev/null @@ -1 +0,0 @@ -results/ diff --git a/bench/__init__.py b/bench/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/bench/benchmark.py b/bench/benchmark.py deleted file mode 100644 index 2d45224f..00000000 --- a/bench/benchmark.py +++ /dev/null @@ -1,242 +0,0 @@ -# /// script -# requires-python = ">=3.12" -# /// - -import argparse -import hashlib -import json -import os -import shlex -import shutil -import subprocess -import sys -import tempfile -from contextlib import contextmanager -from pathlib import Path -from typing import Iterator, NoReturn, TypedDict - -_DEPS = ["hyperfine", "curl", "unzip"] - -_HERE = Path(__file__).parent -_PROJECT_ROOT = _HERE.parent -_ZIZMOR = _PROJECT_ROOT / "target" / "release" / "zizmor" - -assert (_PROJECT_ROOT / "Cargo.toml").is_file(), "Missing project root?" - -_BENCHMARKS = _HERE / "benchmarks.json" -_RESULTS = _HERE / "results" - -assert _BENCHMARKS.is_file(), f"Benchmarks file not found: {_BENCHMARKS}" -_RESULTS.mkdir(exist_ok=True) - -_CACHE_DIR = Path(tempfile.gettempdir()) / "zizmor-benchmark-cache" -_CACHE_DIR.mkdir(exist_ok=True) - -_GH_TOKEN = os.getenv("GH_TOKEN") - - -class Log: - def __init__(self, scope: str | None) -> None: - self.scopes = [scope] if scope else [] - - def info(self, message: str) -> None: - scopes = " ".join(f"[{s}]" for s in self.scopes) - print(f"[+] {scopes} {message}", file=sys.stderr) - - def warn(self, message: str) -> None: - scopes = " ".join(f"[{s}]" for s in self.scopes) - print(f"[!] {scopes} {message}", file=sys.stderr) - - def error(self, message: str) -> NoReturn: - self.warn(message) - sys.exit(1) - - @contextmanager - def scope(self, new_scope: str) -> Iterator[None]: - """Create a new logging scope.""" - self.scopes.append(new_scope) - try: - yield None - finally: - self.scopes.pop() - - -LOG = Log("benchmarks") - - -def _curl(url: str, expected_sha256: str) -> Path: - """Download a URL and cache it using content addressing with SHA256.""" - cached_file = _CACHE_DIR / expected_sha256 - if cached_file.exists(): - LOG.info("Using cached file") - return cached_file - - result = subprocess.run( - ["curl", "-fsSL", url], - capture_output=True, - check=True, - ) - - content = result.stdout - content_hash = hashlib.sha256(content).hexdigest() - - if content_hash != expected_sha256: - LOG.error(f"Hash mismatch: {expected_sha256} != {content_hash}") - - cached_file.write_bytes(content) - - return cached_file - - -def _unzip(archive_path: Path, extract_name: str) -> Path: - """Extract an archive to a directory in the cache.""" - extract_dir = _CACHE_DIR / extract_name - - if extract_dir.exists(): - LOG.info("Using cached extraction") - return extract_dir - - extract_dir.mkdir(exist_ok=True) - - subprocess.run( - ["unzip", "-q", str(archive_path), "-d", str(extract_dir)], - check=True, - ) - - LOG.info(f"Extracted {archive_path.name} to {extract_dir}") - return extract_dir - - -class Benchmark(TypedDict): - name: str - source_type: str - source: str - source_sha256: str - stencil: str - online: bool | None - - -Plan = list[str] - - -class Bench: - def __init__(self, benchmark: Benchmark) -> None: - self.benchmark = benchmark - - def plan(self) -> Plan: - match self.benchmark["source_type"]: - case "archive-url": - url = self.benchmark["source"] - sha256 = self.benchmark["source_sha256"] - archive = _curl(url, sha256) - inputs = [str(_unzip(archive, self.benchmark["name"]))] - case _: - LOG.error(f"Unknown source type: {self.benchmark['source_type']}") - - if self.benchmark.get("online", False): - if not _GH_TOKEN: - LOG.error("Benchmark requires online access but GH_TOKEN is not set") - - stencil = self.benchmark["stencil"] - command = stencil.replace("$ZIZMOR", str(_ZIZMOR)).replace( - "$INPUTS", " ".join(inputs) - ) - return shlex.split(command) - - def run(self, plan: Plan, *, dry_run: bool) -> None: - command = shlex.join(plan) - - result_file = _RESULTS / f"{self.benchmark['name']}.json" - if result_file.exists() and not dry_run: - LOG.warn("clobbering existing result file") - - hyperfine_command = [ - "hyperfine", - "--warmup", - "3", - # NOTE: not needed because we use --no-exit-codes in the stencil - # "--ignore-failure", - "--export-json", - str(result_file), - command, - ] - - if dry_run: - LOG.warn(f"would have run: {shlex.join(hyperfine_command)}") - return - - try: - subprocess.run( - hyperfine_command, - check=True, - ) - except subprocess.CalledProcessError: - LOG.error("run failed, see above for details") - - # Stupid hack: fixup each result file's results[0].command - # to be a more useful benchmark identifier, since bencher - # apparently keys on these. - result_json = json.loads(result_file.read_bytes()) - result_json["results"][0]["command"] = f"zizmor::{self.benchmark['name']}" - result_file.write_text(json.dumps(result_json)) - - LOG.info(f"run written to {result_file}") - - -def main() -> None: - parser = argparse.ArgumentParser() - parser.add_argument( - "--dry-run", action="store_true", help="Show plans without running them" - ) - parser.add_argument( - "--offline", action="store_true", help="Run only offline benchmarks" - ) - - args = parser.parse_args() - - missing = [] - for dep in _DEPS: - if not shutil.which(dep): - missing.append(dep) - - if missing: - LOG.error( - f"Missing dependencies: {', '.join(missing)}. " - "Please install them before running benchmarks." - ) - - LOG.info("ensuring we have a benchable zizmor build") - subprocess.run( - ["cargo", "build", "--release", "-p", "zizmor"], - check=True, - cwd=_PROJECT_ROOT, - ) - - if not _ZIZMOR.is_file(): - LOG.error("zizmor build presumably failed, see above for details") - - LOG.info(f"using cache dir: {_CACHE_DIR}") - - benchmarks: list[Benchmark] = json.loads(_BENCHMARKS.read_text(encoding="utf-8")) - LOG.info(f"found {len(benchmarks)} benchmarks in {_BENCHMARKS.name}") - - if args.offline: - benchmarks = [b for b in benchmarks if not b.get("online", False)] - LOG.info(f"filtered to {len(benchmarks)} offline benchmarks") - - benches = [Bench(benchmark) for benchmark in benchmarks] - plans = [] - with LOG.scope("plan"): - for bench in benches: - with LOG.scope(bench.benchmark["name"]): - LOG.info("beginning plan") - plans.append(bench.plan()) - - with LOG.scope("run"): - for bench, plan in zip(benches, plans): - with LOG.scope(bench.benchmark["name"]): - bench.run(plan, dry_run=args.dry_run) - - -if __name__ == "__main__": - main() diff --git a/bench/benchmarks.json b/bench/benchmarks.json deleted file mode 100644 index 5d253341..00000000 --- a/bench/benchmarks.json +++ /dev/null @@ -1,24 +0,0 @@ -[ - { - "name": "grafana-9f212d11d0ac", - "source_type": "archive-url", - "source": "https://github.com/grafana/grafana/archive/9f212d11d0ac9c38ada62a7db830844bb9b02905.zip", - "source_sha256": "c6d42b52c8d912db2698d8b06f227de46f0c2d04cc757841792ed6567f0c56c7", - "stencil": "$ZIZMOR --offline --format=plain --no-exit-codes --no-config $INPUTS" - }, - { - "name": "cpython-48f88310044c", - "source_type": "archive-url", - "source": "https://github.com/python/cpython/archive/48f88310044c6ef877f3b0761cf7afece2f8fb3a.zip", - "source_sha256": "a52a67f1dd9cfa67c7d1305d5b9639629abe247b2c32f01b77f790ddf8b49503", - "stencil": "$ZIZMOR --offline --format=plain --no-exit-codes --no-config $INPUTS" - }, - { - "name": "gha-hazmat-da3c3cd-online", - "source_type": "archive-url", - "source": "https://github.com/woodruffw/gha-hazmat/archive/da3c3cd.zip", - "source_sha256": "f0aa224c5203218ad26e9f104d8dc3eaf8b322c97056add04d79f4a0d53c8f1f", - "stencil": "$ZIZMOR --format=plain --no-exit-codes --no-config $INPUTS", - "online": true - } -] diff --git a/bench/common.py b/bench/common.py new file mode 100644 index 00000000..c9b98a59 --- /dev/null +++ b/bench/common.py @@ -0,0 +1,12 @@ +import subprocess +from pathlib import Path + +_HERE = Path(__file__).parent +_ZIZMOR = _HERE.parent / "target" / "release" / "zizmor" + + +def zizmor(args: list[str], *, check: bool = False) -> None: + assert _ZIZMOR.is_file(), ( + f"zizmor binary not found at {_ZIZMOR}, run prepare() first" + ) + subprocess.run([str(_ZIZMOR), *args], check=check) diff --git a/bench/conftest.py b/bench/conftest.py new file mode 100644 index 00000000..e69de29b diff --git a/bench/test_bench_basic.py b/bench/test_bench_basic.py new file mode 100644 index 00000000..b9a3e17a --- /dev/null +++ b/bench/test_bench_basic.py @@ -0,0 +1,13 @@ +import pytest + +from .common import zizmor + + +@pytest.mark.benchmark +def test_zizmor_startup(): + zizmor(["--version"]) + + +@pytest.mark.benchmark +def test_zizmor_help(): + zizmor(["--help"]) diff --git a/bench/test_bench_offline.py b/bench/test_bench_offline.py new file mode 100644 index 00000000..32952c60 --- /dev/null +++ b/bench/test_bench_offline.py @@ -0,0 +1,68 @@ +import io +import zipfile +from pathlib import Path + +import pytest +import urllib3 + +from bench.common import zizmor + + +@pytest.fixture(scope="session") +def grafana(tmp_path_factory) -> Path: + archive = "https://github.com/grafana/grafana/archive/9f212d11d0ac9c38ada62a7db830844bb9b02905.zip" + raw_zip = urllib3.PoolManager().request("GET", archive).data + + path = tmp_path_factory.mktemp("grafana") + + zipfile.ZipFile(io.BytesIO(raw_zip)).extractall(path) + + return path + + +@pytest.fixture(scope="session") +def cpython(tmp_path_factory) -> Path: + archive = "https://github.com/python/cpython/archive/48f88310044c6ef877f3b0761cf7afece2f8fb3a.zip" + raw_zip = urllib3.PoolManager().request("GET", archive).data + + path = tmp_path_factory.mktemp("cpython") + + zipfile.ZipFile(io.BytesIO(raw_zip)).extractall(path) + + return path + + +@pytest.mark.benchmark +def test_zizmor_offline_grafana_9f212d11d0(grafana: Path): + """ + Runs `zizmor --offline --format=plain --no-exit-codes --no-config ` + """ + + zizmor( + [ + "--offline", + "--format=plain", + "--no-exit-codes", + "--no-config", + str(grafana), + ], + check=True, + ) + + +@pytest.mark.benchmark +def test_zizmor_offline_cpython_48f88310044c(cpython: Path): + """ + Runs `zizmor --offline --format=plain --no-exit-codes --no-config ` + """ + + zizmor( + [ + "--offline", + "--format=plain", + "--no-exit-codes", + "--no-config", + str(cpython), + ], + check=True, + ) diff --git a/bench/test_bench_online.py b/bench/test_bench_online.py new file mode 100644 index 00000000..4f1f9687 --- /dev/null +++ b/bench/test_bench_online.py @@ -0,0 +1,47 @@ +import os + +import pytest + +from bench.common import zizmor + + +@pytest.mark.skipif("GH_TOKEN" not in os.environ, reason="GH_TOKEN not set") +def test_zizmor_online_gha_hazmat_da3c3cd(benchmark): + """ + Runs `zizmor --format=plain --no-exit-codes --no-config woodruffw/gha-hazmat@da3c3cd` + """ + + benchmark.pedantic( + zizmor, + args=( + [ + "--format=plain", + "--no-exit-codes", + "--no-config", + "woodruffw/gha-hazmat@da3c3cd", + ], + ), + warmup_rounds=2, + iterations=10, + ) + + +@pytest.mark.skipif("GH_TOKEN" not in os.environ, reason="GH_TOKEN not set") +def test_zizmor_online_cpython_48f88310044c(benchmark): + """ + Runs `zizmor --format=plain --no-exit-codes --no-config python/cpython@48f88310044c` + """ + + benchmark.pedantic( + zizmor, + args=( + [ + "--format=plain", + "--no-exit-codes", + "--no-config", + "python/cpython@48f88310044c", + ], + ), + warmup_rounds=2, + iterations=10, + ) diff --git a/crates/github-actions-models/Cargo.toml b/crates/github-actions-models/Cargo.toml index cc3bd71b..dbe7611a 100644 --- a/crates/github-actions-models/Cargo.toml +++ b/crates/github-actions-models/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "github-actions-models" -version = "0.39.0" +version = "0.42.0" description = "Unofficial, high-quality data models for GitHub Actions workflows, actions, and related components" repository = "https://github.com/zizmorcore/zizmor/tree/main/crates/github-actions-models" keywords = ["github", "ci"] @@ -17,8 +17,10 @@ workspace = true [dependencies] indexmap.workspace = true +self_cell.workspace = true serde.workspace = true serde_yaml.workspace = true tracing.workspace = true [dev-dependencies] +insta.workspace = true diff --git a/crates/github-actions-models/src/common.rs b/crates/github-actions-models/src/common.rs index 5b230aa9..101d9896 100644 --- a/crates/github-actions-models/src/common.rs +++ b/crates/github-actions-models/src/common.rs @@ -1,11 +1,9 @@ //! Shared models and utilities. -use std::{ - fmt::{self, Display}, - str::FromStr, -}; +use std::fmt::{self, Display}; use indexmap::IndexMap; +use self_cell::self_cell; use serde::{Deserialize, Deserializer, Serialize, de}; pub mod expr; @@ -201,57 +199,59 @@ pub enum Uses { Docker(DockerUses), } -impl FromStr for Uses { - type Err = UsesError; +impl Uses { + /// Parse a `uses:` clause into its appropriate variant. + pub fn parse(uses: impl Into) -> Result { + let uses = uses.into(); - fn from_str(uses: &str) -> Result { if uses.starts_with("./") { - LocalUses::from_str(uses).map(Self::Local) + Ok(Self::Local(LocalUses::new(uses))) } else if let Some(image) = uses.strip_prefix("docker://") { - DockerUses::from_str(image).map(Self::Docker) + DockerUses::parse(image).map(Self::Docker) } else { - RepositoryUses::from_str(uses).map(Self::Repository) + RepositoryUses::parse(uses).map(Self::Repository) + } + } + + /// Returns the original raw `uses:` clause. + pub fn raw(&self) -> &str { + match self { + Uses::Local(local) => &local.path, + Uses::Repository(repo) => repo.raw(), + Uses::Docker(docker) => docker.raw(), } } } /// A `uses: ./some/path` clause. #[derive(Debug, PartialEq)] +#[non_exhaustive] pub struct LocalUses { pub path: String, } -impl FromStr for LocalUses { - type Err = UsesError; - - fn from_str(uses: &str) -> Result { - Ok(LocalUses { path: uses.into() }) +impl LocalUses { + fn new(path: String) -> Self { + LocalUses { path } } } -/// A `uses: some/repo` clause. #[derive(Debug, PartialEq)] -pub struct RepositoryUses { +struct RepositoryUsesInner<'a> { /// The repo user or org. - pub owner: String, + owner: &'a str, /// The repo name. - pub repo: String, + repo: &'a str, + /// The owner/repo slug. + slug: &'a str, /// The subpath to the action or reusable workflow, if present. - pub subpath: Option, + subpath: Option<&'a str>, /// The `@` that the `uses:` is pinned to. - pub git_ref: String, + git_ref: &'a str, } -impl FromStr for RepositoryUses { - type Err = UsesError; - - fn from_str(uses: &str) -> Result { - // NOTE: FromStr is slightly sub-optimal, since it takes a borrowed - // &str and results in bunch of allocs for a fully owned type. - // - // In theory we could do `From` instead, but - // `&mut str::split_mut` and similar don't exist yet. - +impl<'a> RepositoryUsesInner<'a> { + fn from_str(uses: &'a str) -> Result { // NOTE: Both git refs and paths can contain `@`, but in practice // GHA refuses to run a `uses:` clause with more than one `@` in it. let (path, git_ref) = match uses.rsplit_once('@') { @@ -259,44 +259,110 @@ impl FromStr for RepositoryUses { None => return Err(UsesError(format!("missing `@` in {uses}"))), }; - let components = path.splitn(3, '/').collect::>(); - if components.len() < 2 { - return Err(UsesError(format!("owner/repo slug is too short: {uses}"))); - } + let mut components = path.splitn(3, '/'); - Ok(RepositoryUses { - owner: components[0].into(), - repo: components[1].into(), - subpath: components.get(2).map(ToString::to_string), - git_ref: git_ref.into(), - }) + if let Some(owner) = components.next() + && let Some(repo) = components.next() + { + let subpath = components.next(); + + let slug = if subpath.is_none() { + path + } else { + &path[..owner.len() + 1 + repo.len()] + }; + + Ok(RepositoryUsesInner { + owner, + repo, + slug, + subpath, + git_ref, + }) + } else { + Err(UsesError(format!("owner/repo slug is too short: {uses}"))) + } } } -/// A `uses: docker://some-image` clause. -#[derive(Debug, PartialEq)] -pub struct DockerUses { - /// The registry this image is on, if present. - pub registry: Option, - /// The name of the Docker image. - pub image: String, - /// An optional tag for the image. - pub tag: Option, - /// An optional integrity hash for the image. - pub hash: Option, +self_cell!( + /// A `uses: some/repo` clause. + pub struct RepositoryUses { + owner: String, + + #[covariant] + dependent: RepositoryUsesInner, + } + + impl {Debug, PartialEq} +); + +impl Display for RepositoryUses { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.raw()) + } } -impl DockerUses { +impl RepositoryUses { + /// Parse a `uses: some/repo` clause. + pub fn parse(uses: impl Into) -> Result { + RepositoryUses::try_new(uses.into(), |s| { + let inner = RepositoryUsesInner::from_str(s)?; + Ok(inner) + }) + } + + /// Get the raw `uses:` string. + pub fn raw(&self) -> &str { + self.borrow_owner() + } + + /// Get the owner (user or org) of this repository `uses:` clause. + pub fn owner(&self) -> &str { + self.borrow_dependent().owner + } + + /// Get the repository name of this repository `uses:` clause. + pub fn repo(&self) -> &str { + self.borrow_dependent().repo + } + + /// Get the owner/repo slug of this repository `uses:` clause. + pub fn slug(&self) -> &str { + self.borrow_dependent().slug + } + + /// Get the optional subpath of this repository `uses:` clause. + pub fn subpath(&self) -> Option<&str> { + self.borrow_dependent().subpath + } + + /// Get the git ref (branch, tag, or SHA) of this repository `uses:` clause. + pub fn git_ref(&self) -> &str { + self.borrow_dependent().git_ref + } +} + +#[derive(Debug, PartialEq)] +#[non_exhaustive] +pub struct DockerUsesInner<'a> { + /// The registry this image is on, if present. + registry: Option<&'a str>, + /// The name of the Docker image. + image: &'a str, + /// An optional tag for the image. + tag: Option<&'a str>, + /// An optional integrity hash for the image. + hash: Option<&'a str>, +} + +impl<'a> DockerUsesInner<'a> { fn is_registry(registry: &str) -> bool { // https://stackoverflow.com/a/42116190 registry == "localhost" || registry.contains('.') || registry.contains(':') } -} -impl FromStr for DockerUses { - type Err = UsesError; - - fn from_str(uses: &str) -> Result { + fn from_str(uses: &'a str) -> Result { let (registry, image) = match uses.split_once('/') { Some((registry, image)) if Self::is_registry(registry) => (Some(registry), image), _ => (None, uses), @@ -314,11 +380,11 @@ impl FromStr for DockerUses { Some(&hash[1..]) }; - Ok(DockerUses { - registry: registry.map(Into::into), - image: image.into(), + Ok(DockerUsesInner { + registry, + image, tag: None, - hash: hash.map(Into::into), + hash, }) } else { let (image, tag) = match image.split_once(':') { @@ -327,16 +393,63 @@ impl FromStr for DockerUses { _ => (image, None), }; - Ok(DockerUses { - registry: registry.map(Into::into), - image: image.into(), - tag: tag.map(Into::into), + Ok(DockerUsesInner { + registry, + image, + tag, hash: None, }) } } } +self_cell!( + /// A `uses: docker://some-image` clause. + pub struct DockerUses { + owner: String, + + #[covariant] + dependent: DockerUsesInner, + } + + impl {Debug, PartialEq} +); + +impl DockerUses { + /// Parse a `uses: docker://some-image` clause. + pub fn parse(uses: impl Into) -> Result { + DockerUses::try_new(uses.into(), |s| { + let inner = DockerUsesInner::from_str(s)?; + Ok(inner) + }) + } + + /// Get the raw uses clause. This does not include the `docker://` prefix. + pub fn raw(&self) -> &str { + self.borrow_owner() + } + + /// Get the optional registry of this Docker image. + pub fn registry(&self) -> Option<&str> { + self.borrow_dependent().registry + } + + /// Get the image name of this Docker image. + pub fn image(&self) -> &str { + self.borrow_dependent().image + } + + /// Get the optional tag of this Docker image. + pub fn tag(&self) -> Option<&str> { + self.borrow_dependent().tag + } + + /// Get the optional hash of this Docker image. + pub fn hash(&self) -> Option<&str> { + self.borrow_dependent().hash + } +} + /// Wraps a `de::Error::custom` call to log the same error as /// a `tracing::error!` event. /// @@ -351,13 +464,22 @@ where de::Error::custom(msg) } +/// Deserialize a `DockerUses`. +pub(crate) fn docker_uses<'de, D>(de: D) -> Result +where + D: Deserializer<'de>, +{ + let uses = ::deserialize(de)?; + DockerUses::parse(uses).map_err(custom_error::) +} + /// Deserialize an ordinary step `uses:`. pub(crate) fn step_uses<'de, D>(de: D) -> Result where D: Deserializer<'de>, { - let uses = <&str>::deserialize(de)?; - Uses::from_str(uses).map_err(custom_error::) + let uses = ::deserialize(de)?; + Uses::parse(uses).map_err(custom_error::) } /// Deserialize a reusable workflow step `uses:` @@ -396,9 +518,7 @@ mod tests { use crate::common::{BasePermission, Env, EnvValue, Permission}; - use super::{ - DockerUses, LocalUses, Permissions, RepositoryUses, Uses, UsesError, reusable_step_uses, - }; + use super::{Permissions, Uses, reusable_step_uses}; #[test] fn test_permissions() { @@ -453,230 +573,418 @@ mod tests { #[test] fn test_uses_parses() { - let vectors = [ - ( - // Valid: fully pinned. - "actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3", - Ok(Uses::Repository(RepositoryUses { - owner: "actions".to_owned(), - repo: "checkout".to_owned(), + // Fully pinned. + insta::assert_debug_snapshot!( + Uses::parse("actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3").unwrap(), + @r#" + Repository( + RepositoryUses { + owner: "actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3", + dependent: RepositoryUsesInner { + owner: "actions", + repo: "checkout", + slug: "actions/checkout", subpath: None, - git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3".to_owned(), - })), - ), - ( - // Valid: fully pinned, subpath - "actions/aws/ec2@8f4b7f84864484a7bf31766abe9204da3cbe65b3", - Ok(Uses::Repository(RepositoryUses { - owner: "actions".to_owned(), - repo: "aws".to_owned(), - subpath: Some("ec2".to_owned()), - git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3".to_owned(), - })), - ), - ( - // Valid: fully pinned, complex subpath - "example/foo/bar/baz/quux@8f4b7f84864484a7bf31766abe9204da3cbe65b3", - Ok(Uses::Repository(RepositoryUses { - owner: "example".to_owned(), - repo: "foo".to_owned(), - subpath: Some("bar/baz/quux".to_owned()), - git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3".to_owned(), - })), - ), - ( - // Valid: pinned with branch/tag - "actions/checkout@v4", - Ok(Uses::Repository(RepositoryUses { - owner: "actions".to_owned(), - repo: "checkout".to_owned(), - subpath: None, - git_ref: "v4".to_owned(), - })), - ), - ( - "actions/checkout@abcd", - Ok(Uses::Repository(RepositoryUses { - owner: "actions".to_owned(), - repo: "checkout".to_owned(), - subpath: None, - git_ref: "abcd".to_owned(), - })), - ), - ( - // Invalid: unpinned - "actions/checkout", - Err(UsesError( - "missing `@` in actions/checkout".to_owned(), - )), - ), - ( - // Valid: Docker ref, implicit registry - "docker://alpine:3.8", - Ok(Uses::Docker(DockerUses { - registry: None, - image: "alpine".to_owned(), - tag: Some("3.8".to_owned()), - hash: None, - })), - ), - ( - // Valid: Docker ref, localhost - "docker://localhost/alpine:3.8", - Ok(Uses::Docker(DockerUses { - registry: Some("localhost".to_owned()), - image: "alpine".to_owned(), - tag: Some("3.8".to_owned()), - hash: None, - })), - ), - ( - // Valid: Docker ref, localhost w/ port - "docker://localhost:1337/alpine:3.8", - Ok(Uses::Docker(DockerUses { - registry: Some("localhost:1337".to_owned()), - image: "alpine".to_owned(), - tag: Some("3.8".to_owned()), - hash: None, - })), - ), - ( - // Valid: Docker ref, custom registry - "docker://ghcr.io/foo/alpine:3.8", - Ok(Uses::Docker(DockerUses { - registry: Some("ghcr.io".to_owned()), - image: "foo/alpine".to_owned(), - tag: Some("3.8".to_owned()), - hash: None, - })), - ), - ( - // Valid: Docker ref, missing tag - "docker://ghcr.io/foo/alpine", - Ok(Uses::Docker(DockerUses { - registry: Some("ghcr.io".to_owned()), - image: "foo/alpine".to_owned(), - tag: None, - hash: None, - })), - ), - ( - // Invalid, but allowed: Docker ref, empty tag - "docker://ghcr.io/foo/alpine:", - Ok(Uses::Docker(DockerUses { - registry: Some("ghcr.io".to_owned()), - image: "foo/alpine".to_owned(), - tag: None, - hash: None, - })), - ), - ( - // Valid: Docker ref, bare - "docker://alpine", - Ok(Uses::Docker(DockerUses { - registry: None, - image: "alpine".to_owned(), - tag: None, - hash: None, - })), - ), - ( - // Valid: Docker ref, hash - "docker://alpine@hash", - Ok(Uses::Docker(DockerUses { - registry: None, - image: "alpine".to_owned(), - tag: None, - hash: Some("hash".to_owned()), - })), - ), - ( - // Valid: Local action "ref", actually part of the path - "./.github/actions/hello-world-action@172239021f7ba04fe7327647b213799853a9eb89", - Ok(Uses::Local(LocalUses { - path: "./.github/actions/hello-world-action@172239021f7ba04fe7327647b213799853a9eb89".to_owned(), - })), - ), - ( - // Valid: Local action ref, unpinned - "./.github/actions/hello-world-action", - Ok(Uses::Local(LocalUses { - path: "./.github/actions/hello-world-action".to_owned(), - })), - ), - // Invalid: missing user/repo - ( - "checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3", - Err(UsesError( - "owner/repo slug is too short: checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3".to_owned() - )), - ), - ]; + git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3", + }, + }, + ) + "#, + ); - for (input, expected) in vectors { - assert_eq!(input.parse(), expected); - } + // Fully pinned, subpath. + insta::assert_debug_snapshot!( + Uses::parse("actions/aws/ec2@8f4b7f84864484a7bf31766abe9204da3cbe65b3").unwrap(), + @r#" + Repository( + RepositoryUses { + owner: "actions/aws/ec2@8f4b7f84864484a7bf31766abe9204da3cbe65b3", + dependent: RepositoryUsesInner { + owner: "actions", + repo: "aws", + slug: "actions/aws", + subpath: Some( + "ec2", + ), + git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3", + }, + }, + ) + "# + ); + + // Fully pinned, complex subpath. + insta::assert_debug_snapshot!( + Uses::parse("example/foo/bar/baz/quux@8f4b7f84864484a7bf31766abe9204da3cbe65b3").unwrap(), + @r#" + Repository( + RepositoryUses { + owner: "example/foo/bar/baz/quux@8f4b7f84864484a7bf31766abe9204da3cbe65b3", + dependent: RepositoryUsesInner { + owner: "example", + repo: "foo", + slug: "example/foo", + subpath: Some( + "bar/baz/quux", + ), + git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3", + }, + }, + ) + "# + ); + + // Pinned with branch/tag. + insta::assert_debug_snapshot!( + Uses::parse("actions/checkout@v4").unwrap(), + @r#" + Repository( + RepositoryUses { + owner: "actions/checkout@v4", + dependent: RepositoryUsesInner { + owner: "actions", + repo: "checkout", + slug: "actions/checkout", + subpath: None, + git_ref: "v4", + }, + }, + ) + "# + ); + + insta::assert_debug_snapshot!( + Uses::parse("actions/checkout@abcd").unwrap(), + @r#" + Repository( + RepositoryUses { + owner: "actions/checkout@abcd", + dependent: RepositoryUsesInner { + owner: "actions", + repo: "checkout", + slug: "actions/checkout", + subpath: None, + git_ref: "abcd", + }, + }, + ) + "# + ); + + // Invalid: unpinned. + insta::assert_debug_snapshot!( + Uses::parse("actions/checkout").unwrap_err(), + @r#" + UsesError( + "missing `@` in actions/checkout", + ) + "# + ); + + // Valid: Docker ref, implicit registry. + insta::assert_debug_snapshot!( + Uses::parse("docker://alpine:3.8").unwrap(), + @r#" + Docker( + DockerUses { + owner: "alpine:3.8", + dependent: DockerUsesInner { + registry: None, + image: "alpine", + tag: Some( + "3.8", + ), + hash: None, + }, + }, + ) + "# + ); + + // Valid: Docker ref, localhost. + insta::assert_debug_snapshot!( + Uses::parse("docker://localhost/alpine:3.8").unwrap(), + @r#" + Docker( + DockerUses { + owner: "localhost/alpine:3.8", + dependent: DockerUsesInner { + registry: Some( + "localhost", + ), + image: "alpine", + tag: Some( + "3.8", + ), + hash: None, + }, + }, + ) + "# + ); + + // Valid: Docker ref, localhost with port. + insta::assert_debug_snapshot!( + Uses::parse("docker://localhost:1337/alpine:3.8").unwrap(), + @r#" + Docker( + DockerUses { + owner: "localhost:1337/alpine:3.8", + dependent: DockerUsesInner { + registry: Some( + "localhost:1337", + ), + image: "alpine", + tag: Some( + "3.8", + ), + hash: None, + }, + }, + ) + "# + ); + + // Valid: Docker ref, custom registry. + insta::assert_debug_snapshot!( + Uses::parse("docker://ghcr.io/foo/alpine:3.8").unwrap(), + @r#" + Docker( + DockerUses { + owner: "ghcr.io/foo/alpine:3.8", + dependent: DockerUsesInner { + registry: Some( + "ghcr.io", + ), + image: "foo/alpine", + tag: Some( + "3.8", + ), + hash: None, + }, + }, + ) + "# + ); + + // Valid: Docker ref, missing tag. + insta::assert_debug_snapshot!( + Uses::parse("docker://ghcr.io/foo/alpine").unwrap(), + @r#" + Docker( + DockerUses { + owner: "ghcr.io/foo/alpine", + dependent: DockerUsesInner { + registry: Some( + "ghcr.io", + ), + image: "foo/alpine", + tag: None, + hash: None, + }, + }, + ) + "# + ); + + // Invalid, but allowed: Docker ref, empty tag + insta::assert_debug_snapshot!( + Uses::parse("docker://ghcr.io/foo/alpine:").unwrap(), + @r#" + Docker( + DockerUses { + owner: "ghcr.io/foo/alpine:", + dependent: DockerUsesInner { + registry: Some( + "ghcr.io", + ), + image: "foo/alpine", + tag: None, + hash: None, + }, + }, + ) + "# + ); + + // Valid: Docker ref, bare. + insta::assert_debug_snapshot!( + Uses::parse("docker://alpine").unwrap(), + @r#" + Docker( + DockerUses { + owner: "alpine", + dependent: DockerUsesInner { + registry: None, + image: "alpine", + tag: None, + hash: None, + }, + }, + ) + "# + ); + + // Valid: Docker ref, with hash. + insta::assert_debug_snapshot!( + Uses::parse("docker://alpine@hash").unwrap(), + @r#" + Docker( + DockerUses { + owner: "alpine@hash", + dependent: DockerUsesInner { + registry: None, + image: "alpine", + tag: None, + hash: Some( + "hash", + ), + }, + }, + ) + "# + ); + + // Valid: Local action "ref", actually part of the path + insta::assert_debug_snapshot!( + Uses::parse("./.github/actions/hello-world-action@172239021f7ba04fe7327647b213799853a9eb89").unwrap(), + @r#" + Local( + LocalUses { + path: "./.github/actions/hello-world-action@172239021f7ba04fe7327647b213799853a9eb89", + }, + ) + "# + ); + + // Valid: Local action ref, unpinned. + insta::assert_debug_snapshot!( + Uses::parse("./.github/actions/hello-world-action").unwrap(), + @r#" + Local( + LocalUses { + path: "./.github/actions/hello-world-action", + }, + ) + "# + ); + + // Invalid: missing user/repo + insta::assert_debug_snapshot!( + Uses::parse("checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3").unwrap_err(), + @r#" + UsesError( + "owner/repo slug is too short: checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3", + ) + "# + ); } #[test] fn test_uses_deser_reusable() { - let vectors = [ - // Valid, as expected. - ( - "octo-org/this-repo/.github/workflows/workflow-1.yml@\ - 172239021f7ba04fe7327647b213799853a9eb89", - Some(Uses::Repository(RepositoryUses { - owner: "octo-org".to_owned(), - repo: "this-repo".to_owned(), - subpath: Some(".github/workflows/workflow-1.yml".to_owned()), - git_ref: "172239021f7ba04fe7327647b213799853a9eb89".to_owned(), - })), - ), - ( - "octo-org/this-repo/.github/workflows/workflow-1.yml@notahash", - Some(Uses::Repository(RepositoryUses { - owner: "octo-org".to_owned(), - repo: "this-repo".to_owned(), - subpath: Some(".github/workflows/workflow-1.yml".to_owned()), - git_ref: "notahash".to_owned(), - })), - ), - ( - "octo-org/this-repo/.github/workflows/workflow-1.yml@abcd", - Some(Uses::Repository(RepositoryUses { - owner: "octo-org".to_owned(), - repo: "this-repo".to_owned(), - subpath: Some(".github/workflows/workflow-1.yml".to_owned()), - git_ref: "abcd".to_owned(), - })), - ), - // Invalid: remote reusable workflow without ref - ("octo-org/this-repo/.github/workflows/workflow-1.yml", None), - // Invalid: local reusable workflow with ref - ( - "./.github/workflows/workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89", - None, - ), - // Invalid: no ref at all - ("octo-org/this-repo/.github/workflows/workflow-1.yml", None), - (".github/workflows/workflow-1.yml", None), - // Invalid: missing user/repo - ( - "workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89", - None, - ), - ]; - // Dummy type for testing deser of `Uses`. #[derive(Deserialize)] #[serde(transparent)] struct Dummy(#[serde(deserialize_with = "reusable_step_uses")] Uses); - for (input, expected) in vectors { - assert_eq!( - serde_yaml::from_str::(input).map(|d| d.0).ok(), - expected - ); - } + insta::assert_debug_snapshot!( + serde_yaml::from_str::( + "octo-org/this-repo/.github/workflows/workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89" + ) + .map(|d| d.0) + .unwrap(), + @r#" + Repository( + RepositoryUses { + owner: "octo-org/this-repo/.github/workflows/workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89", + dependent: RepositoryUsesInner { + owner: "octo-org", + repo: "this-repo", + slug: "octo-org/this-repo", + subpath: Some( + ".github/workflows/workflow-1.yml", + ), + git_ref: "172239021f7ba04fe7327647b213799853a9eb89", + }, + }, + ) + "# + ); + + insta::assert_debug_snapshot!( + serde_yaml::from_str::( + "octo-org/this-repo/.github/workflows/workflow-1.yml@notahash" + ).map(|d| d.0).unwrap(), + @r#" + Repository( + RepositoryUses { + owner: "octo-org/this-repo/.github/workflows/workflow-1.yml@notahash", + dependent: RepositoryUsesInner { + owner: "octo-org", + repo: "this-repo", + slug: "octo-org/this-repo", + subpath: Some( + ".github/workflows/workflow-1.yml", + ), + git_ref: "notahash", + }, + }, + ) + "# + ); + + insta::assert_debug_snapshot!( + serde_yaml::from_str::( + "octo-org/this-repo/.github/workflows/workflow-1.yml@abcd" + ).map(|d| d.0).unwrap(), + @r#" + Repository( + RepositoryUses { + owner: "octo-org/this-repo/.github/workflows/workflow-1.yml@abcd", + dependent: RepositoryUsesInner { + owner: "octo-org", + repo: "this-repo", + slug: "octo-org/this-repo", + subpath: Some( + ".github/workflows/workflow-1.yml", + ), + git_ref: "abcd", + }, + }, + ) + "# + ); + + // Invalid: remote reusable workflow without ref + insta::assert_debug_snapshot!( + serde_yaml::from_str::( + "octo-org/this-repo/.github/workflows/workflow-1.yml" + ).map(|d| d.0).unwrap_err(), + @r#"Error("malformed `uses` ref: missing `@` in octo-org/this-repo/.github/workflows/workflow-1.yml")"# + ); + + // Invalid: local reusable workflow with ref + insta::assert_debug_snapshot!( + serde_yaml::from_str::( + "./.github/workflows/workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89" + ).map(|d| d.0).unwrap_err(), + @r#"Error("local reusable workflow reference can't specify `@`")"# + ); + + // Invalid: no ref at all + insta::assert_debug_snapshot!( + serde_yaml::from_str::( + ".github/workflows/workflow-1.yml" + ).map(|d| d.0).unwrap_err(), + @r#"Error("malformed `uses` ref: missing `@` in .github/workflows/workflow-1.yml")"# + ); + + // Invalid: missing user/repo + insta::assert_debug_snapshot!( + serde_yaml::from_str::( + "workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89" + ).map(|d| d.0).unwrap_err(), + @r#"Error("malformed `uses` ref: owner/repo slug is too short: workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89")"# + ); } } diff --git a/crates/github-actions-models/src/dependabot/v2.rs b/crates/github-actions-models/src/dependabot/v2.rs index efa0d75a..2549ca66 100644 --- a/crates/github-actions-models/src/dependabot/v2.rs +++ b/crates/github-actions-models/src/dependabot/v2.rs @@ -349,6 +349,8 @@ pub enum AllowDeny { #[derive(Deserialize, Debug, PartialEq)] #[serde(rename_all = "kebab-case")] pub enum PackageEcosystem { + /// `bazel` + Bazel, /// `bun` Bun, /// `bundler` @@ -369,6 +371,8 @@ pub enum PackageEcosystem { DotnetSdk, /// `helm` Helm, + /// `julia` + Julia, /// `elm` Elm, /// `gitsubmodule` @@ -387,6 +391,8 @@ pub enum PackageEcosystem { Npm, /// `nuget` Nuget, + /// `opentofu` + Opentofu, /// `pip` Pip, /// `pub` diff --git a/crates/github-actions-models/src/workflow/job.rs b/crates/github-actions-models/src/workflow/job.rs index abd219c5..76da0762 100644 --- a/crates/github-actions-models/src/workflow/job.rs +++ b/crates/github-actions-models/src/workflow/job.rs @@ -5,7 +5,7 @@ use serde::Deserialize; use serde_yaml::Value; use crate::common::expr::{BoE, LoE}; -use crate::common::{Env, If, Permissions, Uses, custom_error}; +use crate::common::{DockerUses, Env, If, Permissions, Uses, custom_error}; use super::{Concurrency, Defaults}; @@ -162,7 +162,8 @@ pub struct Matrix { pub enum Container { Name(String), Container { - image: String, + #[serde(deserialize_with = "crate::common::docker_uses")] + image: DockerUses, credentials: Option, #[serde(default)] env: LoE, diff --git a/crates/github-actions-models/tests/sample-dependabot/v2/opentofu.yml b/crates/github-actions-models/tests/sample-dependabot/v2/opentofu.yml new file mode 100644 index 00000000..bd9d308b --- /dev/null +++ b/crates/github-actions-models/tests/sample-dependabot/v2/opentofu.yml @@ -0,0 +1,23 @@ +# https://github.com/zizmorcore/zizmor/issues/1451 + +version: 2 +enable-beta-ecosystems: true + +updates: + - package-ecosystem: "opentofu" + directories: + - "/stack" + - "/modules/default-branch-protection" + schedule: + interval: "cron" + cronjob: "30 7 * * *" + timezone: "Europe/London" + target-branch: "main" + groups: + terraform: + applies-to: "version-updates" + patterns: + - "*" + update-types: + - "patch" + - "minor" diff --git a/crates/github-actions-models/tests/test_workflow.rs b/crates/github-actions-models/tests/test_workflow.rs index c9fded27..c6eaa0a3 100644 --- a/crates/github-actions-models/tests/test_workflow.rs +++ b/crates/github-actions-models/tests/test_workflow.rs @@ -1,4 +1,4 @@ -use std::{env, path::Path, str::FromStr}; +use std::{env, path::Path}; use github_actions_models::{ common::{ @@ -56,13 +56,13 @@ fn test_pip_audit_ci() { let StepBody::Uses { uses, with } = &test_job.steps[0].body else { panic!("expected uses step"); }; - assert_eq!(uses, &Uses::from_str("actions/checkout@v4.1.1").unwrap()); + assert_eq!(uses, &Uses::parse("actions/checkout@v4.1.1").unwrap()); assert!(with.is_empty()); let StepBody::Uses { uses, with } = &test_job.steps[1].body else { panic!("expected uses step"); }; - assert_eq!(uses, &Uses::from_str("actions/setup-python@v5").unwrap()); + assert_eq!(uses, &Uses::parse("actions/setup-python@v5").unwrap()); assert_eq!(with["python-version"].to_string(), "${{ matrix.python }}"); assert_eq!(with["cache"].to_string(), "pip"); assert_eq!(with["cache-dependency-path"].to_string(), "pyproject.toml"); diff --git a/crates/tree-sitter-iter/Cargo.toml b/crates/tree-sitter-iter/Cargo.toml index 46289139..0d44bf97 100644 --- a/crates/tree-sitter-iter/Cargo.toml +++ b/crates/tree-sitter-iter/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "tree-sitter-iter" description = "A very simple pre-order iterator for tree-sitter CSTs" -version = "0.0.2" +version = "0.0.3" authors.workspace = true homepage.workspace = true edition.workspace = true diff --git a/crates/yamlpatch/Cargo.toml b/crates/yamlpatch/Cargo.toml index 19e36bd5..497d4d6a 100644 --- a/crates/yamlpatch/Cargo.toml +++ b/crates/yamlpatch/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "yamlpatch" -version = "0.7.0" +version = "0.8.0" description = "Comment and format-preserving YAML patch operations" repository = "https://github.com/zizmorcore/zizmor/tree/main/crates/yamlpatch" keywords = ["yaml", "patch"] diff --git a/crates/yamlpatch/README.md b/crates/yamlpatch/README.md index acb03bdb..ffd0fd94 100644 --- a/crates/yamlpatch/README.md +++ b/crates/yamlpatch/README.md @@ -41,16 +41,17 @@ human review. ## Operations -`yamlpatch` supports several types of patch operations: +`yamlpatch` supports the following patch operations: - **Replace**: Replace a value at a specific path - **Add**: Add new key-value pairs to mappings - **Remove**: Remove keys or elements - **MergeInto**: Merge values into existing mappings +- **Append**: Append items to block sequences +- **ReplaceComment**: Replace comments associated with features - **RewriteFragment**: Rewrite portions of string values (useful for templating) -Each operation is designed to work with the existing document structure -and formatting, making minimal changes while achieving the desired result. +Each operation preserves the document's formatting and structure (as best-effort). ## License diff --git a/crates/yamlpatch/tests/unit_tests.rs b/crates/yamlpatch/tests/unit_tests.rs index 356ad31b..dc6a4be7 100644 --- a/crates/yamlpatch/tests/unit_tests.rs +++ b/crates/yamlpatch/tests/unit_tests.rs @@ -903,21 +903,21 @@ normal: let end = find_content_end(&feature, &doc); insta::assert_snapshot!(doc.source()[feature.location.byte_span.0..end], @r" - bar: baz - abc: def # comment - "); + bar: baz + abc: def # comment + "); let feature = route_to_feature_exact(&route!("interior-spaces"), &doc) .unwrap() .unwrap(); let end = find_content_end(&feature, &doc); insta::assert_snapshot!(doc.source()[feature.location.byte_span.0..end], @r" - - foo + - foo - - bar - # hello - - baz # hello - "); + - bar + # hello + - baz # hello + "); let feature = route_to_feature_exact(&route!("normal"), &doc) .unwrap() @@ -2775,11 +2775,12 @@ items: apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap(); insta::assert_snapshot!(result.source(), @r" - items: - - first - - second - - third - "); + + items: + - first + - second + - third + "); } #[test] @@ -2822,19 +2823,20 @@ databases: let result = apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap(); - insta::assert_snapshot!(result.source(), @r#" - databases: - - name: primary - host: db1.example.com - port: 5432 - max_connections: 100 - ssl: true - readonly: false - - name: analytics - host: db2.example.com - port: 5433 - readonly: true - "#); + insta::assert_snapshot!(result.source(), @r" + + databases: + - name: primary + host: db1.example.com + port: 5432 + max_connections: 100 + ssl: true + readonly: false + - name: analytics + host: db2.example.com + port: 5433 + readonly: true + "); } #[test] @@ -2870,16 +2872,17 @@ jobs: apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap(); insta::assert_snapshot!(result.source(), @r#" - jobs: - test: - steps: - - name: First step - run: echo "first" - - name: Second step - run: echo "second" - - name: Third step - run: echo "third" - "#); + + jobs: + test: + steps: + - name: First step + run: echo "first" + - name: Second step + run: echo "second" + - name: Third step + run: echo "third" + "#); } #[test] @@ -2924,20 +2927,21 @@ servers: assert!(result.source().contains("# Staging server")); assert!(result.source().contains("# internal only")); - insta::assert_snapshot!(result.source(), @r#" - servers: - # Production server - - name: prod - host: prod.example.com - port: 443 - # Staging server - - name: staging - host: staging.example.com # internal only - port: 8443 - - name: dev - host: localhost - port: 8080 - "#); + insta::assert_snapshot!(result.source(), @r" + + servers: + # Production server + - name: prod + host: prod.example.com + port: 443 + # Staging server + - name: staging + host: staging.example.com # internal only + port: 8443 + - name: dev + host: localhost + port: 8080 + "); } #[test] @@ -2959,11 +2963,12 @@ ports: apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap(); insta::assert_snapshot!(result.source(), @r" - ports: - - 8080 - - 8081 - - 8082 - "); + + ports: + - 8080 + - 8081 + - 8082 + "); } #[test] @@ -2985,11 +2990,12 @@ configs: apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap(); insta::assert_snapshot!(result.source(), @r" - configs: - - name: config1 - value: 123 - - {} - "); + + configs: + - name: config1 + value: 123 + - {} + "); } #[test] @@ -3031,15 +3037,16 @@ services: let result = apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap(); - insta::assert_snapshot!(result.source(), @r#" - services: - - name: api - port: 8080 - - name: worker - port: 9090 - config: - replicas: 3 - "#); + insta::assert_snapshot!(result.source(), @r" + + services: + - name: api + port: 8080 + - name: worker + port: 9090 + config: + replicas: 3 + "); } #[test] @@ -3096,11 +3103,12 @@ tasks: apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap(); insta::assert_snapshot!(result.source(), @r" - tasks: - - task1 - - task2 - - task3 - "); + + tasks: + - task1 + - task2 + - task3 + "); } #[test] @@ -3138,20 +3146,21 @@ jobs: let result = apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap(); - insta::assert_snapshot!(result.source(), @r#" - name: CI - on: push - jobs: - test: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Run tests - run: npm test - - name: Upload coverage - uses: codecov/codecov-action@v3 - "#); + insta::assert_snapshot!(result.source(), @r" + + name: CI + on: push + jobs: + test: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Run tests + run: npm test + - name: Upload coverage + uses: codecov/codecov-action@v3 + "); } #[test] @@ -3176,9 +3185,10 @@ foo: apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap(); insta::assert_snapshot!(result.source(), @r" - foo: - - abc - - - def - - ghi - "); + + foo: + - abc + - - def + - ghi + "); } diff --git a/crates/yamlpath/Cargo.toml b/crates/yamlpath/Cargo.toml index fae7ecac..8a3cbc19 100644 --- a/crates/yamlpath/Cargo.toml +++ b/crates/yamlpath/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "yamlpath" -version = "0.29.0" +version = "0.31.0" description = "Format-preserving YAML feature extraction" repository = "https://github.com/zizmorcore/zizmor/tree/main/crates/yamlpath" readme = "README.md" diff --git a/crates/zizmor/Cargo.toml b/crates/zizmor/Cargo.toml index 870c0c42..c241e617 100644 --- a/crates/zizmor/Cargo.toml +++ b/crates/zizmor/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "zizmor" description = "Static analysis for GitHub Actions" -version = "1.18.0-rc3" +version = "1.19.0" repository = "https://github.com/zizmorcore/zizmor" documentation = "https://docs.zizmor.sh" keywords = ["cli", "github-actions", "static-analysis", "security"] diff --git a/crates/zizmor/build.rs b/crates/zizmor/build.rs index 265240a1..884b652f 100644 --- a/crates/zizmor/build.rs +++ b/crates/zizmor/build.rs @@ -4,7 +4,7 @@ use std::fs::{self, File}; use std::path::Path; use std::{env, io}; -use fst::MapBuilder; +use fst::{MapBuilder, SetBuilder}; fn do_context_capabilities() { let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); @@ -55,7 +55,29 @@ fn do_codeql_injection_sinks() { fs::copy(source, target).unwrap(); } +fn do_archived_action_repos() { + let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); + let source = Path::new(&manifest_dir).join("data/archived-repos.txt"); + let target = Path::new(&env::var("OUT_DIR").unwrap()).join("archived-repos.fst"); + + print!( + "cargo::rerun-if-changed={source}", + source = source.display() + ); + + let out = io::BufWriter::new(File::create(target).unwrap()); + let mut build = SetBuilder::new(out).unwrap(); + + let contents = fs::read_to_string(source).unwrap(); + for line in contents.lines() { + build.insert(line).unwrap(); + } + + build.finish().unwrap(); +} + fn main() { do_context_capabilities(); do_codeql_injection_sinks(); + do_archived_action_repos(); } diff --git a/crates/zizmor/data/archived-repos.txt b/crates/zizmor/data/archived-repos.txt new file mode 100644 index 00000000..2aeb1ab8 --- /dev/null +++ b/crates/zizmor/data/archived-repos.txt @@ -0,0 +1,87 @@ +8398a7/action-slack +actions-rs/audit-check +actions-rs/cargo +actions-rs/clippy-check +actions-rs/components-nightly +actions-rs/grcov +actions-rs/install +actions-rs/tarpaulin +actions-rs/toolchain +actions/create-release +actions/setup-elixir +actions/setup-haskell +actions/setup-ruby +actions/upload-release-asset +andrewmcodes-archive/rubocop-linter-action +artichoke/setup-rust +aslafy-z/conventional-pr-title-action +azure/appconfiguration-sync +azure/appservice-actions +azure/azure-resource-login-action +azure/container-actions +azure/container-scan +azure/data-factory-deploy-action +azure/data-factory-export-action +azure/data-factory-validate-action +azure/get-keyvault-secrets +azure/k8s-actions +azure/manage-azure-policy +azure/publish-security-assessments +azure/run-sqlpackage-action +azure/spring-cloud-deploy +azure/webapps-container-deploy +cedrickring/golang-action +cirrus-actions/rebase +crazy-max/ghaction-docker-buildx +decathlon/pull-request-labeler-action +delaguardo/setup-graalvm +dulvui/godot-android-export +expo/expo-preview-action +fabasoad/setup-zizmor-action +facebook/pysa-action +fregante/release-with-changelog +google/mirror-branch-action +google/skywater-pdk-actions +gradle/gradle-build-action +grafana/k6-action +helaili/github-graphql-action +helaili/jekyll-action +ilshidur/action-slack +jakejarvis/backblaze-b2-action +jakejarvis/cloudflare-purge-action +jakejarvis/firebase-deploy-action +jakejarvis/hugo-build-action +jakejarvis/lighthouse-action +jakejarvis/s3-sync-action +justinribeiro/lighthouse-action +kanadgupta/glitch-sync +kxxt/chatgpt-action +machine-learning-apps/wandb-action +mansagroup/gcs-cache-action +marvinpinto/action-automatic-releases +marvinpinto/actions +maxheld83/ghpages +micnncim/action-lgtm-reaction +mikepenz/gradle-dependency-submission +orf/cargo-bloat-action +paambaati/codeclimate-action +primer/figma-action +repo-sync/pull-request +repo-sync/repo-sync +sagebind/docker-swarm-deploy-action +scottbrenner/generate-changelog-action +secrethub/actions +semgrep/semgrep-action +shaunlwm/action-release-debugapk +sonarsource/sonarcloud-github-action +stefanprodan/kube-tools +swiftdocorg/github-wiki-publish-action +tachiyomiorg/issue-moderator-action +technote-space/auto-cancel-redundant-workflow +technote-space/get-diff-action +tencentcloudbase/cloudbase-action +trmcnvn/chrome-addon +whelk-io/maven-settings-xml-action +yeslayla/build-godot-action +youyo/aws-cdk-github-actions +z0al/dependent-issues diff --git a/crates/zizmor/data/context-capabilities.csv b/crates/zizmor/data/context-capabilities.csv index 76d8ff96..b1a1a079 100644 --- a/crates/zizmor/data/context-capabilities.csv +++ b/crates/zizmor/data/context-capabilities.csv @@ -272,6 +272,17 @@ github.event.changes.new_issue.draft,fixed github.event.changes.new_issue.events_url,structured github.event.changes.new_issue.html_url,structured github.event.changes.new_issue.id,fixed +github.event.changes.new_issue.issue_dependencies_summary.blocked_by,fixed +github.event.changes.new_issue.issue_dependencies_summary.blocking,fixed +github.event.changes.new_issue.issue_dependencies_summary.total_blocked_by,fixed +github.event.changes.new_issue.issue_dependencies_summary.total_blocking,fixed +github.event.changes.new_issue.issue_field_values.*.data_type,fixed +github.event.changes.new_issue.issue_field_values.*.issue_field_id,fixed +github.event.changes.new_issue.issue_field_values.*.node_id,arbitrary +github.event.changes.new_issue.issue_field_values.*.single_select_option.color,arbitrary +github.event.changes.new_issue.issue_field_values.*.single_select_option.id,fixed +github.event.changes.new_issue.issue_field_values.*.single_select_option.name,arbitrary +github.event.changes.new_issue.issue_field_values.*.value,arbitrary github.event.changes.new_issue.labels.*.color,arbitrary github.event.changes.new_issue.labels.*.default,fixed github.event.changes.new_issue.labels.*.description,arbitrary @@ -762,6 +773,17 @@ github.event.changes.old_issue.draft,fixed github.event.changes.old_issue.events_url,structured github.event.changes.old_issue.html_url,structured github.event.changes.old_issue.id,fixed +github.event.changes.old_issue.issue_dependencies_summary.blocked_by,fixed +github.event.changes.old_issue.issue_dependencies_summary.blocking,fixed +github.event.changes.old_issue.issue_dependencies_summary.total_blocked_by,fixed +github.event.changes.old_issue.issue_dependencies_summary.total_blocking,fixed +github.event.changes.old_issue.issue_field_values.*.data_type,fixed +github.event.changes.old_issue.issue_field_values.*.issue_field_id,fixed +github.event.changes.old_issue.issue_field_values.*.node_id,arbitrary +github.event.changes.old_issue.issue_field_values.*.single_select_option.color,arbitrary +github.event.changes.old_issue.issue_field_values.*.single_select_option.id,fixed +github.event.changes.old_issue.issue_field_values.*.single_select_option.name,arbitrary +github.event.changes.old_issue.issue_field_values.*.value,arbitrary github.event.changes.old_issue.labels.*.color,arbitrary github.event.changes.old_issue.labels.*.default,fixed github.event.changes.old_issue.labels.*.description,arbitrary @@ -1060,7 +1082,6 @@ github.event.changes.required_status_checks.from.*,arbitrary github.event.changes.required_status_checks_enforcement_level.from,fixed github.event.changes.tag_name.from,arbitrary github.event.changes.title.from,arbitrary -github.event.check_run.app,fixed github.event.check_run.app.client_id,arbitrary github.event.check_run.app.created_at,fixed github.event.check_run.app.description,arbitrary @@ -1191,6 +1212,8 @@ github.event.check_run.check_suite.repository.compare_url,arbitrary github.event.check_run.check_suite.repository.contents_url,arbitrary github.event.check_run.check_suite.repository.contributors_url,structured github.event.check_run.check_suite.repository.created_at,fixed +github.event.check_run.check_suite.repository.custom_properties,arbitrary +github.event.check_run.check_suite.repository.custom_properties.*,arbitrary github.event.check_run.check_suite.repository.default_branch,arbitrary github.event.check_run.check_suite.repository.delete_branch_on_merge,fixed github.event.check_run.check_suite.repository.deployments_url,structured @@ -2207,6 +2230,17 @@ github.event.issue.draft,fixed github.event.issue.events_url,arbitrary github.event.issue.html_url,arbitrary github.event.issue.id,fixed +github.event.issue.issue_dependencies_summary.blocked_by,fixed +github.event.issue.issue_dependencies_summary.blocking,fixed +github.event.issue.issue_dependencies_summary.total_blocked_by,fixed +github.event.issue.issue_dependencies_summary.total_blocking,fixed +github.event.issue.issue_field_values.*.data_type,fixed +github.event.issue.issue_field_values.*.issue_field_id,fixed +github.event.issue.issue_field_values.*.node_id,arbitrary +github.event.issue.issue_field_values.*.single_select_option.color,arbitrary +github.event.issue.issue_field_values.*.single_select_option.id,fixed +github.event.issue.issue_field_values.*.single_select_option.name,arbitrary +github.event.issue.issue_field_values.*.value,arbitrary github.event.issue.labels.*,arbitrary github.event.issue.labels.*.color,arbitrary github.event.issue.labels.*.default,fixed @@ -3047,6 +3081,7 @@ github.event.pull_request.requested_reviewers.*.url,structured github.event.pull_request.requested_reviewers.*.user_view_type,arbitrary github.event.pull_request.requested_teams.*.deleted,fixed github.event.pull_request.requested_teams.*.description,arbitrary +github.event.pull_request.requested_teams.*.enterprise_id,fixed github.event.pull_request.requested_teams.*.html_url,structured github.event.pull_request.requested_teams.*.id,fixed github.event.pull_request.requested_teams.*.ldap_dn,arbitrary @@ -3054,6 +3089,7 @@ github.event.pull_request.requested_teams.*.members_url,arbitrary github.event.pull_request.requested_teams.*.name,arbitrary github.event.pull_request.requested_teams.*.node_id,arbitrary github.event.pull_request.requested_teams.*.notification_setting,arbitrary +github.event.pull_request.requested_teams.*.organization_id,fixed github.event.pull_request.requested_teams.*.parent.description,arbitrary github.event.pull_request.requested_teams.*.parent.html_url,structured github.event.pull_request.requested_teams.*.parent.id,fixed @@ -3069,6 +3105,7 @@ github.event.pull_request.requested_teams.*.permission,arbitrary github.event.pull_request.requested_teams.*.privacy,arbitrary github.event.pull_request.requested_teams.*.repositories_url,structured github.event.pull_request.requested_teams.*.slug,arbitrary +github.event.pull_request.requested_teams.*.type,fixed github.event.pull_request.requested_teams.*.url,structured github.event.pull_request.review_comment_url,arbitrary github.event.pull_request.review_comments,fixed @@ -3349,6 +3386,7 @@ github.event.release.discussion_url,structured github.event.release.draft,fixed github.event.release.html_url,structured github.event.release.id,fixed +github.event.release.immutable,fixed github.event.release.name,arbitrary github.event.release.node_id,arbitrary github.event.release.prerelease,fixed @@ -3366,6 +3404,7 @@ github.event.release.reactions.url,structured github.event.release.tag_name,arbitrary github.event.release.tarball_url,structured github.event.release.target_commitish,arbitrary +github.event.release.updated_at,fixed github.event.release.upload_url,structured github.event.release.url,structured github.event.release.zipball_url,structured @@ -3691,6 +3730,7 @@ github.event.review.node_id,arbitrary github.event.review.pull_request_url,structured github.event.review.state,arbitrary github.event.review.submitted_at,fixed +github.event.review.updated_at,fixed github.event.review.user.avatar_url,structured github.event.review.user.deleted,fixed github.event.review.user.email,arbitrary diff --git a/crates/zizmor/src/audit/anonymous_definition.rs b/crates/zizmor/src/audit/anonymous_definition.rs index 5de1fd8f..c60c295f 100644 --- a/crates/zizmor/src/audit/anonymous_definition.rs +++ b/crates/zizmor/src/audit/anonymous_definition.rs @@ -39,6 +39,7 @@ impl Audit for AnonymousDefinition { .confidence(Confidence::High) .persona(Persona::Pedantic) .add_location(workflow.location().primary()) + .tip("use 'name: ...' to give this workflow a name") .build(workflow)?, ); } @@ -47,14 +48,13 @@ impl Audit for AnonymousDefinition { match job { Job::NormalJob(normal) => { if normal.name.is_none() { - let location = normal.location().primary(); - findings.push( Self::finding() .severity(ANONYMOUS_DEFINITION_JOB_SEVERITY) .confidence(Confidence::High) .persona(Persona::Pedantic) - .add_location(location) + .add_location(normal.location_with_grip().primary()) + .tip("use 'name: ...' to give this job a name") .build(workflow)?, ); } diff --git a/crates/zizmor/src/audit/archived_uses.rs b/crates/zizmor/src/audit/archived_uses.rs new file mode 100644 index 00000000..effd55aa --- /dev/null +++ b/crates/zizmor/src/audit/archived_uses.rs @@ -0,0 +1,140 @@ +use std::sync::LazyLock; + +use fst::Set; +use github_actions_models::common::{RepositoryUses, Uses}; +use subfeature::Subfeature; + +use crate::{ + audit::{Audit, AuditError, AuditLoadError, audit_meta}, + config::Config, + finding::{Confidence, Finding, FindingBuilder, Persona, Severity, location::Locatable}, + models::{ + StepCommon as _, + action::CompositeStep, + workflow::{ReusableWorkflowCallJob, Step}, + }, + state::AuditState, +}; + +static ARCHIVED_REPOS_FST: LazyLock> = LazyLock::new(|| { + fst::Set::new(include_bytes!(concat!(env!("OUT_DIR"), "/archived-repos.fst")).as_slice()) + .expect("couldn't initialize archived repos FST") +}); + +pub(crate) struct ArchivedUses; + +audit_meta!( + ArchivedUses, + "archived-uses", + "action or reusable workflow from archived repository" +); + +impl ArchivedUses { + pub(crate) fn uses_is_archived<'doc>(uses: &RepositoryUses) -> Option> { + // TODO: Annoying that we need to allocate for case normalization here; can we use an + // automaton to search the FST case-insensitively? + let normalized = format!( + "{owner}/{repo}", + owner = uses.owner().to_lowercase(), + repo = uses.repo().to_lowercase() + ); + + ARCHIVED_REPOS_FST.contains(normalized.as_bytes()).then(|| { + Self::finding() + .confidence(Confidence::High) + .severity(Severity::Medium) + .persona(Persona::Regular) + }) + } +} + +#[async_trait::async_trait] +impl Audit for ArchivedUses { + fn new(_state: &AuditState) -> Result + where + Self: Sized, + { + Ok(Self) + } + + async fn audit_step<'doc>( + &self, + step: &Step<'doc>, + _config: &Config, + ) -> Result>, AuditError> { + let mut findings = vec![]; + + if let Some(Uses::Repository(uses)) = step.uses() + && let Some(finding) = Self::uses_is_archived(uses) + { + findings.push( + finding + .add_location(step.location_with_grip()) + .add_location( + step.location() + .with_keys(["uses".into()]) + .subfeature(Subfeature::new(0, uses.slug())) + .annotated("repository is archived") + .primary(), + ) + .build(step)?, + ) + } + + Ok(findings) + } + + async fn audit_composite_step<'doc>( + &self, + step: &CompositeStep<'doc>, + _config: &Config, + ) -> Result>, AuditError> { + let mut findings = vec![]; + + if let Some(Uses::Repository(uses)) = step.uses() + && let Some(finding) = Self::uses_is_archived(uses) + { + findings.push( + finding + .add_location(step.location_with_grip()) + .add_location( + step.location() + .with_keys(["uses".into()]) + .subfeature(Subfeature::new(0, uses.slug())) + .annotated("repository is archived") + .primary(), + ) + .build(step)?, + ) + } + + Ok(findings) + } + + async fn audit_reusable_job<'doc>( + &self, + job: &ReusableWorkflowCallJob<'doc>, + _config: &Config, + ) -> Result>, AuditError> { + let mut findings = vec![]; + + if let Uses::Repository(uses) = &job.uses + && let Some(finding) = Self::uses_is_archived(uses) + { + findings.push( + finding + .add_location(job.location_with_grip()) + .add_location( + job.location() + .with_keys(["uses".into()]) + .subfeature(Subfeature::new(0, uses.slug())) + .annotated("repository is archived") + .primary(), + ) + .build(job)?, + ) + } + + Ok(findings) + } +} diff --git a/crates/zizmor/src/audit/artipacked.rs b/crates/zizmor/src/audit/artipacked.rs index 7513400d..8650727a 100644 --- a/crates/zizmor/src/audit/artipacked.rs +++ b/crates/zizmor/src/audit/artipacked.rs @@ -42,12 +42,12 @@ impl Artipacked { uses: &github_actions_models::common::RepositoryUses, ) -> Result, ClientError> { let version = if !uses.ref_is_commit() { - uses.git_ref.clone() + uses.git_ref().to_string() } else { match self.client { Some(ref client) => { let tag = client - .longest_tag_for_commit(&uses.owner, &uses.repo, &uses.git_ref) + .longest_tag_for_commit(uses.owner(), uses.repo(), uses.git_ref()) .await?; match tag { @@ -271,8 +271,6 @@ impl Audit for Artipacked { #[cfg(test)] mod tests { - use std::str::FromStr; - use github_actions_models::common::RepositoryUses; use super::*; @@ -325,11 +323,11 @@ mod tests { #[tokio::test] async fn test_is_checkout_v6_or_higher_offline() { // Test v6 and higher versions - let v6 = RepositoryUses::from_str("actions/checkout@v6").unwrap(); - let v6_0 = RepositoryUses::from_str("actions/checkout@v6.0").unwrap(); - let v6_1_0 = RepositoryUses::from_str("actions/checkout@v6.1.0").unwrap(); - let v7 = RepositoryUses::from_str("actions/checkout@v7").unwrap(); - let v10 = RepositoryUses::from_str("actions/checkout@v10").unwrap(); + let v6 = RepositoryUses::parse("actions/checkout@v6").unwrap(); + let v6_0 = RepositoryUses::parse("actions/checkout@v6.0").unwrap(); + let v6_1_0 = RepositoryUses::parse("actions/checkout@v6.1.0").unwrap(); + let v7 = RepositoryUses::parse("actions/checkout@v7").unwrap(); + let v10 = RepositoryUses::parse("actions/checkout@v10").unwrap(); let artipacked = Artipacked { client: None }; @@ -355,9 +353,9 @@ mod tests { ); // Test versions below v6 - let v4 = RepositoryUses::from_str("actions/checkout@v4").unwrap(); - let v5 = RepositoryUses::from_str("actions/checkout@v5").unwrap(); - let v5_9 = RepositoryUses::from_str("actions/checkout@v5.9").unwrap(); + let v4 = RepositoryUses::parse("actions/checkout@v4").unwrap(); + let v5 = RepositoryUses::parse("actions/checkout@v5").unwrap(); + let v5_9 = RepositoryUses::parse("actions/checkout@v5.9").unwrap(); assert_eq!( artipacked.is_checkout_v6_or_higher(&v4).await.unwrap(), @@ -374,7 +372,7 @@ mod tests { // Test commit SHA (should return None when offline) let commit_sha = - RepositoryUses::from_str("actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683") + RepositoryUses::parse("actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683") .unwrap(); assert_eq!( artipacked @@ -385,7 +383,7 @@ mod tests { ); // Test invalid/unparseable refs (should return None) - let invalid = RepositoryUses::from_str("actions/checkout@main").unwrap(); + let invalid = RepositoryUses::parse("actions/checkout@main").unwrap(); assert_eq!( artipacked.is_checkout_v6_or_higher(&invalid).await.unwrap(), None @@ -410,7 +408,7 @@ mod tests { // Points to v6.0.0. let commit_sha_v6 = - RepositoryUses::from_str("actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3") + RepositoryUses::parse("actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3") .unwrap(); assert_eq!( @@ -423,7 +421,7 @@ mod tests { // Points to v5.0.1. let commit_sha_v5 = - RepositoryUses::from_str("actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd") + RepositoryUses::parse("actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd") .unwrap(); assert_eq!( @@ -526,6 +524,7 @@ jobs: |workflow: &Workflow, findings| { let fixed = apply_fix_for_snapshot(workflow.as_document(), findings); insta::assert_snapshot!(fixed.source(), @r" + name: Test Workflow on: push jobs: @@ -573,6 +572,7 @@ jobs: |workflow: &Workflow, findings| { let fixed = apply_fix_for_snapshot(workflow.as_document(), findings); insta::assert_snapshot!(fixed.source(), @r" + name: Test Workflow on: push jobs: diff --git a/crates/zizmor/src/audit/bot_conditions.rs b/crates/zizmor/src/audit/bot_conditions.rs index d3847a17..d4c3492e 100644 --- a/crates/zizmor/src/audit/bot_conditions.rs +++ b/crates/zizmor/src/audit/bot_conditions.rs @@ -15,7 +15,7 @@ use super::{Audit, AuditLoadError, AuditState, audit_meta}; use crate::{ audit::AuditError, finding::{Confidence, Fix, FixDisposition, Severity, location::Locatable as _}, - models::workflow::{JobExt, Workflow}, + models::workflow::{JobCommon, Workflow}, utils::{self, ExtractedExpr}, }; use subfeature::Subfeature; @@ -81,7 +81,7 @@ impl Audit for BotConditions { if let Some(If::Expr(expr)) = &job.r#if { conds.push(( expr, - job.location_with_name(), + job.location_with_grip(), job.location().with_keys(["if".into()]), )); } @@ -91,7 +91,7 @@ impl Audit for BotConditions { if let Some(If::Expr(expr)) = &step.r#if { conds.push(( expr, - step.location_with_name(), + step.location_with_grip(), step.location().with_keys(["if".into()]), )); } @@ -538,6 +538,7 @@ jobs: } insta::assert_snapshot!(document.source(), @r#" + name: Test Workflow on: pull_request_target: @@ -588,6 +589,7 @@ jobs: } } insta::assert_snapshot!(document.source(), @r#" + name: Test Workflow on: pull_request_target: @@ -641,6 +643,7 @@ jobs: // Verify it suggests comment.user.login for issue_comment events insta::assert_snapshot!(document.source(), @r#" + name: Test Issue Comment on: issue_comment @@ -690,6 +693,7 @@ jobs: // Verify it suggests review.user.login for pull_request_review events insta::assert_snapshot!(document.source(), @r#" + name: Test PR Review on: pull_request_review @@ -739,6 +743,7 @@ jobs: // Verify it suggests issue.user.login for issues events insta::assert_snapshot!(document.source(), @r#" + name: Test Issues on: issues @@ -788,6 +793,7 @@ jobs: // Verify it suggests release.author.login for release events insta::assert_snapshot!(document.source(), @r#" + name: Test Release on: release @@ -836,6 +842,7 @@ jobs: } insta::assert_snapshot!(document.source(), @r#" + name: Test Create on: create @@ -885,6 +892,7 @@ jobs: } insta::assert_snapshot!(document.source(), @r#" + name: Test Workflow on: pull_request_target: diff --git a/crates/zizmor/src/audit/cache_poisoning.rs b/crates/zizmor/src/audit/cache_poisoning.rs index 78dbc936..0dd95d9b 100644 --- a/crates/zizmor/src/audit/cache_poisoning.rs +++ b/crates/zizmor/src/audit/cache_poisoning.rs @@ -9,7 +9,7 @@ use crate::finding::location::{Locatable as _, Routable}; use crate::finding::{Confidence, Finding, Fix, FixDisposition, Severity}; use crate::models::StepCommon; use crate::models::coordinate::{ActionCoordinate, ControlExpr, ControlFieldType, Toggle, Usage}; -use crate::models::workflow::{JobExt as _, NormalJob, Step, Steps}; +use crate::models::workflow::{JobCommon as _, NormalJob, Step, Steps}; use crate::state::AuditState; use indexmap::IndexMap; @@ -444,7 +444,7 @@ impl CachePoisoning { finding_builder = finding_builder.fix(fix); } - finding_builder.build(step.workflow()).ok() + finding_builder.build(step).ok() } } @@ -555,6 +555,7 @@ jobs: |findings: Vec| { let fixed_content = apply_fix_for_snapshot(workflow_content, findings); insta::assert_snapshot!(fixed_content, @r" + name: Test Workflow on: release @@ -599,6 +600,7 @@ jobs: |findings: Vec| { let fixed_content = apply_fix_for_snapshot(workflow_content, findings); insta::assert_snapshot!(fixed_content, @r" + name: Test Workflow on: release diff --git a/crates/zizmor/src/audit/dependabot_cooldown.rs b/crates/zizmor/src/audit/dependabot_cooldown.rs index 4bd782ba..e8b2f8df 100644 --- a/crates/zizmor/src/audit/dependabot_cooldown.rs +++ b/crates/zizmor/src/audit/dependabot_cooldown.rs @@ -135,7 +135,7 @@ impl Audit for DependabotCooldown { Self::finding() .add_location( update - .location_with_name() + .location_with_grip() .primary() .annotated("missing cooldown configuration"), ) @@ -202,6 +202,7 @@ updates: let fix = &finding.fixes[0]; let fixed_document = fix.apply(dependabot.as_document()).unwrap(); insta::assert_snapshot!(fixed_document.source(), @r" + version: 2 updates: @@ -243,6 +244,7 @@ updates: let fix = &finding.fixes[0]; let fixed_document = fix.apply(dependabot.as_document()).unwrap(); insta::assert_snapshot!(fixed_document.source(), @r" + version: 2 updates: @@ -284,6 +286,7 @@ updates: let fix = &finding.fixes[0]; let fixed_document = fix.apply(dependabot.as_document()).unwrap(); insta::assert_snapshot!(fixed_document.source(), @r" + version: 2 updates: @@ -335,6 +338,7 @@ updates: } insta::assert_snapshot!(document.source(), @r" + version: 2 updates: @@ -379,6 +383,7 @@ updates: // Verify the document remains unchanged insta::assert_snapshot!(dependabot.as_document().source(), @r" + version: 2 updates: diff --git a/crates/zizmor/src/audit/dependabot_execution.rs b/crates/zizmor/src/audit/dependabot_execution.rs index fe5caea3..d57653dd 100644 --- a/crates/zizmor/src/audit/dependabot_execution.rs +++ b/crates/zizmor/src/audit/dependabot_execution.rs @@ -61,7 +61,7 @@ impl Audit for DependabotExecution { .primary() .annotated("enabled here"), ) - .add_location(update.location_with_name()) + .add_location(update.location_with_grip()) .fix(Self::create_set_deny_fix(update)) .build(dependabot)?, ); @@ -123,6 +123,7 @@ updates: let fix = &finding.fixes[0]; let fixed_document = fix.apply(dependabot.as_document()).unwrap(); insta::assert_snapshot!(fixed_document.source(), @r" + version: 2 updates: @@ -158,6 +159,7 @@ updates: // Verify the document remains unchanged insta::assert_snapshot!(dependabot.as_document().source(), @r" + version: 2 updates: @@ -192,6 +194,7 @@ updates: // Verify the document remains unchanged insta::assert_snapshot!(dependabot.as_document().source(), @r" + version: 2 updates: @@ -240,6 +243,7 @@ updates: } insta::assert_snapshot!(document.source(), @r" + version: 2 updates: diff --git a/crates/zizmor/src/audit/excessive_permissions.rs b/crates/zizmor/src/audit/excessive_permissions.rs index 76204252..790ede83 100644 --- a/crates/zizmor/src/audit/excessive_permissions.rs +++ b/crates/zizmor/src/audit/excessive_permissions.rs @@ -14,6 +14,7 @@ use crate::{ static KNOWN_PERMISSIONS: LazyLock> = LazyLock::new(|| { [ ("actions", Severity::High), + ("artifact-metadata", Severity::Medium), ("attestations", Severity::High), ("checks", Severity::Medium), ("contents", Severity::High), @@ -21,6 +22,8 @@ static KNOWN_PERMISSIONS: LazyLock> = LazyLock::new(|| { ("discussions", Severity::Medium), ("id-token", Severity::High), ("issues", Severity::High), + // What does the write permission even do here? + ("models", Severity::Low), ("packages", Severity::High), ("pages", Severity::High), ("pull-requests", Severity::High), diff --git a/crates/zizmor/src/audit/forbidden_uses.rs b/crates/zizmor/src/audit/forbidden_uses.rs index 81ed8d4a..74ff3a7a 100644 --- a/crates/zizmor/src/audit/forbidden_uses.rs +++ b/crates/zizmor/src/audit/forbidden_uses.rs @@ -1,4 +1,5 @@ use github_actions_models::common::Uses; +use subfeature::Subfeature; use super::{Audit, AuditLoadError, AuditState, audit_meta}; use crate::audit::AuditError; @@ -59,6 +60,7 @@ impl ForbiddenUses { step.location() .primary() .with_keys(["uses".into()]) + .subfeature(Subfeature::new(0, uses.raw())) .annotated("use of this action is forbidden"), ) .build(step)?, diff --git a/crates/zizmor/src/audit/github_env.rs b/crates/zizmor/src/audit/github_env.rs index ae0c35a5..aa91e29b 100644 --- a/crates/zizmor/src/audit/github_env.rs +++ b/crates/zizmor/src/audit/github_env.rs @@ -13,7 +13,7 @@ use crate::config::Config; use crate::finding::location::Locatable as _; use crate::finding::{Confidence, Finding, Severity}; use crate::models::StepCommon; -use crate::models::{workflow::JobExt as _, workflow::Step}; +use crate::models::{workflow::JobCommon as _, workflow::Step}; use crate::state::AuditState; use crate::utils; use crate::utils::once::static_regex; @@ -88,20 +88,22 @@ const PWSH_REDIRECT_QUERY: &str = r#" const PWSH_PIPELINE_QUERY: &str = r#" (pipeline - (command - command_name: (command_name) @cmd - command_elements: (command_elements - (_)* - (array_literal_expression - (unary_expression [ - (string_literal - (expandable_string_literal (variable) @destination)) - (variable) @destination - ]) - ) - (_)*)) - (#match? @cmd "(?i)out-file|add-content|set-content|tee-object") - (#match? @destination "(?i)ENV:GITHUB_ENV|ENV:GITHUB_PATH") + (pipeline_chain + (command + command_name: (command_name) @cmd + command_elements: (command_elements + (_)* + (array_literal_expression + (unary_expression [ + (string_literal + (expandable_string_literal (variable) @destination)) + (variable) @destination + ]) + ) + (_)*)) + (#match? @cmd "(?i)out-file|add-content|set-content|tee-object") + (#match? @destination "(?i)ENV:GITHUB_ENV|ENV:GITHUB_PATH") + ) ) @span "#; @@ -392,7 +394,7 @@ impl Audit for GitHubEnv { } if let StepBody::Run { run, .. } = &step.deref().body { - let shell = step.shell().unwrap_or_else(|| { + let shell = step.shell().map(|s| s.0).unwrap_or_else(|| { tracing::warn!( "github-env: couldn't determine shell type for {workflow}:{job} step {stepno}; assuming bash", workflow = step.workflow().key.presentation_path(), @@ -419,7 +421,7 @@ impl Audit for GitHubEnv { .with_keys(["run".into()]) .annotated(format!("write to {dest} may allow code execution")), ) - .build(step.workflow())?, + .build(step)?, ) } } @@ -438,7 +440,7 @@ impl Audit for GitHubEnv { return Ok(findings); }; - let shell = step.shell().unwrap_or_else(|| { + let shell = step.shell().map(|s| s.0).unwrap_or_else(|| { tracing::warn!( "github-env: couldn't determine shell type for {action} step {stepno}; assuming bash", action = step.action().key.presentation_path(), @@ -463,7 +465,7 @@ impl Audit for GitHubEnv { .with_keys(["run".into()]) .annotated(format!("write to {dest} may allow code execution")), ) - .build(step.action())?, + .build(step)?, ) } diff --git a/crates/zizmor/src/audit/impostor_commit.rs b/crates/zizmor/src/audit/impostor_commit.rs index a8fbc3ba..e4f52bc4 100644 --- a/crates/zizmor/src/audit/impostor_commit.rs +++ b/crates/zizmor/src/audit/impostor_commit.rs @@ -7,6 +7,7 @@ use anyhow::anyhow; use github_actions_models::common::{RepositoryUses, Uses}; +use subfeature::Subfeature; use super::{Audit, AuditLoadError, Job, audit_meta}; use crate::{ @@ -51,7 +52,7 @@ impl ImpostorCommit { Ok( match self .client - .compare_commits(&uses.owner, &uses.repo, base_ref, head_ref) + .compare_commits(uses.owner(), uses.repo(), base_ref, head_ref) .await .map_err(Self::err)? { @@ -76,13 +77,13 @@ impl ImpostorCommit { return Ok(false); }; - // Fast path: almost all commit refs will be at the tip of + // Fastest path: almost all commit refs will be at the tip of // the branch or tag's history, so check those first. // Check tags before branches, since in practice version tags // are more commonly pinned. let tags = self .client - .list_tags(&uses.owner, &uses.repo) + .list_tags(uses.owner(), uses.repo()) .await .map_err(Self::err)?; @@ -94,7 +95,7 @@ impl ImpostorCommit { let branches = self .client - .list_branches(&uses.owner, &uses.repo) + .list_branches(uses.owner(), uses.repo()) .await .map_err(Self::err)?; @@ -104,6 +105,21 @@ impl ImpostorCommit { } } + // Fast path: attempt to use GitHub's undocumented `branch_commits` + // API to see if the commit is present in any branch/tag. + // There are no stabilitiy guarantees for this API, so we fall back + // to the slow(er) paths if it fails. + match self + .client + .branch_commits(uses.owner(), uses.repo(), head_ref) + .await + { + Ok(branch_commits) => return Ok(branch_commits.is_empty()), + Err(e) => tracing::warn!("fast path impostor check failed for {uses}: {e}"), + } + + // Slow path: use GitHub's comparison API to check each branch and tag's + // history for presence of the commit. for branch in &branches { if self .named_ref_contains_commit(uses, &format!("refs/heads/{}", &branch.name), head_ref) @@ -131,7 +147,7 @@ impl ImpostorCommit { async fn get_highest_tag(&self, uses: &RepositoryUses) -> Result, AuditError> { let tags = self .client - .list_tags(&uses.owner, &uses.repo) + .list_tags(uses.owner(), uses.repo()) .await .map_err(Self::err)?; @@ -186,16 +202,16 @@ impl ImpostorCommit { Ok(None) => { tracing::warn!( "No tags found for {}/{}, cannot create fix", - uses.owner, - uses.repo + uses.owner(), + uses.repo() ); return None; } Err(e) => { tracing::error!( "Failed to get latest tag for {}/{}: {}", - uses.owner, - uses.repo, + uses.owner(), + uses.repo(), e ); return None; @@ -203,8 +219,8 @@ impl ImpostorCommit { }; // Build the new uses string with the latest tag - let mut uses_slug = format!("{}/{}", uses.owner, uses.repo); - if let Some(subpath) = &uses.subpath { + let mut uses_slug = format!("{}/{}", uses.owner(), uses.repo()); + if let Some(subpath) = &uses.subpath() { uses_slug.push_str(&format!("/{subpath}")); } let fixed_uses = format!("{uses_slug}@{latest_tag}"); @@ -256,8 +272,13 @@ impl Audit for ImpostorCommit { let mut finding_builder = Self::finding() .severity(Severity::High) .confidence(Confidence::High) + .add_location(step.location_with_grip()) .add_location( - step.location().primary().annotated(IMPOSTOR_ANNOTATION), + step.location() + .with_keys(["uses".into()]) + .subfeature(Subfeature::new(0, uses.raw())) + .primary() + .annotated(IMPOSTOR_ANNOTATION), ); if let Some(fix) = self.create_impostor_fix(uses, &step).await { @@ -279,8 +300,14 @@ impl Audit for ImpostorCommit { let mut finding_builder = Self::finding() .severity(Severity::High) .confidence(Confidence::High) + .add_location(reusable.location_with_grip()) .add_location( - reusable.location().primary().annotated(IMPOSTOR_ANNOTATION), + reusable + .location() + .with_keys(["uses".into()]) + .subfeature(Subfeature::new(0, uses.raw())) + .primary() + .annotated(IMPOSTOR_ANNOTATION), ); if let Some(fix) = self.create_reusable_fix(uses, &reusable).await { @@ -310,13 +337,20 @@ impl Audit for ImpostorCommit { let mut finding_builder = Self::finding() .severity(Severity::High) .confidence(Confidence::High) - .add_location(step.location().primary().annotated(IMPOSTOR_ANNOTATION)); + .add_location(step.location_with_grip()) + .add_location( + step.location() + .with_keys(["uses".into()]) + .subfeature(Subfeature::new(0, uses.raw())) + .primary() + .annotated(IMPOSTOR_ANNOTATION), + ); if let Some(fix) = self.create_impostor_fix(uses, step).await { finding_builder = finding_builder.fix(fix); } - findings.push(finding_builder.build(step.action()).map_err(Self::err)?); + findings.push(finding_builder.build(step).map_err(Self::err)?); } Ok(findings) @@ -381,6 +415,7 @@ jobs: // Apply the fix and snapshot test the result let new_doc = findings[0].fixes[0].apply(input.as_document()).unwrap(); assert_snapshot!(new_doc.source(), @r" + name: Test Impostor Commit Fix on: push jobs: diff --git a/crates/zizmor/src/audit/insecure_commands.rs b/crates/zizmor/src/audit/insecure_commands.rs index bc101f47..aa9a6a54 100644 --- a/crates/zizmor/src/audit/insecure_commands.rs +++ b/crates/zizmor/src/audit/insecure_commands.rs @@ -295,6 +295,7 @@ jobs: assert!(fixed_document.source().contains("ANOTHER_VAR: also-keep")); insta::assert_snapshot!(fixed_document.source(), @r#" + on: push jobs: @@ -349,6 +350,7 @@ jobs: assert!(fixed_document.source().contains("GLOBAL_VAR: keep-me")); insta::assert_snapshot!(fixed_document.source(), @r#" + on: push env: @@ -403,6 +405,7 @@ jobs: assert!(fixed_document.source().contains("STEP_VAR: keep-me")); insta::assert_snapshot!(fixed_document.source(), @r#" + on: push jobs: @@ -446,6 +449,7 @@ jobs: let fixed_document = fix.apply(workflow.as_document()).unwrap(); insta::assert_snapshot!(fixed_document.source(), @r#" + on: push jobs: diff --git a/crates/zizmor/src/audit/known_vulnerable_actions.rs b/crates/zizmor/src/audit/known_vulnerable_actions.rs index 0e0c54e2..5362c703 100644 --- a/crates/zizmor/src/audit/known_vulnerable_actions.rs +++ b/crates/zizmor/src/audit/known_vulnerable_actions.rs @@ -34,7 +34,7 @@ impl KnownVulnerableActions { &self, uses: &RepositoryUses, ) -> Result)>, AuditError> { - let version = match &uses.git_ref { + let version = match &uses.git_ref() { // If `uses` is pinned to a symbolic ref, we need to perform // feats of heroism to figure out what's going on. // In the "happy" case the symbolic ref is an exact version tag, @@ -54,7 +54,7 @@ impl KnownVulnerableActions { version if !uses.ref_is_commit() => { let Some(commit_ref) = self .client - .commit_for_ref(&uses.owner, &uses.repo, version) + .commit_for_ref(uses.owner(), uses.repo(), version) .await .map_err(Self::err)? else { @@ -65,7 +65,7 @@ impl KnownVulnerableActions { match self .client - .longest_tag_for_commit(&uses.owner, &uses.repo, &commit_ref) + .longest_tag_for_commit(uses.owner(), uses.repo(), &commit_ref) .await .map_err(Self::err)? { @@ -84,7 +84,7 @@ impl KnownVulnerableActions { commit_ref => { match self .client - .longest_tag_for_commit(&uses.owner, &uses.repo, commit_ref) + .longest_tag_for_commit(uses.owner(), uses.repo(), commit_ref) .await .map_err(Self::err)? { @@ -100,7 +100,7 @@ impl KnownVulnerableActions { let vulns = self .client - .gha_advisories(&uses.owner, &uses.repo, &version) + .gha_advisories(uses.owner(), uses.repo(), &version) .await .map_err(Self::err)?; @@ -135,8 +135,8 @@ impl KnownVulnerableActions { target_version: String, step: &impl StepCommon<'doc>, ) -> Result, AuditError> { - let mut uses_slug = format!("{}/{}", uses.owner, uses.repo); - if let Some(subpath) = &uses.subpath { + let mut uses_slug = format!("{}/{}", uses.owner(), uses.repo()); + if let Some(subpath) = &uses.subpath() { uses_slug.push_str(&format!("/{subpath}")); } @@ -162,13 +162,13 @@ impl KnownVulnerableActions { let (target_ref, target_commit) = match self .client - .commit_for_ref(&uses.owner, &uses.repo, &prefixed_version) + .commit_for_ref(uses.owner(), uses.repo(), &prefixed_version) .await { Ok(commit) => commit.map(|commit| (&prefixed_version, commit)), Err(_) => self .client - .commit_for_ref(&uses.owner, &uses.repo, &bare_version) + .commit_for_ref(uses.owner(), uses.repo(), &bare_version) .await .map_err(Self::err)? .map(|commit| (&bare_version, commit)), @@ -176,8 +176,8 @@ impl KnownVulnerableActions { .ok_or_else(|| { Self::err(anyhow!( "Cannot resolve version {bare_version} to commit hash for {}/{}", - uses.owner, - uses.repo + uses.owner(), + uses.repo() )) })?; @@ -208,7 +208,7 @@ impl KnownVulnerableActions { // prefixed with `v` or not. Instead of trying to figure it out // via the GitHub API, we match the style of the current `uses` // clause. - let target_version_tag = if uses.git_ref.starts_with('v') { + let target_version_tag = if uses.git_ref().starts_with('v') { prefixed_version } else { bare_version @@ -362,12 +362,7 @@ jobs: let step = &steps[0]; // Test the fix directly - let uses = RepositoryUses { - owner: "actions".to_string(), - repo: "checkout".to_string(), - git_ref: "v2".to_string(), - subpath: None, - }; + let uses = RepositoryUses::parse("actions/checkout@v2").unwrap(); let audit = create_test_audit(); let fix = audit @@ -377,6 +372,7 @@ jobs: let fixed_document = fix.apply(workflow.as_document()).unwrap(); insta::assert_snapshot!(fixed_document.source(), @r#" + name: Test Vulnerable Actions on: push jobs: @@ -417,12 +413,7 @@ jobs: let step = &steps[0]; // Test the fix directly - let uses = RepositoryUses { - owner: "actions".to_string(), - repo: "setup-node".to_string(), - git_ref: "v1".to_string(), - subpath: None, - }; + let uses = RepositoryUses::parse("actions/setup-node@v1").unwrap(); let audit = create_test_audit(); let fix = audit @@ -431,7 +422,8 @@ jobs: .unwrap(); let fixed_document = fix.apply(workflow.as_document()).unwrap(); - insta::assert_snapshot!(fixed_document.source(), @r#" + insta::assert_snapshot!(fixed_document.source(), @r" + name: Test Node Setup on: push jobs: @@ -444,7 +436,7 @@ jobs: node-version: '18' - name: Install dependencies run: npm install - "#); + "); } #[tokio::test] @@ -474,12 +466,7 @@ jobs: let step = &steps[0]; // Test the fix directly - let uses = RepositoryUses { - owner: "codecov".to_string(), - repo: "codecov-action".to_string(), - git_ref: "v1".to_string(), - subpath: None, - }; + let uses = RepositoryUses::parse("codecov/codecov-action@v1").unwrap(); let audit = create_test_audit(); let fix = audit @@ -489,6 +476,7 @@ jobs: let fixed_document = fix.apply(workflow.as_document()).unwrap(); insta::assert_snapshot!(fixed_document.source(), @r#" + name: Test Third Party Action on: push jobs: @@ -541,12 +529,7 @@ jobs: let audit = create_test_audit(); // Fix checkout action - let uses_checkout = RepositoryUses { - owner: "actions".to_string(), - repo: "checkout".to_string(), - git_ref: "v2".to_string(), - subpath: None, - }; + let uses_checkout = RepositoryUses::parse("actions/checkout@v2").unwrap(); let fix_checkout = audit .create_upgrade_fix(&uses_checkout, "v4".into(), &steps[0]) .await @@ -554,12 +537,7 @@ jobs: current_document = fix_checkout.apply(¤t_document).unwrap(); // Fix setup-node action - let uses_node = RepositoryUses { - owner: "actions".to_string(), - repo: "setup-node".to_string(), - git_ref: "v1".to_string(), - subpath: None, - }; + let uses_node = RepositoryUses::parse("actions/setup-node@v1").unwrap(); let fix_node = audit .create_upgrade_fix(&uses_node, "v4".into(), &steps[1]) .await @@ -567,19 +545,15 @@ jobs: current_document = fix_node.apply(¤t_document).unwrap(); // Fix cache action - let uses_cache = RepositoryUses { - owner: "actions".to_string(), - repo: "cache".to_string(), - git_ref: "v2".to_string(), - subpath: None, - }; + let uses_cache = RepositoryUses::parse("actions/cache@v2").unwrap(); let fix_cache = audit .create_upgrade_fix(&uses_cache, "v4".into(), &steps[2]) .await .unwrap(); current_document = fix_cache.apply(¤t_document).unwrap(); - insta::assert_snapshot!(current_document.source(), @r#" + insta::assert_snapshot!(current_document.source(), @r" + name: Test Multiple Vulnerable Actions on: push jobs: @@ -599,7 +573,7 @@ jobs: key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} - name: Install dependencies run: npm install - "#); + "); } #[tokio::test] @@ -627,12 +601,7 @@ jobs: let step = &steps[0]; // Test the fix with subpath - let uses = RepositoryUses { - owner: "owner".to_string(), - repo: "repo".to_string(), - git_ref: "v1".to_string(), - subpath: Some("subpath".to_string()), - }; + let uses = RepositoryUses::parse("owner/repo/subpath@v1").unwrap(); let audit = create_test_audit(); let fix = audit @@ -642,6 +611,7 @@ jobs: let fixed_document = fix.apply(workflow.as_document()).unwrap(); insta::assert_snapshot!(fixed_document.source(), @r" + name: Test Action with Subpath on: push jobs: @@ -678,12 +648,7 @@ jobs: }; let step = &steps[0]; - let uses = RepositoryUses { - owner: "actions".to_string(), - repo: "checkout".to_string(), - git_ref: "v2".to_string(), - subpath: None, - }; + let uses = RepositoryUses::parse("actions/checkout@v2").unwrap(); // Test that when first_patched_version is provided, it's used let audit = create_test_audit(); @@ -695,7 +660,8 @@ jobs: .apply(workflow.as_document()) .unwrap(); - insta::assert_snapshot!(fixed_document.source(), @r#" + insta::assert_snapshot!(fixed_document.source(), @r" + name: Test First Patched Version Priority on: push jobs: @@ -704,7 +670,7 @@ jobs: steps: - name: Vulnerable action uses: actions/checkout@v3.1.0 - "#); + "); } #[tokio::test] @@ -729,12 +695,7 @@ jobs: }; let step = &steps[0]; - let uses = RepositoryUses { - owner: "actions".to_string(), - repo: "checkout".to_string(), - git_ref: "v2".to_string(), - subpath: None, - }; + let uses = RepositoryUses::parse("actions/checkout@v2").unwrap(); let audit = create_test_audit(); let fix = audit @@ -745,6 +706,7 @@ jobs: let new_doc = fix.apply(workflow.as_document()).unwrap(); assert_snapshot!(new_doc.source(), @r" + name: Test Non-Commit Ref on: push jobs: @@ -808,6 +770,7 @@ jobs: let new_doc = findings[0].fixes[0].apply(input.as_document()).unwrap(); assert_snapshot!(new_doc.source(), @r" + name: Test Commit Hash Pinning Real API on: push permissions: {} @@ -864,6 +827,7 @@ jobs: let new_doc = findings[0].fixes[0].apply(input.as_document()).unwrap(); assert_snapshot!(new_doc.source(), @r" + name: Test Commit Hash Pinning Real API on: push permissions: {} diff --git a/crates/zizmor/src/audit/mod.rs b/crates/zizmor/src/audit/mod.rs index 65bc5949..f1704388 100644 --- a/crates/zizmor/src/audit/mod.rs +++ b/crates/zizmor/src/audit/mod.rs @@ -18,6 +18,7 @@ use crate::{ }; pub(crate) mod anonymous_definition; +pub(crate) mod archived_uses; pub(crate) mod artipacked; pub(crate) mod bot_conditions; pub(crate) mod cache_poisoning; @@ -188,10 +189,9 @@ pub(crate) enum AuditLoadError { } #[derive(Error, Debug)] -#[error("error in {ident}")] +#[error("error in '{ident}' audit")] pub(crate) struct AuditError { ident: &'static str, - #[source] source: anyhow::Error, } diff --git a/crates/zizmor/src/audit/obfuscation.rs b/crates/zizmor/src/audit/obfuscation.rs index bd82fb73..e1675f3e 100644 --- a/crates/zizmor/src/audit/obfuscation.rs +++ b/crates/zizmor/src/audit/obfuscation.rs @@ -33,7 +33,7 @@ impl Obfuscation { // GitHub happily interprets but otherwise gums up pattern matching // in audits like unpinned-uses, forbidden-uses, and cache-poisoning. // We check for some of these forms of nonsense here and report them. - if let Some(subpath) = uses.subpath.as_deref() { + if let Some(subpath) = uses.subpath() { for component in subpath.split('/') { match component { // . and .. are valid in uses subpaths, but are impossible to @@ -60,7 +60,7 @@ impl Obfuscation { /// Normalizes a uses path by removing unnecessary components like empty slashes, `.`, and `..`. fn normalize_uses_path(&self, uses: &RepositoryUses) -> Option { - let subpath = uses.subpath.as_deref()?; + let subpath = uses.subpath()?; let mut components = Vec::new(); for component in subpath.split('/') { @@ -83,14 +83,19 @@ impl Obfuscation { // If all components were removed, the subpath should be empty if components.is_empty() { - Some(format!("{}/{}@{}", uses.owner, uses.repo, uses.git_ref)) + Some(format!( + "{}/{}@{}", + uses.owner(), + uses.repo(), + uses.git_ref() + )) } else { Some(format!( "{}/{}/{}@{}", - uses.owner, - uses.repo, + uses.owner(), + uses.repo(), components.join("/"), - uses.git_ref + uses.git_ref() )) } } @@ -220,7 +225,10 @@ impl Obfuscation { } } crate::models::StepBodyCommon::Run { .. } => { - if let Some("cmd" | "cmd.exe") = step.shell().map(utils::normalize_shell) { + if let Some(("cmd" | "cmd.exe", shell_loc)) = step + .shell() + .map(|(shell, loc)| (utils::normalize_shell(shell), loc)) + { // `shell: cmd` is basically impossible to analyze: it has no formal // grammar and has several line continuation mechanisms that stymie // naive matching. It also hasn't been the default shell on Windows @@ -230,11 +238,10 @@ impl Obfuscation { .confidence(Confidence::High) .severity(Severity::Low) .add_location( - step.location() - .primary() - .with_keys(["shell".into()]) + step.location_with_grip() .annotated("Windows CMD shell limits analysis"), ) + .add_location(shell_loc.primary()) .tip("use 'shell: pwsh' or 'shell: bash' for improved analysis") .build(step) .map_err(Self::err)?, @@ -400,7 +407,8 @@ jobs: "#; let result = apply_fix_for_snapshot(workflow_content, "obfuscation").await; - insta::assert_snapshot!(result, @r#" + insta::assert_snapshot!(result, @r" + name: Test Workflow on: push @@ -409,7 +417,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - "#); + "); } #[tokio::test] @@ -426,7 +434,8 @@ jobs: "#; let result = apply_fix_for_snapshot(workflow_content, "obfuscation").await; - insta::assert_snapshot!(result, @r#" + insta::assert_snapshot!(result, @r" + name: Test Workflow on: push @@ -435,7 +444,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: github/codeql-action/init@v2 - "#); + "); } #[tokio::test] @@ -452,7 +461,8 @@ jobs: "#; let result = apply_fix_for_snapshot(workflow_content, "obfuscation").await; - insta::assert_snapshot!(result, @r#" + insta::assert_snapshot!(result, @r" + name: Test Workflow on: push @@ -461,6 +471,6 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/cache/save@v4 - "#); + "); } } diff --git a/crates/zizmor/src/audit/ref_confusion.rs b/crates/zizmor/src/audit/ref_confusion.rs index d0a69320..ef589a34 100644 --- a/crates/zizmor/src/audit/ref_confusion.rs +++ b/crates/zizmor/src/audit/ref_confusion.rs @@ -43,12 +43,12 @@ impl RefConfusion { // TODO: use a tokio JoinSet here? let branches_match = self .client - .has_branch(&uses.owner, &uses.repo, sym_ref) + .has_branch(uses.owner(), uses.repo(), sym_ref) .await .map_err(Self::err)?; let tags_match = self .client - .has_tag(&uses.owner, &uses.repo, sym_ref) + .has_tag(uses.owner(), uses.repo(), sym_ref) .await .map_err(Self::err)?; @@ -158,7 +158,7 @@ impl Audit for RefConfusion { .with_keys(["uses".into()]) .annotated(REF_CONFUSION_ANNOTATION), ) - .build(step.action()) + .build(step) .map_err(Self::err)?, ); } diff --git a/crates/zizmor/src/audit/ref_version_mismatch.rs b/crates/zizmor/src/audit/ref_version_mismatch.rs index 3c2e9a1e..523f9d0f 100644 --- a/crates/zizmor/src/audit/ref_version_mismatch.rs +++ b/crates/zizmor/src/audit/ref_version_mismatch.rs @@ -106,7 +106,7 @@ impl RefVersionMismatch { let Some(commit_for_ref) = self .client - .commit_for_ref(&uses.owner, &uses.repo, version_from_comment) + .commit_for_ref(uses.owner(), uses.repo(), version_from_comment) .await .map_err(Self::err)? else { @@ -138,7 +138,7 @@ impl RefVersionMismatch { if let Some(suggestion) = self .client - .longest_tag_for_commit(&uses.owner, &uses.repo, commit_sha) + .longest_tag_for_commit(uses.owner(), uses.repo(), commit_sha) .await .map_err(Self::err)? { diff --git a/crates/zizmor/src/audit/secrets_inherit.rs b/crates/zizmor/src/audit/secrets_inherit.rs index 57d7b739..bb20e941 100644 --- a/crates/zizmor/src/audit/secrets_inherit.rs +++ b/crates/zizmor/src/audit/secrets_inherit.rs @@ -1,10 +1,10 @@ use github_actions_models::workflow::job::Secrets; +use subfeature::Subfeature; use super::{Audit, AuditLoadError, AuditState, audit_meta}; use crate::{ audit::AuditError, finding::{Confidence, location::Locatable as _}, - models::workflow::JobExt as _, }; pub(crate) struct SecretsInherit; @@ -38,6 +38,7 @@ impl Audit for SecretsInherit { job.location() .primary() .with_keys(["uses".into()]) + .subfeature(Subfeature::new(0, job.uses.raw())) .annotated("this reusable workflow"), ) .add_location( @@ -47,7 +48,7 @@ impl Audit for SecretsInherit { ) .confidence(Confidence::High) .severity(crate::finding::Severity::Medium) - .build(job.parent())?, + .build(job)?, ); } diff --git a/crates/zizmor/src/audit/stale_action_refs.rs b/crates/zizmor/src/audit/stale_action_refs.rs index 1bf7eea5..59cfb72e 100644 --- a/crates/zizmor/src/audit/stale_action_refs.rs +++ b/crates/zizmor/src/audit/stale_action_refs.rs @@ -2,6 +2,7 @@ use anyhow::anyhow; use github_actions_models::common::{RepositoryUses, Uses}; +use subfeature::Subfeature; use super::{Audit, AuditLoadError, audit_meta}; use crate::{ @@ -29,7 +30,7 @@ impl StaleActionRefs { let tag = match &uses.commit_ref() { Some(commit_ref) => self .client - .longest_tag_for_commit(&uses.owner, &uses.repo, commit_ref) + .longest_tag_for_commit(uses.owner(), uses.repo(), commit_ref) .await .map_err(Self::err)?, None => return Ok(false), @@ -53,7 +54,12 @@ impl StaleActionRefs { .confidence(Confidence::High) .severity(Severity::Low) .persona(Persona::Pedantic) - .add_location(step.location().primary().with_keys(["uses".into()])) + .add_location( + step.location() + .primary() + .with_keys(["uses".into()]) + .subfeature(Subfeature::new(0, uses.raw())), + ) .build(step)?, ); } diff --git a/crates/zizmor/src/audit/template_injection.rs b/crates/zizmor/src/audit/template_injection.rs index 2d8cf81f..52046297 100644 --- a/crates/zizmor/src/audit/template_injection.rs +++ b/crates/zizmor/src/audit/template_injection.rs @@ -185,7 +185,7 @@ impl TemplateInjection { return None; } - let shell = utils::normalize_shell(step.shell()?); + let shell = utils::normalize_shell(step.shell()?.0); match shell { "bash" | "sh" | "zsh" => Some(format!("${{{env_var}}}")), @@ -707,6 +707,7 @@ jobs: "replace expression with environment variable", ); insta::assert_snapshot!(fixed_content.source(), @r#" + name: Test Template Injection on: push jobs: @@ -758,6 +759,7 @@ jobs: "replace expression with environment variable", ); insta::assert_snapshot!(fixed_content.source(), @r#" + name: Test Template Injection on: push jobs: @@ -811,6 +813,7 @@ jobs: "replace expression with environment variable", ); insta::assert_snapshot!(fixed_content.source(), @r#" + name: Test Template Injection on: push jobs: @@ -920,6 +923,7 @@ jobs: } insta::assert_snapshot!(current_document.source(), @r#" + name: Test Multiple Template Injections on: push jobs: @@ -986,6 +990,7 @@ jobs: } insta::assert_snapshot!(current_document.source(), @r#" + name: Test Duplicate Template Injections on: push jobs: @@ -1046,6 +1051,7 @@ jobs: } insta::assert_snapshot!(current_document.source(), @r#" + name: Test Duplicate Template Injections on: push jobs: @@ -1109,6 +1115,7 @@ jobs: } insta::assert_snapshot!(current_document.source(), @r#" + name: Test Duplicate Template Injections on: push jobs: @@ -1225,6 +1232,7 @@ jobs: "replace expression with environment variable", ); insta::assert_snapshot!(fixed_content.source(), @r#" + name: Test Template Injection - Bash on: push jobs: @@ -1271,6 +1279,7 @@ jobs: "replace expression with environment variable", ); insta::assert_snapshot!(fixed_content.source(), @r#" + name: Test Template Injection - Bash on: push jobs: @@ -1316,7 +1325,8 @@ jobs: finding, "replace expression with environment variable", ); - insta::assert_snapshot!(fixed_content.source(), @r#" + insta::assert_snapshot!(fixed_content.source(), @r" + name: Test Template Injection - CMD on: push jobs: @@ -1326,7 +1336,7 @@ jobs: - name: Vulnerable step with cmd shell shell: cmd run: echo User is %GITHUB_ACTOR% - "#); + "); } } ); @@ -1363,6 +1373,7 @@ jobs: "replace expression with environment variable", ); insta::assert_snapshot!(fixed_content.source(), @r#" + name: Test Template Injection - PowerShell on: push jobs: @@ -1409,6 +1420,7 @@ jobs: ); // Ubuntu default shell is bash, so should use ${VAR} syntax insta::assert_snapshot!(fixed_content.source(), @r#" + name: Test Template Injection - Default Shell Ubuntu on: push jobs: @@ -1454,6 +1466,7 @@ jobs: ); // Windows default shell is pwsh, so should use $env:VAR syntax insta::assert_snapshot!(fixed_content.source(), @r#" + name: Test Template Injection - Default Shell Windows on: push jobs: @@ -1498,7 +1511,8 @@ jobs: finding, "replace expression with environment variable", ); - insta::assert_snapshot!(fixed_content.source(), @r#" + insta::assert_snapshot!(fixed_content.source(), @r" + name: Test Template Injection - CMD with Custom Env on: push jobs: @@ -1510,7 +1524,7 @@ jobs: run: echo PR title is %GITHUB_EVENT_PULL_REQUEST_TITLE% env: GITHUB_EVENT_PULL_REQUEST_TITLE: ${{ github.event.pull_request.title }} - "#); + "); } } ); diff --git a/crates/zizmor/src/audit/unpinned_images.rs b/crates/zizmor/src/audit/unpinned_images.rs index 96f2fda2..43b42917 100644 --- a/crates/zizmor/src/audit/unpinned_images.rs +++ b/crates/zizmor/src/audit/unpinned_images.rs @@ -4,7 +4,6 @@ use crate::{ Confidence, Finding, Persona, Severity, location::{Locatable as _, SymbolicLocation}, }, - models::workflow::JobExt as _, state::AuditState, }; @@ -30,7 +29,7 @@ impl UnpinnedImages { .confidence(Confidence::High) .add_location(annotated_location) .persona(persona) - .build(job.parent()) + .build(job) } } @@ -52,13 +51,11 @@ impl Audit for UnpinnedImages { _config: &crate::config::Config, ) -> anyhow::Result>, AuditError> { let mut findings = vec![]; - let mut image_refs_with_locations: Vec<(DockerUses, SymbolicLocation<'doc>)> = vec![]; + let mut image_refs_with_locations: Vec<(&'doc DockerUses, SymbolicLocation<'doc>)> = vec![]; if let Some(Container::Container { image, .. }) = &job.container { image_refs_with_locations.push(( - image - .parse() - .expect("failed to parse job container image as DockerUses"), + image, job.location() .primary() .with_keys(["container".into(), "image".into()]), @@ -68,9 +65,7 @@ impl Audit for UnpinnedImages { for (service, config) in job.services.iter() { if let Container::Container { image, .. } = &config { image_refs_with_locations.push(( - image - .parse() - .expect("failed to parse service container image as DockerUses"), + image, job.location().primary().with_keys([ "services".into(), service.as_str().into(), @@ -81,9 +76,9 @@ impl Audit for UnpinnedImages { } for (image, location) in image_refs_with_locations { - match image.hash { + match image.hash() { Some(_) => continue, - None => match image.tag.as_deref() { + None => match image.tag() { Some("latest") => { findings.push(self.build_finding( location, diff --git a/crates/zizmor/src/audit/unpinned_uses.rs b/crates/zizmor/src/audit/unpinned_uses.rs index 5854035b..6cbebfed 100644 --- a/crates/zizmor/src/audit/unpinned_uses.rs +++ b/crates/zizmor/src/audit/unpinned_uses.rs @@ -1,4 +1,5 @@ use github_actions_models::common::Uses; +use subfeature::Subfeature; use super::{Audit, AuditLoadError, AuditState, audit_meta}; use crate::audit::AuditError; @@ -31,7 +32,7 @@ impl UnpinnedUses { Uses::Docker(_) => { if uses.unpinned() { Some(( - "action is not pinned to a tag, branch, or hash ref".into(), + "image is not pinned to a tag, branch, or hash ref".into(), Severity::Medium, Persona::default(), )) @@ -108,6 +109,7 @@ impl UnpinnedUses { step.location() .primary() .with_keys(["uses".into()]) + .subfeature(Subfeature::new(0, uses.raw())) .annotated(annotation), ) .build(step)?, diff --git a/crates/zizmor/src/audit/unsound_condition.rs b/crates/zizmor/src/audit/unsound_condition.rs index fc0a282c..e17e951d 100644 --- a/crates/zizmor/src/audit/unsound_condition.rs +++ b/crates/zizmor/src/audit/unsound_condition.rs @@ -6,7 +6,7 @@ use crate::{ Confidence, Fix, FixDisposition, Severity, location::{Locatable as _, SymbolicLocation}, }, - models::{AsDocument, workflow::JobExt}, + models::AsDocument, utils, }; use yamlpatch::{Op, Patch}; @@ -158,7 +158,7 @@ impl Audit for UnsoundCondition { job: &crate::models::workflow::NormalJob<'doc>, _config: &crate::config::Config, ) -> Result>, AuditError> { - self.process_conditions(job.parent(), job.conditions()) + self.process_conditions(job, job.conditions()) } async fn audit_reusable_job<'doc>( @@ -167,7 +167,7 @@ impl Audit for UnsoundCondition { _config: &crate::config::Config, ) -> Result>, AuditError> { let conds = job.r#if.iter().map(|cond| (cond, job.location())); - self.process_conditions(job.parent(), conds) + self.process_conditions(job, conds) } async fn audit_action<'doc>( @@ -244,6 +244,7 @@ jobs: let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings); insta::assert_snapshot!(fixed_document.source(), @r#" + name: Test on: push jobs: @@ -283,6 +284,7 @@ jobs: let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings); insta::assert_snapshot!(fixed_document.source(), @r#" + name: Test on: push jobs: @@ -323,6 +325,7 @@ jobs: let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings); insta::assert_snapshot!(fixed_document.source(), @r#" + name: Test on: push jobs: @@ -367,6 +370,7 @@ jobs: let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings); insta::assert_snapshot!(fixed_document.source(), @r#" + name: Test on: push jobs: @@ -406,7 +410,8 @@ jobs: assert_eq!(findings.len(), 1); let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings); - insta::assert_snapshot!(fixed_document.source(), @r#" + insta::assert_snapshot!(fixed_document.source(), @r" + name: Test on: push jobs: @@ -414,7 +419,7 @@ jobs: if: |- ${{ github.event_name == 'pull_request' }} uses: ./.github/workflows/reusable.yml - "#); + "); } ); } @@ -463,6 +468,7 @@ jobs: } insta::assert_snapshot!(document.source(), @r#" + name: Test on: push jobs: diff --git a/crates/zizmor/src/audit/unsound_contains.rs b/crates/zizmor/src/audit/unsound_contains.rs index dc179699..893e84d2 100644 --- a/crates/zizmor/src/audit/unsound_contains.rs +++ b/crates/zizmor/src/audit/unsound_contains.rs @@ -9,7 +9,6 @@ use super::{Audit, AuditLoadError, AuditState, audit_meta}; use crate::{ audit::AuditError, finding::{Confidence, Severity}, - models::workflow::JobExt as _, utils::{self, ExtractedExpr}, }; @@ -68,7 +67,7 @@ impl Audit for UnsoundContains { .primary() .annotated(format!("contains(..) condition can be bypassed if attacker can control '{context}'")), ) - .build(job.parent()) + .build(job) }) }) .collect() diff --git a/crates/zizmor/src/audit/use_trusted_publishing.rs b/crates/zizmor/src/audit/use_trusted_publishing.rs index 2d33b68c..7c1bb86d 100644 --- a/crates/zizmor/src/audit/use_trusted_publishing.rs +++ b/crates/zizmor/src/audit/use_trusted_publishing.rs @@ -1,4 +1,3 @@ -use std::collections::HashSet; use std::{sync::LazyLock, vec}; use anyhow::Context as _; @@ -13,7 +12,7 @@ use crate::{ models::{ StepBodyCommon, StepCommon, coordinate::{ActionCoordinate, ControlExpr, ControlFieldType, Toggle}, - workflow::JobExt as _, + workflow::JobCommon as _, }, state::AuditState, utils, @@ -180,66 +179,101 @@ impl UseTrustedPublishing { match cmd { "cargo" => { - let args = args.collect::>(); - // Looking for `cargo ... publish` without `--dry-run` or `-n`. - args.contains("publish") && !args.contains("--dry-run") && !args.contains("-n") + + args.any(|arg| arg == "publish") + && args.all(|arg| arg != "--dry-run" && arg != "-n") } "uv" => { - let args = args.collect::>(); + match args.find(|arg| *arg == "publish" || *arg == "run") { + Some("publish") => { + // `uv ... publish` without `--dry-run`. + args.all(|arg| arg != "--dry-run") + } + Some("run") => { + // `uv ... run ... twine ... upload`. + args.any(|arg| arg == "twine") && args.any(|arg| arg == "upload") + } + _ => false, + } + } + "uvx" => { + // Looking for `uvx twine ... upload`. + // Like with pipx, we loosely match the `twine` part + // to allow for version specifiers. In uvx's case, these + // are formatted like `twine@X.Y.Z`. - // Looking for `uv ... publish` without `--dry-run`. - args.contains("publish") && !args.contains("--dry-run") + args.any(|arg| arg.starts_with("twine")) && args.any(|arg| arg == "upload") } "hatch" | "pdm" => { // Looking for `hatch ... publish` or `pdm ... publish`. args.any(|arg| arg == "publish") } + "poetry" => { + // Looking for `poetry ... publish` without `--dry-run`. + // + // Poetry has no support for Trusted Publishing at all as + // of 2025-12-1: https://github.com/python-poetry/poetry/issues/7940 + args.any(|arg| arg == "publish") && args.all(|arg| arg != "--dry-run") + } "twine" => { // Looking for `twine ... upload`. args.any(|arg| arg == "upload") } + "pipx" => { + // TODO: also match `pipx ... run ... uv ... publish`, etc. + + // Looking for `pipx ... run ... twine ... upload`. + // + // A wrinkle here is that `pipx run` takes version specifiers + // too, e.g. `pipx run twine==X.Y.Z upload ...`. So we only + // loosely match the `twine` part. + args.any(|arg| arg == "run") + && args.any(|arg| arg.starts_with("twine")) + && args.any(|arg| arg == "upload") + } + _ if cmd.starts_with("python") => { + // Looking for `python* ... -m ... twine ... upload`. + args.any(|arg| arg == "-m") + && args.any(|arg| arg == "twine") + && args.any(|arg| arg == "upload") + } "gem" => { // Looking for `gem ... push`. args.any(|arg| arg == "push") } + "bundle" => { + // Looking for `bundle ... exec ... gem ... push`. + args.any(|arg| arg == "exec") + && args.any(|arg| arg == "gem") + && args.any(|arg| arg == "push") + } "npm" => { - let args = args.collect::>(); + // Looking for `npm ... publish` without `--dry-run`. // TODO: Figure out `npm run ... publish` patterns. - - // Looking for `npm ... publish` without `--dry-run`. - args.contains("publish") && !args.contains("--dry-run") + args.any(|arg| arg == "publish") && args.all(|arg| arg != "--dry-run") } "yarn" => { - let args = args.collect::>(); - // TODO: Figure out `yarn run ... publish` patterns. // TODO: Figure out `yarn ... publish` patterns for lerna/npm workspaces. // Looking for `yarn ... npm publish` without `--dry-run` or `-n`. - args.contains("npm") - && args.contains("publish") - && !args.contains("--dry-run") - && !args.contains("-n") + args.any(|arg| arg == "npm") && args.all(|arg| arg != "--dry-run" && arg != "-n") } "pnpm" => { - let args = args.collect::>(); - // TODO: Figure out `pnpm run ... publish` patterns. // Looking for `pnpm ... publish` without `--dry-run`. - args.contains("publish") && !args.contains("--dry-run") + args.any(|arg| arg == "publish") && args.all(|arg| arg != "--dry-run") } "nuget" | "nuget.exe" => { // Looking for `nuget ... push`. args.any(|arg| arg == "push") } "dotnet" => { - // Looking for `dotnet ... nuget push`. - args.next() - .map(|cmd| cmd == "nuget" && Self::is_publish_command(cmd, args)) - .unwrap_or(false) + // Looking for `dotnet ... nuget ... push`. + args.any(|arg| arg == "nuget") && args.any(|arg| arg == "push") } _ => false, } @@ -396,7 +430,7 @@ impl Audit for UseTrustedPublishing { if let StepBodyCommon::Run { run, .. } = step.body() && !step.parent.has_id_token() { - let shell = step.shell().unwrap_or_else(|| { + let shell = step.shell().map(|s| s.0).unwrap_or_else(|| { tracing::debug!( "use-trusted-publishing: couldn't determine shell type for {workflow}:{job} step {stepno}", workflow = step.workflow().key.filename(), @@ -442,3 +476,69 @@ impl Audit for UseTrustedPublishing { self.process_step(step) } } + +#[cfg(test)] +mod tests { + #[test] + fn test_is_publish_command() { + for (args, is_publish_command) in &[ + (&["cargo", "publish"][..], true), + (&["cargo", "publish", "-p", "foo"][..], true), + (&["cargo", "publish", "--dry-run"][..], false), + (&["cargo", "publish", "-n"][..], false), + (&["cargo", "build"][..], false), + (&["uv", "publish"][..], true), + (&["uv", "publish", "dist/*"][..], true), + (&["uv", "publish", "--dry-run"][..], false), + (&["uv", "run", "--dev", "twine", "upload"][..], true), + (&["uv", "run", "twine", "upload"][..], true), + (&["uv"][..], false), + (&["uv", "sync"][..], false), + (&["uvx", "twine", "upload"][..], true), + (&["uvx", "twine@3.4.1", "upload"][..], true), + (&["uvx", "twine@6.1.0", "upload"][..], true), + (&["uvx", "twine"][..], false), + (&["poetry", "publish"][..], true), + (&["poetry", "publish", "--dry-run"][..], false), + (&["hatch", "publish"][..], true), + (&["pdm", "publish"][..], true), + (&["twine", "upload", "dist/*"][..], true), + (&["pipx", "run", "twine", "upload", "dist/*"][..], true), + ( + &["pipx", "run", "twine==3.4.1", "upload", "dist/*"][..], + true, + ), + ( + &["pipx", "run", "twine==6.1.0", "upload", "dist/*"][..], + true, + ), + (&["python", "-m", "twine", "upload", "dist/*"][..], true), + (&["python3.9", "-m", "twine", "upload", "dist/*"][..], true), + (&["twine", "check", "dist/*"], false), + (&["gem", "push", "mygem-0.1.0.gem"][..], true), + ( + &["bundle", "exec", "gem", "push", "mygem-0.1.0.gem"][..], + true, + ), + (&["npm", "publish"][..], true), + (&["npm", "run", "publish"][..], true), + (&["npm", "publish", "--dry-run"][..], false), + (&["yarn", "npm", "publish"][..], true), + (&["yarn", "npm", "publish", "--dry-run"][..], false), + (&["pnpm", "publish"][..], true), + (&["pnpm", "publish", "--dry-run"][..], false), + (&["nuget", "push", "MyPackage.nupkg"][..], true), + (&["nuget.exe", "push", "MyPackage.nupkg"][..], true), + (&["dotnet", "nuget", "push", "MyPackage.nupkg"][..], true), + (&["dotnet", "build"][..], false), + ] { + let cmd = args[0]; + let args_iter = args[1..].iter().map(|s| *s); + assert_eq!( + super::UseTrustedPublishing::is_publish_command(cmd, args_iter), + *is_publish_command, + "cmd: {cmd:?}, args: {args:?}" + ); + } + } +} diff --git a/crates/zizmor/src/config.rs b/crates/zizmor/src/config.rs index 83c17288..60fc2f8e 100644 --- a/crates/zizmor/src/config.rs +++ b/crates/zizmor/src/config.rs @@ -21,7 +21,12 @@ use crate::{ registry::input::RepoSlug, }; -const CONFIG_CANDIDATES: &[&str] = &[".github/zizmor.yml", "zizmor.yml"]; +const CONFIG_CANDIDATES: &[&str] = &[ + ".github/zizmor.yml", + ".github/zizmor.yaml", + "zizmor.yml", + "zizmor.yaml", +]; #[derive(Error, Debug)] #[error("configuration error in {path}")] @@ -254,7 +259,7 @@ impl UnpinnedUsesPolicies { &self, uses: &RepositoryUses, ) -> (Option<&RepositoryUsesPattern>, UsesPolicy) { - match self.policy_tree.get(&uses.owner) { + match self.policy_tree.get(uses.owner()) { Some(policies) => { // Policies are ordered by specificity, so we can // iterate and return eagerly. diff --git a/crates/zizmor/src/data/dependabot-2.0.json b/crates/zizmor/src/data/dependabot-2.0.json index d7f23e4d..235fd156 100644 --- a/crates/zizmor/src/data/dependabot-2.0.json +++ b/crates/zizmor/src/data/dependabot-2.0.json @@ -647,24 +647,28 @@ }, "package-ecosystem-values": { "enum": [ + "bazel", "bun", "bundler", "cargo", "composer", + "conda", "devcontainers", "docker", "docker-compose", "dotnet-sdk", "elm", - "gitsubmodule", "github-actions", + "gitsubmodule", "gomod", "gradle", "helm", + "julia", "maven", "mix", "npm", "nuget", + "opentofu", "pip", "pub", "rust-toolchain", @@ -1045,6 +1049,21 @@ "versioning-strategy": { "$ref": "#/definitions/versioning-strategy", "description": "How to update manifest version requirements" + }, + "patterns": { + "description": "Array of dependency patterns to include in a multi-ecosystem group. Required when using multi-ecosystem-group. Use '*' to include all dependencies.", + "type": "array", + "items": { + "type": "string", + "minLength": 1 + }, + "minItems": 1, + "uniqueItems": true + }, + "multi-ecosystem-group": { + "description": "String identifier linking this ecosystem to a multi-ecosystem group", + "type": "string", + "minLength": 1 } }, "allOf": [ @@ -1056,6 +1075,15 @@ { "required": ["directories"] }, { "required": ["directory"] } ] + }, + { + "$comment": "If multi-ecosystem-group is specified, patterns is required", + "if": { + "required": ["multi-ecosystem-group"] + }, + "then": { + "required": ["patterns"] + } } ] }, @@ -1132,6 +1160,129 @@ } }, "minProperties": 1 + }, + "multi-ecosystem-group": { + "type": "object", + "description": "Define a group that spans multiple package ecosystems, allowing consolidated pull requests across different ecosystems", + "additionalProperties": false, + "properties": { + "schedule": { + "description": "Schedule preferences for the group", + "type": "object", + "properties": { + "interval": { + "$ref": "#/definitions/schedule-interval" + }, + "day": { + "$ref": "#/definitions/schedule-day", + "description": "Specify an alternative day to check for updates" + }, + "time": { + "type": "string", + "description": "Specify an alternative time of day to check for updates (format: hh:mm)", + "pattern": "^([01][0-9]|2[0-3]):[0-5][0-9]$" + }, + "timezone": { + "$ref": "#/definitions/timezone", + "description": "The time zone identifier must be from the Time Zone database maintained by IANA" + }, + "cronjob": { + "type": "string", + "description": "Specify a valid cron expression for updates" + } + }, + "allOf": [ + { + "$comment": "If interval type is 'cron', enforce 'cronjob' property.", + "if": { + "properties": { + "interval": { + "const": "cron" + } + } + }, + "then": { + "required": ["interval", "cronjob"] + }, + "else": { + "required": ["interval"] + } + } + ] + }, + "labels": { + "description": "Labels to set on pull requests (additive - merges with ecosystem-level labels)", + "type": "array", + "items": { + "type": "string", + "minLength": 1 + }, + "minItems": 0, + "uniqueItems": true + }, + "assignees": { + "description": "Assignees to set on pull requests (additive - merges with ecosystem-level assignees)", + "type": "array", + "items": { + "type": "string", + "minLength": 1 + }, + "minItems": 1, + "uniqueItems": true + }, + "milestone": { + "description": "Associate all pull requests raised for this group with a milestone. You need to specify the numeric identifier of the milestone and not its label.", + "type": "integer", + "minimum": 1 + }, + "target-branch": { + "description": "Specify a different branch for manifest files and for pull requests.", + "type": "string", + "minLength": 1 + }, + "commit-message": { + "description": "Commit message preferences for the group", + "type": "object", + "properties": { + "prefix": { + "description": "A prefix for all commit messages", + "type": "string", + "maxLength": 50 + }, + "prefix-development": { + "description": "A separate prefix for all commit messages that update dependencies in the Development dependency group", + "type": "string", + "maxLength": 50 + }, + "include": { + "description": "Specifies that any prefix is followed by a list of the dependencies updated in the commit", + "type": "string", + "enum": ["scope"] + } + }, + "anyOf": [ + { "required": ["prefix"] }, + { "required": ["prefix-development"] }, + { "required": ["include"] } + ], + "additionalProperties": false + }, + "pull-request-branch-name": { + "description": "Pull request branch name preferences for the group", + "type": "object", + "properties": { + "separator": { + "description": "Change separator for PR branch name", + "type": "string", + "default": "/", + "enum": ["-", "_", "/"] + } + }, + "required": ["separator"], + "additionalProperties": false + } + }, + "required": ["schedule"] } }, "properties": { @@ -1155,6 +1306,14 @@ }, "registries": { "$ref": "#/definitions/registry" + }, + "multi-ecosystem-groups": { + "type": "object", + "description": "Define groups that span multiple package ecosystems, allowing consolidated pull requests across different ecosystems", + "additionalProperties": { + "$ref": "#/definitions/multi-ecosystem-group" + }, + "minProperties": 1 } }, "required": ["version", "updates"], diff --git a/crates/zizmor/src/finding.rs b/crates/zizmor/src/finding.rs index fcc1dba9..dacce53e 100644 --- a/crates/zizmor/src/finding.rs +++ b/crates/zizmor/src/finding.rs @@ -5,7 +5,10 @@ use clap::ValueEnum; use serde::{Deserialize, Serialize}; use self::location::{Location, SymbolicLocation}; -use crate::{InputKey, audit::AuditError, models::AsDocument, registry::input::Group}; +use crate::{ + InputKey, audit::AuditError, finding::location::LocationKind, models::AsDocument, + registry::input::Group, +}; use yamlpatch::{self, Patch}; pub(crate) mod location; @@ -246,7 +249,12 @@ impl<'doc> FindingBuilder<'doc> { locations.extend(self.raw_locations); - if !locations.iter().any(|l| l.symbolic.is_primary()) { + if locations.len() == 1 + && let Some(location) = locations.get_mut(0) + { + // If there's only one location, then it's primary by definition. + location.symbolic.kind = LocationKind::Primary; + } else if !locations.iter().any(|l| l.symbolic.is_primary()) { return Err(AuditError::new( self.ident, anyhow!("API misuse: at least one location must be marked with primary()"), diff --git a/crates/zizmor/src/finding/location.rs b/crates/zizmor/src/finding/location.rs index ea95909d..2fe1d97d 100644 --- a/crates/zizmor/src/finding/location.rs +++ b/crates/zizmor/src/finding/location.rs @@ -208,12 +208,12 @@ pub(crate) trait Locatable<'doc> { fn location(&self) -> SymbolicLocation<'doc>; /// Returns an "enriched" symbolic location of this model, - /// when the model is of a type that has a name. Otherwise, - /// returns the same symbolic location as `location()`. + /// when the model has one or more "grip" fields that are + /// visually useful to key off of (like a `name` or `id` field). /// /// For example, a GitHub Actions workflow step has an optional name, /// which is included in this symbolic location if present. - fn location_with_name(&self) -> SymbolicLocation<'doc> { + fn location_with_grip(&self) -> SymbolicLocation<'doc> { self.location() } } diff --git a/crates/zizmor/src/github.rs b/crates/zizmor/src/github.rs index f173c10e..47b1bd4d 100644 --- a/crates/zizmor/src/github.rs +++ b/crates/zizmor/src/github.rs @@ -575,6 +575,29 @@ impl Client { .max_by_key(|t| t.name.len())) } + #[instrument(skip(self))] + pub(crate) async fn branch_commits( + &self, + owner: &str, + repo: &str, + commit: &str, + ) -> Result { + // NOTE(ww): This API is undocumented. + // See: https://github.com/orgs/community/discussions/78161 + let url = format!("https://github.com/{owner}/{repo}/branch_commits/{commit}"); + + // We ask GitHub for JSON, because it sends HTML by default for this endpoint. + self.base_client + .get(&url) + .header(ACCEPT, "application/json") + .send() + .await? + .error_for_status()? + .json() + .await + .map_err(Into::into) + } + #[instrument(skip(self))] pub(crate) async fn compare_commits( &self, @@ -859,6 +882,23 @@ pub(crate) struct Commit { pub(crate) sha: String, } +/// The response structure from GitHub's undocumented `branch_commits` API. +/// +/// This model is intentionally incomplete. +#[derive(Clone, Deserialize)] +#[serde(rename_all = "lowercase")] +#[non_exhaustive] +pub(crate) struct BranchCommits { + branches: Vec, + tags: Vec, +} + +impl BranchCommits { + pub(crate) fn is_empty(&self) -> bool { + self.branches.is_empty() && self.tags.is_empty() + } +} + #[derive(Clone, Deserialize)] #[serde(rename_all = "lowercase")] pub(crate) enum ComparisonStatus { diff --git a/crates/zizmor/src/lsp.rs b/crates/zizmor/src/lsp.rs index 46350dc9..218c931d 100644 --- a/crates/zizmor/src/lsp.rs +++ b/crates/zizmor/src/lsp.rs @@ -4,7 +4,7 @@ use std::str::FromStr; use camino::Utf8Path; use thiserror::Error; -use tower_lsp_server::lsp_types::{self, TextDocumentSyncKind}; +use tower_lsp_server::ls_types::{self, TextDocumentSyncKind}; use tower_lsp_server::{Client, LanguageServer, LspService, Server}; use crate::audit::AuditInput; @@ -25,7 +25,7 @@ pub(crate) struct Error { } struct LspDocumentCommon { - uri: lsp_types::Uri, + uri: ls_types::Uri, text: String, version: Option, } @@ -39,35 +39,35 @@ struct Backend { impl LanguageServer for Backend { async fn initialize( &self, - _: lsp_types::InitializeParams, - ) -> tower_lsp_server::jsonrpc::Result { - Ok(lsp_types::InitializeResult { - server_info: Some(lsp_types::ServerInfo { + _: ls_types::InitializeParams, + ) -> tower_lsp_server::jsonrpc::Result { + Ok(ls_types::InitializeResult { + server_info: Some(ls_types::ServerInfo { name: "zizmor (LSP)".into(), version: Some(env!("CARGO_PKG_VERSION").into()), }), - capabilities: lsp_types::ServerCapabilities { - text_document_sync: Some(lsp_types::TextDocumentSyncCapability::Kind( - lsp_types::TextDocumentSyncKind::FULL, + capabilities: ls_types::ServerCapabilities { + text_document_sync: Some(ls_types::TextDocumentSyncCapability::Kind( + ls_types::TextDocumentSyncKind::FULL, )), ..Default::default() }, }) } - async fn initialized(&self, _: lsp_types::InitializedParams) { + async fn initialized(&self, _: ls_types::InitializedParams) { let selectors = vec![ - lsp_types::DocumentFilter { + ls_types::DocumentFilter { language: Some("yaml".into()), scheme: None, pattern: Some("**/.github/workflows/*.{yml,yaml}".into()), }, - lsp_types::DocumentFilter { + ls_types::DocumentFilter { language: Some("yaml".into()), scheme: None, pattern: Some("**/action.{yml,yaml}".into()), }, - lsp_types::DocumentFilter { + ls_types::DocumentFilter { language: Some("yaml".into()), scheme: None, pattern: Some("**/.github/dependabot.{yml,yaml}".into()), @@ -80,46 +80,46 @@ impl LanguageServer for Backend { // neglects to. self.client .register_capability(vec![ - lsp_types::Registration { + ls_types::Registration { id: "zizmor-didopen".into(), method: "textDocument/didOpen".into(), register_options: Some( - serde_json::to_value(lsp_types::TextDocumentRegistrationOptions { + serde_json::to_value(ls_types::TextDocumentRegistrationOptions { document_selector: Some(selectors.clone()), }) .expect("failed to serialize LSP document registration options"), ), }, - lsp_types::Registration { + ls_types::Registration { id: "zizmor-didchange".into(), method: "textDocument/didChange".into(), register_options: Some( - serde_json::to_value(lsp_types::TextDocumentChangeRegistrationOptions { + serde_json::to_value(ls_types::TextDocumentChangeRegistrationOptions { document_selector: Some(selectors.clone()), sync_kind: TextDocumentSyncKind::FULL, }) .expect("failed to serialize LSP document registration options"), ), }, - lsp_types::Registration { + ls_types::Registration { id: "zizmor-didsave".into(), method: "textDocument/didSave".into(), register_options: Some( - serde_json::to_value(lsp_types::TextDocumentSaveRegistrationOptions { + serde_json::to_value(ls_types::TextDocumentSaveRegistrationOptions { include_text: Some(true), text_document_registration_options: - lsp_types::TextDocumentRegistrationOptions { + ls_types::TextDocumentRegistrationOptions { document_selector: Some(selectors.clone()), }, }) .expect("failed to serialize LSP document registration options"), ), }, - lsp_types::Registration { + ls_types::Registration { id: "zizmor-didclose".into(), method: "textDocument/didClose".into(), register_options: Some( - serde_json::to_value(lsp_types::TextDocumentRegistrationOptions { + serde_json::to_value(ls_types::TextDocumentRegistrationOptions { document_selector: Some(selectors), }) .expect("failed to serialize LSP document registration options"), @@ -130,7 +130,7 @@ impl LanguageServer for Backend { .expect("failed to register text document capabilities with the LSP client"); self.client - .log_message(lsp_types::MessageType::INFO, "server initialized!") + .log_message(ls_types::MessageType::INFO, "server initialized!") .await; } @@ -139,7 +139,7 @@ impl LanguageServer for Backend { Ok(()) } - async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) { + async fn did_open(&self, params: ls_types::DidOpenTextDocumentParams) { tracing::debug!("did_open: {:?}", params); self.audit(LspDocumentCommon { uri: params.text_document.uri, @@ -149,7 +149,7 @@ impl LanguageServer for Backend { .await; } - async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) { + async fn did_change(&self, params: ls_types::DidChangeTextDocumentParams) { tracing::debug!("did_change: {:?}", params); let mut params = params; let Some(change) = params.content_changes.pop() else { @@ -164,7 +164,7 @@ impl LanguageServer for Backend { .await; } - async fn did_save(&self, params: lsp_types::DidSaveTextDocumentParams) { + async fn did_save(&self, params: ls_types::DidSaveTextDocumentParams) { tracing::debug!("did_save: {:?}", params); if let Some(text) = params.text { self.audit(LspDocumentCommon { @@ -217,15 +217,15 @@ impl Backend { .iter() .map(|finding| { let primary = finding.primary_location(); - lsp_types::Diagnostic { - range: lsp_types::Range { + ls_types::Diagnostic { + range: ls_types::Range { start: primary.concrete.location.start_point.into(), end: primary.concrete.location.end_point.into(), }, severity: Some(finding.determinations.severity.into()), - code: Some(lsp_types::NumberOrString::String(finding.ident.into())), - code_description: Some(lsp_types::CodeDescription { - href: lsp_types::Uri::from_str(finding.url) + code: Some(ls_types::NumberOrString::String(finding.ident.into())), + code_description: Some(ls_types::CodeDescription { + href: ls_types::Uri::from_str(finding.url) .expect("finding contains an invalid URL somehow"), }), source: Some("zizmor".into()), @@ -248,25 +248,25 @@ impl Backend { async fn audit(&self, params: LspDocumentCommon) { if let Err(e) = self.audit_inner(params).await { self.client - .log_message(lsp_types::MessageType::ERROR, format!("audit failed: {e}")) + .log_message(ls_types::MessageType::ERROR, format!("audit failed: {e}")) .await; } } } -impl From for lsp_types::DiagnosticSeverity { +impl From for ls_types::DiagnosticSeverity { fn from(value: Severity) -> Self { // TODO: Does this mapping make sense? match value { - Severity::Informational => lsp_types::DiagnosticSeverity::INFORMATION, - Severity::Low => lsp_types::DiagnosticSeverity::WARNING, - Severity::Medium => lsp_types::DiagnosticSeverity::WARNING, - Severity::High => lsp_types::DiagnosticSeverity::ERROR, + Severity::Informational => ls_types::DiagnosticSeverity::INFORMATION, + Severity::Low => ls_types::DiagnosticSeverity::WARNING, + Severity::Medium => ls_types::DiagnosticSeverity::WARNING, + Severity::High => ls_types::DiagnosticSeverity::ERROR, } } } -impl From for lsp_types::Position { +impl From for ls_types::Position { fn from(value: Point) -> Self { Self { line: value.row as u32, diff --git a/crates/zizmor/src/main.rs b/crates/zizmor/src/main.rs index 6a18a00a..620ad0f5 100644 --- a/crates/zizmor/src/main.rs +++ b/crates/zizmor/src/main.rs @@ -2,6 +2,7 @@ use std::{ collections::HashSet, + env, io::{Write, stdout}, process::ExitCode, }; @@ -29,6 +30,7 @@ use tracing_indicatif::{IndicatifLayer, span_ext::IndicatifSpanExt}; use tracing_subscriber::{EnvFilter, layer::SubscriberExt as _, util::SubscriberInitExt as _}; use crate::{ + audit::AuditError, config::{Config, ConfigError, ConfigErrorInner}, github::Client, models::AsDocument, @@ -116,6 +118,22 @@ struct App { #[arg(long, value_enum, default_value_t)] format: OutputFormat, + /// Whether to render OSC 8 links in the output. + /// + /// This affects links under audit IDs, as well as any links + /// produced by audit rules. + /// + /// Only affects `--format=plain` (the default). + #[arg(long, value_enum, default_value_t, env = "ZIZMOR_RENDER_LINKS")] + render_links: CliRenderLinks, + + /// Whether to render audit URLs in the output, separately from any URLs + /// embedded in OSC 8 links. + /// + /// Only affects `--format=plain` (the default). + #[arg(long, value_enum, default_value_t, env = "ZIZMOR_SHOW_AUDIT_URLS")] + show_audit_urls: CliShowAuditUrls, + /// Control the use of color in output. #[arg(long, value_enum, value_name = "MODE")] color: Option, @@ -316,6 +334,79 @@ pub(crate) enum OutputFormat { Github, } +#[derive(Debug, Default, Copy, Clone, ValueEnum)] +pub(crate) enum CliRenderLinks { + /// Render OSC 8 links in output if support is detected. + #[default] + Auto, + /// Always render OSC 8 links in output. + Always, + /// Never render OSC 8 links in output. + Never, +} + +#[derive(Debug, Copy, Clone)] +pub(crate) enum RenderLinks { + Always, + Never, +} + +impl From for RenderLinks { + fn from(value: CliRenderLinks) -> Self { + match value { + CliRenderLinks::Auto => { + // We render links if stdout is a terminal. This is assumed + // to preclude CI environments and log files. + // + // TODO: Switch this to the support-hyperlinks crate? + // See: https://github.com/zkat/supports-hyperlinks/pull/8 + if stdout().is_terminal() { + RenderLinks::Always + } else { + RenderLinks::Never + } + } + CliRenderLinks::Always => RenderLinks::Always, + CliRenderLinks::Never => RenderLinks::Never, + } + } +} + +#[derive(Debug, Default, Copy, Clone, ValueEnum)] +pub(crate) enum CliShowAuditUrls { + /// Render audit URLs in output automatically based on output format and runtime context. + /// + /// For example, URLs will be shown if a CI runtime is detected. + #[default] + Auto, + /// Always render audit URLs in output. + Always, + /// Never render audit URLs in output. + Never, +} + +#[derive(Debug, Copy, Clone)] +pub(crate) enum ShowAuditUrls { + Always, + Never, +} + +impl From for ShowAuditUrls { + fn from(value: CliShowAuditUrls) -> Self { + match value { + CliShowAuditUrls::Auto => { + if utils::is_ci() || !stdout().is_terminal() { + ShowAuditUrls::Always + } else { + ShowAuditUrls::Never + } + } + CliShowAuditUrls::Always => ShowAuditUrls::Always, + CliShowAuditUrls::Never => ShowAuditUrls::Never, + } + } +} + #[derive(Debug, Copy, Clone, ValueEnum)] pub(crate) enum ColorMode { /// Use color output if the output supports it. @@ -551,10 +642,10 @@ enum Error { #[error("failed to load audit rules")] AuditLoad(#[source] anyhow::Error), /// An error while running an audit. - #[error("{ident} failed on {input}")] + #[error("'{ident}' audit failed on {input}")] Audit { ident: &'static str, - source: anyhow::Error, + source: AuditError, input: String, }, /// An error while rendering output. @@ -597,6 +688,7 @@ async fn run(app: &mut App) -> Result { ColorMode::Never } else if std::env::var("FORCE_COLOR").is_ok() || std::env::var("CLICOLOR_FORCE").is_ok() + || utils::is_ci() { ColorMode::Always } else { @@ -751,7 +843,7 @@ async fn run(app: &mut App) -> Result { while let Some(findings) = completion_stream.next().await { let findings = findings.map_err(|err| Error::Audit { ident: err.ident(), - source: err.into(), + source: err, input: input.key().to_string(), })?; @@ -768,7 +860,13 @@ async fn run(app: &mut App) -> Result { } match app.format { - OutputFormat::Plain => output::plain::render_findings(®istry, &results, app.naches), + OutputFormat::Plain => output::plain::render_findings( + ®istry, + &results, + &app.show_audit_urls.into(), + &app.render_links.into(), + app.naches, + ), OutputFormat::Json | OutputFormat::JsonV1 => { output::json::v1::output(stdout(), results.findings()).map_err(Error::Output)? } @@ -811,7 +909,7 @@ async fn main() -> ExitCode { // which is then typically inaccessible from an already failed // CI job. In those cases, it's better to dump directly to stderr, // since that'll typically be captured by console logging. - if std::env::var_os("CI").is_some() { + if utils::is_ci() { std::panic::set_hook(Box::new(|info| { let trace = std::backtrace::Backtrace::force_capture(); eprintln!("FATAL: zizmor crashed. This is a bug that should be reported."); @@ -871,6 +969,16 @@ async fn main() -> ExitCode { Some(report) } Error::Collection(err) => match err.inner() { + CollectionError::NoInputs => { + let group = Group::with_title(Level::ERROR.primary_title(err.to_string())) + .element(Level::HELP.message("collection yielded no auditable inputs")) + .element(Level::HELP.message("inputs must contain at least one valid workflow, action, or Dependabot config")); + + let renderer = Renderer::styled(); + let report = renderer.render(&[group]); + + Some(report) + } CollectionError::DuplicateInput(..) => { let group = Group::with_title(Level::ERROR.primary_title(err.to_string())) .element(Level::HELP.message(format!( @@ -907,7 +1015,8 @@ async fn main() -> ExitCode { Some(report) } - CollectionError::Yamlpath(..) => { + // These errors only happen if something is wrong with zizmor itself. + CollectionError::Yamlpath(..) | CollectionError::Model(..) => { let group = Group::with_title(Level::ERROR.primary_title(err.to_string())).elements([ Level::HELP.message("this typically indicates a bug in zizmor; please report it"), Level::HELP.message( diff --git a/crates/zizmor/src/models.rs b/crates/zizmor/src/models.rs index 2dc12f47..505d720c 100644 --- a/crates/zizmor/src/models.rs +++ b/crates/zizmor/src/models.rs @@ -7,7 +7,7 @@ use github_actions_models::common::Env; use github_actions_models::common::expr::LoE; use github_actions_models::workflow::job::Strategy; -use crate::finding::location::Locatable; +use crate::finding::location::{Locatable, SymbolicLocation}; use crate::models::inputs::HasInputs; pub(crate) mod action; @@ -45,7 +45,7 @@ pub(crate) trait StepCommon<'doc>: Locatable<'doc> + HasInputs { fn env_is_static(&self, ctx: &context::Context) -> bool; /// Returns a [`common::Uses`] for this step, if it has one. - fn uses(&self) -> Option<&common::Uses>; + fn uses(&self) -> Option<&'doc common::Uses>; /// Returns this step's job's strategy, if present. /// @@ -64,7 +64,7 @@ pub(crate) trait StepCommon<'doc>: Locatable<'doc> + HasInputs { /// /// Returns `None` if the shell cannot be statically determined, including /// if the shell is specified via an expression. - fn shell(&self) -> Option<&str>; + fn shell(&self) -> Option<(&str, SymbolicLocation<'doc>)>; } impl<'a, 'doc, T: StepCommon<'doc>> AsDocument<'a, 'doc> for T { diff --git a/crates/zizmor/src/models/action.rs b/crates/zizmor/src/models/action.rs index 006cb57e..6afc4f4f 100644 --- a/crates/zizmor/src/models/action.rs +++ b/crates/zizmor/src/models/action.rs @@ -175,10 +175,13 @@ impl<'doc> Locatable<'doc> for CompositeStep<'doc> { ]) } - fn location_with_name(&self) -> SymbolicLocation<'doc> { - match self.inner.name { - Some(_) => self.location().with_keys(["name".into()]), - None => self.location(), + fn location_with_grip(&self) -> SymbolicLocation<'doc> { + if self.inner.name.is_some() { + self.location().with_keys(["name".into()]) + } else if self.inner.id.is_some() { + self.location().with_keys(["id".into()]) + } else { + self.location() } } } @@ -198,7 +201,7 @@ impl<'doc> StepCommon<'doc> for CompositeStep<'doc> { utils::env_is_static(ctx, &[&self.env]) } - fn uses(&self) -> Option<&common::Uses> { + fn uses(&self) -> Option<&'doc common::Uses> { let action::StepBody::Uses { uses, .. } = &self.inner.body else { return None; }; @@ -229,14 +232,19 @@ impl<'doc> StepCommon<'doc> for CompositeStep<'doc> { self.action().as_document() } - fn shell(&self) -> Option<&str> { + fn shell(&self) -> Option<(&str, SymbolicLocation<'doc>)> { // For composite action steps, shell is always explicitly specified in the YAML. if let action::StepBody::Run { shell: LoE::Literal(shell), .. } = &self.inner.body { - Some(shell) + Some(( + shell, + self.location() + .with_keys(["shell".into()]) + .annotated("shell defined here"), + )) } else { None } diff --git a/crates/zizmor/src/models/dependabot.rs b/crates/zizmor/src/models/dependabot.rs index 05a04321..4d7aa350 100644 --- a/crates/zizmor/src/models/dependabot.rs +++ b/crates/zizmor/src/models/dependabot.rs @@ -131,7 +131,7 @@ impl<'doc> Locatable<'doc> for Update<'doc> { .annotated("this update rule") } - fn location_with_name(&self) -> SymbolicLocation<'doc> { + fn location_with_grip(&self) -> SymbolicLocation<'doc> { self.location() .with_keys(["package-ecosystem".into()]) .annotated("this ecosystem") diff --git a/crates/zizmor/src/models/uses.rs b/crates/zizmor/src/models/uses.rs index 2e50e763..cceb0781 100644 --- a/crates/zizmor/src/models/uses.rs +++ b/crates/zizmor/src/models/uses.rs @@ -1,18 +1,18 @@ //! Extension traits for the `Uses` APIs. -use std::str::FromStr; +use std::{str::FromStr, sync::LazyLock}; use github_actions_models::common::{RepositoryUses, Uses}; +use regex::Regex; use serde::Deserialize; -use crate::utils::once::static_regex; - -// Matches all variants of [`RepositoryUsesPattern`] except `*`. -// -// TODO: Replace this with a real parser; this is ridiculous. -static_regex!( - REPOSITORY_USES_PATTERN, - r#"(?xmi) # verbose, multi-line mode, case-insensitive +/// Matches all variants of [`RepositoryUsesPattern`] except `*`. +/// +/// TODO: Replace this with a real parser; this is ridiculous. +static REPOSITORY_USES_PATTERN: LazyLock = LazyLock::new(|| { + #[allow(clippy::unwrap_used)] + Regex::new( + r#"(?xmi) # verbose, multi-line mode, case-insensitive ^ # start of line ([\w-]+) # (1) owner / # / @@ -30,8 +30,10 @@ static_regex!( ([[[:graph:]]&&[^\*]]+) # (4) git ref (any non-space, non-* characters) )? # end of non-capturing group for optional git ref $ # end of line - "# -); + "#, + ) + .unwrap() +}); /// Represents a pattern for matching repository `uses` references. /// These patterns are ordered by specificity; more specific patterns @@ -71,10 +73,10 @@ impl RepositoryUsesPattern { subpath, git_ref, } => { - uses.owner.eq_ignore_ascii_case(owner) - && uses.repo.eq_ignore_ascii_case(repo) - && uses.subpath == *subpath - && uses.git_ref.as_str() == git_ref + uses.owner().eq_ignore_ascii_case(owner) + && uses.repo().eq_ignore_ascii_case(repo) + && uses.subpath() == subpath.as_deref() + && uses.git_ref() == git_ref } RepositoryUsesPattern::ExactPath { owner, @@ -87,19 +89,19 @@ impl RepositoryUsesPattern { // Utf8Path gets us part of the way there, but is // platform dependent (i.e. will do the wrong thing // if the platform separator is not /). - uses.owner.eq_ignore_ascii_case(owner) - && uses.repo.eq_ignore_ascii_case(repo) - && uses.subpath.as_deref().is_some_and(|s| s == subpath) + uses.owner().eq_ignore_ascii_case(owner) + && uses.repo().eq_ignore_ascii_case(repo) + && uses.subpath().is_some_and(|s| s == subpath) } RepositoryUsesPattern::ExactRepo { owner, repo } => { - uses.owner.eq_ignore_ascii_case(owner) - && uses.repo.eq_ignore_ascii_case(repo) - && uses.subpath.is_none() + uses.owner().eq_ignore_ascii_case(owner) + && uses.repo().eq_ignore_ascii_case(repo) + && uses.subpath().is_none() } RepositoryUsesPattern::InRepo { owner, repo } => { - uses.owner.eq_ignore_ascii_case(owner) && uses.repo.eq_ignore_ascii_case(repo) + uses.owner().eq_ignore_ascii_case(owner) && uses.repo().eq_ignore_ascii_case(repo) } - RepositoryUsesPattern::InOwner(owner) => uses.owner.eq_ignore_ascii_case(owner), + RepositoryUsesPattern::InOwner(owner) => uses.owner().eq_ignore_ascii_case(owner), RepositoryUsesPattern::Any => true, } } @@ -218,18 +220,18 @@ impl RepositoryUsesExt for RepositoryUses { } fn ref_is_commit(&self) -> bool { - self.git_ref.len() == 40 && self.git_ref.chars().all(|c| c.is_ascii_hexdigit()) + self.git_ref().len() == 40 && self.git_ref().chars().all(|c| c.is_ascii_hexdigit()) } fn commit_ref(&self) -> Option<&str> { - match &self.git_ref { + match &self.git_ref() { git_ref if self.ref_is_commit() => Some(git_ref), _ => None, } } fn symbolic_ref(&self) -> Option<&str> { - match &self.git_ref { + match &self.git_ref() { git_ref if !self.ref_is_commit() => Some(git_ref), _ => None, } @@ -246,7 +248,7 @@ impl UsesExt for Uses { /// Whether the `uses:` is unpinned. fn unpinned(&self) -> bool { match self { - Uses::Docker(docker) => docker.hash.is_none() && docker.tag.is_none(), + Uses::Docker(docker) => docker.hash().is_none() && docker.tag().is_none(), Uses::Repository(_) => false, // Local `uses:` are always unpinned; any `@ref` component // is actually part of the path. @@ -263,7 +265,7 @@ impl UsesExt for Uses { // (since it's fully contained within the calling repo), Uses::Local(_) => false, Uses::Repository(repo) => !repo.ref_is_commit(), - Uses::Docker(docker) => docker.hash.is_none(), + Uses::Docker(docker) => docker.hash().is_none(), } } } @@ -477,7 +479,7 @@ mod tests { ("actions/checkout/foo@v3", "actions/checkout/foo@v3", true), ("actions/checkout/foo@v1", "actions/checkout/foo@v3", false), ] { - let Ok(Uses::Repository(uses)) = Uses::from_str(uses) else { + let Ok(Uses::Repository(uses)) = Uses::parse(uses) else { return Err(anyhow!("invalid uses: {uses}")); }; diff --git a/crates/zizmor/src/models/workflow.rs b/crates/zizmor/src/models/workflow.rs index 6225135f..4c6b6f7d 100644 --- a/crates/zizmor/src/models/workflow.rs +++ b/crates/zizmor/src/models/workflow.rs @@ -286,7 +286,13 @@ impl<'doc> NormalJob<'doc> { } } -impl<'doc> JobExt<'doc> for NormalJob<'doc> { +impl<'a, 'doc> AsDocument<'a, 'doc> for NormalJob<'doc> { + fn as_document(&'a self) -> &'doc yamlpath::Document { + self.parent.as_document() + } +} + +impl<'doc> JobCommon<'doc> for NormalJob<'doc> { fn id(&self) -> &'doc str { self.id } @@ -329,7 +335,13 @@ impl<'doc> ReusableWorkflowCallJob<'doc> { } } -impl<'doc> JobExt<'doc> for ReusableWorkflowCallJob<'doc> { +impl<'a, 'doc> AsDocument<'a, 'doc> for ReusableWorkflowCallJob<'doc> { + fn as_document(&'a self) -> &'doc yamlpath::Document { + self.parent.as_document() + } +} + +impl<'doc> JobCommon<'doc> for ReusableWorkflowCallJob<'doc> { fn id(&self) -> &'doc str { self.id } @@ -352,7 +364,7 @@ impl<'doc> std::ops::Deref for ReusableWorkflowCallJob<'doc> { } /// Common behavior across both normal and reusable jobs. -pub(crate) trait JobExt<'doc> { +pub(crate) trait JobCommon<'doc>: Locatable<'doc> { /// The job's unique ID (i.e., its key in the workflow's `jobs:` block). fn id(&self) -> &'doc str; @@ -363,7 +375,7 @@ pub(crate) trait JobExt<'doc> { fn parent(&self) -> &'doc Workflow; } -impl<'doc, T: JobExt<'doc>> Locatable<'doc> for T { +impl<'doc, T: JobCommon<'doc>> Locatable<'doc> for T { /// Returns this job's [`SymbolicLocation`]. fn location(&self) -> SymbolicLocation<'doc> { self.parent() @@ -372,10 +384,15 @@ impl<'doc, T: JobExt<'doc>> Locatable<'doc> for T { .with_keys(["jobs".into(), self.id().into()]) } - fn location_with_name(&self) -> SymbolicLocation<'doc> { - match self.name() { - Some(_) => self.location().with_keys(["name".into()]), - None => self.location(), + fn location_with_grip(&self) -> SymbolicLocation<'doc> { + if self.name().is_some() { + self.location().with_keys(["name".into()]) + } else { + self.parent() + .location() + .annotated("this job") + .with_keys(["jobs".into(), self.id().into()]) + .key_only() } } } @@ -617,10 +634,13 @@ impl<'doc> Locatable<'doc> for Step<'doc> { .annotated("this step") } - fn location_with_name(&self) -> SymbolicLocation<'doc> { - match self.inner.name { - Some(_) => self.location().with_keys(["name".into()]), - None => self.location(), + fn location_with_grip(&self) -> SymbolicLocation<'doc> { + if self.inner.name.is_some() { + self.location().with_keys(["name".into()]) + } else if self.inner.id.is_some() { + self.location().with_keys(["id".into()]) + } else { + self.location() } } } @@ -640,7 +660,7 @@ impl<'doc> StepCommon<'doc> for Step<'doc> { utils::env_is_static(ctx, &[&self.env, &self.job().env, &self.workflow().env]) } - fn uses(&self) -> Option<&common::Uses> { + fn uses(&self) -> Option<&'doc common::Uses> { let StepBody::Uses { uses, .. } = &self.inner.body else { return None; }; @@ -671,7 +691,7 @@ impl<'doc> StepCommon<'doc> for Step<'doc> { self.workflow().as_document() } - fn shell(&self) -> Option<&str> { + fn shell(&self) -> Option<(&str, SymbolicLocation<'doc>)> { // For workflow steps, we can use the existing shell() method self.shell() } @@ -700,7 +720,7 @@ impl<'doc> Step<'doc> { /// if the shell can't be statically inferred. /// /// Invariant: panics if the step is not a `run:` step. - pub(crate) fn shell(&self) -> Option<&str> { + pub(crate) fn shell(&self) -> Option<(&str, SymbolicLocation<'doc>)> { let StepBody::Run { run: _, working_directory: _, @@ -716,7 +736,12 @@ impl<'doc> Step<'doc> { // If any of these is an expression, we can't infer the shell // statically, so we terminate early with `None`. let shell = match shell { - Some(LoE::Literal(shell)) => Some(shell.as_str()), + Some(LoE::Literal(shell)) => Some(( + shell.as_str(), + self.location() + .with_keys(["shell".into()]) + .annotated("shell defined here"), + )), Some(LoE::Expr(_)) => return None, None => match self .job() @@ -724,7 +749,13 @@ impl<'doc> Step<'doc> { .as_ref() .and_then(|d| d.run.as_ref().and_then(|r| r.shell.as_ref())) { - Some(LoE::Literal(shell)) => Some(shell.as_str()), + Some(LoE::Literal(shell)) => Some(( + shell.as_str(), + self.job() + .location() + .with_keys(["defaults".into(), "run".into(), "shell".into()]) + .annotated("job default shell defined here"), + )), Some(LoE::Expr(_)) => return None, None => match self .workflow() @@ -732,14 +763,30 @@ impl<'doc> Step<'doc> { .as_ref() .and_then(|d| d.run.as_ref().and_then(|r| r.shell.as_ref())) { - Some(LoE::Literal(shell)) => Some(shell.as_str()), + Some(LoE::Literal(shell)) => Some(( + shell.as_str(), + self.workflow() + .location() + .with_keys(["defaults".into(), "run".into(), "shell".into()]) + .annotated("workflow default shell defined here"), + )), Some(LoE::Expr(_)) => return None, None => None, }, }, }; - shell.or_else(|| self.parent.runner_default_shell()) + shell.or_else(|| { + self.parent.runner_default_shell().map(|shell| { + ( + shell, + self.job() + .location() + .with_keys(["runs-on".into()]) + .annotated("shell implied by runner"), + ) + }) + }) } } diff --git a/crates/zizmor/src/output/plain.rs b/crates/zizmor/src/output/plain.rs index da7a0e7a..e10749c6 100644 --- a/crates/zizmor/src/output/plain.rs +++ b/crates/zizmor/src/output/plain.rs @@ -7,6 +7,7 @@ use anstream::{eprintln, print, println}; use owo_colors::OwoColorize; use crate::{ + RenderLinks, ShowAuditUrls, finding::{ Finding, Severity, location::{Location, LocationKind}, @@ -43,6 +44,7 @@ impl From<&Severity> for Level<'_> { pub(crate) fn finding_snippets<'doc>( registry: &'doc InputRegistry, finding: &'doc Finding<'doc>, + render_links_mode: &RenderLinks, ) -> Vec>> { // Our finding might span multiple workflows, so we need to group locations // by their enclosing workflow to generate each snippet correctly. @@ -67,15 +69,20 @@ pub(crate) fn finding_snippets<'doc>( for (input_key, locations) in locations_by_workflow { let input = registry.get_input(input_key); + let path = match render_links_mode { + RenderLinks::Always => input.link().unwrap_or(input_key.presentation_path()), + RenderLinks::Never => input_key.presentation_path(), + }; + snippets.push( Snippet::source(input.as_document().source()) .fold(true) .line_start(1) - .path(input.link().unwrap_or(input_key.presentation_path())) + .path(path) .annotations(locations.iter().map(|loc| { - let annotation = match loc.symbolic.link { - Some(ref link) => link, - None => &loc.symbolic.annotation, + let annotation = match (loc.symbolic.link.as_deref(), render_links_mode) { + (Some(link), RenderLinks::Always) => link, + _ => &loc.symbolic.annotation, }; AnnotationKind::from(loc.symbolic.kind) @@ -94,10 +101,12 @@ pub(crate) fn finding_snippets<'doc>( pub(crate) fn render_findings( registry: &InputRegistry, findings: &FindingRegistry, + show_urls_mode: &ShowAuditUrls, + render_links_mode: &RenderLinks, naches_mode: bool, ) { for finding in findings.findings() { - render_finding(registry, finding); + render_finding(registry, finding, show_urls_mode, render_links_mode); println!(); } @@ -190,11 +199,19 @@ pub(crate) fn render_findings( } } -fn render_finding(registry: &InputRegistry, finding: &Finding) { - let title = Level::from(&finding.determinations.severity) +fn render_finding( + registry: &InputRegistry, + finding: &Finding, + show_urls_mode: &ShowAuditUrls, + render_links_mode: &RenderLinks, +) { + let mut title = Level::from(&finding.determinations.severity) .primary_title(finding.desc) - .id(finding.ident) - .id_url(finding.url); + .id(finding.ident); + + if matches!(render_links_mode, RenderLinks::Always) { + title = title.id_url(finding.url); + } let confidence = format!( "audit confidence → {:?}", @@ -202,7 +219,7 @@ fn render_finding(registry: &InputRegistry, finding: &Finding) { ); let mut group = Group::with_title(title) - .elements(finding_snippets(registry, finding)) + .elements(finding_snippets(registry, finding, render_links_mode)) .element(Level::NOTE.message(confidence)); if let Some(tip) = &finding.tip { @@ -213,6 +230,13 @@ fn render_finding(registry: &InputRegistry, finding: &Finding) { group = group.element(Level::NOTE.message("this finding has an auto-fix")); } + if matches!(show_urls_mode, ShowAuditUrls::Always) { + group = group.element(Level::HELP.message(format!( + "audit documentation → {url}", + url = finding.url.green() + ))) + } + // TODO: Evaluate alternative decor styles. let renderer = Renderer::styled(); println!("{}", renderer.render(&[group])); diff --git a/crates/zizmor/src/registry.rs b/crates/zizmor/src/registry.rs index 4766e0d4..4c8d4d9e 100644 --- a/crates/zizmor/src/registry.rs +++ b/crates/zizmor/src/registry.rs @@ -74,6 +74,7 @@ impl AuditRegistry { register_audit!(audit::dependabot_execution::DependabotExecution); register_audit!(audit::dependabot_cooldown::DependabotCooldown); register_audit!(audit::concurrency_limits::ConcurrencyLimits); + register_audit!(audit::archived_uses::ArchivedUses); Ok(registry) } diff --git a/crates/zizmor/src/registry/input.rs b/crates/zizmor/src/registry/input.rs index 47b49fce..15b9d241 100644 --- a/crates/zizmor/src/registry/input.rs +++ b/crates/zizmor/src/registry/input.rs @@ -34,7 +34,7 @@ pub(crate) enum CollectionError { /// The input couldn't be converted into the expected model. /// This typically indicates a bug in `github-actions-models`. #[error("couldn't turn input into a an appropriate model")] - Model(#[source] anyhow::Error), + Model(#[from] serde_yaml::Error), /// The input couldn't be loaded into an internal yamlpath document. /// This typically indicates a bug in `yamlpath`. diff --git a/crates/zizmor/src/utils.rs b/crates/zizmor/src/utils.rs index 61b5590e..fec05a3f 100644 --- a/crates/zizmor/src/utils.rs +++ b/crates/zizmor/src/utils.rs @@ -1,15 +1,11 @@ //! Helper routines. -use anyhow::{Context as _, Error, anyhow}; +use anyhow::{Error, anyhow}; use camino::Utf8Path; use github_actions_expressions::context::{Context, ContextPattern}; use github_actions_models::common::{Env, expr::LoE}; -use jsonschema::{ - BasicOutput::{Invalid, Valid}, - Validator, - output::{ErrorDescription, OutputUnit}, - validator_for, -}; +use jsonschema::ErrorEntry; +use jsonschema::{Validator, validator_for}; use std::ops::{Deref, Range}; use std::{fmt::Write, sync::LazyLock}; @@ -307,11 +303,11 @@ pub(crate) static DEFAULT_ENVIRONMENT_VARIABLES: &[( ), ]; -fn parse_validation_errors(errors: Vec>) -> Error { +fn parse_validation_errors(errors: Vec>) -> Error { let mut message = String::new(); for error in errors { - let description = error.error_description().to_string(); + let description = error.error.to_string(); // HACK: error descriptions are sometimes a long rats' nest // of JSON objects. We should render this in a palatable way // but doing so is nontrivial, so we just skip them for now. @@ -319,7 +315,7 @@ fn parse_validation_errors(errors: Vec>) -> Error { // the error for an unmatched "oneOf", so these errors are // typically less useful anyways. if !description.starts_with("{") { - let location = error.instance_location().as_str(); + let location = error.instance_location.as_str(); if location.is_empty() { writeln!(message, "{description}").expect("I/O on a String failed"); } else { @@ -353,11 +349,17 @@ where // to distinguish between syntax and semantic errors, // but serde-yaml doesn't give us an API to do that. // To approximate it, we re-parse the input as a - // `Value` and use that as an oracle -- a successful + // `serde_yaml::Mapping`, then convert that `serde_yaml::Mapping` + // into a `serde_json::Value` and use it as an oracle -- a successful // re-parse indicates that the input is valid YAML and // that our error is semantic, while a failed re-parse // indicates a syntax error. // + // We need to round-trip through a `serde_yaml::Mapping` to ensure that + // all of YAML's validity rules are preserved -- directly deserializing + // into a `serde_json::Value` would miss some YAML-specific checks, + // like duplicate keys within mappings. See #1395 for an example of this. + // // We do this in a nested fashion to avoid re-parsing // the input twice if we can help it, and because the // more obvious trick (`serde_yaml::from_value`) doesn't @@ -366,21 +368,26 @@ where // See: https://github.com/dtolnay/serde-yaml/issues/170 // See: https://github.com/dtolnay/serde-yaml/issues/395 - match serde_yaml::from_str(contents) { + match serde_yaml::from_str::(contents) { // We know we have valid YAML, so one of two things happened here: // 1. The input is semantically valid, but we have a bug in // `github-actions-models`. // 2. The input is semantically invalid, and the user // needs to fix it. // We the JSON schema `validator` to separate these. - Ok(raw_value) => match validator.apply(&raw_value).basic() { - Valid(_) => Err(e) - .context("this suggests a bug in zizmor; please report it!") - .map_err(CollectionError::Model), - Invalid(errors) => { + Ok(raw_value) => { + let evaluation = validator.evaluate( + &serde_json::to_value(&raw_value) + .map_err(|e| CollectionError::Syntax(e.into()))?, + ); + + if evaluation.flag().valid { + Err(e.into()) + } else { + let errors = evaluation.iter_errors().collect::>(); Err(CollectionError::Schema(parse_validation_errors(errors))) } - }, + } // Syntax error. Err(e) => Err(CollectionError::Syntax(e.into())), } @@ -703,6 +710,13 @@ pub(crate) mod once { pub(crate) use warn_once; } +/// Returns whether we are running in a CI environment. +pub(crate) fn is_ci() -> bool { + static IS_CI: LazyLock = LazyLock::new(|| std::env::var_os("CI").is_some()); + + *IS_CI +} + #[cfg(test)] mod tests { use anyhow::Result; diff --git a/crates/zizmor/tests/integration/acceptance.rs b/crates/zizmor/tests/integration/acceptance.rs index c3e90f1c..9f767072 100644 --- a/crates/zizmor/tests/integration/acceptance.rs +++ b/crates/zizmor/tests/integration/acceptance.rs @@ -196,15 +196,11 @@ fn audit_unpinned_uses() -> anyhow::Result<()> { assert_value_match(&findings, "$[0].determinations.confidence", "High"); assert_value_match(&findings, "$[0].determinations.severity", "Medium"); - assert_value_match( - &findings, - "$[0].locations[0].concrete.feature", - "uses: docker://ubuntu", - ); + assert_value_match(&findings, "$[0].locations[0].concrete.feature", "ubuntu"); assert_value_match( &findings, "$[1].locations[0].concrete.feature", - "uses: docker://ghcr.io/pypa/gh-action-pypi-publish", + "ghcr.io/pypa/gh-action-pypi-publish", ); Ok(()) diff --git a/crates/zizmor/tests/integration/audit/anonymous_definition.rs b/crates/zizmor/tests/integration/audit/anonymous_definition.rs index cd2f3209..363e01ac 100644 --- a/crates/zizmor/tests/integration/audit/anonymous_definition.rs +++ b/crates/zizmor/tests/integration/audit/anonymous_definition.rs @@ -7,7 +7,7 @@ fn test_regular_persona() -> anyhow::Result<()> { zizmor() .input(input_under_test("anonymous-definition.yml")) .run()?, - @r"No findings to report. Good job! (2 suppressed)" + @"No findings to report. Good job! (2 suppressed)" ); Ok(()) @@ -34,17 +34,16 @@ fn test_pedantic_persona() -> anyhow::Result<()> { | |__________________________________________^ this workflow | = note: audit confidence → High + = tip: use 'name: ...' to give this workflow a name info[anonymous-definition]: workflow or action definition without a name --> @@INPUT@@:21:3 | - 21 | / will-trigger: - 22 | | runs-on: ubuntu-latest - 23 | | steps: - 24 | | - run: "echo this job will trigger" - | |__________________________________________^ this job + 21 | will-trigger: + | ^^^^^^^^^^^^ this job | = note: audit confidence → High + = tip: use 'name: ...' to give this job a name 2 findings: 1 informational, 1 low, 0 medium, 0 high "# diff --git a/crates/zizmor/tests/integration/audit/archived_uses.rs b/crates/zizmor/tests/integration/audit/archived_uses.rs new file mode 100644 index 00000000..d455276a --- /dev/null +++ b/crates/zizmor/tests/integration/audit/archived_uses.rs @@ -0,0 +1,67 @@ +use crate::common::{input_under_test, zizmor}; + +#[test] +fn test_regular_persona() -> anyhow::Result<()> { + insta::assert_snapshot!( + zizmor().input(input_under_test("archived-uses.yml")).run()?, + @r" + warning[archived-uses]: action or reusable workflow from archived repository + --> @@INPUT@@:17:15 + | + 16 | - name: setup ruby + | ---------------- this step + 17 | uses: actions/setup-ruby@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3 + | ^^^^^^^^^^^^^^^^^^ repository is archived + | + = note: audit confidence → High + + warning[archived-uses]: action or reusable workflow from archived repository + --> @@INPUT@@:20:15 + | + 19 | - name: SETUP RUBY BUT LOUDLY + | --------------------------- this step + 20 | uses: ACTIONS/SETUP-RUBY@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3 + | ^^^^^^^^^^^^^^^^^^ repository is archived + | + = note: audit confidence → High + + warning[archived-uses]: action or reusable workflow from archived repository + --> @@INPUT@@:24:11 + | + 23 | name: archived-uses-reusable + | ---------------------------- this job + 24 | uses: actions/setup-ruby/.github/workflows/notreal.yml@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3 + | ^^^^^^^^^^^^^^^^^^ repository is archived + | + = note: audit confidence → High + + 3 findings: 0 informational, 0 low, 3 medium, 0 high + " + ); + + Ok(()) +} + +#[test] +fn test_composite_action() -> anyhow::Result<()> { + insta::assert_snapshot!( + zizmor() + .input(input_under_test("archived-uses/action/")) + .run()?, + @r" + warning[archived-uses]: action or reusable workflow from archived repository + --> @@INPUT@@action.yml:9:13 + | + 8 | - name: setup ruby + | ---------------- this step + 9 | uses: actions/setup-ruby@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3 + | ^^^^^^^^^^^^^^^^^^ repository is archived + | + = note: audit confidence → High + + 1 finding: 0 informational, 0 low, 1 medium, 0 high + " + ); + + Ok(()) +} diff --git a/crates/zizmor/tests/integration/audit/concurrency_limits.rs b/crates/zizmor/tests/integration/audit/concurrency_limits.rs index c4a5a6f9..29ff0add 100644 --- a/crates/zizmor/tests/integration/audit/concurrency_limits.rs +++ b/crates/zizmor/tests/integration/audit/concurrency_limits.rs @@ -80,28 +80,28 @@ fn test_jobs_missing_no_cancel() -> anyhow::Result<()> { .args(["--persona=pedantic"]) .run()?, @r" - help[concurrency-limits]: insufficient job-level concurrency limits - --> @@INPUT@@:9:5 - | - 9 | concurrency: group - | ^^^^^^^^^^^^^^^^^^ job concurrency is missing cancel-in-progress - | - = note: audit confidence → High + help[concurrency-limits]: insufficient job-level concurrency limits + --> @@INPUT@@:9:5 + | + 9 | concurrency: group + | ^^^^^^^^^^^^^^^^^^ job concurrency is missing cancel-in-progress + | + = note: audit confidence → High - help[concurrency-limits]: insufficient job-level concurrency limits - --> @@INPUT@@:1:1 - | - 1 | / name: Workflow with job 1 missing cancel-in-progress and job 2 missing concurrency - 2 | | on: push - 3 | | permissions: {} - ... | - 17 | | - name: 2-ok - 18 | | run: echo ok - | |___________________^ missing concurrency setting - | - = note: audit confidence → High + help[concurrency-limits]: insufficient job-level concurrency limits + --> @@INPUT@@:1:1 + | + 1 | / name: Workflow with job 1 missing cancel-in-progress and job 2 missing concurrency + 2 | | on: push + 3 | | permissions: {} + ... | + 17 | | - name: 2-ok + 18 | | run: echo ok + | |___________________^ missing concurrency setting + | + = note: audit confidence → High - 2 findings: 0 informational, 2 low, 0 medium, 0 high + 2 findings: 0 informational, 2 low, 0 medium, 0 high " ); diff --git a/crates/zizmor/tests/integration/audit/dependabot_cooldown.rs b/crates/zizmor/tests/integration/audit/dependabot_cooldown.rs index e4bf9900..64880b9c 100644 --- a/crates/zizmor/tests/integration/audit/dependabot_cooldown.rs +++ b/crates/zizmor/tests/integration/audit/dependabot_cooldown.rs @@ -165,7 +165,7 @@ fn test_config_short_cooldown_permitted() -> anyhow::Result<()> { .input(input_under_test("dependabot-cooldown/default-days-too-short/dependabot.yml")) .config(input_under_test("dependabot-cooldown/configs/cooldown-one-day.yml")) .run()?, - @r"No findings to report. Good job!" + @"No findings to report. Good job!" ); Ok(()) diff --git a/crates/zizmor/tests/integration/audit/forbidden_uses.rs b/crates/zizmor/tests/integration/audit/forbidden_uses.rs index c4cf970d..134547a1 100644 --- a/crates/zizmor/tests/integration/audit/forbidden_uses.rs +++ b/crates/zizmor/tests/integration/audit/forbidden_uses.rs @@ -27,26 +27,26 @@ fn test_deny_all() -> Result<()> { .run()?, @r" error[forbidden-uses]: forbidden action used - --> @@INPUT@@:13:9 + --> @@INPUT@@:13:15 | 13 | - uses: actions/setup-python@v4 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden + | ^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden | = note: audit confidence → High error[forbidden-uses]: forbidden action used - --> @@INPUT@@:14:9 + --> @@INPUT@@:14:15 | 14 | - uses: pypa/gh-action-pypi-publish@release/v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden | = note: audit confidence → High error[forbidden-uses]: forbidden action used - --> @@INPUT@@:15:9 + --> @@INPUT@@:15:15 | 15 | - uses: actions/checkout@v4 - | ^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden + | ^^^^^^^^^^^^^^^^^^^ use of this action is forbidden | = note: audit confidence → High @@ -68,10 +68,10 @@ fn test_allow_some() -> Result<()> { .run()?, @r" error[forbidden-uses]: forbidden action used - --> @@INPUT@@:13:9 + --> @@INPUT@@:13:15 | 13 | - uses: actions/setup-python@v4 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden + | ^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden | = note: audit confidence → High @@ -93,18 +93,18 @@ fn test_deny_some() -> Result<()> { .run()?, @r" error[forbidden-uses]: forbidden action used - --> @@INPUT@@:14:9 + --> @@INPUT@@:14:15 | 14 | - uses: pypa/gh-action-pypi-publish@release/v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden | = note: audit confidence → High error[forbidden-uses]: forbidden action used - --> @@INPUT@@:15:9 + --> @@INPUT@@:15:15 | 15 | - uses: actions/checkout@v4 - | ^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden + | ^^^^^^^^^^^^^^^^^^^ use of this action is forbidden | = note: audit confidence → High @@ -126,18 +126,18 @@ fn test_deny_some_refs() -> Result<()> { .run()?, @r" error[forbidden-uses]: forbidden action used - --> @@INPUT@@:13:9 + --> @@INPUT@@:13:15 | 13 | - uses: actions/setup-python@v4 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden + | ^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden | = note: audit confidence → High error[forbidden-uses]: forbidden action used - --> @@INPUT@@:14:9 + --> @@INPUT@@:14:15 | 14 | - uses: pypa/gh-action-pypi-publish@release/v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden | = note: audit confidence → High @@ -159,10 +159,10 @@ fn test_allow_some_refs() -> Result<()> { .run()?, @r" error[forbidden-uses]: forbidden action used - --> @@INPUT@@:15:9 + --> @@INPUT@@:15:15 | 15 | - uses: actions/checkout@v4 - | ^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden + | ^^^^^^^^^^^^^^^^^^^ use of this action is forbidden | = note: audit confidence → High diff --git a/crates/zizmor/tests/integration/audit/impostor_commit.rs b/crates/zizmor/tests/integration/audit/impostor_commit.rs new file mode 100644 index 00000000..2c982f8b --- /dev/null +++ b/crates/zizmor/tests/integration/audit/impostor_commit.rs @@ -0,0 +1,31 @@ +use crate::common::{input_under_test, zizmor}; + +#[cfg_attr(not(feature = "gh-token-tests"), ignore)] +#[test] +fn test_regular_persona() -> anyhow::Result<()> { + insta::assert_snapshot!( + zizmor() + .input(input_under_test("impostor-commit.yml")) + .offline(false) + .run()?, + @r" + error[impostor-commit]: commit with no history in referenced repository + --> @@INPUT@@:29:15 + | + 29 | - uses: actions/checkout@c7d749a2d57b4b375d1ebcd17cfbfb60c676f18e + | - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ uses a commit that doesn't belong to the specified org/repo + | _________| + | | + 30 | | with: + 31 | | persist-credentials: false + | |____________________________________- this step + | + = note: audit confidence → High + = note: this finding has an auto-fix + + 4 findings (3 suppressed, 1 fixable): 0 informational, 0 low, 0 medium, 1 high + " + ); + + Ok(()) +} diff --git a/crates/zizmor/tests/integration/audit/mod.rs b/crates/zizmor/tests/integration/audit/mod.rs index 04c49308..e62157b4 100644 --- a/crates/zizmor/tests/integration/audit/mod.rs +++ b/crates/zizmor/tests/integration/audit/mod.rs @@ -1,6 +1,7 @@ //! Per-audit integrationt tests, including snapshots. mod anonymous_definition; +mod archived_uses; mod artipacked; mod bot_conditions; mod cache_poisoning; @@ -12,7 +13,7 @@ mod excessive_permissions; mod forbidden_uses; mod github_env; // mod hardcoded_container_credentials; // TODO -// mod impostor_commit; // TODO +mod impostor_commit; mod insecure_commands; // mod known_vulnerable_actions; // TODO mod obfuscation; diff --git a/crates/zizmor/tests/integration/audit/obfuscation.rs b/crates/zizmor/tests/integration/audit/obfuscation.rs index 4d8c7bfd..e7149117 100644 --- a/crates/zizmor/tests/integration/audit/obfuscation.rs +++ b/crates/zizmor/tests/integration/audit/obfuscation.rs @@ -237,3 +237,56 @@ fn test_issue_1177_repro_pedantic() -> Result<()> { Ok(()) } + +/// Reproduces issue #1414: the obfuscation audit should not crash if the +/// user has `shell: cmd` defined as a job or workflow default rather than +/// at the step level. +/// +/// See: https://github.com/zizmorcore/zizmor/issues/1414 +#[test] +fn test_issue_1414_repro() -> Result<()> { + insta::assert_snapshot!( + zizmor() + .input(input_under_test("obfuscation/issue-1414-repro.yml")) + .run()?, + @r" + help[obfuscation]: obfuscated usage of GitHub Actions features + --> @@INPUT@@:13:9 + | + 13 | shell: cmd + | ^^^^^^^^^^ job default shell defined here + 14 | steps: + 15 | - name: say hi + | ------------ Windows CMD shell limits analysis + | + = note: audit confidence → High + = tip: use 'shell: pwsh' or 'shell: bash' for improved analysis + + 3 findings (2 suppressed): 0 informational, 1 low, 0 medium, 0 high + " + ); + + // Like #1414, but with `shell: cmd` defined at the workflow level. + insta::assert_snapshot!( + zizmor() + .input(input_under_test("obfuscation/workflow-cmd-default-shell.yml")) + .run()?, + @r" + help[obfuscation]: obfuscated usage of GitHub Actions features + --> @@INPUT@@:10:5 + | + 10 | shell: cmd + | ^^^^^^^^^^ workflow default shell defined here + ... + 16 | - name: say hi + | ------------ Windows CMD shell limits analysis + | + = note: audit confidence → High + = tip: use 'shell: pwsh' or 'shell: bash' for improved analysis + + 3 findings (2 suppressed): 0 informational, 1 low, 0 medium, 0 high + " + ); + + Ok(()) +} diff --git a/crates/zizmor/tests/integration/audit/ref_confusion.rs b/crates/zizmor/tests/integration/audit/ref_confusion.rs index f3db36bb..3d8a4eff 100644 --- a/crates/zizmor/tests/integration/audit/ref_confusion.rs +++ b/crates/zizmor/tests/integration/audit/ref_confusion.rs @@ -33,7 +33,7 @@ fn test_issue_518_repro() -> Result<()> { .input(input_under_test("ref-confusion/issue-518-repro.yml")) .offline(false) .run()?, - @r"No findings to report. Good job! (1 ignored, 1 suppressed)" + @"No findings to report. Good job! (1 ignored, 1 suppressed)" ); Ok(()) diff --git a/crates/zizmor/tests/integration/audit/ref_version_mismatch.rs b/crates/zizmor/tests/integration/audit/ref_version_mismatch.rs index e5c59e57..3a80078d 100644 --- a/crates/zizmor/tests/integration/audit/ref_version_mismatch.rs +++ b/crates/zizmor/tests/integration/audit/ref_version_mismatch.rs @@ -42,7 +42,7 @@ fn test_nested_annotated_tags() -> Result<()> { "ref-version-mismatch/nested-annotated-tags.yml" )) .run()?, - @r"No findings to report. Good job! (1 suppressed)" + @"No findings to report. Good job! (1 suppressed)" ); Ok(()) diff --git a/crates/zizmor/tests/integration/audit/secrets_inherit.rs b/crates/zizmor/tests/integration/audit/secrets_inherit.rs index aa7565fb..00e38652 100644 --- a/crates/zizmor/tests/integration/audit/secrets_inherit.rs +++ b/crates/zizmor/tests/integration/audit/secrets_inherit.rs @@ -8,10 +8,10 @@ fn secrets_inherit() -> anyhow::Result<()> { .run()?, @r" warning[secrets-inherit]: secrets unconditionally inherited by called workflow - --> @@INPUT@@:10:5 + --> @@INPUT@@:10:11 | 10 | uses: octo-org/example-repo/.github/workflows/called-workflow.yml@main - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this reusable workflow + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this reusable workflow 11 | # NOT OK: unconditionally inherits 12 | secrets: inherit | ---------------- inherits all parent secrets diff --git a/crates/zizmor/tests/integration/audit/self_hosted_runner.rs b/crates/zizmor/tests/integration/audit/self_hosted_runner.rs index a18a7189..68869ab9 100644 --- a/crates/zizmor/tests/integration/audit/self_hosted_runner.rs +++ b/crates/zizmor/tests/integration/audit/self_hosted_runner.rs @@ -30,7 +30,7 @@ fn test_self_hosted_default() -> Result<()> { zizmor() .input(input_under_test("self-hosted.yml")) .run()?, - @r"No findings to report. Good job! (1 suppressed)" + @"No findings to report. Good job! (1 suppressed)" ); Ok(()) diff --git a/crates/zizmor/tests/integration/audit/stale_action_refs.rs b/crates/zizmor/tests/integration/audit/stale_action_refs.rs index a874900b..d09838eb 100644 --- a/crates/zizmor/tests/integration/audit/stale_action_refs.rs +++ b/crates/zizmor/tests/integration/audit/stale_action_refs.rs @@ -11,10 +11,10 @@ fn test_pedantic_persona() -> anyhow::Result<()> { .run()?, @r" help[stale-action-refs]: commit hash does not point to a Git tag - --> @@INPUT@@:34:7 + --> @@INPUT@@:34:13 | 34 | - uses: actions/checkout@009b9ae9e446ad8d9b8c809870b0fbcc5e03573e - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this step + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this step | = note: audit confidence → High diff --git a/crates/zizmor/tests/integration/audit/template_injection.rs b/crates/zizmor/tests/integration/audit/template_injection.rs index 15e082fb..504f2c48 100644 --- a/crates/zizmor/tests/integration/audit/template_injection.rs +++ b/crates/zizmor/tests/integration/audit/template_injection.rs @@ -183,7 +183,7 @@ fn test_issue_418_repro() -> Result<()> { zizmor() .input(input_under_test("template-injection/issue-418-repro.yml")) .run()?, - @r"No findings to report. Good job! (3 suppressed)" + @"No findings to report. Good job! (3 suppressed)" ); Ok(()) @@ -247,10 +247,10 @@ fn test_pr_425_backstop_action() -> Result<()> { = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:29:7 + --> @@INPUT@@:29:13 | 29 | uses: azure/powershell@whatever - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High diff --git a/crates/zizmor/tests/integration/audit/undocumented_permissions.rs b/crates/zizmor/tests/integration/audit/undocumented_permissions.rs index e13003de..04b93cf4 100644 --- a/crates/zizmor/tests/integration/audit/undocumented_permissions.rs +++ b/crates/zizmor/tests/integration/audit/undocumented_permissions.rs @@ -64,7 +64,7 @@ fn test_undocumented_permissions_default() -> Result<()> { zizmor() .input(input_under_test("undocumented-permissions.yml")) .run()?, - @r"No findings to report. Good job! (5 suppressed)" + @"No findings to report. Good job! (5 suppressed)" ); Ok(()) @@ -78,7 +78,7 @@ fn test_documented_permissions_pedantic() -> Result<()> { .input(input_under_test("undocumented-permissions/documented.yml")) .args(["--persona=pedantic"]) .run()?, - @r"No findings to report. Good job! (1 ignored)" + @"No findings to report. Good job! (1 ignored)" ); Ok(()) @@ -94,7 +94,7 @@ fn test_contents_read_only_pedantic() -> Result<()> { )) .args(["--persona=pedantic"]) .run()?, - @r"No findings to report. Good job!" + @"No findings to report. Good job!" ); Ok(()) @@ -110,7 +110,7 @@ fn test_empty_permissions_pedantic() -> Result<()> { )) .args(["--persona=pedantic"]) .run()?, - @r"No findings to report. Good job!" + @"No findings to report. Good job!" ); Ok(()) diff --git a/crates/zizmor/tests/integration/audit/unpinned_uses.rs b/crates/zizmor/tests/integration/audit/unpinned_uses.rs index 4534e122..830a741f 100644 --- a/crates/zizmor/tests/integration/audit/unpinned_uses.rs +++ b/crates/zizmor/tests/integration/audit/unpinned_uses.rs @@ -10,18 +10,18 @@ fn test_unpinned_uses_pedantic() -> Result<()> { .run()?, @r" warning[unpinned-uses]: unpinned action reference - --> @@INPUT@@:21:9 + --> @@INPUT@@:21:24 | 21 | - uses: docker://ubuntu - | ^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref + | ^^^^^^ image is not pinned to a tag, branch, or hash ref | = note: audit confidence → High warning[unpinned-uses]: unpinned action reference - --> @@INPUT@@:27:9 + --> @@INPUT@@:27:24 | 27 | - uses: docker://ghcr.io/pypa/gh-action-pypi-publish - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ image is not pinned to a tag, branch, or hash ref | = note: audit confidence → High @@ -40,18 +40,18 @@ fn test_unpinned_uses_default() -> Result<()> { .run()?, @r" warning[unpinned-uses]: unpinned action reference - --> @@INPUT@@:21:9 + --> @@INPUT@@:21:24 | 21 | - uses: docker://ubuntu - | ^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref + | ^^^^^^ image is not pinned to a tag, branch, or hash ref | = note: audit confidence → High warning[unpinned-uses]: unpinned action reference - --> @@INPUT@@:27:9 + --> @@INPUT@@:27:24 | 27 | - uses: docker://ghcr.io/pypa/gh-action-pypi-publish - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ image is not pinned to a tag, branch, or hash ref | = note: audit confidence → High @@ -71,18 +71,18 @@ fn test_action_pedantic() -> Result<()> { .run()?, @r" error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:12:7 + --> @@INPUT@@:12:13 | 12 | uses: asdf-vm/actions/setup@v3 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:15:7 + --> @@INPUT@@:15:13 | 15 | uses: asdf-vm/actions/setup@main - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High @@ -114,7 +114,7 @@ fn test_issue_659_repro() -> Result<()> { .input(input_under_test("unpinned-uses/issue-659-repro.yml")) .args(["--pedantic"]) .run()?, - @r"No findings to report. Good job!" + @"No findings to report. Good job!" ); Ok(()) @@ -129,10 +129,10 @@ fn test_default_config() -> Result<()> { .run()?, @r" error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:22:9 + --> @@INPUT@@:22:15 | 22 | - uses: pypa/gh-action-pypi-publish@release/v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High @@ -155,42 +155,42 @@ fn test_hash_pin_everything_config() -> Result<()> { .run()?, @r" error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:12:9 + --> @@INPUT@@:12:15 | 12 | - uses: actions/setup-python@v4 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:14:9 + --> @@INPUT@@:14:15 | 14 | - uses: actions/checkout@v3 - | ^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:22:9 + --> @@INPUT@@:22:15 | 22 | - uses: pypa/gh-action-pypi-publish@release/v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:24:9 + --> @@INPUT@@:24:15 | 24 | - uses: github/codeql-action/init@v3 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:26:9 + --> @@INPUT@@:26:15 | 26 | - uses: github/codeql-action/upload-sarif@v3 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High @@ -226,26 +226,26 @@ fn test_composite_config() -> Result<()> { .run()?, @r" error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:12:9 + --> @@INPUT@@:12:15 | 12 | - uses: actions/setup-python@v4 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by actions/setup-python policy) + | ^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by actions/setup-python policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:24:9 + --> @@INPUT@@:24:15 | 24 | - uses: github/codeql-action/init@v3 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:26:9 + --> @@INPUT@@:26:15 | 26 | - uses: github/codeql-action/upload-sarif@v3 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High @@ -265,18 +265,18 @@ fn test_composite_config_2() -> Result<()> { .run()?, @r" error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:24:9 + --> @@INPUT@@:24:15 | 24 | - uses: github/codeql-action/init@v3 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by github/codeql-action/init policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by github/codeql-action/init policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:26:9 + --> @@INPUT@@:26:15 | 26 | - uses: github/codeql-action/upload-sarif@v3 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by github/codeql-action/upload-sarif policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by github/codeql-action/upload-sarif policy) | = note: audit confidence → High @@ -296,42 +296,42 @@ fn test_empty_config() -> Result<()> { .run()?, @r" error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:12:9 + --> @@INPUT@@:12:15 | 12 | - uses: actions/setup-python@v4 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:14:9 + --> @@INPUT@@:14:15 | 14 | - uses: actions/checkout@v3 - | ^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:22:9 + --> @@INPUT@@:22:15 | 22 | - uses: pypa/gh-action-pypi-publish@release/v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:24:9 + --> @@INPUT@@:24:15 | 24 | - uses: github/codeql-action/init@v3 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:26:9 + --> @@INPUT@@:26:15 | 26 | - uses: github/codeql-action/upload-sarif@v3 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High diff --git a/crates/zizmor/tests/integration/audit/use_trusted_publishing.rs b/crates/zizmor/tests/integration/audit/use_trusted_publishing.rs index 409130aa..a5daad60 100644 --- a/crates/zizmor/tests/integration/audit/use_trusted_publishing.rs +++ b/crates/zizmor/tests/integration/audit/use_trusted_publishing.rs @@ -357,7 +357,7 @@ fn test_issue_1191_repro() -> Result<()> { "use-trusted-publishing/issue-1191-repro.yml" )) .run()?, - @r"No findings to report. Good job! (3 suppressed)" + @"No findings to report. Good job! (3 suppressed)" ); Ok(()) @@ -406,3 +406,106 @@ fn test_nuget_push() -> Result<()> { Ok(()) } + +#[test] +fn test_gem_push() -> Result<()> { + insta::assert_snapshot!( + zizmor() + .input(input_under_test("use-trusted-publishing/gem-push.yml")) + .run()?, + @r" + info[use-trusted-publishing]: prefer trusted publishing for authentication + --> @@INPUT@@:12:14 + | + 12 | run: gem push foo-0.1.0.gem + | --- ^^^^^^^^^^^^^^^^^^^^^^ this command + | | + | this step + | + = note: audit confidence → High + + info[use-trusted-publishing]: prefer trusted publishing for authentication + --> @@INPUT@@:15:14 + | + 15 | run: bundle exec gem push foo-0.1.0.gem + | --- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this command + | | + | this step + | + = note: audit confidence → High + + info[use-trusted-publishing]: prefer trusted publishing for authentication + --> @@INPUT@@:20:11 + | + 19 | run: | + | --- this step + 20 | / gem \ + 21 | | push \ + 22 | | foo-0.1.0.gem + | |_________________________^ this command + | + = note: audit confidence → High + + 5 findings (2 suppressed): 3 informational, 0 low, 0 medium, 0 high + " + ); + + Ok(()) +} + +#[test] +fn test_twine_upload() -> Result<()> { + insta::assert_snapshot!( + zizmor() + .input(input_under_test("use-trusted-publishing/twine-upload.yml")) + .run()?, + @r" + info[use-trusted-publishing]: prefer trusted publishing for authentication + --> @@INPUT@@:12:14 + | + 12 | run: twine upload dist/* + | --- ^^^^^^^^^^^^^^^^^^^ this command + | | + | this step + | + = note: audit confidence → High + + info[use-trusted-publishing]: prefer trusted publishing for authentication + --> @@INPUT@@:15:14 + | + 15 | run: python -m twine upload dist/* + | --- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this command + | | + | this step + | + = note: audit confidence → High + + info[use-trusted-publishing]: prefer trusted publishing for authentication + --> @@INPUT@@:19:11 + | + 18 | run: | + | --- this step + 19 | / python3.10 -m \ + 20 | | twine \ + 21 | | upload \ + 22 | | dist/* + | |__________________^ this command + | + = note: audit confidence → High + + info[use-trusted-publishing]: prefer trusted publishing for authentication + --> @@INPUT@@:26:11 + | + 25 | run: | + | --- this step + 26 | pipx run twine==6.1.0 upload dist/* + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this command + | + = note: audit confidence → High + + 6 findings (2 suppressed): 4 informational, 0 low, 0 medium, 0 high + " + ); + + Ok(()) +} diff --git a/crates/zizmor/tests/integration/common.rs b/crates/zizmor/tests/integration/common.rs index 26b8b7ad..52cfcb1b 100644 --- a/crates/zizmor/tests/integration/common.rs +++ b/crates/zizmor/tests/integration/common.rs @@ -42,28 +42,37 @@ pub struct Zizmor { stdin: Option, unbuffer: bool, offline: bool, + gh_token: bool, inputs: Vec, config: Option, no_config: bool, output: OutputMode, expects_failure: bool, + show_audit_urls: bool, } impl Zizmor { /// Create a new zizmor runner. pub fn new() -> Self { - let cmd = Command::new(cargo::cargo_bin!()); + let mut cmd = Command::new(cargo::cargo_bin!()); + + // Our child `zizmor` process starts with a clean environment, to + // ensure we explicitly test interactions with things like `CI` + // and `GH_TOKEN`. + cmd.env_clear(); Self { cmd, stdin: None, unbuffer: false, offline: true, + gh_token: true, inputs: vec![], config: None, no_config: false, output: OutputMode::Stdout, expects_failure: false, + show_audit_urls: false, } } @@ -82,11 +91,6 @@ impl Zizmor { self } - pub fn unsetenv(mut self, key: &str) -> Self { - self.cmd.env_remove(key); - self - } - pub fn input(mut self, input: impl Into) -> Self { self.inputs.push(input.into()); self @@ -112,6 +116,11 @@ impl Zizmor { self } + pub fn gh_token(mut self, flag: bool) -> Self { + self.gh_token = flag; + self + } + pub fn output(mut self, output: OutputMode) -> Self { self.output = output; self @@ -125,6 +134,11 @@ impl Zizmor { self } + pub fn show_audit_urls(mut self, flag: bool) -> Self { + self.show_audit_urls = flag; + self + } + pub fn working_dir(mut self, dir: impl Into) -> Self { self.cmd.current_dir(dir.into()); self @@ -140,7 +154,12 @@ impl Zizmor { } else { // If we're running in online mode, we pre-assert the // presence of GH_TOKEN to make configuration failures more obvious. - std::env::var("GH_TOKEN").context("online tests require GH_TOKEN to be set")?; + let token = + std::env::var("GH_TOKEN").context("online tests require GH_TOKEN to be set")?; + + if self.gh_token { + self.cmd.env("GH_TOKEN", token); + } } if self.no_config && self.config.is_some() { @@ -165,6 +184,12 @@ impl Zizmor { self.cmd.arg("--no-progress"); } + if self.show_audit_urls { + self.cmd.arg("--show-audit-urls=always"); + } else { + self.cmd.arg("--show-audit-urls=never"); + } + for input in &self.inputs { self.cmd.arg(input); } diff --git a/crates/zizmor/tests/integration/config.rs b/crates/zizmor/tests/integration/config.rs index 5d428072..70b5a73d 100644 --- a/crates/zizmor/tests/integration/config.rs +++ b/crates/zizmor/tests/integration/config.rs @@ -164,6 +164,31 @@ fn test_discovers_config_in_dotgithub() -> anyhow::Result<()> { Ok(()) } +/// Ensures we correctly discover a `zizmor.yaml` configuration file in a `.github` +/// subdirectory of a given input directory, i.e. +/// `config-in-dotgithub/.github/zizmor.yaml` in this case. +/// +/// This tests that both `.yml` and `.yaml` extensions are supported. +#[test] +fn test_discovers_dotyaml_config_in_dotgithub() -> anyhow::Result<()> { + insta::assert_snapshot!( + zizmor() + .input(input_under_test("config-scenarios/dotyaml-config-in-dotgithub")) + .setenv("RUST_LOG", "zizmor::config=debug") + .output(OutputMode::Both) + .run()?, + @r" + 🌈 zizmor v@@VERSION@@ + DEBUG zizmor::config: discovering config for local input `@@INPUT@@` + DEBUG zizmor::config: attempting config discovery in `@@INPUT@@` + DEBUG zizmor::config: found config candidate at `@@INPUT@@/.github/zizmor.yaml` + No findings to report. Good job! (1 ignored, 2 suppressed) + ", + ); + + Ok(()) +} + /// Ensures we correctly discover a configuration file in a `.github` /// subdirectory from an input filename, i.e. going from /// `config-in-dotgithub/.github/workflows/hackme.yml` diff --git a/crates/zizmor/tests/integration/e2e.rs b/crates/zizmor/tests/integration/e2e.rs index 31239c2b..fc56c027 100644 --- a/crates/zizmor/tests/integration/e2e.rs +++ b/crates/zizmor/tests/integration/e2e.rs @@ -83,13 +83,21 @@ fn menagerie() -> Result<()> { #[test] fn color_control_basic() -> Result<()> { - // No terminal, so no color by default. + // No terminal and not CI, so no color by default. let no_color_default_output = zizmor() .output(OutputMode::Both) .input(input_under_test("e2e-menagerie")) .run()?; assert!(!no_color_default_output.contains("\x1b[")); + // No terminal but CI, so color by default. + let color_default_ci_output = zizmor() + .setenv("CI", "true") + .output(OutputMode::Both) + .input(input_under_test("e2e-menagerie")) + .run()?; + assert!(color_default_ci_output.contains("\x1b[")); + // Force color via --color=always. let forced_color_via_arg_output = zizmor() .output(OutputMode::Both) @@ -251,6 +259,75 @@ fn invalid_inputs() -> Result<()> { ); } + insta::assert_snapshot!( + zizmor() + .expects_failure(true) + .input(input_under_test("invalid/empty/")) + .args(["--strict-collection"]) + .run()?, + @r" + 🌈 zizmor v@@VERSION@@ + fatal: no audit was performed + error: no inputs collected + | + = help: collection yielded no auditable inputs + = help: inputs must contain at least one valid workflow, action, or Dependabot config + + Caused by: + no inputs collected + " + ); + + Ok(()) +} + +/// Reproduction test for #1395. +/// +/// Ensures that we produce a useful error message when the user gives us an +/// invalid YAML input (specifically, one with duplicate mapping keys). +#[test] +fn test_issue_1394() -> Result<()> { + insta::assert_snapshot!( + zizmor() + .expects_failure(true) + .input(input_under_test( + "invalid/issue-1395-repro-duplicate-mapping-keys.yml" + )) + .args(["--strict-collection"]) + .run()?, + @r#" + 🌈 zizmor v@@VERSION@@ + fatal: no audit was performed + failed to load file://@@INPUT@@ as workflow + + Caused by: + 0: invalid YAML syntax: jobs.demo.steps[0]: duplicate entry with key "env" at line 10 column 9 + 1: jobs.demo.steps[0]: duplicate entry with key "env" at line 10 column 9 + "# + ); + + // Without --strict-collection, we get a warning and then a collection failure error. + insta::assert_snapshot!( + zizmor() + .expects_failure(true) + .input(input_under_test( + "invalid/issue-1395-repro-duplicate-mapping-keys.yml" + )) + .run()?, + @r#" + 🌈 zizmor v@@VERSION@@ + WARN collect_inputs: zizmor::registry::input: failed to parse input: jobs.demo.steps[0]: duplicate entry with key "env" at line 10 column 9 + fatal: no audit was performed + error: no inputs collected + | + = help: collection yielded no auditable inputs + = help: inputs must contain at least one valid workflow, action, or Dependabot config + + Caused by: + no inputs collected + "# + ); + Ok(()) } @@ -342,10 +419,10 @@ fn issue_1065() -> Result<()> { = note: audit confidence → Medium error[unpinned-uses]: unpinned action reference - --> @@INPUT@@:16:9 + --> @@INPUT@@:16:15 | 16 | uses: thollander/actions-comment-pull-request@v3 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High @@ -437,10 +514,10 @@ fn issue_1286() -> Result<()> { @r" 🌈 zizmor v@@VERSION@@ fatal: no audit was performed - ref-confusion failed on file://@@INPUT@@ + 'ref-confusion' audit failed on file://@@INPUT@@ Caused by: - 0: error in ref-confusion + 0: error in 'ref-confusion' audit 1: couldn't list branches for woodruffw-experiments/this-does-not-exist 2: can't access woodruffw-experiments/this-does-not-exist: missing or you have no access ", @@ -531,7 +608,6 @@ fn test_cant_retrieve_offline() -> Result<()> { zizmor() .expects_failure(true) .offline(true) - .unsetenv("GH_TOKEN") .args(["pypa/sampleproject"]) .run()?, @r" @@ -557,7 +633,7 @@ fn test_cant_retrieve_no_gh_token() -> Result<()> { zizmor() .expects_failure(true) .offline(false) - .unsetenv("GH_TOKEN") + .gh_token(false) .args(["pypa/sampleproject"]) .run()?, @r" @@ -595,3 +671,25 @@ fn test_github_output() -> Result<()> { Ok(()) } + +/// Ensures that the `--show-audit-urls` flag works as expected. +#[test] +fn test_show_urls() -> Result<()> { + let with_urls = zizmor() + .offline(true) + .show_audit_urls(true) + .input(input_under_test("several-vulnerabilities.yml")) + .run()?; + + assert!(with_urls.contains("audit documentation → ")); + + let without_urls = zizmor() + .offline(true) + .show_audit_urls(false) + .input(input_under_test("several-vulnerabilities.yml")) + .run()?; + + assert!(!without_urls.contains("audit documentation → ")); + + Ok(()) +} diff --git a/crates/zizmor/tests/integration/e2e/snapshots/integration__e2e__json_v1__json_v1.snap b/crates/zizmor/tests/integration/e2e/snapshots/integration__e2e__json_v1__json_v1.snap index 1d9aa349..73c084f6 100644 --- a/crates/zizmor/tests/integration/e2e/snapshots/integration__e2e__json_v1__json_v1.snap +++ b/crates/zizmor/tests/integration/e2e/snapshots/integration__e2e__json_v1__json_v1.snap @@ -1,6 +1,5 @@ --- source: crates/zizmor/tests/integration/e2e/json_v1.rs -assertion_line: 17 expression: output --- [ @@ -249,7 +248,7 @@ expression: output "given_path": "@@INPUT@@" } }, - "annotation": "action is not pinned to a tag, branch, or hash ref", + "annotation": "image is not pinned to a tag, branch, or hash ref", "route": { "route": [ { @@ -269,25 +268,32 @@ expression: output } ] }, - "feature_kind": "Normal", + "feature_kind": { + "Subfeature": { + "after": 0, + "fragment": { + "Raw": "ubuntu" + } + } + }, "kind": "Primary" }, "concrete": { "location": { "start_point": { "row": 20, - "column": 8 + "column": 23 }, "end_point": { "row": 20, "column": 29 }, "offset_span": { - "start": 406, + "start": 421, "end": 427 } }, - "feature": "uses: docker://ubuntu", + "feature": "docker://ubuntu", "comments": [] } } @@ -312,7 +318,7 @@ expression: output "given_path": "@@INPUT@@" } }, - "annotation": "action is not pinned to a tag, branch, or hash ref", + "annotation": "image is not pinned to a tag, branch, or hash ref", "route": { "route": [ { @@ -332,25 +338,32 @@ expression: output } ] }, - "feature_kind": "Normal", + "feature_kind": { + "Subfeature": { + "after": 0, + "fragment": { + "Raw": "ghcr.io/pypa/gh-action-pypi-publish" + } + } + }, "kind": "Primary" }, "concrete": { "location": { "start_point": { "row": 26, - "column": 8 + "column": 23 }, "end_point": { "row": 26, "column": 58 }, "offset_span": { - "start": 531, + "start": 546, "end": 581 } }, - "feature": "uses: docker://ghcr.io/pypa/gh-action-pypi-publish", + "feature": "docker://ghcr.io/pypa/gh-action-pypi-publish", "comments": [] } } diff --git a/crates/zizmor/tests/integration/snapshots/integration__e2e__gha_hazmat.snap b/crates/zizmor/tests/integration/snapshots/integration__e2e__gha_hazmat.snap index 2c8c9f79..cbe4960e 100644 --- a/crates/zizmor/tests/integration/snapshots/integration__e2e__gha_hazmat.snap +++ b/crates/zizmor/tests/integration/snapshots/integration__e2e__gha_hazmat.snap @@ -156,15 +156,11 @@ error[dangerous-triggers]: use of fundamentally insecure workflow trigger error[bot-conditions]: spoofable bot actor check --> .github/workflows/bot-conditions.yml:18:9 | -16 | / hackme: -17 | | runs-on: ubuntu-latest -18 | | if: github.actor == 'dependabot[bot]' - | | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ actor context may be spoofable -19 | | steps: -... | -33 | | run: echo hello -34 | | if: github.actor == 'notabot' - | |______________________________________- this job +16 | hackme: + | ------ this job +17 | runs-on: ubuntu-latest +18 | if: github.actor == 'dependabot[bot]' + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ actor context may be spoofable | = note: audit confidence → High = note: this finding has an auto-fix @@ -488,26 +484,26 @@ warning[excessive-permissions]: overly broad permissions = note: audit confidence → Medium error[unpinned-uses]: unpinned action reference - --> .github/workflows/known-vulnerable-actions.yml:19:9 + --> .github/workflows/known-vulnerable-actions.yml:19:15 | 19 | - uses: atlassian/gajira-create@v1.0.1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/known-vulnerable-actions.yml:25:9 + --> .github/workflows/known-vulnerable-actions.yml:25:15 | 25 | - uses: rlespinasse/github-slug-action@v4 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/known-vulnerable-actions.yml:28:9 + --> .github/workflows/known-vulnerable-actions.yml:28:15 | 28 | - uses: rlespinasse/github-slug-action@4.0.1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High @@ -610,90 +606,90 @@ info[use-trusted-publishing]: prefer trusted publishing for authentication = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/pypi-manual-credential.yml:23:9 + --> .github/workflows/pypi-manual-credential.yml:23:15 | 23 | uses: pypa/gh-action-pypi-publish@release/v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/pypi-manual-credential.yml:27:9 + --> .github/workflows/pypi-manual-credential.yml:27:15 | 27 | uses: pypa/gh-action-pypi-publish@release/v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/pypi-manual-credential.yml:33:9 + --> .github/workflows/pypi-manual-credential.yml:33:15 | 33 | uses: pypa/gh-action-pypi-publish@release/v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/pypi-manual-credential.yml:39:9 + --> .github/workflows/pypi-manual-credential.yml:39:15 | 39 | uses: pypa/gh-action-pypi-publish@release/v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/pypi-manual-credential.yml:45:9 + --> .github/workflows/pypi-manual-credential.yml:45:15 | 45 | uses: pypa/gh-action-pypi-publish@release/v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/pypi-manual-credential.yml:51:9 + --> .github/workflows/pypi-manual-credential.yml:51:15 | 51 | uses: pypa/gh-action-pypi-publish@release/v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/pypi-manual-credential.yml:58:9 + --> .github/workflows/pypi-manual-credential.yml:58:15 | 58 | uses: pypa/gh-action-pypi-publish@release/v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/pypi-manual-credential.yml:66:9 + --> .github/workflows/pypi-manual-credential.yml:66:15 | 66 | uses: pypa/gh-action-pypi-publish@release/v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/pypi-manual-credential.yml:73:9 + --> .github/workflows/pypi-manual-credential.yml:73:15 | 73 | uses: pypa/gh-action-pypi-publish@release/v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/pypi-manual-credential.yml:81:9 + --> .github/workflows/pypi-manual-credential.yml:81:15 | 81 | uses: pypa/gh-action-pypi-publish@release/v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/ref-confusion.yml:26:9 + --> .github/workflows/ref-confusion.yml:26:15 | 26 | - uses: woodruffw/gha-hazmat/ref-confusion@confusable - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High @@ -768,10 +764,10 @@ warning[excessive-permissions]: overly broad permissions = note: audit confidence → Medium warning[secrets-inherit]: secrets unconditionally inherited by called workflow - --> .github/workflows/secrets-inherit.yml:16:5 + --> .github/workflows/secrets-inherit.yml:16:11 | 16 | uses: octo-org/example-repo/.github/workflows/called-workflow.yml@main - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this reusable workflow + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this reusable workflow 17 | # NOT OK: unconditionally inherits 18 | secrets: inherit | ---------------- inherits all parent secrets @@ -896,18 +892,18 @@ warning[excessive-permissions]: overly broad permissions = note: audit confidence → Medium warning[unpinned-uses]: unpinned action reference - --> .github/workflows/unpinned.yml:20:9 + --> .github/workflows/unpinned.yml:20:24 | 20 | - uses: docker://ubuntu - | ^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref + | ^^^^^^ image is not pinned to a tag, branch, or hash ref | = note: audit confidence → High warning[unpinned-uses]: unpinned action reference - --> .github/workflows/unpinned.yml:26:9 + --> .github/workflows/unpinned.yml:26:24 | 26 | - uses: docker://ghcr.io/pypa/gh-action-pypi-publish - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ image is not pinned to a tag, branch, or hash ref | = note: audit confidence → High diff --git a/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_input_not_strict-2.snap b/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_input_not_strict-2.snap index 82f9916b..e755d64a 100644 --- a/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_input_not_strict-2.snap +++ b/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_input_not_strict-2.snap @@ -5,4 +5,10 @@ expression: "zizmor().expects_failure(true).input(input_under_test(&format!(\"in 🌈 zizmor v@@VERSION@@ WARN collect_inputs: zizmor::registry::input: failed to validate input as action: input does not match expected validation schema fatal: no audit was performed -no inputs collected +error: no inputs collected + | + = help: collection yielded no auditable inputs + = help: inputs must contain at least one valid workflow, action, or Dependabot config + +Caused by: + no inputs collected diff --git a/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_input_not_strict.snap b/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_input_not_strict.snap index 3fcc2c3a..e4265e31 100644 --- a/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_input_not_strict.snap +++ b/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_input_not_strict.snap @@ -5,4 +5,10 @@ expression: "zizmor().expects_failure(true).input(input_under_test(&format!(\"in 🌈 zizmor v@@VERSION@@ WARN collect_inputs: zizmor::registry::input: failed to validate input as workflow: input does not match expected validation schema fatal: no audit was performed -no inputs collected +error: no inputs collected + | + = help: collection yielded no auditable inputs + = help: inputs must contain at least one valid workflow, action, or Dependabot config + +Caused by: + no inputs collected diff --git a/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-10.snap b/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-10.snap index 11971dc3..ad3abc6b 100644 --- a/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-10.snap +++ b/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-10.snap @@ -8,4 +8,6 @@ failed to load file://@@INPUT@@ as action Caused by: 0: input does not match expected validation schema - 1: null is not of type "object" + 1: "name" is a required property + "description" is a required property + "runs" is a required property diff --git a/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-2.snap b/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-2.snap index c614fbcf..5e6c4d95 100644 --- a/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-2.snap +++ b/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-2.snap @@ -8,5 +8,5 @@ failed to load file://@@INPUT@@ as workflow Caused by: 0: input does not match expected validation schema - 1: on.workflow_call.inputs.input: "type" is a required property - Additional properties are not allowed ('boom' was unexpected) + 1: Additional properties are not allowed ('boom' was unexpected) + on.workflow_call.inputs.input: "type" is a required property diff --git a/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-3.snap b/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-3.snap index 21b89f43..9a2229b3 100644 --- a/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-3.snap +++ b/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-3.snap @@ -8,4 +8,5 @@ failed to load file://@@INPUT@@ as workflow Caused by: 0: input does not match expected validation schema - 1: null is not of type "object" + 1: "on" is a required property + "jobs" is a required property diff --git a/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-4.snap b/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-4.snap index 38008ae0..f7e20e7a 100644 --- a/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-4.snap +++ b/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-4.snap @@ -7,5 +7,5 @@ fatal: no audit was performed failed to load file://@@INPUT@@ as workflow Caused by: - 0: input does not match expected validation schema - 1: "lol" is not of type "object" + 0: invalid YAML syntax: invalid type: string "lol", expected a YAML mapping + 1: invalid type: string "lol", expected a YAML mapping diff --git a/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-6.snap b/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-6.snap index 21b89f43..9a2229b3 100644 --- a/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-6.snap +++ b/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-6.snap @@ -8,4 +8,5 @@ failed to load file://@@INPUT@@ as workflow Caused by: 0: input does not match expected validation schema - 1: null is not of type "object" + 1: "on" is a required property + "jobs" is a required property diff --git a/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-7.snap b/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-7.snap index 21b89f43..9a2229b3 100644 --- a/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-7.snap +++ b/crates/zizmor/tests/integration/snapshots/integration__e2e__invalid_inputs-7.snap @@ -8,4 +8,5 @@ failed to load file://@@INPUT@@ as workflow Caused by: 0: input does not match expected validation schema - 1: null is not of type "object" + 1: "on" is a required property + "jobs" is a required property diff --git a/crates/zizmor/tests/integration/snapshots/integration__e2e__issue_1116_strict_collection_remote_input.snap b/crates/zizmor/tests/integration/snapshots/integration__e2e__issue_1116_strict_collection_remote_input.snap index f1adb2e4..e4d75f51 100644 --- a/crates/zizmor/tests/integration/snapshots/integration__e2e__issue_1116_strict_collection_remote_input.snap +++ b/crates/zizmor/tests/integration/snapshots/integration__e2e__issue_1116_strict_collection_remote_input.snap @@ -8,4 +8,6 @@ failed to load https://github.com/woodruffw-experiments/zizmor-issue-1116/blob/f Caused by: 0: input does not match expected validation schema - 1: null is not of type "object" + 1: "name" is a required property + "description" is a required property + "runs" is a required property diff --git a/crates/zizmor/tests/integration/snapshots/integration__e2e__issue_569.snap b/crates/zizmor/tests/integration/snapshots/integration__e2e__issue_569.snap index d4697a96..154e47f1 100644 --- a/crates/zizmor/tests/integration/snapshots/integration__e2e__issue_569.snap +++ b/crates/zizmor/tests/integration/snapshots/integration__e2e__issue_569.snap @@ -24,58 +24,58 @@ expression: "zizmor().offline(false).output(OutputMode::Both).args([\"--no-onlin INFO audit: zizmor: 🌈 completed .github/workflows/tail-call.yml INFO audit: zizmor: 🌈 completed .github/workflows/verify-ensurepip-wheels.yml error[unpinned-uses]: unpinned action reference - --> .github/workflows/build.yml:117:9 + --> .github/workflows/build.yml:117:15 | 117 | uses: hendrikmuhs/ccache-action@v1.2 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/build.yml:313:7 + --> .github/workflows/build.yml:313:13 | 313 | uses: hendrikmuhs/ccache-action@v1.2 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/build.yml:368:7 + --> .github/workflows/build.yml:368:13 | 368 | uses: hendrikmuhs/ccache-action@v1.2 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/build.yml:477:7 + --> .github/workflows/build.yml:477:13 | 477 | uses: egor-tensin/setup-gcc@v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/build.yml:498:7 + --> .github/workflows/build.yml:498:13 | 498 | uses: hendrikmuhs/ccache-action@v1.2 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/build.yml:583:9 + --> .github/workflows/build.yml:583:15 | 583 | uses: google/oss-fuzz/infra/cifuzz/actions/build_fuzzers@master - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/build.yml:588:9 + --> .github/workflows/build.yml:588:15 | 588 | uses: google/oss-fuzz/infra/cifuzz/actions/run_fuzzers@master - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High @@ -89,82 +89,82 @@ help[obfuscation]: obfuscated usage of GitHub Actions features = note: this finding has an auto-fix error[unpinned-uses]: unpinned action reference - --> .github/workflows/documentation-links.yml:25:9 + --> .github/workflows/documentation-links.yml:25:15 | 25 | - uses: readthedocs/actions/preview@v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/lint.yml:28:9 + --> .github/workflows/lint.yml:28:15 | 28 | - uses: pre-commit/action@v3.0.1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/require-pr-label.yml:19:9 + --> .github/workflows/require-pr-label.yml:19:15 | 19 | uses: mheap/github-action-required-labels@v5 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/require-pr-label.yml:38:9 + --> .github/workflows/require-pr-label.yml:38:15 | 38 | uses: mheap/github-action-required-labels@v5 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/require-pr-label.yml:47:9 + --> .github/workflows/require-pr-label.yml:47:15 | 47 | uses: mheap/github-action-required-labels@v5 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/require-pr-label.yml:58:9 + --> .github/workflows/require-pr-label.yml:58:15 | 58 | uses: mheap/github-action-required-labels@v5 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/reusable-tsan.yml:60:7 + --> .github/workflows/reusable-tsan.yml:60:13 | 60 | uses: hendrikmuhs/ccache-action@v1.2 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/reusable-ubuntu.yml:68:7 + --> .github/workflows/reusable-ubuntu.yml:68:13 | 68 | uses: hendrikmuhs/ccache-action@v1.2 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/reusable-wasi.yml:30:7 + --> .github/workflows/reusable-wasi.yml:30:13 | 30 | uses: bytecodealliance/actions/wasmtime/setup@v1 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High error[unpinned-uses]: unpinned action reference - --> .github/workflows/reusable-wasi.yml:46:7 + --> .github/workflows/reusable-wasi.yml:46:13 | 46 | uses: hendrikmuhs/ccache-action@v1.2 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy) | = note: audit confidence → High diff --git a/crates/zizmor/tests/integration/test-data/archived-uses.yml b/crates/zizmor/tests/integration/test-data/archived-uses.yml new file mode 100644 index 00000000..a5dd1f29 --- /dev/null +++ b/crates/zizmor/tests/integration/test-data/archived-uses.yml @@ -0,0 +1,24 @@ +name: archived-uses + +on: [push, pull_request] + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +permissions: {} + +jobs: + archived-uses: + name: archived-uses + runs-on: ubuntu-latest + steps: + - name: setup ruby + uses: actions/setup-ruby@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3 + + - name: SETUP RUBY BUT LOUDLY + uses: ACTIONS/SETUP-RUBY@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3 + + archived-uses-reusable: + name: archived-uses-reusable + uses: actions/setup-ruby/.github/workflows/notreal.yml@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3 diff --git a/crates/zizmor/tests/integration/test-data/archived-uses/action/action.yml b/crates/zizmor/tests/integration/test-data/archived-uses/action/action.yml new file mode 100644 index 00000000..8dea5984 --- /dev/null +++ b/crates/zizmor/tests/integration/test-data/archived-uses/action/action.yml @@ -0,0 +1,9 @@ +name: archived-uses + +description: archived-uses composite action + +runs: + using: composite + steps: + - name: setup ruby + uses: actions/setup-ruby@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3 diff --git a/crates/zizmor/tests/integration/test-data/config-scenarios/dotyaml-config-in-dotgithub/.github/workflows/hackme.yml b/crates/zizmor/tests/integration/test-data/config-scenarios/dotyaml-config-in-dotgithub/.github/workflows/hackme.yml new file mode 100644 index 00000000..31714168 --- /dev/null +++ b/crates/zizmor/tests/integration/test-data/config-scenarios/dotyaml-config-in-dotgithub/.github/workflows/hackme.yml @@ -0,0 +1,16 @@ +name: hackme +on: + issues: + +permissions: {} + +jobs: + inject-me: + name: inject-me + runs-on: ubuntu-latest + + steps: + - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # tag=v7.0.1 + with: + script: | + return "doing a thing: ${{ github.event.issue.title }}" diff --git a/crates/zizmor/tests/integration/test-data/config-scenarios/dotyaml-config-in-dotgithub/.github/zizmor.yaml b/crates/zizmor/tests/integration/test-data/config-scenarios/dotyaml-config-in-dotgithub/.github/zizmor.yaml new file mode 100644 index 00000000..9e7be950 --- /dev/null +++ b/crates/zizmor/tests/integration/test-data/config-scenarios/dotyaml-config-in-dotgithub/.github/zizmor.yaml @@ -0,0 +1,4 @@ +rules: + template-injection: + ignore: + - hackme.yml diff --git a/crates/zizmor/tests/integration/test-data/impostor-commit.yml b/crates/zizmor/tests/integration/test-data/impostor-commit.yml new file mode 100644 index 00000000..f898f2c3 --- /dev/null +++ b/crates/zizmor/tests/integration/test-data/impostor-commit.yml @@ -0,0 +1,34 @@ +# impostor-commit.yml +# +# what: +# an example of commit impersonation. GitHub repositories and their forks +# share a "network" of commit objects, meaning that GitHub's backend +# can't easily distinguish between them. as a result, workflows that +# appear to be pinned to a hash on a specific repo may actually be pinned +# to a hash within a different fork repo, which can be malicious. +# +# how: +# a user is unlikely to accidentally write a malicious commit, but may merge +# an otherwise innocent-looking change from a third party without realizing +# that the commits pinned by that party are actually references to a malicious +# fork. +# +# see also: https://www.chainguard.dev/unchained/what-the-fork-imposter-commits-in-github-actions-and-ci-cd + +name: example +on: [push] + +permissions: {} + +jobs: + commit: + runs-on: ubuntu-latest + permissions: {} + steps: + # NOT OK: c7d749a2d57b4b375d1ebcd17cfbfb60c676f18e is an impostor + - uses: actions/checkout@c7d749a2d57b4b375d1ebcd17cfbfb60c676f18e + with: + persist-credentials: false + - shell: bash + run: | + echo 'hello world!' diff --git a/crates/zizmor/tests/integration/test-data/invalid/empty/README.md b/crates/zizmor/tests/integration/test-data/invalid/empty/README.md new file mode 100644 index 00000000..91cf4265 --- /dev/null +++ b/crates/zizmor/tests/integration/test-data/invalid/empty/README.md @@ -0,0 +1,3 @@ +# empty + +This is an empty directory, except for this README file. diff --git a/crates/zizmor/tests/integration/test-data/invalid/issue-1395-repro-duplicate-mapping-keys.yml b/crates/zizmor/tests/integration/test-data/invalid/issue-1395-repro-duplicate-mapping-keys.yml new file mode 100644 index 00000000..33d1cdfb --- /dev/null +++ b/crates/zizmor/tests/integration/test-data/invalid/issue-1395-repro-duplicate-mapping-keys.yml @@ -0,0 +1,15 @@ +# reproducer for https://github.com/zizmorcore/zizmor/issues/1395 + +name: issue-1395 +on: workflow_dispatch +jobs: + demo: + name: Demonstration + runs-on: ubuntu-latest + steps: + - name: Duplicate env block breaks Zizmor + env: + FOO: foo + run: echo ... + env: + BAR: bar diff --git a/crates/zizmor/tests/integration/test-data/obfuscation/issue-1414-repro.yml b/crates/zizmor/tests/integration/test-data/obfuscation/issue-1414-repro.yml new file mode 100644 index 00000000..7d78a85e --- /dev/null +++ b/crates/zizmor/tests/integration/test-data/obfuscation/issue-1414-repro.yml @@ -0,0 +1,16 @@ +name: issue-1414-repro + +on: + pull_request: + +permissions: {} + +jobs: + some-job: + runs-on: windows-latest + defaults: + run: + shell: cmd + steps: + - name: say hi + run: echo hi diff --git a/crates/zizmor/tests/integration/test-data/obfuscation/workflow-cmd-default-shell.yml b/crates/zizmor/tests/integration/test-data/obfuscation/workflow-cmd-default-shell.yml new file mode 100644 index 00000000..19bd79d0 --- /dev/null +++ b/crates/zizmor/tests/integration/test-data/obfuscation/workflow-cmd-default-shell.yml @@ -0,0 +1,17 @@ +name: workflow-cmd-default-shell + +on: + pull_request: + +permissions: {} + +defaults: + run: + shell: cmd + +jobs: + some-job: + runs-on: windows-latest + steps: + - name: say hi + run: echo hi diff --git a/crates/zizmor/tests/integration/test-data/use-trusted-publishing/gem-push.yml b/crates/zizmor/tests/integration/test-data/use-trusted-publishing/gem-push.yml new file mode 100644 index 00000000..53dffa93 --- /dev/null +++ b/crates/zizmor/tests/integration/test-data/use-trusted-publishing/gem-push.yml @@ -0,0 +1,22 @@ +on: [push] + +name: use-trusted-publishing + +jobs: + publish-1: + name: publish-1 + runs-on: ubuntu-latest + permissions: {} + steps: + - name: vulnerable-1 + run: gem push foo-0.1.0.gem + + - name: vulnerable-2 + run: bundle exec gem push foo-0.1.0.gem + + # multiline bash commands are detected + - name: vulnerable-3 + run: | + gem \ + push \ + foo-0.1.0.gem diff --git a/crates/zizmor/tests/integration/test-data/use-trusted-publishing/twine-upload.yml b/crates/zizmor/tests/integration/test-data/use-trusted-publishing/twine-upload.yml new file mode 100644 index 00000000..e218d57e --- /dev/null +++ b/crates/zizmor/tests/integration/test-data/use-trusted-publishing/twine-upload.yml @@ -0,0 +1,26 @@ +on: [push] + +name: use-trusted-publishing + +jobs: + publish-1: + name: publish-1 + runs-on: ubuntu-latest + permissions: {} + steps: + - name: vulnerable-1 + run: twine upload dist/* + + - name: vulnerable-2 + run: python -m twine upload dist/* + + - name: vulnerable-3 + run: | + python3.10 -m \ + twine \ + upload \ + dist/* + + - name: vulnerable-4 + run: | + pipx run twine==6.1.0 upload dist/* diff --git a/docs/audits.md b/docs/audits.md index 1ae8c73d..b17f3f4c 100644 --- a/docs/audits.md +++ b/docs/audits.md @@ -60,6 +60,56 @@ Add a `name:` field to your workflow or action. - run: echo "Hello!" ``` +## `archived-uses` + +| Type | Examples | Introduced in | Works offline | Auto-fixes available | Configurable | +|----------|------------------|---------------|----------------|--------------------|--------------| +| Workflow, Action | [archived-uses.yml] | v1.19.0 | ✅ | ❌ | ❌ | + +[archived-uses.yml]: https://github.com/zizmorcore/zizmor/blob/main/crates/zizmor/tests/integration/test-data/archived-uses.yml + + +Detects `#!yaml uses:` clauses that reference [archived repositories]. + +[archived repositories]: https://docs.github.com/en/repositories/archiving-a-github-repository/archiving-repositories + +Archival on GitHub makes a repository read-only, and indicates that the +repository is no longer maintained. Using actions or reusable workflows from archived +represents a supply chain risk: + +- Unmaintained repositories are more likely to accumulate indirect vulnerabilties, + including in any dependencies that have been vendored into JavaScript actions + (or that are used indirectly through transitive dependencies that have gone + stale). + +- Any vulnerabilities discovered in the action or reusable workflow *itself* + are unlikely to be fixed, since the repository is read-only. + +Consequently, users are encouraged to avoid dependening on archived repositories +for actions or reusable workflows. + +### Remediation + +Depending on the archived repository's functionality, you may be able to: + +- _Remove_ the action/reusable workflow entirely. Actions @actions-rs/cargo, + for example, can be replaced by directly invoking the correct `#!bash cargo ...` + command in a `#!yaml run:` step. + +- _Replace_ the archived action/reusable workflow with a maintained alternative. + For example, @actions/setup-ruby can be replaced with @ruby/setup-ruby. + +!!! tip + + Many archived actions are thin wrappers around GitHub's REST and GraphQL + APIs. In most cases, you can replace these actions with usage of the + [`gh` CLI](https://cli.github.com/), which is pre-installed on GitHub-hosted + runners. + + For more information, see [Using GitHub CLI in workflows]. + + [Using GitHub CLI in workflows]: https://docs.github.com/en/actions/how-tos/write-workflows/choose-what-workflows-do/use-github-cli + ## `artipacked` | Type | Examples | Introduced in | Works offline | Auto-fixes available | Configurable | @@ -469,7 +519,7 @@ In general, you should enable `cooldown` for all updaters. Detects usages of `insecure-external-code-execution` in Dependabot configuration files. -By default, Dependabot does not execution code from dependency manifests +By default, Dependabot does not execute code from dependency manifests during updates. However, users can opt in to this behavior by setting `#!yaml insecure-external-code-execution: allow` in their Dependabot configuration. diff --git a/docs/configuration.md b/docs/configuration.md index c9ce0046..8e98129f 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -9,7 +9,7 @@ description: zizmor's configuration file and configurable behaviors. Configuration support was added in `v0.2.0`. `zizmor` supports a small amount of configuration via [YAML] config files, -typically named `zizmor.yml`. +typically named `zizmor.yml` or `zizmor.yaml`. [YAML]: https://learnxinyminutes.com/docs/yaml/ @@ -41,9 +41,25 @@ typically named `zizmor.yml`. * File inputs (e.g. `zizmor path/to/workflow.yml`): `zizmor` performs directory discovery starting in the directory containing the given file. - * Directory inputs (e.g. `zizmor .`): `zizmor` looks for a `zizmor.yml` or - `.github/zizmor.yml` in the given directory or any parent, up to the - filesystem root or the first `.git` directory. + * Directory inputs (e.g. `zizmor .`): `zizmor` looks for a `zizmor.yml` + or `zizmor.yaml` file in the given directory, the `.github` child directory, + or any parent, up to the filesystem root or the first `.git` directory. + + !!! example + + Given an invocation like `zizmor ./repo/`, `zizmor` will attempt + to discover configuration files in the following order: + + 1. `./repo/.github/zizmor.yml` + 2. `./repo/.github/zizmor.yaml` + 3. `./repo/zizmor.yml` + 4. `./repo/zizmor.yaml` + 5. `./repo/../.github/zizmor.yml` + 6. `./repo/../.github/zizmor.yaml` + 7. ...and so on, until the filesystem root or a `.git/` directory is found. + + + !!! note diff --git a/docs/development.md b/docs/development.md index db5e4448..0f3bd128 100644 --- a/docs/development.md +++ b/docs/development.md @@ -172,7 +172,7 @@ See [insta's documentation] for more details. ## Benchmarking -`zizmor` currently uses [hyperfine](https://github.org/sharkdp/hyperfine) +`zizmor` currently uses [pytest-codspeed](https://github.com/CodSpeedHQ/pytest-codspeed) for command-line benchmarking. Benchmarks are stored in the top-level `bench/` directory, and can be @@ -184,27 +184,22 @@ make bench ``` We currently run offline benchmarks in the CI and report their results -to [Bencher](https://bencher.dev/). See -[our project page](https://bencher.dev/console/projects/zizmor/plots) -on Bencher for results and trends. +to [CodSpeed](https://codspeed.io). See +[our project page](https://codspeed.io/zizmorcore/zizmor) +on CodSpeed for results and trends. There are also online benchmarks, but these don't get run automatically. -To run them, you can pass `GH_TOKEN` to the `bench/benchmark.py` script -directly: +To run them, you can set `GH_TOKEN`: ```bash -GH_TOKEN=$(gh auth token) uv run bench/benchmark.py +GH_TOKEN=$(gh auth token) make bench ``` ### Adding new benchmarks -`zizmor` currently orchestrates benchmarks with `bench/benchmark.py`, -which wraps `hyperfine` to add a planning phase. +Benchmarks are currently written as pytest functions. -Take a look at `bench/benchmarks.json` for the current benchmarks. -Observe that each benchmark tells `benchmark.py` how to retrieve its -input as well as provides a `stencil` that the benchmark runner will -expand to run the benchmark. +Take a look at `bench/test_*.py` for existing benchmarks. ## Building the website diff --git a/docs/integrations.md b/docs/integrations.md index 6714d90b..bd552580 100644 --- a/docs/integrations.md +++ b/docs/integrations.md @@ -28,17 +28,17 @@ jobs: name: Run zizmor 🌈 runs-on: ubuntu-latest permissions: - security-events: write - contents: read # only needed for private repos - actions: read # only needed for private repos + security-events: write # Required for upload-sarif (used by zizmor-action) to upload SARIF files. + contents: read # Only needed for private repos. Needed to clone the repo. + actions: read # Only needed for private repos. Needed for upload-sarif to read workflow run info. steps: - name: Checkout repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Run zizmor 🌈 - uses: zizmorcore/zizmor-action@e673c3917a1aef3c65c972347ed84ccd013ecda4 # v0.2.0 + uses: zizmorcore/zizmor-action@e639db99335bc9038abc0e066dfcd72e23d26fb4 # v0.3.0 ``` See the action's [`inputs` documentation][inputs-documentation] for @@ -89,17 +89,17 @@ GitHub Actions setup: name: zizmor latest via PyPI runs-on: ubuntu-latest permissions: - security-events: write # needed for SARIF uploads - contents: read # only needed for private repos - actions: read # only needed for private repos + security-events: write # Required for upload-sarif (used by zizmor-action) to upload SARIF files. + contents: read # Only needed for private repos. Needed to clone the repo. + actions: read # Only needed for private repos. Needed for upload-sarif to read workflow run info. steps: - name: Checkout repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 with: persist-credentials: false - name: Install the latest version of uv - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 + uses: astral-sh/setup-uv@ed21f2f24f8dd64503750218de024bcf64c7250a # v7.1.5 - name: Run zizmor 🌈 run: uvx zizmor --format=sarif . > results.sarif # (2)! @@ -107,7 +107,7 @@ GitHub Actions setup: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} # (1)! - name: Upload SARIF file - uses: github/codeql-action/upload-sarif@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0 + uses: github/codeql-action/upload-sarif@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8 with: sarif_file: results.sarif category: zizmor @@ -161,14 +161,13 @@ GitHub Actions setup: name: zizmor latest via PyPI runs-on: ubuntu-latest permissions: - contents: read # only needed for private repos - actions: read # only needed for private repos + contents: read # Only needed for private repos. Needed to clone the repo. steps: - name: Checkout repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 - name: Install the latest version of uv - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 + uses: astral-sh/setup-uv@ed21f2f24f8dd64503750218de024bcf64c7250a # v7.1.5 - name: Run zizmor 🌈 run: uvx zizmor --format=github . # (2)! @@ -256,7 +255,7 @@ To do so, add the following to your `.pre-commit-config.yaml` `#!yaml repos:` se ```yaml - repo: https://github.com/zizmorcore/zizmor-pre-commit - rev: v1.17.0 # (1)! + rev: v1.19.0 # (1)! hooks: - id: zizmor ``` @@ -274,6 +273,20 @@ This will run `zizmor` on every commit. See [`pre-commit`](https://pre-commit.com/) documentation for more information on how to configure `pre-commit`. +## Linter frameworks + +!!! important + + These are third-party integrations; please report any issues + you encounter to the appropriate upstream project. + +### super-linter + +You can use `zizmor` with @super-linter/super-linter. + +Refer to the [super-linter documentation](https://github.com/super-linter/super-linter) +for additional details. + ## Tab completion { #integration-tab-completion } !!! note diff --git a/docs/release-notes.md b/docs/release-notes.md index d7f08599..c79be398 100644 --- a/docs/release-notes.md +++ b/docs/release-notes.md @@ -9,7 +9,69 @@ of `zizmor`. ## Next (UNRELEASED) -## Enhancements 🌱 +### Enhancements 🌱 + +* The [excessive-permissions] audit is now aware of the `artifact-metadata` + and `models` permissions (#1461) + +## 1.19.0 + +### New Features 🌈 + +* **New audit**: [archived-uses] detects usages of archived repositories in + `#!yaml uses:` clauses (#1411) + +### Enhancements 🌱 + +* The [use-trusted-publishing] audit now detects additional publishing command + patterns, including common "wrapped" patterns like `bundle exec gem publish` + (#1394) + +* zizmor now produces better error messages on a handful of error cases involving + invalid input files. Specifically, a subset of syntax and schema errors now + produce more detailed and actionable error messages (#1396) + +* The [use-trusted-publishing] audit now detects additional publishing command + patterns, including `uv run ...`, `uvx ...`, and `poetry publish` + (#1402) + +* zizmor now produces more useful and less ambiguous spans for many findings, + particularly those from the [anonymous-definition] audit (#1416) + +* zizmor now discovers configuration files named `zizmor.yaml`, in addition + to `zizmor.yml` (#1431) + +* zizmor now produces a more useful error message when input collection + yields no inputs (#1439) + +* The `--render-links` flag now allows users to control `zizmor`'s OSC 8 terminal + link rendering behavior. This is particularly useful in environments that + advertise themselves as terminals but fail to correctly render or ignore + OSC 8 links (#1454) + +### Performance Improvements 🚄 + +* The [impostor-commit] audit is now significantly faster on true positives, + making true positive detection virtually as fast as true negative detection. + In practice, true positive runs are over 100 times faster than before + (#1429) + +### Bug Fixes 🐛 + +* Fixed a bug where the [obfuscation] audit would crash if it encountered + a CMD shell that was defined outside of the current step block (i.e. + as a job or workflow default) (#1418) + +* Fixed a bug where the `opentofu` ecosystem was not recognized in + Dependabot configuration files (#1452) + +* `--color=always` no longer implies `--render-links=always`, as some + environments (like GitHub Actions) support ANSI color codes but fail + to handle OSC escapes gracefully (#1454) + +## 1.18.0 + +### Enhancements 🌱 * The [use-trusted-publishing] audit now detects NuGet publishing commands (#1369) @@ -1143,7 +1205,7 @@ This is one of `zizmor`'s bigger recent releases! Key enhancements include: ### What's Changed * fix(cli): remove '0 ignored' from another place by @woodruffw in #157 -* perf: speed up impostor-commit's fast path by @woodruffw in #158 +* perf: speed up [impostor-commit]'s fast path by @woodruffw in #158 * fix(cli): fixup error printing by @woodruffw in #159 ## v0.3.1 @@ -1295,5 +1357,7 @@ This is one of `zizmor`'s bigger recent releases! Key enhancements include: [dependabot-execution]: ./audits.md#dependabot-execution [dependabot-cooldown]: ./audits.md#dependabot-cooldown [concurrency-limits]: ./audits.md#concurrency-limits +[archived-uses]: ./audits.md#archived-uses +[impostor-commit]: ./audits.md#impostor-commit [exit code]: ./usage.md#exit-codes diff --git a/docs/snippets/help.txt b/docs/snippets/help.txt index 5c21ca53..49ac502d 100644 --- a/docs/snippets/help.txt +++ b/docs/snippets/help.txt @@ -28,6 +28,10 @@ Options: Don't show progress bars, even if the terminal supports them --format The output format to emit. By default, cargo-style diagnostics will be emitted [default: plain] [possible values: plain, json, json-v1, sarif, github] + --render-links + Whether to render OSC 8 links in the output [env: ZIZMOR_RENDER_LINKS=] [default: auto] [possible values: auto, always, never] + --show-audit-urls + Whether to render audit URLs in the output, separately from any URLs embedded in OSC 8 links [env: ZIZMOR_SHOW_AUDIT_URLS=] [default: auto] [possible values: auto, always, never] --color Control the use of color in output [possible values: auto, always, never] -c, --config diff --git a/docs/snippets/trophies.md b/docs/snippets/trophies.md index 593d2eb0..8c9b1196 100644 --- a/docs/snippets/trophies.md +++ b/docs/snippets/trophies.md @@ -24,6 +24,14 @@ - aio-libs/aiobotocore#1355 +- ![](https://github.com/altair-graphql.png?size=40){ width="40" loading=lazy align=left } altair-graphql + + --- + + ??? example "Examples" + - altair-graphql/altair@4aa5679f89528c183321a07e387567c13be29f26 + + - ![](https://github.com/anchore.png?size=40){ width="40" loading=lazy align=left } anchore --- @@ -32,6 +40,14 @@ - anchore/vunnel#832 +- ![](https://github.com/ansible.png?size=40){ width="40" loading=lazy align=left } ansible + + --- + + ??? example "Examples" + - ansible/ansible-documentation#3188 + + - ![](https://github.com/apache.png?size=40){ width="40" loading=lazy align=left } apache --- @@ -142,6 +158,7 @@ ??? example "Examples" - bitwarden/ios#2124 + - bitwarden/passkeys-index#128 - bitwarden/sdk-sm#1378 - bitwarden/server#6409 @@ -220,6 +237,14 @@ - conda/conda-lock#814 +- ![](https://github.com/containers.png?size=40){ width="40" loading=lazy align=left } containers + + --- + + ??? example "Examples" + - containers/podman#27642 + + - ![](https://github.com/cunla.png?size=40){ width="40" loading=lazy align=left } cunla --- @@ -340,6 +365,15 @@ - docker/compose#12737 +- ![](https://github.com/dubzzz.png?size=40){ width="40" loading=lazy align=left } dubzzz + + --- + + ??? example "Examples" + - dubzzz/fast-check#6369 + - dubzzz/fast-check#6370 + + - ![](https://github.com/earthobservations.png?size=40){ width="40" loading=lazy align=left } earthobservations --- @@ -461,6 +495,14 @@ - gaphor/gaphor#3658 +- ![](https://github.com/gechr.png?size=40){ width="40" loading=lazy align=left } gechr + + --- + + ??? example "Examples" + - gechr/WhichSpace@e32d710e7e17102dd6eecd522aa1b511b961fcf3 + + - ![](https://github.com/GenericMappingTools.png?size=40){ width="40" loading=lazy align=left } GenericMappingTools --- @@ -539,6 +581,14 @@ - grafana/tanka#1441 +- ![](https://github.com/great-expectations.png?size=40){ width="40" loading=lazy align=left } great-expectations + + --- + + ??? example "Examples" + - great-expectations/airflow-provider-great-expectations#209 + + - ![](https://github.com/GreptimeTeam.png?size=40){ width="40" loading=lazy align=left } GreptimeTeam --- @@ -881,6 +931,14 @@ - nedbat/coveragepy@675c2f8c6c36771c85929ac9e1190b13e0269344 +- ![](https://github.com/neocmakelsp.png?size=40){ width="40" loading=lazy align=left } neocmakelsp + + --- + + ??? example "Examples" + - neocmakelsp/neocmakelsp#216 + + - ![](https://github.com/NetApp.png?size=40){ width="40" loading=lazy align=left } NetApp --- @@ -935,6 +993,14 @@ - onnx/onnx#7363 +- ![](https://github.com/onyx-dot-app.png?size=40){ width="40" loading=lazy align=left } onyx-dot-app + + --- + + ??? example "Examples" + - onyx-dot-app/onyx#6344 + + - ![](https://github.com/open-edge-platform.png?size=40){ width="40" loading=lazy align=left } open-edge-platform --- @@ -1036,6 +1102,14 @@ - prettytable/prettytable#339 +- ![](https://github.com/privacyidea.png?size=40){ width="40" loading=lazy align=left } privacyidea + + --- + + ??? example "Examples" + - privacyidea/privacyidea#4854 + + - ![](https://github.com/prometheus.png?size=40){ width="40" loading=lazy align=left } prometheus --- @@ -1049,6 +1123,10 @@ --- ??? example "Examples" + - psf/black#4901 + - psf/black#4905 + - psf/black#4906 + - psf/black#4911 - psf/cachecontrol#345 @@ -1405,6 +1483,14 @@ - stravalib/stravalib#649 +- ![](https://github.com/super-linter.png?size=40){ width="40" loading=lazy align=left } super-linter + + --- + + ??? example "Examples" + - super-linter/super-linter#7241 + + - ![](https://github.com/Swatinem.png?size=40){ width="40" loading=lazy align=left } Swatinem --- diff --git a/docs/snippets/trophies.txt b/docs/snippets/trophies.txt index 7c87f661..f631bc34 100644 --- a/docs/snippets/trophies.txt +++ b/docs/snippets/trophies.txt @@ -8,7 +8,9 @@ adafruit/circuitpython#9785 ag2ai/faststream#2398 aio-libs/aiobotocore#1344 aio-libs/aiobotocore#1355 +altair-graphql/altair@4aa5679f89528c183321a07e387567c13be29f26 anchore/vunnel#832 +ansible/ansible-documentation#3188 apache/airflow#45408 apache/airflow#47258 apache/airflow#50749 @@ -37,6 +39,7 @@ BerkeleyLearnVerify/Scenic#320 bitcoindevkit/bdk#1778 bitcoindevkit/bdk#1813 bitwarden/ios#2124 +bitwarden/passkeys-index#128 bitwarden/sdk-sm#1378 bitwarden/server#6409 blakeblackshear/frigate#15490 @@ -50,6 +53,7 @@ canonical/testflinger#630 certbot/josepy#193 coder/coder#19459 conda/conda-lock#814 +containers/podman#27642 cunla/fakeredis-py#391 curl/curl@ba9fe58d4331402a24495618668b2cc8afe9794e curl/curl@283ad5c4320fa1d733e60a0dbe216ee36e3924fb @@ -72,6 +76,8 @@ django/django@86b8058b40145fb5ba4fd859676225f533eca986 django-commons/django-tasks-scheduler#272 Diaoul/subliminal#1190 docker/compose#12737 +dubzzz/fast-check#6369 +dubzzz/fast-check#6370 earthobservations/wetterdienst#1440 edgelesssys/contrast#1604 EFForg/rayhunter#711 @@ -88,6 +94,7 @@ freeCodeCamp/devdocs#2386 freedomofpress/securedrop-workstation#1222 freemocap/freemocap#659 gaphor/gaphor#3658 +gechr/WhichSpace@e32d710e7e17102dd6eecd522aa1b511b961fcf3 getsops/sops#1704 GenericMappingTools/pygmt#3861 git-lfs/git-lfs#5930 @@ -106,6 +113,7 @@ grafana/pyroscope#4157 grafana/rollout-operator#218 grafana/synthetic-monitoring-app#1126 grafana/tanka#1441 +great-expectations/airflow-provider-great-expectations#209 google/zerocop#2810 google/zx#1067 google/zx#1126 @@ -193,6 +201,7 @@ mozilla/neqo#2413 mozilla/neqo#2721 narwhals-dev/narwhals#1567 nedbat/coveragepy@675c2f8c6c36771c85929ac9e1190b13e0269344 +neocmakelsp/neocmakelsp#216 NetApp/harvest#3247 nextcloud/user_saml#947 nextcloud/.github#477 @@ -205,6 +214,7 @@ numpy/numpy.org#797 onnx/onnx#6661 onnx/onnx#6662 onnx/onnx#7363 +onyx-dot-app/onyx#6344 openbao/openbao#924 open-edge-platform/geti#162 open-edge-platform/geti-sdk#603 @@ -222,7 +232,12 @@ PlasmaPy/PlasmaPy#2975 PowerDNS/pdns#15600 praetorian-inc/noseyparker#228 prettytable/prettytable#339 +privacyidea/privacyidea#4854 prometheus/prometheus#16530 +psf/black#4901 +psf/black#4905 +psf/black#4906 +psf/black#4911 psf/cachecontrol#345 pubgrub-rs/pubgrub#389 pyca/service-identity#75 @@ -304,6 +319,7 @@ slackapi/python-slack-events-api#110 stravalib/stravalib#645 stravalib/stravalib#648 stravalib/stravalib#649 +super-linter/super-linter#7241 Swatinem/rust-cache#262 TecharoHQ/anubis#188 termcolor/termcolor#89 diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md index 446e1262..8a21c898 100644 --- a/docs/troubleshooting.md +++ b/docs/troubleshooting.md @@ -102,7 +102,7 @@ If you run into this issue, you have two options: in your repository secrets, you could do: ```yaml title="example/repoA/.github/workflows/ci.yml" hl_lines="3" - - uses: zizmorcore/zizmor-action@e673c3917a1aef3c65c972347ed84ccd013ecda4 # v0.2.0 + - uses: zizmorcore/zizmor-action@e639db99335bc9038abc0e066dfcd72e23d26fb4 # v0.3.0 with: token: ${{ secrets.ZIZMOR_GH_TOKEN }} ``` diff --git a/docs/usage.md b/docs/usage.md index ab626fc2..90d8563c 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -206,16 +206,15 @@ By default, `zizmor` produces `cargo`-style diagnostic output. ```console error[template-injection]: code injection via template expansion - --> ./tests/integration/test-data/template-injection/pr-425-backstop/action.yml:28:7 + --> ./tests/integration/test-data/template-injection/pr-425-backstop/action.yml:31:56 | -28 | - name: case4 - | ^^^^^^^^^^^ this step -29 | uses: azure/powershell +29 | uses: azure/powershell@whatever + | ------------------------------- action accepts arbitrary code 30 | with: 31 | inlineScript: Get-AzVM -ResourceGroupName "${{ inputs.expandme }}" - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ inputs.expandme may expand into attacker-controllable code + | ------------ via this input ^^^^^^^^^^^^^^^ may expand into attacker-controllable code | - = note: audit confidence → Low + = note: audit confidence → High ``` This output will be colorized by default when sent to a supporting terminal and @@ -224,6 +223,36 @@ disable output colorization by setting `NO_COLOR=1` in their environment. This format can also be explicitly selected with `--format=plain`. +#### Audit documentation links + +By default, `zizmor` includes links to relevant documentation pages +for each finding in its plain output format. These links are provided via +[OSC 8](https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda) +hyperlinks, which are supported by many modern terminal emulators. + +For example, in the output above, `template-injection` within +`error[template-injection]: ...` is a clickable link that takes you to +the [template-injection](./audits#template-injection) audit documentation. + +In addition to these OSC 8 links, `zizmor` also includes the full URL +as part of each finding _if_ it detects a non-terminal output _or_ +a CI environment (e.g. GitHub Actions). + +To make this behavior explicir, users can supply the `--show-audit-urls` +option: + +```bash +# always show audit documentation URLs, even if output is to a terminal +zizmor --show-audit-urls=always ... + +# never show audit documentation URLs +zizmor --show-audit-urls=never ... +``` + +!!! note + + `--show-audit-urls=...` is available in `v1.19.0` and later. + #### Color customization When invoked from a terminal, `zizmor` will attempt to enrich its output diff --git a/pyproject.toml b/pyproject.toml index 6628404f..a9de525a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,4 +16,9 @@ manifest-path = "crates/zizmor/Cargo.toml" include = [{ path = "README.md", format = "sdist" }, { path = "LICENSE", format = "sdist" }] [dependency-groups] +bench = [ + "pytest>=9.0.2", + "pytest-codspeed>=4.2.0", + "urllib3>=2.6.2", +] docs = ["zensical"] diff --git a/support/archived-action-repos.txt b/support/archived-action-repos.txt new file mode 100644 index 00000000..9a96a1ab --- /dev/null +++ b/support/archived-action-repos.txt @@ -0,0 +1,147 @@ +# archived-action-repos.txt +# one per line, comment lines begin with # +# +# NOTE(ww): I somewhat aritrarily curated this manually from a GitHub search +# for `topic:github-actions archived:true` as of 2025-12-04. + +# official actions +actions/upload-release-asset +actions/create-release +actions/setup-ruby +actions/setup-elixir +actions/setup-haskell + + +# community actions +actions-rs/cargo +actions-rs/grcov +actions-rs/audit-check +actions-rs/toolchain +actions-rs/tarpaulin +actions-rs/clippy-check +actions-rs/install +actions-rs/components-nightly + +andrewmcodes-archive/rubocop-linter-action + +artichoke/setup-rust + +aslafy-z/conventional-pr-title-action + +Azure/AppConfiguration-Sync +Azure/appservice-actions +Azure/azure-resource-login-action +Azure/container-actions +Azure/container-scan +Azure/get-keyvault-secrets +Azure/k8s-actions +Azure/manage-azure-policy +Azure/data-factory-deploy-action +Azure/data-factory-export-action +Azure/data-factory-validate-action +Azure/publish-security-assessments +Azure/run-sqlpackage-action +Azure/spring-cloud-deploy +Azure/webapps-container-deploy + +cedrickring/golang-action + +cirrus-actions/rebase + +crazy-max/ghaction-docker-buildx + +Decathlon/pull-request-labeler-action + +DeLaGuardo/setup-graalvm + +dulvui/godot-android-export + +expo/expo-preview-action + +fabasoad/setup-zizmor-action + +facebook/pysa-action + +fregante/release-with-changelog + +google/mirror-branch-action +google/skywater-pdk-actions + +gradle/gradle-build-action + +grafana/k6-action + +helaili/github-graphql-action +helaili/jekyll-action + +Ilshidur/action-slack + +jakejarvis/backblaze-b2-action +jakejarvis/cloudflare-purge-action +jakejarvis/firebase-deploy-action +jakejarvis/hugo-build-action +jakejarvis/lighthouse-action +jakejarvis/s3-sync-action + +justinribeiro/lighthouse-action + +kanadgupta/glitch-sync + +kxxt/chatgpt-action + +machine-learning-apps/wandb-action + +MansaGroup/gcs-cache-action + +marvinpinto/actions +marvinpinto/action-automatic-releases + +maxheld83/ghpages + +micnncim/action-lgtm-reaction + +mikepenz/gradle-dependency-submission + +orf/cargo-bloat-action + +paambaati/codeclimate-action + +primer/figma-action + +repo-sync/pull-request +repo-sync/repo-sync + +sagebind/docker-swarm-deploy-action + +ScottBrenner/generate-changelog-action + +secrethub/actions + +semgrep/semgrep-action + +ShaunLWM/action-release-debugapk + +stefanprodan/kube-tools + +SonarSource/sonarcloud-github-action + +SwiftDocOrg/github-wiki-publish-action + +tachiyomiorg/issue-moderator-action + +technote-space/auto-cancel-redundant-workflow +technote-space/get-diff-action + +TencentCloudBase/cloudbase-action + +trmcnvn/chrome-addon + +whelk-io/maven-settings-xml-action + +yeslayla/build-godot-action + +youyo/aws-cdk-github-actions + +z0al/dependent-issues + +8398a7/action-slack diff --git a/support/archived-repos.py b/support/archived-repos.py new file mode 100755 index 00000000..628c53bf --- /dev/null +++ b/support/archived-repos.py @@ -0,0 +1,32 @@ +#!/usr/bin/env -S uv run --script +# /// script +# requires-python = ">=3.14" +# dependencies = [] +# /// +from pathlib import Path + +_HERE = Path(__file__).parent +_ARCHIVED_ACTION_REPOS = _HERE / "archived-action-repos.txt" + +assert _ARCHIVED_ACTION_REPOS.is_file(), f"Missing {_ARCHIVED_ACTION_REPOS}" + +_OUT = _HERE.parent / "crates" / "zizmor" / "data" / "archived-repos.txt" + + +def main() -> None: + lines = [] + for line in _ARCHIVED_ACTION_REPOS.read_text().splitlines(): + line = line.strip() + if not line or line.startswith("#"): + continue + + lines.append(line.lower()) + + lines.sort() + + with _OUT.open("w") as io: + print("\n".join(lines), file=io) + + +if __name__ == "__main__": + main() diff --git a/uv.lock b/uv.lock index ab3742c9..cc92b8e7 100644 --- a/uv.lock +++ b/uv.lock @@ -2,6 +2,88 @@ version = 1 revision = 3 requires-python = ">=3.10" +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/d7/516d984057745a6cd96575eea814fe1edd6646ee6efd552fb7b0921dec83/cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44", size = 184283, upload-time = "2025-09-08T23:22:08.01Z" }, + { url = "https://files.pythonhosted.org/packages/9e/84/ad6a0b408daa859246f57c03efd28e5dd1b33c21737c2db84cae8c237aa5/cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49", size = 180504, upload-time = "2025-09-08T23:22:10.637Z" }, + { url = "https://files.pythonhosted.org/packages/50/bd/b1a6362b80628111e6653c961f987faa55262b4002fcec42308cad1db680/cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c", size = 208811, upload-time = "2025-09-08T23:22:12.267Z" }, + { url = "https://files.pythonhosted.org/packages/4f/27/6933a8b2562d7bd1fb595074cf99cc81fc3789f6a6c05cdabb46284a3188/cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb", size = 216402, upload-time = "2025-09-08T23:22:13.455Z" }, + { url = "https://files.pythonhosted.org/packages/05/eb/b86f2a2645b62adcfff53b0dd97e8dfafb5c8aa864bd0d9a2c2049a0d551/cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0", size = 203217, upload-time = "2025-09-08T23:22:14.596Z" }, + { url = "https://files.pythonhosted.org/packages/9f/e0/6cbe77a53acf5acc7c08cc186c9928864bd7c005f9efd0d126884858a5fe/cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4", size = 203079, upload-time = "2025-09-08T23:22:15.769Z" }, + { url = "https://files.pythonhosted.org/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453", size = 216475, upload-time = "2025-09-08T23:22:17.427Z" }, + { url = "https://files.pythonhosted.org/packages/21/7a/13b24e70d2f90a322f2900c5d8e1f14fa7e2a6b3332b7309ba7b2ba51a5a/cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495", size = 218829, upload-time = "2025-09-08T23:22:19.069Z" }, + { url = "https://files.pythonhosted.org/packages/60/99/c9dc110974c59cc981b1f5b66e1d8af8af764e00f0293266824d9c4254bc/cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5", size = 211211, upload-time = "2025-09-08T23:22:20.588Z" }, + { url = "https://files.pythonhosted.org/packages/49/72/ff2d12dbf21aca1b32a40ed792ee6b40f6dc3a9cf1644bd7ef6e95e0ac5e/cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb", size = 218036, upload-time = "2025-09-08T23:22:22.143Z" }, + { url = "https://files.pythonhosted.org/packages/e2/cc/027d7fb82e58c48ea717149b03bcadcbdc293553edb283af792bd4bcbb3f/cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a", size = 172184, upload-time = "2025-09-08T23:22:23.328Z" }, + { url = "https://files.pythonhosted.org/packages/33/fa/072dd15ae27fbb4e06b437eb6e944e75b068deb09e2a2826039e49ee2045/cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739", size = 182790, upload-time = "2025-09-08T23:22:24.752Z" }, + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + [[package]] name = "click" version = "8.3.1" @@ -32,6 +114,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2d/82/e5d2c1c67d19841e9edc74954c827444ae826978499bde3dfc1d007c8c11/deepmerge-2.0-py3-none-any.whl", hash = "sha256:6de9ce507115cff0bed95ff0ce9ecc31088ef50cbdf09bc90a09349a318b3d00", size = 13475, upload-time = "2024-08-30T05:31:48.659Z" }, ] +[[package]] +name = "exceptiongroup" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + [[package]] name = "markdown" version = "3.10" @@ -41,6 +144,54 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/70/81/54e3ce63502cd085a0c556652a4e1b919c45a446bd1e5300e10c44c8c521/markdown-3.10-py3-none-any.whl", hash = "sha256:b5b99d6951e2e4948d939255596523444c0e677c669700b1d17aa4a8a464cb7c", size = 107678, upload-time = "2025-11-03T19:51:13.887Z" }, ] +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pycparser" +version = "2.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, +] + [[package]] name = "pygments" version = "2.19.2" @@ -52,15 +203,59 @@ wheels = [ [[package]] name = "pymdown-extensions" -version = "10.17.1" +version = "10.17.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown" }, { name = "pyyaml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e7/d9/a987e4d549c6c82353fce5fa5f650229bb60ea4c0d1684a2714a509aef58/pymdown_extensions-10.17.1.tar.gz", hash = "sha256:60d05fe55e7fb5a1e4740fc575facad20dc6ee3a748e8d3d36ba44142e75ce03", size = 845207, upload-time = "2025-11-11T21:44:58.815Z" } +sdist = { url = "https://files.pythonhosted.org/packages/25/6d/af5378dbdb379fddd9a277f8b9888c027db480cde70028669ebd009d642a/pymdown_extensions-10.17.2.tar.gz", hash = "sha256:26bb3d7688e651606260c90fb46409fbda70bf9fdc3623c7868643a1aeee4713", size = 847344, upload-time = "2025-11-26T15:43:57.004Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/81/40/b2d7b9fdccc63e48ae4dbd363b6b89eb7ac346ea49ed667bb71f92af3021/pymdown_extensions-10.17.1-py3-none-any.whl", hash = "sha256:1f160209c82eecbb5d8a0d8f89a4d9bd6bdcbde9a8537761844cfc57ad5cd8a6", size = 266310, upload-time = "2025-11-11T21:44:56.809Z" }, + { url = "https://files.pythonhosted.org/packages/93/78/b93cb80bd673bdc9f6ede63d8eb5b4646366953df15667eb3603be57a2b1/pymdown_extensions-10.17.2-py3-none-any.whl", hash = "sha256:bffae79a2e8b9e44aef0d813583a8fea63457b7a23643a43988055b7b79b4992", size = 266556, upload-time = "2025-11-26T15:43:55.162Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "pytest-codspeed" +version = "4.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi" }, + { name = "pytest" }, + { name = "rich" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e2/e8/27fcbe6516a1c956614a4b61a7fccbf3791ea0b992e07416e8948184327d/pytest_codspeed-4.2.0.tar.gz", hash = "sha256:04b5d0bc5a1851ba1504d46bf9d7dbb355222a69f2cd440d54295db721b331f7", size = 113263, upload-time = "2025-10-24T09:02:55.704Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/b8/d599a466c50af3f04001877ae8b17c12b803f3b358235736b91a0769de0d/pytest_codspeed-4.2.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:609828b03972966b75b9b7416fa2570c4a0f6124f67e02d35cd3658e64312a7b", size = 261943, upload-time = "2025-10-24T09:02:37.962Z" }, + { url = "https://files.pythonhosted.org/packages/74/19/ccc1a2fcd28357a8db08ba6b60f381832088a3850abc262c8e0b3406491a/pytest_codspeed-4.2.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23a0c0fbf8bb4de93a3454fd9e5efcdca164c778aaef0a9da4f233d85cb7f5b8", size = 250782, upload-time = "2025-10-24T09:02:39.617Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2d/f0083a2f14ecf008d961d40439a71da0ae0d568e5f8dc2fccd3e8a2ab3e4/pytest_codspeed-4.2.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2de87bde9fbc6fd53f0fd21dcf2599c89e0b8948d49f9bad224edce51c47e26b", size = 261960, upload-time = "2025-10-24T09:02:40.665Z" }, + { url = "https://files.pythonhosted.org/packages/5f/0c/1f514c553db4ea5a69dfbe2706734129acd0eca8d5101ec16f1dd00dbc0f/pytest_codspeed-4.2.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95aeb2479ca383f6b18e2cc9ebcd3b03ab184980a59a232aea6f370bbf59a1e3", size = 250808, upload-time = "2025-10-24T09:02:42.07Z" }, + { url = "https://files.pythonhosted.org/packages/81/04/479905bd6653bc981c0554fcce6df52d7ae1594e1eefd53e6cf31810ec7f/pytest_codspeed-4.2.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d4fefbd4ae401e2c60f6be920a0be50eef0c3e4a1f0a1c83962efd45be38b39", size = 262084, upload-time = "2025-10-24T09:02:43.155Z" }, + { url = "https://files.pythonhosted.org/packages/d2/46/d6f345d7907bac6cbb6224bd697ecbc11cf7427acc9e843c3618f19e3476/pytest_codspeed-4.2.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:309b4227f57fcbb9df21e889ea1ae191d0d1cd8b903b698fdb9ea0461dbf1dfe", size = 251100, upload-time = "2025-10-24T09:02:44.168Z" }, + { url = "https://files.pythonhosted.org/packages/de/dc/e864f45e994a50390ff49792256f1bdcbf42f170e3bc0470ee1a7d2403f3/pytest_codspeed-4.2.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72aab8278452a6d020798b9e4f82780966adb00f80d27a25d1274272c54630d5", size = 262057, upload-time = "2025-10-24T09:02:45.791Z" }, + { url = "https://files.pythonhosted.org/packages/1d/1c/f1d2599784486879cf6579d8d94a3e22108f0e1f130033dab8feefd29249/pytest_codspeed-4.2.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:684fcd9491d810ded653a8d38de4835daa2d001645f4a23942862950664273f8", size = 251013, upload-time = "2025-10-24T09:02:46.937Z" }, + { url = "https://files.pythonhosted.org/packages/0c/fd/eafd24db5652a94b4d00fe9b309b607de81add0f55f073afb68a378a24b6/pytest_codspeed-4.2.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:50794dabea6ec90d4288904452051e2febace93e7edf4ca9f2bce8019dd8cd37", size = 262065, upload-time = "2025-10-24T09:02:48.018Z" }, + { url = "https://files.pythonhosted.org/packages/f9/14/8d9340d7dc0ae647991b28a396e16b3403e10def883cde90d6b663d3f7ec/pytest_codspeed-4.2.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0ebd87f2a99467a1cfd8e83492c4712976e43d353ee0b5f71cbb057f1393aca", size = 251057, upload-time = "2025-10-24T09:02:49.102Z" }, + { url = "https://files.pythonhosted.org/packages/4b/39/48cf6afbca55bc7c8c93c3d4ae926a1068bcce3f0241709db19b078d5418/pytest_codspeed-4.2.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dbbb2d61b85bef8fc7e2193f723f9ac2db388a48259d981bbce96319043e9830", size = 267983, upload-time = "2025-10-24T09:02:50.558Z" }, + { url = "https://files.pythonhosted.org/packages/33/86/4407341efb5dceb3e389635749ce1d670542d6ca148bd34f9d5334295faf/pytest_codspeed-4.2.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:748411c832147bfc85f805af78a1ab1684f52d08e14aabe22932bbe46c079a5f", size = 256732, upload-time = "2025-10-24T09:02:51.603Z" }, + { url = "https://files.pythonhosted.org/packages/25/0e/8cb71fd3ed4ed08c07aec1245aea7bc1b661ba55fd9c392db76f1978d453/pytest_codspeed-4.2.0-py3-none-any.whl", hash = "sha256:e81bbb45c130874ef99aca97929d72682733527a49f84239ba575b5cb843bab0", size = 113726, upload-time = "2025-10-24T09:02:54.785Z" }, ] [[package]] @@ -127,6 +322,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, ] +[[package]] +name = "rich" +version = "14.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, +] + [[package]] name = "tomli" version = "2.3.0" @@ -176,9 +384,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, ] +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1e/24/a2a2ed9addd907787d7aa0355ba36a6cadf1768b934c652ea78acbd59dcd/urllib3-2.6.2.tar.gz", hash = "sha256:016f9c98bb7e98085cb2b4b17b87d2c702975664e4f060c6532e64d1c1a5e797", size = 432930, upload-time = "2025-12-11T15:56:40.252Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/b9/4095b668ea3678bf6a0af005527f39de12fb026516fb3df17495a733b7f8/urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd", size = 131182, upload-time = "2025-12-11T15:56:38.584Z" }, +] + [[package]] name = "zensical" -version = "0.0.9" +version = "0.0.10" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -189,20 +415,20 @@ dependencies = [ { name = "pyyaml" }, { name = "tomli", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8b/87/51b2c208e727fb2875b8cd0a29ec010cd2d4ceabb8d9d09a0bcc81a30ee8/zensical-0.0.9.tar.gz", hash = "sha256:5b41e2ddd2f19694e6fa6bb021a85095a09f7bd0afb7ea4ab694e73dcdc3d892", size = 3805609, upload-time = "2025-11-20T10:18:43Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/0e/d6c17a39505cdb90004805caa3696ba28e675d396697e4dc51a7ef820bcb/zensical-0.0.10.tar.gz", hash = "sha256:9a9b60a3757fb0f4cf2ec33844bf29141f02223e99440698cfb6911bb1f49956", size = 3805670, upload-time = "2025-11-25T20:42:23.076Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/4d/4e0222bf2b86c84002c2e94c14efdb31a0e117bf6ce39b01bee861fbf83f/zensical-0.0.9-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8a61f554f7d6edd588c23d4535a41c9e354786d9dabce151aba697f48a62ef9e", size = 11876113, upload-time = "2025-11-20T10:18:29.986Z" }, - { url = "https://files.pythonhosted.org/packages/f8/40/d19560c71e4f07602046e298cc1060538e9500c232fc905fe4a0b0bc4ac7/zensical-0.0.9-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:4262afb1774db9569314593765174d20956a2e0648ec84c24dea14747ab5c9cf", size = 11750760, upload-time = "2025-11-20T10:18:27.487Z" }, - { url = "https://files.pythonhosted.org/packages/56/b4/8ef91e8e06cf33ae5d8a61f2aeb3c204ea212bfc8c24a5d0a5ba0ce89171/zensical-0.0.9-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0515610c0849f6491da9dcf0eb4eebbfd6fdc149c077e983b9ca89dad6afb58d", size = 12072523, upload-time = "2025-11-20T10:18:16.728Z" }, - { url = "https://files.pythonhosted.org/packages/ce/35/49f4178d6640716e404a3938692f9a96ba96aa52197530f7d8ef6b93568b/zensical-0.0.9-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:872496595c59ff49780bf2289a71997afa05629115a237e3ec50137fe61b67fb", size = 12046934, upload-time = "2025-11-20T10:18:19.728Z" }, - { url = "https://files.pythonhosted.org/packages/90/0f/aae32bc4e54d1a7978e3d97eb31efd66b97ceac8a7172e9ff9cdbaf16a71/zensical-0.0.9-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2a7d577133fd9aea6e5f78dd6aacd95fae68006e051c89006dc5767331c9656", size = 12355939, upload-time = "2025-11-20T10:18:22.576Z" }, - { url = "https://files.pythonhosted.org/packages/16/5f/a9c9bc32f5676aabb58563dd69c6bc9b351f7f1c9750c50ae5af66618245/zensical-0.0.9-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed53d626c8bcd2a5a90f06f30c72b8e156aa0d172ca949fa19b90f297a628d4f", size = 12126744, upload-time = "2025-11-20T10:18:24.887Z" }, - { url = "https://files.pythonhosted.org/packages/56/61/8150be50898cb8788bb672a17d61d7e4e82024f03d07fefd275f6529c5b5/zensical-0.0.9-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2231efd3d072a5c6362e17a9781626f0e6f83407727589487d7a02de9e8bd49b", size = 12252545, upload-time = "2025-11-20T10:18:32.242Z" }, - { url = "https://files.pythonhosted.org/packages/36/11/d75ce3421995e05ce65b31677f7c7af06e4a44af89f80006ebabd3b7d8b0/zensical-0.0.9-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:340cef1980808cce6285d2edb5abdb90119b6e39d5a3be8b49029402ffbb651f", size = 12312488, upload-time = "2025-11-20T10:18:34.7Z" }, - { url = "https://files.pythonhosted.org/packages/07/55/ef11f43e2288910a859d383b2c6072a424558eeffb1baa48f9ed343395e9/zensical-0.0.9-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:cdda5721b6b3a08592e370cf1d8a77bf22d20e88bb93addd31fb6e91f5825051", size = 12389592, upload-time = "2025-11-20T10:18:37.936Z" }, - { url = "https://files.pythonhosted.org/packages/bd/35/cc62c9e1031c3f1132597e351d31964e6077bf251e1762a99d49fec5261d/zensical-0.0.9-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:c0505f6aa2ea5dad65bbc9257231c6691b430d4363b7e99cf3c7e73ee4014b46", size = 12330560, upload-time = "2025-11-20T10:18:40.384Z" }, - { url = "https://files.pythonhosted.org/packages/42/83/c764010c2bc6eaaf435726b79ebf0c8cee15cad52463e1348cfc9bc58da2/zensical-0.0.9-cp310-abi3-win32.whl", hash = "sha256:d95eb93b272931194fff584e327623b2c2436ed39dbddc4a289ce7177f826698", size = 11487863, upload-time = "2025-11-20T10:18:47.151Z" }, - { url = "https://files.pythonhosted.org/packages/97/2a/ed43083e8443dbcd6838e3ea6faad901488c1d052da67e9e0ee25184bc01/zensical-0.0.9-cp310-abi3-win_amd64.whl", hash = "sha256:17316ab0450a20841e2ba32ed119dd72c58986bca79956990ad6cfd8711c4df2", size = 11677131, upload-time = "2025-11-20T10:18:44.802Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b7/8bf897c220ae1475c59c1fb8bd4a4a288f28773ae485d0afeb530bcf5f44/zensical-0.0.10-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:d371c9ff57cb2dc8d3b7ec630d4cf0ffa4cba3b414619c4975eb801a032105c0", size = 11875860, upload-time = "2025-11-25T20:42:08.245Z" }, + { url = "https://files.pythonhosted.org/packages/ea/22/b73c2e218a9b2753e1e869340b280f0e2c374caf900b9d6932c1b8cc011f/zensical-0.0.10-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:5bd06c9a1fb536e23cf6f5acb83af492a0aaf457708a81589731a33ab1695a1e", size = 11751105, upload-time = "2025-11-25T20:42:05.075Z" }, + { url = "https://files.pythonhosted.org/packages/49/eb/a95dbbc6939ca32231b881234c37d6b45140f3dd5461f68d59c6398e7f45/zensical-0.0.10-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd6c4447aaa58e2545c3b7a5d39fa1f5a0e9062f1a2d380c3c21aac0a8979b5a", size = 12073214, upload-time = "2025-11-25T20:41:52.917Z" }, + { url = "https://files.pythonhosted.org/packages/b4/34/b8fbbbbb3a7edac44a626b2171889c3f6a670ae20bb2bf0121a22e9428ed/zensical-0.0.10-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95a91f13920975fc9744f052c028800e0b0553f52c9417bef8a6e16864459bd8", size = 12046487, upload-time = "2025-11-25T20:41:55.745Z" }, + { url = "https://files.pythonhosted.org/packages/14/af/7d9059af1727dc19b96c4a06c8e9c928b2d74a781ad9b807ab983254170d/zensical-0.0.10-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a22908517458428bd91adfbcf4813c7b63011cd1b6761956f81255993d903ae7", size = 12355490, upload-time = "2025-11-25T20:41:58.875Z" }, + { url = "https://files.pythonhosted.org/packages/9c/13/e4d912c954933b9b29abe7e9e5cdc7480b1003291ec9cb9a63a083072d14/zensical-0.0.10-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a482443e7aded812c3e72250f2ba8b4e77f09a626c1c9892766168dd2acf8dfd", size = 12126681, upload-time = "2025-11-25T20:42:01.986Z" }, + { url = "https://files.pythonhosted.org/packages/5c/a2/032bb19e87ab561c05977fa088033199747ccdec7826f73e234b5200c897/zensical-0.0.10-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:679d32bceac749a79ef11c08e5c0eb7c3e8e73f4c35732a2f6bcaa3ffaa61a70", size = 12252530, upload-time = "2025-11-25T20:42:11.192Z" }, + { url = "https://files.pythonhosted.org/packages/af/34/ae021c619257103480df27f83b7be78361373467328f2698fad86dcd7870/zensical-0.0.10-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:db42271a085bf8efe2d6dc9f97325e866fcfa2fafe7e079c5bc0f0e8407275fa", size = 12313035, upload-time = "2025-11-25T20:42:14.365Z" }, + { url = "https://files.pythonhosted.org/packages/c0/e3/123d7a75abd1b3f8c8401a010b225373427abf87c53b6ef5ee9521285338/zensical-0.0.10-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:11e9acdfd6622dc43862c99e4669ebe56f4e7ab2cefe73a74005549a09ac679e", size = 12390358, upload-time = "2025-11-25T20:42:17.429Z" }, + { url = "https://files.pythonhosted.org/packages/94/46/b4c84c1a53fe8c2362b2e7c5a31f06d67ebb3850d9cb068538fa75527320/zensical-0.0.10-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3fc7e47efbd90f476276492049ade0587f32f95216062ed4ecc02857f2cbca39", size = 12330960, upload-time = "2025-11-25T20:42:20.295Z" }, + { url = "https://files.pythonhosted.org/packages/2c/de/25c2ea44a0688d20321e0436d24c1772c4c271c0eb30216c72151b758c0f/zensical-0.0.10-cp310-abi3-win32.whl", hash = "sha256:feaab818c28170a8b542f729c2941c2a766d7026d24bcf13b403f843250fa15b", size = 11488081, upload-time = "2025-11-25T20:42:28.508Z" }, + { url = "https://files.pythonhosted.org/packages/35/24/2dca7768ec98962c6c7abe1447b4929553a98fccbf8ec1e6db282025ca90/zensical-0.0.10-cp310-abi3-win_amd64.whl", hash = "sha256:64f208a3168eb616985680e84a76ca2dbf0064508c4ce8655f7fd81db10636aa", size = 11677518, upload-time = "2025-11-25T20:42:25.534Z" }, ] [[package]] @@ -210,6 +436,11 @@ name = "zizmor" source = { editable = "." } [package.dev-dependencies] +bench = [ + { name = "pytest" }, + { name = "pytest-codspeed" }, + { name = "urllib3" }, +] docs = [ { name = "zensical" }, ] @@ -217,4 +448,9 @@ docs = [ [package.metadata] [package.metadata.requires-dev] +bench = [ + { name = "pytest", specifier = ">=9.0.2" }, + { name = "pytest-codspeed", specifier = ">=4.2.0" }, + { name = "urllib3", specifier = ">=2.6.2" }, +] docs = [{ name = "zensical" }]