Compare commits

..

No commits in common. "main" and "v1.18.0-rc3" have entirely different histories.

139 changed files with 1438 additions and 3919 deletions

View file

@ -1,51 +0,0 @@
name: CodSpeed Benchmarks
on:
push:
branches:
- "main"
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
- labeled
# `workflow_dispatch` allows CodSpeed to trigger backtest
# performance analysis in order to generate initial data.
workflow_dispatch:
permissions: {}
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
benchmarks:
name: Run benchmarks
# PRs only get benchmarked if they have the `run-benchmarks` label.
if: |
contains(github.event.pull_request.labels.*.name, 'run-benchmarks')
|| github.event_name == 'push'
|| github.event_name == 'workflow_dispatch'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
cache-all-crates: true
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- name: Build zizmor (release)
run: cargo build --release
- name: Run the benchmarks
uses: CodSpeedHQ/action@346a2d8a8d9d38909abd0bc3d23f773110f076ad # v4.4.1
with:
mode: walltime
run: make bench

View file

@ -5,11 +5,6 @@ on:
branches:
- main
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
permissions: {}
@ -22,14 +17,14 @@ jobs:
name: Lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- name: Format
run: cargo fmt --check
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Lint
run: cargo clippy -- --deny warnings
@ -38,13 +33,13 @@ jobs:
name: Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
- name: Test dependencies
run: |
@ -67,11 +62,11 @@ jobs:
name: Test site build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
- name: Test site
run: make site

View file

@ -22,7 +22,7 @@ jobs:
pull-requests: write # for opening PRs
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
@ -31,7 +31,7 @@ jobs:
make refresh-schemas
- name: create PR
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
draft: true
commit-message: "[BOT] update JSON schemas from SchemaStore"
@ -59,18 +59,18 @@ jobs:
pull-requests: write # for opening PRs
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
- name: try to refresh context capabilities
run: |
make webhooks-to-contexts
- name: create PR
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
draft: true
commit-message: "[BOT] update context capabilities"
@ -97,18 +97,18 @@ jobs:
pull-requests: write # for opening PRs
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
- name: try to refresh CodeQL injection sinks
run: |
make codeql-injection-sinks
- name: create PR
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
draft: true
commit-message: "[BOT] update CodeQL injection sinks"

View file

@ -27,7 +27,7 @@ jobs:
target: x86_64-pc-windows-msvc
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
@ -60,7 +60,7 @@ jobs:
shell: bash
- name: Upload artifact
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: artifacts-${{ matrix.target }}
path: ${{ steps.archive-release.outputs.filename }}
@ -78,7 +78,7 @@ jobs:
steps:
- name: Download artifacts
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
pattern: artifacts-*
path: distrib/

View file

@ -43,7 +43,7 @@ jobs:
packages: write
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
@ -54,7 +54,7 @@ jobs:
- name: Extract Docker metadata
id: docker-metadata
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
with:
images: "${{ env.ZIZMOR_IMAGE }}"
@ -86,7 +86,7 @@ jobs:
shell: bash
- name: Upload digest
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: digests-${{ matrix.image.platform-pair }}
path: ${{ runner.temp }}/digests/*
@ -107,7 +107,7 @@ jobs:
steps:
- name: Download digests
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
path: ${{ runner.temp }}/digests
pattern: digests-*
@ -128,7 +128,7 @@ jobs:
- name: Extract Docker metadata
id: docker-metadata
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
env:
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
with:

View file

@ -37,7 +37,7 @@ jobs:
# target: ppc64le
# manylinux: "2_28"
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- name: Build wheels
@ -47,7 +47,7 @@ jobs:
args: --release --out dist --manifest-path crates/zizmor/Cargo.toml
manylinux: ${{ matrix.platform.manylinux }}
- name: Upload wheels
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: wheels-linux-${{ matrix.platform.target }}
path: dist
@ -67,7 +67,7 @@ jobs:
- runner: ubuntu-24.04
target: armv7
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- name: Build wheels
@ -77,7 +77,7 @@ jobs:
args: --release --out dist --manifest-path crates/zizmor/Cargo.toml
manylinux: musllinux_1_2
- name: Upload wheels
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: wheels-musllinux-${{ matrix.platform.target }}
path: dist
@ -93,7 +93,7 @@ jobs:
- runner: windows-latest
target: x86
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- name: Build wheels
@ -102,7 +102,7 @@ jobs:
target: ${{ matrix.platform.target }}
args: --release --out dist --manifest-path crates/zizmor/Cargo.toml
- name: Upload wheels
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: wheels-windows-${{ matrix.platform.target }}
path: dist
@ -118,7 +118,7 @@ jobs:
- runner: macos-15
target: aarch64
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- name: Build wheels
@ -127,7 +127,7 @@ jobs:
target: ${{ matrix.platform.target }}
args: --release --out dist --manifest-path crates/zizmor/Cargo.toml
- name: Upload wheels
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: wheels-macos-${{ matrix.platform.target }}
path: dist
@ -136,7 +136,7 @@ jobs:
name: Build source distribution
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- name: Build sdist
@ -145,7 +145,7 @@ jobs:
command: sdist
args: --out dist --manifest-path crates/zizmor/Cargo.toml
- name: Upload sdist
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: wheels-sdist
path: dist
@ -161,7 +161,7 @@ jobs:
permissions:
id-token: write # Trusted Publishing + PEP 740 attestations
steps:
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- name: Attest
uses: astral-sh/attest-action@2c727738cea36d6c97dd85eb133ea0e0e8fe754b # v0.0.4
with:

View file

@ -53,7 +53,7 @@ jobs:
permissions:
id-token: write # for trusted publishing to crates.io
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false

View file

@ -19,7 +19,7 @@ jobs:
permissions:
id-token: write # for trusted publishing to crates.io
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false

View file

@ -26,12 +26,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
- name: build site
run: make site

View file

@ -19,18 +19,18 @@ jobs:
pull-requests: write # for 'Leave comment' step
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Run zizmor
run: |
cargo run -- --format sarif . > results.sarif
- name: Upload SARIF file
uses: github/codeql-action/upload-sarif@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8
uses: github/codeql-action/upload-sarif@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
with:
sarif_file: results.sarif
category: zizmor-test-sarif-presentation
@ -52,11 +52,11 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Run zizmor
run: |
@ -67,27 +67,3 @@ jobs:
--no-exit-codes \
--format github \
crates/zizmor/tests/integration/test-data/several-vulnerabilities.yml
test-plain-presentation:
name: Test plain text presentation
runs-on: ubuntu-latest
if: contains(github.event.pull_request.labels.*.name, 'test-plain-presentation')
permissions: {}
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- name: Run zizmor
run: |
# Normally we'd want a workflow to fail if the audit fails,
# but we're only testing presentation here.
cargo run \
-- \
--no-exit-codes \
--format plain \
crates/zizmor/tests/integration/test-data/several-vulnerabilities.yml

View file

@ -19,7 +19,7 @@ jobs:
issues: write # to create an issue if a new version is found
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
sparse-checkout: support/

View file

@ -5,11 +5,6 @@ on:
branches: ["main"]
pull_request:
branches: ["**"]
types:
- opened
- synchronize
- reopened
- ready_for_review
permissions: {}
@ -21,12 +16,12 @@ jobs:
security-events: write
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- name: Run zizmor 🌈
uses: zizmorcore/zizmor-action@e639db99335bc9038abc0e066dfcd72e23d26fb4 # v0.3.0
uses: zizmorcore/zizmor-action@e673c3917a1aef3c65c972347ed84ccd013ecda4 # v0.2.0
with:
# intentionally not scanning the entire repository,
# since it contains integration tests.

3
.gitignore vendored
View file

@ -10,6 +10,3 @@
# pending snapshots
.*.pending-snap
# benchmarks
.codspeed/

186
Cargo.lock generated
View file

@ -58,9 +58,9 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "annotate-snippets"
version = "0.12.10"
version = "0.12.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "15580ece6ea97cbf832d60ba19c021113469480852c6a2a6beb0db28f097bf1f"
checksum = "a44baf24dd94e781f74dfe67ffee75a09a57971ddf0f615a178b4f6d404b48ff"
dependencies = [
"anstyle",
"unicode-width 0.2.2",
@ -228,6 +228,12 @@ version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.10.0"
@ -307,18 +313,18 @@ dependencies = [
[[package]]
name = "camino"
version = "1.2.2"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48"
checksum = "276a59bf2b2c967788139340c9f0c5b12d7fd6630315c15c217e559de85d2609"
dependencies = [
"serde_core",
]
[[package]]
name = "cc"
version = "1.2.49"
version = "1.2.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90583009037521a116abf44494efecd645ba48b6622457080f080b85544e2215"
checksum = "ac9fe6cdbb24b6ade63616c0a0688e45bb56732262c158df3c0c4bea4ca47cb7"
dependencies = [
"find-msvc-tools",
"shlex",
@ -338,9 +344,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "clap"
version = "4.5.53"
version = "4.5.51"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8"
checksum = "4c26d721170e0295f191a69bd9a1f93efcdb0aff38684b61ab5750468972e5f5"
dependencies = [
"clap_builder",
"clap_derive",
@ -358,9 +364,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.53"
version = "4.5.51"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00"
checksum = "75835f0c7bf681bfd05abe44e965760fea999a5286c6eb2d59883634fd02011a"
dependencies = [
"anstream",
"anstyle",
@ -370,9 +376,9 @@ dependencies = [
[[package]]
name = "clap_complete"
version = "4.5.61"
version = "4.5.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39615915e2ece2550c0149addac32fb5bd312c657f43845bb9088cb9c8a7c992"
checksum = "8e602857739c5a4291dfa33b5a298aeac9006185229a700e5810a3ef7272d971"
dependencies = [
"clap",
]
@ -558,12 +564,6 @@ dependencies = [
"parking_lot_core",
]
[[package]]
name = "data-encoding"
version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476"
[[package]]
name = "deranged"
version = "0.5.4"
@ -705,9 +705,9 @@ dependencies = [
[[package]]
name = "find-msvc-tools"
version = "0.1.5"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844"
checksum = "52051878f80a721bb68ebfbc930e07b65ba72f2da88968ea5c06fd6ca3d3a127"
[[package]]
name = "flate2"
@ -721,12 +721,11 @@ dependencies = [
[[package]]
name = "fluent-uri"
version = "0.3.2"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1918b65d96df47d3591bed19c5cca17e3fa5d0707318e4b5ef2eae01764df7e5"
checksum = "17c704e9dbe1ddd863da1e6ff3567795087b1eb201ce80d8fa81162e1516500d"
dependencies = [
"borrow-or-share",
"ref-cast",
"bitflags 1.3.2",
]
[[package]]
@ -924,11 +923,9 @@ dependencies = [
[[package]]
name = "github-actions-models"
version = "0.42.0"
version = "0.39.0"
dependencies = [
"indexmap",
"insta",
"self_cell",
"serde",
"serde_yaml",
"tracing",
@ -955,9 +952,9 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
[[package]]
name = "hashbrown"
version = "0.16.1"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
dependencies = [
"allocator-api2",
"equivalent",
@ -978,11 +975,12 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "http"
version = "1.4.0"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a"
checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565"
dependencies = [
"bytes",
"fnv",
"itoa",
]
@ -1291,12 +1289,12 @@ dependencies = [
[[package]]
name = "indexmap"
version = "2.12.1"
version = "2.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2"
checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f"
dependencies = [
"equivalent",
"hashbrown 0.16.1",
"hashbrown 0.16.0",
"serde",
"serde_core",
]
@ -1317,9 +1315,9 @@ dependencies = [
[[package]]
name = "insta"
version = "1.44.3"
version = "1.43.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5c943d4415edd8153251b6f197de5eb1640e56d84e8d9159bea190421c73698"
checksum = "46fdb647ebde000f43b5b53f773c30cf9b0cb4300453208713fa38b2c70935a0"
dependencies = [
"console 0.15.11",
"once_cell",
@ -1384,13 +1382,13 @@ dependencies = [
[[package]]
name = "jsonschema"
version = "0.37.4"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73c9ffb2b5c56d58030e1b532d8e8389da94590515f118cf35b5cb68e4764a7e"
checksum = "0303b14f91cbac17c64aaf2ef60ab71fe5f34c3867cedcbca72c9dd15f5040fe"
dependencies = [
"ahash",
"base64 0.22.1",
"bytecount",
"data-encoding",
"email_address",
"fancy-regex",
"fraction",
@ -1428,7 +1426,7 @@ version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb"
dependencies = [
"bitflags",
"bitflags 2.10.0",
"libc",
"redox_syscall",
]
@ -1477,16 +1475,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
[[package]]
name = "ls-types"
version = "0.0.2"
name = "lsp-types"
version = "0.97.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a7deb98ef9daaa7500324351a5bab7c80c644cfb86b4be0c4433b582af93510"
checksum = "53353550a17c04ac46c585feb189c2db82154fc84b79c7a66c96c2c644f66071"
dependencies = [
"bitflags",
"fluent-uri 0.3.2",
"percent-encoding",
"bitflags 1.3.2",
"fluent-uri 0.1.4",
"serde",
"serde_json",
"serde_repr",
]
[[package]]
@ -1782,9 +1780,9 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
[[package]]
name = "pest"
version = "2.8.4"
version = "2.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cbcfd20a6d4eeba40179f05735784ad32bdaef05ce8e8af05f180d45bb3e7e22"
checksum = "989e7521a040efde50c3ab6bbadafbe15ab6dc042686926be59ac35d74607df4"
dependencies = [
"memchr",
"ucd-trie",
@ -1792,9 +1790,9 @@ dependencies = [
[[package]]
name = "pest_derive"
version = "2.8.4"
version = "2.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51f72981ade67b1ca6adc26ec221be9f463f2b5839c7508998daa17c23d94d7f"
checksum = "187da9a3030dbafabbbfb20cb323b976dc7b7ce91fcd84f2f74d6e31d378e2de"
dependencies = [
"pest",
"pest_generator",
@ -1802,9 +1800,9 @@ dependencies = [
[[package]]
name = "pest_generator"
version = "2.8.4"
version = "2.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dee9efd8cdb50d719a80088b76f81aec7c41ed6d522ee750178f83883d271625"
checksum = "49b401d98f5757ebe97a26085998d6c0eecec4995cad6ab7fc30ffdf4b052843"
dependencies = [
"pest",
"pest_meta",
@ -1815,9 +1813,9 @@ dependencies = [
[[package]]
name = "pest_meta"
version = "2.8.4"
version = "2.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf1d70880e76bdc13ba52eafa6239ce793d85c8e43896507e43dd8984ff05b82"
checksum = "72f27a2cfee9f9039c4d86faa5af122a0ac3851441a34865b8a043b46be0065a"
dependencies = [
"pest",
"sha2",
@ -2048,7 +2046,7 @@ version = "0.5.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d"
dependencies = [
"bitflags",
"bitflags 2.10.0",
]
[[package]]
@ -2073,14 +2071,14 @@ dependencies = [
[[package]]
name = "referencing"
version = "0.37.4"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4283168a506f0dcbdce31c9f9cce3129c924da4c6bca46e46707fcb746d2d70c"
checksum = "22d0d0665043906aacf1d83bea9d61e5134f8f437815b84320e7facf8ff4e9c2"
dependencies = [
"ahash",
"fluent-uri 0.4.1",
"getrandom 0.3.4",
"hashbrown 0.16.1",
"hashbrown 0.16.0",
"parking_lot",
"percent-encoding",
"serde_json",
@ -2129,9 +2127,9 @@ checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58"
[[package]]
name = "reqwest"
version = "0.12.26"
version = "0.12.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b4c14b2d9afca6a60277086b0cc6a6ae0b568f6f7916c943a8cdc79f8be240f"
checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f"
dependencies = [
"base64 0.22.1",
"bytes",
@ -2224,7 +2222,7 @@ version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e"
dependencies = [
"bitflags",
"bitflags 2.10.0",
"errno",
"libc",
"linux-raw-sys",
@ -2347,7 +2345,7 @@ version = "3.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "80fb1d92c5028aa318b4b8bd7302a5bfcf48be96a37fc6fc790f806b0004ee0c"
dependencies = [
"bitflags",
"bitflags 2.10.0",
"core-foundation",
"core-foundation-sys",
"libc",
@ -2490,6 +2488,17 @@ dependencies = [
"syn 2.0.108",
]
[[package]]
name = "serde_repr"
version = "0.1.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
]
[[package]]
name = "serde_spanned"
version = "1.0.3"
@ -2993,11 +3002,11 @@ dependencies = [
[[package]]
name = "tower-http"
version = "0.6.8"
version = "0.6.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8"
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
dependencies = [
"bitflags",
"bitflags 2.10.0",
"bytes",
"futures-util",
"http",
@ -3017,16 +3026,17 @@ checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
[[package]]
name = "tower-lsp-server"
version = "0.23.0"
version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f0e711655c89181a6bc6a2cc348131fcd9680085f5b06b6af13427a393a6e72"
checksum = "88f3f8ec0dcfdda4d908bad2882fe0f89cf2b606e78d16491323e918dfa95765"
dependencies = [
"bytes",
"dashmap",
"futures",
"httparse",
"ls-types",
"lsp-types",
"memchr",
"percent-encoding",
"serde",
"serde_json",
"tokio",
@ -3043,9 +3053,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
[[package]]
name = "tracing"
version = "0.1.43"
version = "0.1.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647"
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
dependencies = [
"pin-project-lite",
"tracing-attributes",
@ -3054,9 +3064,9 @@ dependencies = [
[[package]]
name = "tracing-attributes"
version = "0.1.31"
version = "0.1.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
dependencies = [
"proc-macro2",
"quote",
@ -3065,9 +3075,9 @@ dependencies = [
[[package]]
name = "tracing-core"
version = "0.1.35"
version = "0.1.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c"
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
dependencies = [
"once_cell",
"valuable",
@ -3075,9 +3085,9 @@ dependencies = [
[[package]]
name = "tracing-indicatif"
version = "0.3.14"
version = "0.3.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1ef6990e0438749f0080573248e96631171a0b5ddfddde119aa5ba8c3a9c47e"
checksum = "04d4e11e0e27acef25a47f27e9435355fecdc488867fa2bc90e75b0700d2823d"
dependencies = [
"indicatif",
"tracing",
@ -3098,9 +3108,9 @@ dependencies = [
[[package]]
name = "tracing-subscriber"
version = "0.3.22"
version = "0.3.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e"
checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5"
dependencies = [
"matchers",
"nu-ansi-term",
@ -3116,9 +3126,9 @@ dependencies = [
[[package]]
name = "tree-sitter"
version = "0.26.3"
version = "0.25.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "974d205cc395652cfa8b37daa053fe56eebd429acf8dc055503fee648dae981e"
checksum = "78f873475d258561b06f1c595d93308a7ed124d9977cb26b148c2084a4a3cc87"
dependencies = [
"cc",
"regex",
@ -3130,9 +3140,9 @@ dependencies = [
[[package]]
name = "tree-sitter-bash"
version = "0.25.1"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e5ec769279cc91b561d3df0d8a5deb26b0ad40d183127f409494d6d8fc53062"
checksum = "871b0606e667e98a1237ebdc1b0d7056e0aebfdc3141d12b399865d4cb6ed8a6"
dependencies = [
"cc",
"tree-sitter-language",
@ -3140,7 +3150,7 @@ dependencies = [
[[package]]
name = "tree-sitter-iter"
version = "0.0.3"
version = "0.0.2"
dependencies = [
"tree-sitter",
"tree-sitter-yaml",
@ -3154,9 +3164,9 @@ checksum = "c4013970217383f67b18aef68f6fb2e8d409bc5755227092d32efb0422ba24b8"
[[package]]
name = "tree-sitter-powershell"
version = "0.25.10"
version = "0.25.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "415ec6251d133d26b4f62c60721149fe36c315334f47812450187d6ea59cffdf"
checksum = "ae0e37101b110badaf99aa40460915a8797ceba15fc0ed22773280377a8dffb6"
dependencies = [
"cc",
"tree-sitter-language",
@ -3804,7 +3814,7 @@ checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3"
[[package]]
name = "yamlpatch"
version = "0.8.0"
version = "0.7.0"
dependencies = [
"indexmap",
"insta",
@ -3820,7 +3830,7 @@ dependencies = [
[[package]]
name = "yamlpath"
version = "0.31.0"
version = "0.29.0"
dependencies = [
"line-index",
"self_cell",
@ -3944,7 +3954,7 @@ dependencies = [
[[package]]
name = "zizmor"
version = "1.19.0"
version = "1.18.0-rc3"
dependencies = [
"annotate-snippets",
"anstream",

View file

@ -21,38 +21,38 @@ rust-version = "1.88.0"
[workspace.dependencies]
anyhow = "1.0.100"
github-actions-expressions = { path = "crates/github-actions-expressions", version = "0.0.11" }
github-actions-models = { path = "crates/github-actions-models", version = "0.42.0" }
github-actions-models = { path = "crates/github-actions-models", version = "0.39.0" }
itertools = "0.14.0"
pest = "2.8.4"
pest_derive = "2.8.4"
pest = "2.8.3"
pest_derive = "2.8.3"
pretty_assertions = "1.4.1"
annotate-snippets = "0.12.10"
annotate-snippets = "0.12.9"
anstream = "0.6.21"
assert_cmd = "2.1.1"
async-trait = "0.1.89"
camino = "1.2.2"
clap = "4.5.53"
camino = "1.2.1"
clap = "4.5.51"
clap-verbosity-flag = { version = "3.0.4", default-features = false }
clap_complete = "4.5.61"
clap_complete = "4.5.60"
clap_complete_nushell = "4.5.10"
csv = "1.3.1"
etcetera = "0.11.0"
flate2 = "1.1.5"
fst = "0.4.7"
futures = "0.3"
http = "1.4.0"
http = "1.3.1"
http-cache-reqwest = { version = "1.0.0-alpha.2", features = ["manager-moka"] }
human-panic = "2.0.4"
ignore = "0.4.25"
indexmap = { version = "2.12.1", features = ["serde"] }
indexmap = { version = "2.11.4", features = ["serde"] }
indicatif = "0.18"
insta = "1.44.3"
jsonschema = "0.37.4"
insta = "1.43.2"
jsonschema = "0.35.0"
line-index = "0.1.2"
memchr = "2.7.6"
owo-colors = "4.2.3"
regex = "1.12.1"
reqwest = { version = "0.12.25", default-features = false }
reqwest = { version = "0.12.23", default-features = false }
reqwest-middleware = "0.4.2"
self_cell = "1"
serde = { version = "1.0.228", features = ["derive"] }
@ -65,18 +65,16 @@ tar = "0.4.44"
terminal-link = "0.1.0"
thiserror = "2.0.17"
tokio = { version = "1.47.1", features = ["rt-multi-thread", "io-std"] }
tower-lsp-server = "0.23"
tracing = "0.1.43"
tracing-indicatif = "0.3.14"
tower-lsp-server = "0.22"
tracing = "0.1.41"
tracing-indicatif = "0.3.13"
tracing-subscriber = "0.3.20"
tree-sitter = "0.26.3"
tree-sitter-bash = "0.25.1"
tree-sitter-iter = { path = "crates/tree-sitter-iter", version = "0.0.3" }
# Exact version since the upstream performed a breaking change outside of semver.
# See: https://github.com/zizmorcore/zizmor/pull/1427
tree-sitter-powershell = "=0.25.10"
yamlpath = { path = "crates/yamlpath", version = "0.31.0" }
yamlpatch = { path = "crates/yamlpatch", version = "0.8.0" }
tree-sitter = "0.25.10"
tree-sitter-bash = "0.25.0"
tree-sitter-iter = { path = "crates/tree-sitter-iter", version = "0.0.2" }
tree-sitter-powershell = "0.25.9"
yamlpath = { path = "crates/yamlpath", version = "0.29.0" }
yamlpatch = { path = "crates/yamlpatch", version = "0.7.0" }
tree-sitter-yaml = "0.7.2"
tikv-jemallocator = "0.6"

View file

@ -4,7 +4,7 @@ all:
.PHONY: site
site:
uv run --only-group docs zensical build --clean
uv run --only-group docs zensical build
.PHONY: site-live
site-live:
@ -42,10 +42,6 @@ codeql-injection-sinks: crates/zizmor/data/codeql-injection-sinks.json
crates/zizmor/data/codeql-injection-sinks.json: support/codeql-injection-sinks.py
$< > $@
.PHONY: archived-repos
archived-repos:
support/archived-repos.py
.PHONY: pinact
pinact:
pinact run --update --verify
@ -53,4 +49,4 @@ pinact:
.PHONY: bench
bench:
uv run --only-group=bench pytest bench/ --codspeed
uv run bench/benchmark.py --offline

1
bench/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
results/

View file

242
bench/benchmark.py Normal file
View file

@ -0,0 +1,242 @@
# /// script
# requires-python = ">=3.12"
# ///
import argparse
import hashlib
import json
import os
import shlex
import shutil
import subprocess
import sys
import tempfile
from contextlib import contextmanager
from pathlib import Path
from typing import Iterator, NoReturn, TypedDict
_DEPS = ["hyperfine", "curl", "unzip"]
_HERE = Path(__file__).parent
_PROJECT_ROOT = _HERE.parent
_ZIZMOR = _PROJECT_ROOT / "target" / "release" / "zizmor"
assert (_PROJECT_ROOT / "Cargo.toml").is_file(), "Missing project root?"
_BENCHMARKS = _HERE / "benchmarks.json"
_RESULTS = _HERE / "results"
assert _BENCHMARKS.is_file(), f"Benchmarks file not found: {_BENCHMARKS}"
_RESULTS.mkdir(exist_ok=True)
_CACHE_DIR = Path(tempfile.gettempdir()) / "zizmor-benchmark-cache"
_CACHE_DIR.mkdir(exist_ok=True)
_GH_TOKEN = os.getenv("GH_TOKEN")
class Log:
def __init__(self, scope: str | None) -> None:
self.scopes = [scope] if scope else []
def info(self, message: str) -> None:
scopes = " ".join(f"[{s}]" for s in self.scopes)
print(f"[+] {scopes} {message}", file=sys.stderr)
def warn(self, message: str) -> None:
scopes = " ".join(f"[{s}]" for s in self.scopes)
print(f"[!] {scopes} {message}", file=sys.stderr)
def error(self, message: str) -> NoReturn:
self.warn(message)
sys.exit(1)
@contextmanager
def scope(self, new_scope: str) -> Iterator[None]:
"""Create a new logging scope."""
self.scopes.append(new_scope)
try:
yield None
finally:
self.scopes.pop()
LOG = Log("benchmarks")
def _curl(url: str, expected_sha256: str) -> Path:
"""Download a URL and cache it using content addressing with SHA256."""
cached_file = _CACHE_DIR / expected_sha256
if cached_file.exists():
LOG.info("Using cached file")
return cached_file
result = subprocess.run(
["curl", "-fsSL", url],
capture_output=True,
check=True,
)
content = result.stdout
content_hash = hashlib.sha256(content).hexdigest()
if content_hash != expected_sha256:
LOG.error(f"Hash mismatch: {expected_sha256} != {content_hash}")
cached_file.write_bytes(content)
return cached_file
def _unzip(archive_path: Path, extract_name: str) -> Path:
"""Extract an archive to a directory in the cache."""
extract_dir = _CACHE_DIR / extract_name
if extract_dir.exists():
LOG.info("Using cached extraction")
return extract_dir
extract_dir.mkdir(exist_ok=True)
subprocess.run(
["unzip", "-q", str(archive_path), "-d", str(extract_dir)],
check=True,
)
LOG.info(f"Extracted {archive_path.name} to {extract_dir}")
return extract_dir
class Benchmark(TypedDict):
name: str
source_type: str
source: str
source_sha256: str
stencil: str
online: bool | None
Plan = list[str]
class Bench:
def __init__(self, benchmark: Benchmark) -> None:
self.benchmark = benchmark
def plan(self) -> Plan:
match self.benchmark["source_type"]:
case "archive-url":
url = self.benchmark["source"]
sha256 = self.benchmark["source_sha256"]
archive = _curl(url, sha256)
inputs = [str(_unzip(archive, self.benchmark["name"]))]
case _:
LOG.error(f"Unknown source type: {self.benchmark['source_type']}")
if self.benchmark.get("online", False):
if not _GH_TOKEN:
LOG.error("Benchmark requires online access but GH_TOKEN is not set")
stencil = self.benchmark["stencil"]
command = stencil.replace("$ZIZMOR", str(_ZIZMOR)).replace(
"$INPUTS", " ".join(inputs)
)
return shlex.split(command)
def run(self, plan: Plan, *, dry_run: bool) -> None:
command = shlex.join(plan)
result_file = _RESULTS / f"{self.benchmark['name']}.json"
if result_file.exists() and not dry_run:
LOG.warn("clobbering existing result file")
hyperfine_command = [
"hyperfine",
"--warmup",
"3",
# NOTE: not needed because we use --no-exit-codes in the stencil
# "--ignore-failure",
"--export-json",
str(result_file),
command,
]
if dry_run:
LOG.warn(f"would have run: {shlex.join(hyperfine_command)}")
return
try:
subprocess.run(
hyperfine_command,
check=True,
)
except subprocess.CalledProcessError:
LOG.error("run failed, see above for details")
# Stupid hack: fixup each result file's results[0].command
# to be a more useful benchmark identifier, since bencher
# apparently keys on these.
result_json = json.loads(result_file.read_bytes())
result_json["results"][0]["command"] = f"zizmor::{self.benchmark['name']}"
result_file.write_text(json.dumps(result_json))
LOG.info(f"run written to {result_file}")
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument(
"--dry-run", action="store_true", help="Show plans without running them"
)
parser.add_argument(
"--offline", action="store_true", help="Run only offline benchmarks"
)
args = parser.parse_args()
missing = []
for dep in _DEPS:
if not shutil.which(dep):
missing.append(dep)
if missing:
LOG.error(
f"Missing dependencies: {', '.join(missing)}. "
"Please install them before running benchmarks."
)
LOG.info("ensuring we have a benchable zizmor build")
subprocess.run(
["cargo", "build", "--release", "-p", "zizmor"],
check=True,
cwd=_PROJECT_ROOT,
)
if not _ZIZMOR.is_file():
LOG.error("zizmor build presumably failed, see above for details")
LOG.info(f"using cache dir: {_CACHE_DIR}")
benchmarks: list[Benchmark] = json.loads(_BENCHMARKS.read_text(encoding="utf-8"))
LOG.info(f"found {len(benchmarks)} benchmarks in {_BENCHMARKS.name}")
if args.offline:
benchmarks = [b for b in benchmarks if not b.get("online", False)]
LOG.info(f"filtered to {len(benchmarks)} offline benchmarks")
benches = [Bench(benchmark) for benchmark in benchmarks]
plans = []
with LOG.scope("plan"):
for bench in benches:
with LOG.scope(bench.benchmark["name"]):
LOG.info("beginning plan")
plans.append(bench.plan())
with LOG.scope("run"):
for bench, plan in zip(benches, plans):
with LOG.scope(bench.benchmark["name"]):
bench.run(plan, dry_run=args.dry_run)
if __name__ == "__main__":
main()

24
bench/benchmarks.json Normal file
View file

@ -0,0 +1,24 @@
[
{
"name": "grafana-9f212d11d0ac",
"source_type": "archive-url",
"source": "https://github.com/grafana/grafana/archive/9f212d11d0ac9c38ada62a7db830844bb9b02905.zip",
"source_sha256": "c6d42b52c8d912db2698d8b06f227de46f0c2d04cc757841792ed6567f0c56c7",
"stencil": "$ZIZMOR --offline --format=plain --no-exit-codes --no-config $INPUTS"
},
{
"name": "cpython-48f88310044c",
"source_type": "archive-url",
"source": "https://github.com/python/cpython/archive/48f88310044c6ef877f3b0761cf7afece2f8fb3a.zip",
"source_sha256": "a52a67f1dd9cfa67c7d1305d5b9639629abe247b2c32f01b77f790ddf8b49503",
"stencil": "$ZIZMOR --offline --format=plain --no-exit-codes --no-config $INPUTS"
},
{
"name": "gha-hazmat-da3c3cd-online",
"source_type": "archive-url",
"source": "https://github.com/woodruffw/gha-hazmat/archive/da3c3cd.zip",
"source_sha256": "f0aa224c5203218ad26e9f104d8dc3eaf8b322c97056add04d79f4a0d53c8f1f",
"stencil": "$ZIZMOR --format=plain --no-exit-codes --no-config $INPUTS",
"online": true
}
]

View file

@ -1,12 +0,0 @@
import subprocess
from pathlib import Path
_HERE = Path(__file__).parent
_ZIZMOR = _HERE.parent / "target" / "release" / "zizmor"
def zizmor(args: list[str], *, check: bool = False) -> None:
assert _ZIZMOR.is_file(), (
f"zizmor binary not found at {_ZIZMOR}, run prepare() first"
)
subprocess.run([str(_ZIZMOR), *args], check=check)

View file

View file

@ -1,13 +0,0 @@
import pytest
from .common import zizmor
@pytest.mark.benchmark
def test_zizmor_startup():
zizmor(["--version"])
@pytest.mark.benchmark
def test_zizmor_help():
zizmor(["--help"])

View file

@ -1,68 +0,0 @@
import io
import zipfile
from pathlib import Path
import pytest
import urllib3
from bench.common import zizmor
@pytest.fixture(scope="session")
def grafana(tmp_path_factory) -> Path:
archive = "https://github.com/grafana/grafana/archive/9f212d11d0ac9c38ada62a7db830844bb9b02905.zip"
raw_zip = urllib3.PoolManager().request("GET", archive).data
path = tmp_path_factory.mktemp("grafana")
zipfile.ZipFile(io.BytesIO(raw_zip)).extractall(path)
return path
@pytest.fixture(scope="session")
def cpython(tmp_path_factory) -> Path:
archive = "https://github.com/python/cpython/archive/48f88310044c6ef877f3b0761cf7afece2f8fb3a.zip"
raw_zip = urllib3.PoolManager().request("GET", archive).data
path = tmp_path_factory.mktemp("cpython")
zipfile.ZipFile(io.BytesIO(raw_zip)).extractall(path)
return path
@pytest.mark.benchmark
def test_zizmor_offline_grafana_9f212d11d0(grafana: Path):
"""
Runs `zizmor --offline --format=plain --no-exit-codes --no-config <path-to-grafana-source>`
"""
zizmor(
[
"--offline",
"--format=plain",
"--no-exit-codes",
"--no-config",
str(grafana),
],
check=True,
)
@pytest.mark.benchmark
def test_zizmor_offline_cpython_48f88310044c(cpython: Path):
"""
Runs `zizmor --offline --format=plain --no-exit-codes --no-config <path-to-cpython-source>`
"""
zizmor(
[
"--offline",
"--format=plain",
"--no-exit-codes",
"--no-config",
str(cpython),
],
check=True,
)

View file

@ -1,47 +0,0 @@
import os
import pytest
from bench.common import zizmor
@pytest.mark.skipif("GH_TOKEN" not in os.environ, reason="GH_TOKEN not set")
def test_zizmor_online_gha_hazmat_da3c3cd(benchmark):
"""
Runs `zizmor --format=plain --no-exit-codes --no-config woodruffw/gha-hazmat@da3c3cd`
"""
benchmark.pedantic(
zizmor,
args=(
[
"--format=plain",
"--no-exit-codes",
"--no-config",
"woodruffw/gha-hazmat@da3c3cd",
],
),
warmup_rounds=2,
iterations=10,
)
@pytest.mark.skipif("GH_TOKEN" not in os.environ, reason="GH_TOKEN not set")
def test_zizmor_online_cpython_48f88310044c(benchmark):
"""
Runs `zizmor --format=plain --no-exit-codes --no-config python/cpython@48f88310044c`
"""
benchmark.pedantic(
zizmor,
args=(
[
"--format=plain",
"--no-exit-codes",
"--no-config",
"python/cpython@48f88310044c",
],
),
warmup_rounds=2,
iterations=10,
)

View file

@ -1,6 +1,6 @@
[package]
name = "github-actions-models"
version = "0.42.0"
version = "0.39.0"
description = "Unofficial, high-quality data models for GitHub Actions workflows, actions, and related components"
repository = "https://github.com/zizmorcore/zizmor/tree/main/crates/github-actions-models"
keywords = ["github", "ci"]
@ -17,10 +17,8 @@ workspace = true
[dependencies]
indexmap.workspace = true
self_cell.workspace = true
serde.workspace = true
serde_yaml.workspace = true
tracing.workspace = true
[dev-dependencies]
insta.workspace = true

View file

@ -1,9 +1,11 @@
//! Shared models and utilities.
use std::fmt::{self, Display};
use std::{
fmt::{self, Display},
str::FromStr,
};
use indexmap::IndexMap;
use self_cell::self_cell;
use serde::{Deserialize, Deserializer, Serialize, de};
pub mod expr;
@ -199,59 +201,57 @@ pub enum Uses {
Docker(DockerUses),
}
impl Uses {
/// Parse a `uses:` clause into its appropriate variant.
pub fn parse(uses: impl Into<String>) -> Result<Self, UsesError> {
let uses = uses.into();
impl FromStr for Uses {
type Err = UsesError;
fn from_str(uses: &str) -> Result<Self, Self::Err> {
if uses.starts_with("./") {
Ok(Self::Local(LocalUses::new(uses)))
LocalUses::from_str(uses).map(Self::Local)
} else if let Some(image) = uses.strip_prefix("docker://") {
DockerUses::parse(image).map(Self::Docker)
DockerUses::from_str(image).map(Self::Docker)
} else {
RepositoryUses::parse(uses).map(Self::Repository)
}
}
/// Returns the original raw `uses:` clause.
pub fn raw(&self) -> &str {
match self {
Uses::Local(local) => &local.path,
Uses::Repository(repo) => repo.raw(),
Uses::Docker(docker) => docker.raw(),
RepositoryUses::from_str(uses).map(Self::Repository)
}
}
}
/// A `uses: ./some/path` clause.
#[derive(Debug, PartialEq)]
#[non_exhaustive]
pub struct LocalUses {
pub path: String,
}
impl LocalUses {
fn new(path: String) -> Self {
LocalUses { path }
impl FromStr for LocalUses {
type Err = UsesError;
fn from_str(uses: &str) -> Result<Self, Self::Err> {
Ok(LocalUses { path: uses.into() })
}
}
/// A `uses: some/repo` clause.
#[derive(Debug, PartialEq)]
struct RepositoryUsesInner<'a> {
pub struct RepositoryUses {
/// The repo user or org.
owner: &'a str,
pub owner: String,
/// The repo name.
repo: &'a str,
/// The owner/repo slug.
slug: &'a str,
pub repo: String,
/// The subpath to the action or reusable workflow, if present.
subpath: Option<&'a str>,
pub subpath: Option<String>,
/// The `@<ref>` that the `uses:` is pinned to.
git_ref: &'a str,
pub git_ref: String,
}
impl<'a> RepositoryUsesInner<'a> {
fn from_str(uses: &'a str) -> Result<Self, UsesError> {
impl FromStr for RepositoryUses {
type Err = UsesError;
fn from_str(uses: &str) -> Result<Self, Self::Err> {
// NOTE: FromStr is slightly sub-optimal, since it takes a borrowed
// &str and results in bunch of allocs for a fully owned type.
//
// In theory we could do `From<String>` instead, but
// `&mut str::split_mut` and similar don't exist yet.
// NOTE: Both git refs and paths can contain `@`, but in practice
// GHA refuses to run a `uses:` clause with more than one `@` in it.
let (path, git_ref) = match uses.rsplit_once('@') {
@ -259,110 +259,44 @@ impl<'a> RepositoryUsesInner<'a> {
None => return Err(UsesError(format!("missing `@<ref>` in {uses}"))),
};
let mut components = path.splitn(3, '/');
if let Some(owner) = components.next()
&& let Some(repo) = components.next()
{
let subpath = components.next();
let slug = if subpath.is_none() {
path
} else {
&path[..owner.len() + 1 + repo.len()]
};
Ok(RepositoryUsesInner {
owner,
repo,
slug,
subpath,
git_ref,
})
} else {
Err(UsesError(format!("owner/repo slug is too short: {uses}")))
let components = path.splitn(3, '/').collect::<Vec<_>>();
if components.len() < 2 {
return Err(UsesError(format!("owner/repo slug is too short: {uses}")));
}
}
}
self_cell!(
/// A `uses: some/repo` clause.
pub struct RepositoryUses {
owner: String,
#[covariant]
dependent: RepositoryUsesInner,
}
impl {Debug, PartialEq}
);
impl Display for RepositoryUses {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.raw())
}
}
impl RepositoryUses {
/// Parse a `uses: some/repo` clause.
pub fn parse(uses: impl Into<String>) -> Result<Self, UsesError> {
RepositoryUses::try_new(uses.into(), |s| {
let inner = RepositoryUsesInner::from_str(s)?;
Ok(inner)
Ok(RepositoryUses {
owner: components[0].into(),
repo: components[1].into(),
subpath: components.get(2).map(ToString::to_string),
git_ref: git_ref.into(),
})
}
/// Get the raw `uses:` string.
pub fn raw(&self) -> &str {
self.borrow_owner()
}
/// Get the owner (user or org) of this repository `uses:` clause.
pub fn owner(&self) -> &str {
self.borrow_dependent().owner
}
/// Get the repository name of this repository `uses:` clause.
pub fn repo(&self) -> &str {
self.borrow_dependent().repo
}
/// Get the owner/repo slug of this repository `uses:` clause.
pub fn slug(&self) -> &str {
self.borrow_dependent().slug
}
/// Get the optional subpath of this repository `uses:` clause.
pub fn subpath(&self) -> Option<&str> {
self.borrow_dependent().subpath
}
/// Get the git ref (branch, tag, or SHA) of this repository `uses:` clause.
pub fn git_ref(&self) -> &str {
self.borrow_dependent().git_ref
}
}
/// A `uses: docker://some-image` clause.
#[derive(Debug, PartialEq)]
#[non_exhaustive]
pub struct DockerUsesInner<'a> {
pub struct DockerUses {
/// The registry this image is on, if present.
registry: Option<&'a str>,
pub registry: Option<String>,
/// The name of the Docker image.
image: &'a str,
pub image: String,
/// An optional tag for the image.
tag: Option<&'a str>,
pub tag: Option<String>,
/// An optional integrity hash for the image.
hash: Option<&'a str>,
pub hash: Option<String>,
}
impl<'a> DockerUsesInner<'a> {
impl DockerUses {
fn is_registry(registry: &str) -> bool {
// https://stackoverflow.com/a/42116190
registry == "localhost" || registry.contains('.') || registry.contains(':')
}
}
fn from_str(uses: &'a str) -> Result<Self, UsesError> {
impl FromStr for DockerUses {
type Err = UsesError;
fn from_str(uses: &str) -> Result<Self, Self::Err> {
let (registry, image) = match uses.split_once('/') {
Some((registry, image)) if Self::is_registry(registry) => (Some(registry), image),
_ => (None, uses),
@ -380,11 +314,11 @@ impl<'a> DockerUsesInner<'a> {
Some(&hash[1..])
};
Ok(DockerUsesInner {
registry,
image,
Ok(DockerUses {
registry: registry.map(Into::into),
image: image.into(),
tag: None,
hash,
hash: hash.map(Into::into),
})
} else {
let (image, tag) = match image.split_once(':') {
@ -393,63 +327,16 @@ impl<'a> DockerUsesInner<'a> {
_ => (image, None),
};
Ok(DockerUsesInner {
registry,
image,
tag,
Ok(DockerUses {
registry: registry.map(Into::into),
image: image.into(),
tag: tag.map(Into::into),
hash: None,
})
}
}
}
self_cell!(
/// A `uses: docker://some-image` clause.
pub struct DockerUses {
owner: String,
#[covariant]
dependent: DockerUsesInner,
}
impl {Debug, PartialEq}
);
impl DockerUses {
/// Parse a `uses: docker://some-image` clause.
pub fn parse(uses: impl Into<String>) -> Result<Self, UsesError> {
DockerUses::try_new(uses.into(), |s| {
let inner = DockerUsesInner::from_str(s)?;
Ok(inner)
})
}
/// Get the raw uses clause. This does not include the `docker://` prefix.
pub fn raw(&self) -> &str {
self.borrow_owner()
}
/// Get the optional registry of this Docker image.
pub fn registry(&self) -> Option<&str> {
self.borrow_dependent().registry
}
/// Get the image name of this Docker image.
pub fn image(&self) -> &str {
self.borrow_dependent().image
}
/// Get the optional tag of this Docker image.
pub fn tag(&self) -> Option<&str> {
self.borrow_dependent().tag
}
/// Get the optional hash of this Docker image.
pub fn hash(&self) -> Option<&str> {
self.borrow_dependent().hash
}
}
/// Wraps a `de::Error::custom` call to log the same error as
/// a `tracing::error!` event.
///
@ -464,22 +351,13 @@ where
de::Error::custom(msg)
}
/// Deserialize a `DockerUses`.
pub(crate) fn docker_uses<'de, D>(de: D) -> Result<DockerUses, D::Error>
where
D: Deserializer<'de>,
{
let uses = <String>::deserialize(de)?;
DockerUses::parse(uses).map_err(custom_error::<D>)
}
/// Deserialize an ordinary step `uses:`.
pub(crate) fn step_uses<'de, D>(de: D) -> Result<Uses, D::Error>
where
D: Deserializer<'de>,
{
let uses = <String>::deserialize(de)?;
Uses::parse(uses).map_err(custom_error::<D>)
let uses = <&str>::deserialize(de)?;
Uses::from_str(uses).map_err(custom_error::<D>)
}
/// Deserialize a reusable workflow step `uses:`
@ -518,7 +396,9 @@ mod tests {
use crate::common::{BasePermission, Env, EnvValue, Permission};
use super::{Permissions, Uses, reusable_step_uses};
use super::{
DockerUses, LocalUses, Permissions, RepositoryUses, Uses, UsesError, reusable_step_uses,
};
#[test]
fn test_permissions() {
@ -573,418 +453,230 @@ mod tests {
#[test]
fn test_uses_parses() {
// Fully pinned.
insta::assert_debug_snapshot!(
Uses::parse("actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3").unwrap(),
@r#"
Repository(
RepositoryUses {
owner: "actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3",
dependent: RepositoryUsesInner {
owner: "actions",
repo: "checkout",
slug: "actions/checkout",
let vectors = [
(
// Valid: fully pinned.
"actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3",
Ok(Uses::Repository(RepositoryUses {
owner: "actions".to_owned(),
repo: "checkout".to_owned(),
subpath: None,
git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3",
},
},
)
"#,
);
// Fully pinned, subpath.
insta::assert_debug_snapshot!(
Uses::parse("actions/aws/ec2@8f4b7f84864484a7bf31766abe9204da3cbe65b3").unwrap(),
@r#"
Repository(
RepositoryUses {
owner: "actions/aws/ec2@8f4b7f84864484a7bf31766abe9204da3cbe65b3",
dependent: RepositoryUsesInner {
owner: "actions",
repo: "aws",
slug: "actions/aws",
subpath: Some(
"ec2",
),
git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3",
},
},
)
"#
);
// Fully pinned, complex subpath.
insta::assert_debug_snapshot!(
Uses::parse("example/foo/bar/baz/quux@8f4b7f84864484a7bf31766abe9204da3cbe65b3").unwrap(),
@r#"
Repository(
RepositoryUses {
owner: "example/foo/bar/baz/quux@8f4b7f84864484a7bf31766abe9204da3cbe65b3",
dependent: RepositoryUsesInner {
owner: "example",
repo: "foo",
slug: "example/foo",
subpath: Some(
"bar/baz/quux",
),
git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3",
},
},
)
"#
);
// Pinned with branch/tag.
insta::assert_debug_snapshot!(
Uses::parse("actions/checkout@v4").unwrap(),
@r#"
Repository(
RepositoryUses {
owner: "actions/checkout@v4",
dependent: RepositoryUsesInner {
owner: "actions",
repo: "checkout",
slug: "actions/checkout",
git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3".to_owned(),
})),
),
(
// Valid: fully pinned, subpath
"actions/aws/ec2@8f4b7f84864484a7bf31766abe9204da3cbe65b3",
Ok(Uses::Repository(RepositoryUses {
owner: "actions".to_owned(),
repo: "aws".to_owned(),
subpath: Some("ec2".to_owned()),
git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3".to_owned(),
})),
),
(
// Valid: fully pinned, complex subpath
"example/foo/bar/baz/quux@8f4b7f84864484a7bf31766abe9204da3cbe65b3",
Ok(Uses::Repository(RepositoryUses {
owner: "example".to_owned(),
repo: "foo".to_owned(),
subpath: Some("bar/baz/quux".to_owned()),
git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3".to_owned(),
})),
),
(
// Valid: pinned with branch/tag
"actions/checkout@v4",
Ok(Uses::Repository(RepositoryUses {
owner: "actions".to_owned(),
repo: "checkout".to_owned(),
subpath: None,
git_ref: "v4",
},
},
)
"#
);
insta::assert_debug_snapshot!(
Uses::parse("actions/checkout@abcd").unwrap(),
@r#"
Repository(
RepositoryUses {
owner: "actions/checkout@abcd",
dependent: RepositoryUsesInner {
owner: "actions",
repo: "checkout",
slug: "actions/checkout",
git_ref: "v4".to_owned(),
})),
),
(
"actions/checkout@abcd",
Ok(Uses::Repository(RepositoryUses {
owner: "actions".to_owned(),
repo: "checkout".to_owned(),
subpath: None,
git_ref: "abcd",
},
},
)
"#
);
// Invalid: unpinned.
insta::assert_debug_snapshot!(
Uses::parse("actions/checkout").unwrap_err(),
@r#"
UsesError(
"missing `@<ref>` in actions/checkout",
)
"#
);
// Valid: Docker ref, implicit registry.
insta::assert_debug_snapshot!(
Uses::parse("docker://alpine:3.8").unwrap(),
@r#"
Docker(
DockerUses {
owner: "alpine:3.8",
dependent: DockerUsesInner {
git_ref: "abcd".to_owned(),
})),
),
(
// Invalid: unpinned
"actions/checkout",
Err(UsesError(
"missing `@<ref>` in actions/checkout".to_owned(),
)),
),
(
// Valid: Docker ref, implicit registry
"docker://alpine:3.8",
Ok(Uses::Docker(DockerUses {
registry: None,
image: "alpine",
tag: Some(
"3.8",
),
image: "alpine".to_owned(),
tag: Some("3.8".to_owned()),
hash: None,
},
},
)
"#
);
// Valid: Docker ref, localhost.
insta::assert_debug_snapshot!(
Uses::parse("docker://localhost/alpine:3.8").unwrap(),
@r#"
Docker(
DockerUses {
owner: "localhost/alpine:3.8",
dependent: DockerUsesInner {
registry: Some(
"localhost",
),
image: "alpine",
tag: Some(
"3.8",
),
})),
),
(
// Valid: Docker ref, localhost
"docker://localhost/alpine:3.8",
Ok(Uses::Docker(DockerUses {
registry: Some("localhost".to_owned()),
image: "alpine".to_owned(),
tag: Some("3.8".to_owned()),
hash: None,
},
},
)
"#
);
// Valid: Docker ref, localhost with port.
insta::assert_debug_snapshot!(
Uses::parse("docker://localhost:1337/alpine:3.8").unwrap(),
@r#"
Docker(
DockerUses {
owner: "localhost:1337/alpine:3.8",
dependent: DockerUsesInner {
registry: Some(
"localhost:1337",
),
image: "alpine",
tag: Some(
"3.8",
),
})),
),
(
// Valid: Docker ref, localhost w/ port
"docker://localhost:1337/alpine:3.8",
Ok(Uses::Docker(DockerUses {
registry: Some("localhost:1337".to_owned()),
image: "alpine".to_owned(),
tag: Some("3.8".to_owned()),
hash: None,
},
},
)
"#
);
// Valid: Docker ref, custom registry.
insta::assert_debug_snapshot!(
Uses::parse("docker://ghcr.io/foo/alpine:3.8").unwrap(),
@r#"
Docker(
DockerUses {
owner: "ghcr.io/foo/alpine:3.8",
dependent: DockerUsesInner {
registry: Some(
"ghcr.io",
),
image: "foo/alpine",
tag: Some(
"3.8",
),
})),
),
(
// Valid: Docker ref, custom registry
"docker://ghcr.io/foo/alpine:3.8",
Ok(Uses::Docker(DockerUses {
registry: Some("ghcr.io".to_owned()),
image: "foo/alpine".to_owned(),
tag: Some("3.8".to_owned()),
hash: None,
},
},
)
"#
);
// Valid: Docker ref, missing tag.
insta::assert_debug_snapshot!(
Uses::parse("docker://ghcr.io/foo/alpine").unwrap(),
@r#"
Docker(
DockerUses {
owner: "ghcr.io/foo/alpine",
dependent: DockerUsesInner {
registry: Some(
"ghcr.io",
),
image: "foo/alpine",
})),
),
(
// Valid: Docker ref, missing tag
"docker://ghcr.io/foo/alpine",
Ok(Uses::Docker(DockerUses {
registry: Some("ghcr.io".to_owned()),
image: "foo/alpine".to_owned(),
tag: None,
hash: None,
},
},
)
"#
);
// Invalid, but allowed: Docker ref, empty tag
insta::assert_debug_snapshot!(
Uses::parse("docker://ghcr.io/foo/alpine:").unwrap(),
@r#"
Docker(
DockerUses {
owner: "ghcr.io/foo/alpine:",
dependent: DockerUsesInner {
registry: Some(
"ghcr.io",
),
image: "foo/alpine",
})),
),
(
// Invalid, but allowed: Docker ref, empty tag
"docker://ghcr.io/foo/alpine:",
Ok(Uses::Docker(DockerUses {
registry: Some("ghcr.io".to_owned()),
image: "foo/alpine".to_owned(),
tag: None,
hash: None,
},
},
)
"#
);
// Valid: Docker ref, bare.
insta::assert_debug_snapshot!(
Uses::parse("docker://alpine").unwrap(),
@r#"
Docker(
DockerUses {
owner: "alpine",
dependent: DockerUsesInner {
})),
),
(
// Valid: Docker ref, bare
"docker://alpine",
Ok(Uses::Docker(DockerUses {
registry: None,
image: "alpine",
image: "alpine".to_owned(),
tag: None,
hash: None,
},
},
)
"#
);
// Valid: Docker ref, with hash.
insta::assert_debug_snapshot!(
Uses::parse("docker://alpine@hash").unwrap(),
@r#"
Docker(
DockerUses {
owner: "alpine@hash",
dependent: DockerUsesInner {
})),
),
(
// Valid: Docker ref, hash
"docker://alpine@hash",
Ok(Uses::Docker(DockerUses {
registry: None,
image: "alpine",
image: "alpine".to_owned(),
tag: None,
hash: Some(
"hash",
),
},
},
)
"#
);
hash: Some("hash".to_owned()),
})),
),
(
// Valid: Local action "ref", actually part of the path
"./.github/actions/hello-world-action@172239021f7ba04fe7327647b213799853a9eb89",
Ok(Uses::Local(LocalUses {
path: "./.github/actions/hello-world-action@172239021f7ba04fe7327647b213799853a9eb89".to_owned(),
})),
),
(
// Valid: Local action ref, unpinned
"./.github/actions/hello-world-action",
Ok(Uses::Local(LocalUses {
path: "./.github/actions/hello-world-action".to_owned(),
})),
),
// Invalid: missing user/repo
(
"checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3",
Err(UsesError(
"owner/repo slug is too short: checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3".to_owned()
)),
),
];
// Valid: Local action "ref", actually part of the path
insta::assert_debug_snapshot!(
Uses::parse("./.github/actions/hello-world-action@172239021f7ba04fe7327647b213799853a9eb89").unwrap(),
@r#"
Local(
LocalUses {
path: "./.github/actions/hello-world-action@172239021f7ba04fe7327647b213799853a9eb89",
},
)
"#
);
// Valid: Local action ref, unpinned.
insta::assert_debug_snapshot!(
Uses::parse("./.github/actions/hello-world-action").unwrap(),
@r#"
Local(
LocalUses {
path: "./.github/actions/hello-world-action",
},
)
"#
);
// Invalid: missing user/repo
insta::assert_debug_snapshot!(
Uses::parse("checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3").unwrap_err(),
@r#"
UsesError(
"owner/repo slug is too short: checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3",
)
"#
);
for (input, expected) in vectors {
assert_eq!(input.parse(), expected);
}
}
#[test]
fn test_uses_deser_reusable() {
let vectors = [
// Valid, as expected.
(
"octo-org/this-repo/.github/workflows/workflow-1.yml@\
172239021f7ba04fe7327647b213799853a9eb89",
Some(Uses::Repository(RepositoryUses {
owner: "octo-org".to_owned(),
repo: "this-repo".to_owned(),
subpath: Some(".github/workflows/workflow-1.yml".to_owned()),
git_ref: "172239021f7ba04fe7327647b213799853a9eb89".to_owned(),
})),
),
(
"octo-org/this-repo/.github/workflows/workflow-1.yml@notahash",
Some(Uses::Repository(RepositoryUses {
owner: "octo-org".to_owned(),
repo: "this-repo".to_owned(),
subpath: Some(".github/workflows/workflow-1.yml".to_owned()),
git_ref: "notahash".to_owned(),
})),
),
(
"octo-org/this-repo/.github/workflows/workflow-1.yml@abcd",
Some(Uses::Repository(RepositoryUses {
owner: "octo-org".to_owned(),
repo: "this-repo".to_owned(),
subpath: Some(".github/workflows/workflow-1.yml".to_owned()),
git_ref: "abcd".to_owned(),
})),
),
// Invalid: remote reusable workflow without ref
("octo-org/this-repo/.github/workflows/workflow-1.yml", None),
// Invalid: local reusable workflow with ref
(
"./.github/workflows/workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89",
None,
),
// Invalid: no ref at all
("octo-org/this-repo/.github/workflows/workflow-1.yml", None),
(".github/workflows/workflow-1.yml", None),
// Invalid: missing user/repo
(
"workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89",
None,
),
];
// Dummy type for testing deser of `Uses`.
#[derive(Deserialize)]
#[serde(transparent)]
struct Dummy(#[serde(deserialize_with = "reusable_step_uses")] Uses);
insta::assert_debug_snapshot!(
serde_yaml::from_str::<Dummy>(
"octo-org/this-repo/.github/workflows/workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89"
)
.map(|d| d.0)
.unwrap(),
@r#"
Repository(
RepositoryUses {
owner: "octo-org/this-repo/.github/workflows/workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89",
dependent: RepositoryUsesInner {
owner: "octo-org",
repo: "this-repo",
slug: "octo-org/this-repo",
subpath: Some(
".github/workflows/workflow-1.yml",
),
git_ref: "172239021f7ba04fe7327647b213799853a9eb89",
},
},
)
"#
);
insta::assert_debug_snapshot!(
serde_yaml::from_str::<Dummy>(
"octo-org/this-repo/.github/workflows/workflow-1.yml@notahash"
).map(|d| d.0).unwrap(),
@r#"
Repository(
RepositoryUses {
owner: "octo-org/this-repo/.github/workflows/workflow-1.yml@notahash",
dependent: RepositoryUsesInner {
owner: "octo-org",
repo: "this-repo",
slug: "octo-org/this-repo",
subpath: Some(
".github/workflows/workflow-1.yml",
),
git_ref: "notahash",
},
},
)
"#
);
insta::assert_debug_snapshot!(
serde_yaml::from_str::<Dummy>(
"octo-org/this-repo/.github/workflows/workflow-1.yml@abcd"
).map(|d| d.0).unwrap(),
@r#"
Repository(
RepositoryUses {
owner: "octo-org/this-repo/.github/workflows/workflow-1.yml@abcd",
dependent: RepositoryUsesInner {
owner: "octo-org",
repo: "this-repo",
slug: "octo-org/this-repo",
subpath: Some(
".github/workflows/workflow-1.yml",
),
git_ref: "abcd",
},
},
)
"#
);
// Invalid: remote reusable workflow without ref
insta::assert_debug_snapshot!(
serde_yaml::from_str::<Dummy>(
"octo-org/this-repo/.github/workflows/workflow-1.yml"
).map(|d| d.0).unwrap_err(),
@r#"Error("malformed `uses` ref: missing `@<ref>` in octo-org/this-repo/.github/workflows/workflow-1.yml")"#
);
// Invalid: local reusable workflow with ref
insta::assert_debug_snapshot!(
serde_yaml::from_str::<Dummy>(
"./.github/workflows/workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89"
).map(|d| d.0).unwrap_err(),
@r#"Error("local reusable workflow reference can't specify `@<ref>`")"#
);
// Invalid: no ref at all
insta::assert_debug_snapshot!(
serde_yaml::from_str::<Dummy>(
".github/workflows/workflow-1.yml"
).map(|d| d.0).unwrap_err(),
@r#"Error("malformed `uses` ref: missing `@<ref>` in .github/workflows/workflow-1.yml")"#
);
// Invalid: missing user/repo
insta::assert_debug_snapshot!(
serde_yaml::from_str::<Dummy>(
"workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89"
).map(|d| d.0).unwrap_err(),
@r#"Error("malformed `uses` ref: owner/repo slug is too short: workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89")"#
);
for (input, expected) in vectors {
assert_eq!(
serde_yaml::from_str::<Dummy>(input).map(|d| d.0).ok(),
expected
);
}
}
}

View file

@ -349,8 +349,6 @@ pub enum AllowDeny {
#[derive(Deserialize, Debug, PartialEq)]
#[serde(rename_all = "kebab-case")]
pub enum PackageEcosystem {
/// `bazel`
Bazel,
/// `bun`
Bun,
/// `bundler`
@ -371,8 +369,6 @@ pub enum PackageEcosystem {
DotnetSdk,
/// `helm`
Helm,
/// `julia`
Julia,
/// `elm`
Elm,
/// `gitsubmodule`
@ -391,8 +387,6 @@ pub enum PackageEcosystem {
Npm,
/// `nuget`
Nuget,
/// `opentofu`
Opentofu,
/// `pip`
Pip,
/// `pub`

View file

@ -5,7 +5,7 @@ use serde::Deserialize;
use serde_yaml::Value;
use crate::common::expr::{BoE, LoE};
use crate::common::{DockerUses, Env, If, Permissions, Uses, custom_error};
use crate::common::{Env, If, Permissions, Uses, custom_error};
use super::{Concurrency, Defaults};
@ -162,8 +162,7 @@ pub struct Matrix {
pub enum Container {
Name(String),
Container {
#[serde(deserialize_with = "crate::common::docker_uses")]
image: DockerUses,
image: String,
credentials: Option<DockerCredentials>,
#[serde(default)]
env: LoE<Env>,

View file

@ -1,23 +0,0 @@
# https://github.com/zizmorcore/zizmor/issues/1451
version: 2
enable-beta-ecosystems: true
updates:
- package-ecosystem: "opentofu"
directories:
- "/stack"
- "/modules/default-branch-protection"
schedule:
interval: "cron"
cronjob: "30 7 * * *"
timezone: "Europe/London"
target-branch: "main"
groups:
terraform:
applies-to: "version-updates"
patterns:
- "*"
update-types:
- "patch"
- "minor"

View file

@ -1,4 +1,4 @@
use std::{env, path::Path};
use std::{env, path::Path, str::FromStr};
use github_actions_models::{
common::{
@ -56,13 +56,13 @@ fn test_pip_audit_ci() {
let StepBody::Uses { uses, with } = &test_job.steps[0].body else {
panic!("expected uses step");
};
assert_eq!(uses, &Uses::parse("actions/checkout@v4.1.1").unwrap());
assert_eq!(uses, &Uses::from_str("actions/checkout@v4.1.1").unwrap());
assert!(with.is_empty());
let StepBody::Uses { uses, with } = &test_job.steps[1].body else {
panic!("expected uses step");
};
assert_eq!(uses, &Uses::parse("actions/setup-python@v5").unwrap());
assert_eq!(uses, &Uses::from_str("actions/setup-python@v5").unwrap());
assert_eq!(with["python-version"].to_string(), "${{ matrix.python }}");
assert_eq!(with["cache"].to_string(), "pip");
assert_eq!(with["cache-dependency-path"].to_string(), "pyproject.toml");

View file

@ -1,7 +1,7 @@
[package]
name = "tree-sitter-iter"
description = "A very simple pre-order iterator for tree-sitter CSTs"
version = "0.0.3"
version = "0.0.2"
authors.workspace = true
homepage.workspace = true
edition.workspace = true

View file

@ -1,6 +1,6 @@
[package]
name = "yamlpatch"
version = "0.8.0"
version = "0.7.0"
description = "Comment and format-preserving YAML patch operations"
repository = "https://github.com/zizmorcore/zizmor/tree/main/crates/yamlpatch"
keywords = ["yaml", "patch"]

View file

@ -41,17 +41,16 @@ human review.
## Operations
`yamlpatch` supports the following patch operations:
`yamlpatch` supports several types of patch operations:
- **Replace**: Replace a value at a specific path
- **Add**: Add new key-value pairs to mappings
- **Remove**: Remove keys or elements
- **MergeInto**: Merge values into existing mappings
- **Append**: Append items to block sequences
- **ReplaceComment**: Replace comments associated with features
- **RewriteFragment**: Rewrite portions of string values (useful for templating)
Each operation preserves the document's formatting and structure (as best-effort).
Each operation is designed to work with the existing document structure
and formatting, making minimal changes while achieving the desired result.
## License

View file

@ -903,21 +903,21 @@ normal:
let end = find_content_end(&feature, &doc);
insta::assert_snapshot!(doc.source()[feature.location.byte_span.0..end], @r"
bar: baz
abc: def # comment
");
bar: baz
abc: def # comment
");
let feature = route_to_feature_exact(&route!("interior-spaces"), &doc)
.unwrap()
.unwrap();
let end = find_content_end(&feature, &doc);
insta::assert_snapshot!(doc.source()[feature.location.byte_span.0..end], @r"
- foo
- foo
- bar
# hello
- baz # hello
");
- bar
# hello
- baz # hello
");
let feature = route_to_feature_exact(&route!("normal"), &doc)
.unwrap()
@ -2775,12 +2775,11 @@ items:
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r"
items:
- first
- second
- third
");
items:
- first
- second
- third
");
}
#[test]
@ -2823,20 +2822,19 @@ databases:
let result =
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r"
databases:
- name: primary
host: db1.example.com
port: 5432
max_connections: 100
ssl: true
readonly: false
- name: analytics
host: db2.example.com
port: 5433
readonly: true
");
insta::assert_snapshot!(result.source(), @r#"
databases:
- name: primary
host: db1.example.com
port: 5432
max_connections: 100
ssl: true
readonly: false
- name: analytics
host: db2.example.com
port: 5433
readonly: true
"#);
}
#[test]
@ -2872,17 +2870,16 @@ jobs:
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r#"
jobs:
test:
steps:
- name: First step
run: echo "first"
- name: Second step
run: echo "second"
- name: Third step
run: echo "third"
"#);
jobs:
test:
steps:
- name: First step
run: echo "first"
- name: Second step
run: echo "second"
- name: Third step
run: echo "third"
"#);
}
#[test]
@ -2927,21 +2924,20 @@ servers:
assert!(result.source().contains("# Staging server"));
assert!(result.source().contains("# internal only"));
insta::assert_snapshot!(result.source(), @r"
servers:
# Production server
- name: prod
host: prod.example.com
port: 443
# Staging server
- name: staging
host: staging.example.com # internal only
port: 8443
- name: dev
host: localhost
port: 8080
");
insta::assert_snapshot!(result.source(), @r#"
servers:
# Production server
- name: prod
host: prod.example.com
port: 443
# Staging server
- name: staging
host: staging.example.com # internal only
port: 8443
- name: dev
host: localhost
port: 8080
"#);
}
#[test]
@ -2963,12 +2959,11 @@ ports:
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r"
ports:
- 8080
- 8081
- 8082
");
ports:
- 8080
- 8081
- 8082
");
}
#[test]
@ -2990,12 +2985,11 @@ configs:
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r"
configs:
- name: config1
value: 123
- {}
");
configs:
- name: config1
value: 123
- {}
");
}
#[test]
@ -3037,16 +3031,15 @@ services:
let result =
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r"
services:
- name: api
port: 8080
- name: worker
port: 9090
config:
replicas: 3
");
insta::assert_snapshot!(result.source(), @r#"
services:
- name: api
port: 8080
- name: worker
port: 9090
config:
replicas: 3
"#);
}
#[test]
@ -3103,12 +3096,11 @@ tasks:
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r"
tasks:
- task1
- task2
- task3
");
tasks:
- task1
- task2
- task3
");
}
#[test]
@ -3146,21 +3138,20 @@ jobs:
let result =
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r"
name: CI
on: push
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Run tests
run: npm test
- name: Upload coverage
uses: codecov/codecov-action@v3
");
insta::assert_snapshot!(result.source(), @r#"
name: CI
on: push
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Run tests
run: npm test
- name: Upload coverage
uses: codecov/codecov-action@v3
"#);
}
#[test]
@ -3185,10 +3176,9 @@ foo:
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r"
foo:
- abc
- - def
- ghi
");
foo:
- abc
- - def
- ghi
");
}

View file

@ -1,6 +1,6 @@
[package]
name = "yamlpath"
version = "0.31.0"
version = "0.29.0"
description = "Format-preserving YAML feature extraction"
repository = "https://github.com/zizmorcore/zizmor/tree/main/crates/yamlpath"
readme = "README.md"

View file

@ -1,7 +1,7 @@
[package]
name = "zizmor"
description = "Static analysis for GitHub Actions"
version = "1.19.0"
version = "1.18.0-rc3"
repository = "https://github.com/zizmorcore/zizmor"
documentation = "https://docs.zizmor.sh"
keywords = ["cli", "github-actions", "static-analysis", "security"]

View file

@ -4,7 +4,7 @@ use std::fs::{self, File};
use std::path::Path;
use std::{env, io};
use fst::{MapBuilder, SetBuilder};
use fst::MapBuilder;
fn do_context_capabilities() {
let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
@ -55,29 +55,7 @@ fn do_codeql_injection_sinks() {
fs::copy(source, target).unwrap();
}
fn do_archived_action_repos() {
let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
let source = Path::new(&manifest_dir).join("data/archived-repos.txt");
let target = Path::new(&env::var("OUT_DIR").unwrap()).join("archived-repos.fst");
print!(
"cargo::rerun-if-changed={source}",
source = source.display()
);
let out = io::BufWriter::new(File::create(target).unwrap());
let mut build = SetBuilder::new(out).unwrap();
let contents = fs::read_to_string(source).unwrap();
for line in contents.lines() {
build.insert(line).unwrap();
}
build.finish().unwrap();
}
fn main() {
do_context_capabilities();
do_codeql_injection_sinks();
do_archived_action_repos();
}

View file

@ -1,87 +0,0 @@
8398a7/action-slack
actions-rs/audit-check
actions-rs/cargo
actions-rs/clippy-check
actions-rs/components-nightly
actions-rs/grcov
actions-rs/install
actions-rs/tarpaulin
actions-rs/toolchain
actions/create-release
actions/setup-elixir
actions/setup-haskell
actions/setup-ruby
actions/upload-release-asset
andrewmcodes-archive/rubocop-linter-action
artichoke/setup-rust
aslafy-z/conventional-pr-title-action
azure/appconfiguration-sync
azure/appservice-actions
azure/azure-resource-login-action
azure/container-actions
azure/container-scan
azure/data-factory-deploy-action
azure/data-factory-export-action
azure/data-factory-validate-action
azure/get-keyvault-secrets
azure/k8s-actions
azure/manage-azure-policy
azure/publish-security-assessments
azure/run-sqlpackage-action
azure/spring-cloud-deploy
azure/webapps-container-deploy
cedrickring/golang-action
cirrus-actions/rebase
crazy-max/ghaction-docker-buildx
decathlon/pull-request-labeler-action
delaguardo/setup-graalvm
dulvui/godot-android-export
expo/expo-preview-action
fabasoad/setup-zizmor-action
facebook/pysa-action
fregante/release-with-changelog
google/mirror-branch-action
google/skywater-pdk-actions
gradle/gradle-build-action
grafana/k6-action
helaili/github-graphql-action
helaili/jekyll-action
ilshidur/action-slack
jakejarvis/backblaze-b2-action
jakejarvis/cloudflare-purge-action
jakejarvis/firebase-deploy-action
jakejarvis/hugo-build-action
jakejarvis/lighthouse-action
jakejarvis/s3-sync-action
justinribeiro/lighthouse-action
kanadgupta/glitch-sync
kxxt/chatgpt-action
machine-learning-apps/wandb-action
mansagroup/gcs-cache-action
marvinpinto/action-automatic-releases
marvinpinto/actions
maxheld83/ghpages
micnncim/action-lgtm-reaction
mikepenz/gradle-dependency-submission
orf/cargo-bloat-action
paambaati/codeclimate-action
primer/figma-action
repo-sync/pull-request
repo-sync/repo-sync
sagebind/docker-swarm-deploy-action
scottbrenner/generate-changelog-action
secrethub/actions
semgrep/semgrep-action
shaunlwm/action-release-debugapk
sonarsource/sonarcloud-github-action
stefanprodan/kube-tools
swiftdocorg/github-wiki-publish-action
tachiyomiorg/issue-moderator-action
technote-space/auto-cancel-redundant-workflow
technote-space/get-diff-action
tencentcloudbase/cloudbase-action
trmcnvn/chrome-addon
whelk-io/maven-settings-xml-action
yeslayla/build-godot-action
youyo/aws-cdk-github-actions
z0al/dependent-issues

View file

@ -272,17 +272,6 @@ github.event.changes.new_issue.draft,fixed
github.event.changes.new_issue.events_url,structured
github.event.changes.new_issue.html_url,structured
github.event.changes.new_issue.id,fixed
github.event.changes.new_issue.issue_dependencies_summary.blocked_by,fixed
github.event.changes.new_issue.issue_dependencies_summary.blocking,fixed
github.event.changes.new_issue.issue_dependencies_summary.total_blocked_by,fixed
github.event.changes.new_issue.issue_dependencies_summary.total_blocking,fixed
github.event.changes.new_issue.issue_field_values.*.data_type,fixed
github.event.changes.new_issue.issue_field_values.*.issue_field_id,fixed
github.event.changes.new_issue.issue_field_values.*.node_id,arbitrary
github.event.changes.new_issue.issue_field_values.*.single_select_option.color,arbitrary
github.event.changes.new_issue.issue_field_values.*.single_select_option.id,fixed
github.event.changes.new_issue.issue_field_values.*.single_select_option.name,arbitrary
github.event.changes.new_issue.issue_field_values.*.value,arbitrary
github.event.changes.new_issue.labels.*.color,arbitrary
github.event.changes.new_issue.labels.*.default,fixed
github.event.changes.new_issue.labels.*.description,arbitrary
@ -773,17 +762,6 @@ github.event.changes.old_issue.draft,fixed
github.event.changes.old_issue.events_url,structured
github.event.changes.old_issue.html_url,structured
github.event.changes.old_issue.id,fixed
github.event.changes.old_issue.issue_dependencies_summary.blocked_by,fixed
github.event.changes.old_issue.issue_dependencies_summary.blocking,fixed
github.event.changes.old_issue.issue_dependencies_summary.total_blocked_by,fixed
github.event.changes.old_issue.issue_dependencies_summary.total_blocking,fixed
github.event.changes.old_issue.issue_field_values.*.data_type,fixed
github.event.changes.old_issue.issue_field_values.*.issue_field_id,fixed
github.event.changes.old_issue.issue_field_values.*.node_id,arbitrary
github.event.changes.old_issue.issue_field_values.*.single_select_option.color,arbitrary
github.event.changes.old_issue.issue_field_values.*.single_select_option.id,fixed
github.event.changes.old_issue.issue_field_values.*.single_select_option.name,arbitrary
github.event.changes.old_issue.issue_field_values.*.value,arbitrary
github.event.changes.old_issue.labels.*.color,arbitrary
github.event.changes.old_issue.labels.*.default,fixed
github.event.changes.old_issue.labels.*.description,arbitrary
@ -1082,6 +1060,7 @@ github.event.changes.required_status_checks.from.*,arbitrary
github.event.changes.required_status_checks_enforcement_level.from,fixed
github.event.changes.tag_name.from,arbitrary
github.event.changes.title.from,arbitrary
github.event.check_run.app,fixed
github.event.check_run.app.client_id,arbitrary
github.event.check_run.app.created_at,fixed
github.event.check_run.app.description,arbitrary
@ -1212,8 +1191,6 @@ github.event.check_run.check_suite.repository.compare_url,arbitrary
github.event.check_run.check_suite.repository.contents_url,arbitrary
github.event.check_run.check_suite.repository.contributors_url,structured
github.event.check_run.check_suite.repository.created_at,fixed
github.event.check_run.check_suite.repository.custom_properties,arbitrary
github.event.check_run.check_suite.repository.custom_properties.*,arbitrary
github.event.check_run.check_suite.repository.default_branch,arbitrary
github.event.check_run.check_suite.repository.delete_branch_on_merge,fixed
github.event.check_run.check_suite.repository.deployments_url,structured
@ -2230,17 +2207,6 @@ github.event.issue.draft,fixed
github.event.issue.events_url,arbitrary
github.event.issue.html_url,arbitrary
github.event.issue.id,fixed
github.event.issue.issue_dependencies_summary.blocked_by,fixed
github.event.issue.issue_dependencies_summary.blocking,fixed
github.event.issue.issue_dependencies_summary.total_blocked_by,fixed
github.event.issue.issue_dependencies_summary.total_blocking,fixed
github.event.issue.issue_field_values.*.data_type,fixed
github.event.issue.issue_field_values.*.issue_field_id,fixed
github.event.issue.issue_field_values.*.node_id,arbitrary
github.event.issue.issue_field_values.*.single_select_option.color,arbitrary
github.event.issue.issue_field_values.*.single_select_option.id,fixed
github.event.issue.issue_field_values.*.single_select_option.name,arbitrary
github.event.issue.issue_field_values.*.value,arbitrary
github.event.issue.labels.*,arbitrary
github.event.issue.labels.*.color,arbitrary
github.event.issue.labels.*.default,fixed
@ -3081,7 +3047,6 @@ github.event.pull_request.requested_reviewers.*.url,structured
github.event.pull_request.requested_reviewers.*.user_view_type,arbitrary
github.event.pull_request.requested_teams.*.deleted,fixed
github.event.pull_request.requested_teams.*.description,arbitrary
github.event.pull_request.requested_teams.*.enterprise_id,fixed
github.event.pull_request.requested_teams.*.html_url,structured
github.event.pull_request.requested_teams.*.id,fixed
github.event.pull_request.requested_teams.*.ldap_dn,arbitrary
@ -3089,7 +3054,6 @@ github.event.pull_request.requested_teams.*.members_url,arbitrary
github.event.pull_request.requested_teams.*.name,arbitrary
github.event.pull_request.requested_teams.*.node_id,arbitrary
github.event.pull_request.requested_teams.*.notification_setting,arbitrary
github.event.pull_request.requested_teams.*.organization_id,fixed
github.event.pull_request.requested_teams.*.parent.description,arbitrary
github.event.pull_request.requested_teams.*.parent.html_url,structured
github.event.pull_request.requested_teams.*.parent.id,fixed
@ -3105,7 +3069,6 @@ github.event.pull_request.requested_teams.*.permission,arbitrary
github.event.pull_request.requested_teams.*.privacy,arbitrary
github.event.pull_request.requested_teams.*.repositories_url,structured
github.event.pull_request.requested_teams.*.slug,arbitrary
github.event.pull_request.requested_teams.*.type,fixed
github.event.pull_request.requested_teams.*.url,structured
github.event.pull_request.review_comment_url,arbitrary
github.event.pull_request.review_comments,fixed
@ -3386,7 +3349,6 @@ github.event.release.discussion_url,structured
github.event.release.draft,fixed
github.event.release.html_url,structured
github.event.release.id,fixed
github.event.release.immutable,fixed
github.event.release.name,arbitrary
github.event.release.node_id,arbitrary
github.event.release.prerelease,fixed
@ -3404,7 +3366,6 @@ github.event.release.reactions.url,structured
github.event.release.tag_name,arbitrary
github.event.release.tarball_url,structured
github.event.release.target_commitish,arbitrary
github.event.release.updated_at,fixed
github.event.release.upload_url,structured
github.event.release.url,structured
github.event.release.zipball_url,structured
@ -3730,7 +3691,6 @@ github.event.review.node_id,arbitrary
github.event.review.pull_request_url,structured
github.event.review.state,arbitrary
github.event.review.submitted_at,fixed
github.event.review.updated_at,fixed
github.event.review.user.avatar_url,structured
github.event.review.user.deleted,fixed
github.event.review.user.email,arbitrary

1 github.action_path fixed
272 github.event.changes.new_issue.events_url structured
273 github.event.changes.new_issue.html_url structured
274 github.event.changes.new_issue.id fixed
github.event.changes.new_issue.issue_dependencies_summary.blocked_by fixed
github.event.changes.new_issue.issue_dependencies_summary.blocking fixed
github.event.changes.new_issue.issue_dependencies_summary.total_blocked_by fixed
github.event.changes.new_issue.issue_dependencies_summary.total_blocking fixed
github.event.changes.new_issue.issue_field_values.*.data_type fixed
github.event.changes.new_issue.issue_field_values.*.issue_field_id fixed
github.event.changes.new_issue.issue_field_values.*.node_id arbitrary
github.event.changes.new_issue.issue_field_values.*.single_select_option.color arbitrary
github.event.changes.new_issue.issue_field_values.*.single_select_option.id fixed
github.event.changes.new_issue.issue_field_values.*.single_select_option.name arbitrary
github.event.changes.new_issue.issue_field_values.*.value arbitrary
275 github.event.changes.new_issue.labels.*.color arbitrary
276 github.event.changes.new_issue.labels.*.default fixed
277 github.event.changes.new_issue.labels.*.description arbitrary
762 github.event.changes.old_issue.events_url structured
763 github.event.changes.old_issue.html_url structured
764 github.event.changes.old_issue.id fixed
github.event.changes.old_issue.issue_dependencies_summary.blocked_by fixed
github.event.changes.old_issue.issue_dependencies_summary.blocking fixed
github.event.changes.old_issue.issue_dependencies_summary.total_blocked_by fixed
github.event.changes.old_issue.issue_dependencies_summary.total_blocking fixed
github.event.changes.old_issue.issue_field_values.*.data_type fixed
github.event.changes.old_issue.issue_field_values.*.issue_field_id fixed
github.event.changes.old_issue.issue_field_values.*.node_id arbitrary
github.event.changes.old_issue.issue_field_values.*.single_select_option.color arbitrary
github.event.changes.old_issue.issue_field_values.*.single_select_option.id fixed
github.event.changes.old_issue.issue_field_values.*.single_select_option.name arbitrary
github.event.changes.old_issue.issue_field_values.*.value arbitrary
765 github.event.changes.old_issue.labels.*.color arbitrary
766 github.event.changes.old_issue.labels.*.default fixed
767 github.event.changes.old_issue.labels.*.description arbitrary
1060 github.event.changes.required_status_checks_enforcement_level.from fixed
1061 github.event.changes.tag_name.from arbitrary
1062 github.event.changes.title.from arbitrary
1063 github.event.check_run.app fixed
1064 github.event.check_run.app.client_id arbitrary
1065 github.event.check_run.app.created_at fixed
1066 github.event.check_run.app.description arbitrary
1191 github.event.check_run.check_suite.repository.contents_url arbitrary
1192 github.event.check_run.check_suite.repository.contributors_url structured
1193 github.event.check_run.check_suite.repository.created_at fixed
github.event.check_run.check_suite.repository.custom_properties arbitrary
github.event.check_run.check_suite.repository.custom_properties.* arbitrary
1194 github.event.check_run.check_suite.repository.default_branch arbitrary
1195 github.event.check_run.check_suite.repository.delete_branch_on_merge fixed
1196 github.event.check_run.check_suite.repository.deployments_url structured
2207 github.event.issue.events_url arbitrary
2208 github.event.issue.html_url arbitrary
2209 github.event.issue.id fixed
github.event.issue.issue_dependencies_summary.blocked_by fixed
github.event.issue.issue_dependencies_summary.blocking fixed
github.event.issue.issue_dependencies_summary.total_blocked_by fixed
github.event.issue.issue_dependencies_summary.total_blocking fixed
github.event.issue.issue_field_values.*.data_type fixed
github.event.issue.issue_field_values.*.issue_field_id fixed
github.event.issue.issue_field_values.*.node_id arbitrary
github.event.issue.issue_field_values.*.single_select_option.color arbitrary
github.event.issue.issue_field_values.*.single_select_option.id fixed
github.event.issue.issue_field_values.*.single_select_option.name arbitrary
github.event.issue.issue_field_values.*.value arbitrary
2210 github.event.issue.labels.* arbitrary
2211 github.event.issue.labels.*.color arbitrary
2212 github.event.issue.labels.*.default fixed
3047 github.event.pull_request.requested_reviewers.*.user_view_type arbitrary
3048 github.event.pull_request.requested_teams.*.deleted fixed
3049 github.event.pull_request.requested_teams.*.description arbitrary
github.event.pull_request.requested_teams.*.enterprise_id fixed
3050 github.event.pull_request.requested_teams.*.html_url structured
3051 github.event.pull_request.requested_teams.*.id fixed
3052 github.event.pull_request.requested_teams.*.ldap_dn arbitrary
3054 github.event.pull_request.requested_teams.*.name arbitrary
3055 github.event.pull_request.requested_teams.*.node_id arbitrary
3056 github.event.pull_request.requested_teams.*.notification_setting arbitrary
github.event.pull_request.requested_teams.*.organization_id fixed
3057 github.event.pull_request.requested_teams.*.parent.description arbitrary
3058 github.event.pull_request.requested_teams.*.parent.html_url structured
3059 github.event.pull_request.requested_teams.*.parent.id fixed
3069 github.event.pull_request.requested_teams.*.privacy arbitrary
3070 github.event.pull_request.requested_teams.*.repositories_url structured
3071 github.event.pull_request.requested_teams.*.slug arbitrary
github.event.pull_request.requested_teams.*.type fixed
3072 github.event.pull_request.requested_teams.*.url structured
3073 github.event.pull_request.review_comment_url arbitrary
3074 github.event.pull_request.review_comments fixed
3349 github.event.release.draft fixed
3350 github.event.release.html_url structured
3351 github.event.release.id fixed
github.event.release.immutable fixed
3352 github.event.release.name arbitrary
3353 github.event.release.node_id arbitrary
3354 github.event.release.prerelease fixed
3366 github.event.release.tag_name arbitrary
3367 github.event.release.tarball_url structured
3368 github.event.release.target_commitish arbitrary
github.event.release.updated_at fixed
3369 github.event.release.upload_url structured
3370 github.event.release.url structured
3371 github.event.release.zipball_url structured
3691 github.event.review.pull_request_url structured
3692 github.event.review.state arbitrary
3693 github.event.review.submitted_at fixed
github.event.review.updated_at fixed
3694 github.event.review.user.avatar_url structured
3695 github.event.review.user.deleted fixed
3696 github.event.review.user.email arbitrary

View file

@ -39,7 +39,6 @@ impl Audit for AnonymousDefinition {
.confidence(Confidence::High)
.persona(Persona::Pedantic)
.add_location(workflow.location().primary())
.tip("use 'name: ...' to give this workflow a name")
.build(workflow)?,
);
}
@ -48,13 +47,14 @@ impl Audit for AnonymousDefinition {
match job {
Job::NormalJob(normal) => {
if normal.name.is_none() {
let location = normal.location().primary();
findings.push(
Self::finding()
.severity(ANONYMOUS_DEFINITION_JOB_SEVERITY)
.confidence(Confidence::High)
.persona(Persona::Pedantic)
.add_location(normal.location_with_grip().primary())
.tip("use 'name: ...' to give this job a name")
.add_location(location)
.build(workflow)?,
);
}

View file

@ -1,140 +0,0 @@
use std::sync::LazyLock;
use fst::Set;
use github_actions_models::common::{RepositoryUses, Uses};
use subfeature::Subfeature;
use crate::{
audit::{Audit, AuditError, AuditLoadError, audit_meta},
config::Config,
finding::{Confidence, Finding, FindingBuilder, Persona, Severity, location::Locatable},
models::{
StepCommon as _,
action::CompositeStep,
workflow::{ReusableWorkflowCallJob, Step},
},
state::AuditState,
};
static ARCHIVED_REPOS_FST: LazyLock<Set<&[u8]>> = LazyLock::new(|| {
fst::Set::new(include_bytes!(concat!(env!("OUT_DIR"), "/archived-repos.fst")).as_slice())
.expect("couldn't initialize archived repos FST")
});
pub(crate) struct ArchivedUses;
audit_meta!(
ArchivedUses,
"archived-uses",
"action or reusable workflow from archived repository"
);
impl ArchivedUses {
pub(crate) fn uses_is_archived<'doc>(uses: &RepositoryUses) -> Option<FindingBuilder<'doc>> {
// TODO: Annoying that we need to allocate for case normalization here; can we use an
// automaton to search the FST case-insensitively?
let normalized = format!(
"{owner}/{repo}",
owner = uses.owner().to_lowercase(),
repo = uses.repo().to_lowercase()
);
ARCHIVED_REPOS_FST.contains(normalized.as_bytes()).then(|| {
Self::finding()
.confidence(Confidence::High)
.severity(Severity::Medium)
.persona(Persona::Regular)
})
}
}
#[async_trait::async_trait]
impl Audit for ArchivedUses {
fn new(_state: &AuditState) -> Result<Self, AuditLoadError>
where
Self: Sized,
{
Ok(Self)
}
async fn audit_step<'doc>(
&self,
step: &Step<'doc>,
_config: &Config,
) -> Result<Vec<Finding<'doc>>, AuditError> {
let mut findings = vec![];
if let Some(Uses::Repository(uses)) = step.uses()
&& let Some(finding) = Self::uses_is_archived(uses)
{
findings.push(
finding
.add_location(step.location_with_grip())
.add_location(
step.location()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.slug()))
.annotated("repository is archived")
.primary(),
)
.build(step)?,
)
}
Ok(findings)
}
async fn audit_composite_step<'doc>(
&self,
step: &CompositeStep<'doc>,
_config: &Config,
) -> Result<Vec<Finding<'doc>>, AuditError> {
let mut findings = vec![];
if let Some(Uses::Repository(uses)) = step.uses()
&& let Some(finding) = Self::uses_is_archived(uses)
{
findings.push(
finding
.add_location(step.location_with_grip())
.add_location(
step.location()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.slug()))
.annotated("repository is archived")
.primary(),
)
.build(step)?,
)
}
Ok(findings)
}
async fn audit_reusable_job<'doc>(
&self,
job: &ReusableWorkflowCallJob<'doc>,
_config: &Config,
) -> Result<Vec<Finding<'doc>>, AuditError> {
let mut findings = vec![];
if let Uses::Repository(uses) = &job.uses
&& let Some(finding) = Self::uses_is_archived(uses)
{
findings.push(
finding
.add_location(job.location_with_grip())
.add_location(
job.location()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.slug()))
.annotated("repository is archived")
.primary(),
)
.build(job)?,
)
}
Ok(findings)
}
}

View file

@ -42,12 +42,12 @@ impl Artipacked {
uses: &github_actions_models::common::RepositoryUses,
) -> Result<Option<bool>, ClientError> {
let version = if !uses.ref_is_commit() {
uses.git_ref().to_string()
uses.git_ref.clone()
} else {
match self.client {
Some(ref client) => {
let tag = client
.longest_tag_for_commit(uses.owner(), uses.repo(), uses.git_ref())
.longest_tag_for_commit(&uses.owner, &uses.repo, &uses.git_ref)
.await?;
match tag {
@ -271,6 +271,8 @@ impl Audit for Artipacked {
#[cfg(test)]
mod tests {
use std::str::FromStr;
use github_actions_models::common::RepositoryUses;
use super::*;
@ -323,11 +325,11 @@ mod tests {
#[tokio::test]
async fn test_is_checkout_v6_or_higher_offline() {
// Test v6 and higher versions
let v6 = RepositoryUses::parse("actions/checkout@v6").unwrap();
let v6_0 = RepositoryUses::parse("actions/checkout@v6.0").unwrap();
let v6_1_0 = RepositoryUses::parse("actions/checkout@v6.1.0").unwrap();
let v7 = RepositoryUses::parse("actions/checkout@v7").unwrap();
let v10 = RepositoryUses::parse("actions/checkout@v10").unwrap();
let v6 = RepositoryUses::from_str("actions/checkout@v6").unwrap();
let v6_0 = RepositoryUses::from_str("actions/checkout@v6.0").unwrap();
let v6_1_0 = RepositoryUses::from_str("actions/checkout@v6.1.0").unwrap();
let v7 = RepositoryUses::from_str("actions/checkout@v7").unwrap();
let v10 = RepositoryUses::from_str("actions/checkout@v10").unwrap();
let artipacked = Artipacked { client: None };
@ -353,9 +355,9 @@ mod tests {
);
// Test versions below v6
let v4 = RepositoryUses::parse("actions/checkout@v4").unwrap();
let v5 = RepositoryUses::parse("actions/checkout@v5").unwrap();
let v5_9 = RepositoryUses::parse("actions/checkout@v5.9").unwrap();
let v4 = RepositoryUses::from_str("actions/checkout@v4").unwrap();
let v5 = RepositoryUses::from_str("actions/checkout@v5").unwrap();
let v5_9 = RepositoryUses::from_str("actions/checkout@v5.9").unwrap();
assert_eq!(
artipacked.is_checkout_v6_or_higher(&v4).await.unwrap(),
@ -372,7 +374,7 @@ mod tests {
// Test commit SHA (should return None when offline)
let commit_sha =
RepositoryUses::parse("actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683")
RepositoryUses::from_str("actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683")
.unwrap();
assert_eq!(
artipacked
@ -383,7 +385,7 @@ mod tests {
);
// Test invalid/unparseable refs (should return None)
let invalid = RepositoryUses::parse("actions/checkout@main").unwrap();
let invalid = RepositoryUses::from_str("actions/checkout@main").unwrap();
assert_eq!(
artipacked.is_checkout_v6_or_higher(&invalid).await.unwrap(),
None
@ -408,7 +410,7 @@ mod tests {
// Points to v6.0.0.
let commit_sha_v6 =
RepositoryUses::parse("actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3")
RepositoryUses::from_str("actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3")
.unwrap();
assert_eq!(
@ -421,7 +423,7 @@ mod tests {
// Points to v5.0.1.
let commit_sha_v5 =
RepositoryUses::parse("actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd")
RepositoryUses::from_str("actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd")
.unwrap();
assert_eq!(
@ -524,7 +526,6 @@ jobs:
|workflow: &Workflow, findings| {
let fixed = apply_fix_for_snapshot(workflow.as_document(), findings);
insta::assert_snapshot!(fixed.source(), @r"
name: Test Workflow
on: push
jobs:
@ -572,7 +573,6 @@ jobs:
|workflow: &Workflow, findings| {
let fixed = apply_fix_for_snapshot(workflow.as_document(), findings);
insta::assert_snapshot!(fixed.source(), @r"
name: Test Workflow
on: push
jobs:

View file

@ -15,7 +15,7 @@ use super::{Audit, AuditLoadError, AuditState, audit_meta};
use crate::{
audit::AuditError,
finding::{Confidence, Fix, FixDisposition, Severity, location::Locatable as _},
models::workflow::{JobCommon, Workflow},
models::workflow::{JobExt, Workflow},
utils::{self, ExtractedExpr},
};
use subfeature::Subfeature;
@ -81,7 +81,7 @@ impl Audit for BotConditions {
if let Some(If::Expr(expr)) = &job.r#if {
conds.push((
expr,
job.location_with_grip(),
job.location_with_name(),
job.location().with_keys(["if".into()]),
));
}
@ -91,7 +91,7 @@ impl Audit for BotConditions {
if let Some(If::Expr(expr)) = &step.r#if {
conds.push((
expr,
step.location_with_grip(),
step.location_with_name(),
step.location().with_keys(["if".into()]),
));
}
@ -538,7 +538,6 @@ jobs:
}
insta::assert_snapshot!(document.source(), @r#"
name: Test Workflow
on:
pull_request_target:
@ -589,7 +588,6 @@ jobs:
}
}
insta::assert_snapshot!(document.source(), @r#"
name: Test Workflow
on:
pull_request_target:
@ -643,7 +641,6 @@ jobs:
// Verify it suggests comment.user.login for issue_comment events
insta::assert_snapshot!(document.source(), @r#"
name: Test Issue Comment
on: issue_comment
@ -693,7 +690,6 @@ jobs:
// Verify it suggests review.user.login for pull_request_review events
insta::assert_snapshot!(document.source(), @r#"
name: Test PR Review
on: pull_request_review
@ -743,7 +739,6 @@ jobs:
// Verify it suggests issue.user.login for issues events
insta::assert_snapshot!(document.source(), @r#"
name: Test Issues
on: issues
@ -793,7 +788,6 @@ jobs:
// Verify it suggests release.author.login for release events
insta::assert_snapshot!(document.source(), @r#"
name: Test Release
on: release
@ -842,7 +836,6 @@ jobs:
}
insta::assert_snapshot!(document.source(), @r#"
name: Test Create
on: create
@ -892,7 +885,6 @@ jobs:
}
insta::assert_snapshot!(document.source(), @r#"
name: Test Workflow
on:
pull_request_target:

View file

@ -9,7 +9,7 @@ use crate::finding::location::{Locatable as _, Routable};
use crate::finding::{Confidence, Finding, Fix, FixDisposition, Severity};
use crate::models::StepCommon;
use crate::models::coordinate::{ActionCoordinate, ControlExpr, ControlFieldType, Toggle, Usage};
use crate::models::workflow::{JobCommon as _, NormalJob, Step, Steps};
use crate::models::workflow::{JobExt as _, NormalJob, Step, Steps};
use crate::state::AuditState;
use indexmap::IndexMap;
@ -444,7 +444,7 @@ impl CachePoisoning {
finding_builder = finding_builder.fix(fix);
}
finding_builder.build(step).ok()
finding_builder.build(step.workflow()).ok()
}
}
@ -555,7 +555,6 @@ jobs:
|findings: Vec<Finding>| {
let fixed_content = apply_fix_for_snapshot(workflow_content, findings);
insta::assert_snapshot!(fixed_content, @r"
name: Test Workflow
on: release
@ -600,7 +599,6 @@ jobs:
|findings: Vec<Finding>| {
let fixed_content = apply_fix_for_snapshot(workflow_content, findings);
insta::assert_snapshot!(fixed_content, @r"
name: Test Workflow
on: release

View file

@ -135,7 +135,7 @@ impl Audit for DependabotCooldown {
Self::finding()
.add_location(
update
.location_with_grip()
.location_with_name()
.primary()
.annotated("missing cooldown configuration"),
)
@ -202,7 +202,6 @@ updates:
let fix = &finding.fixes[0];
let fixed_document = fix.apply(dependabot.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r"
version: 2
updates:
@ -244,7 +243,6 @@ updates:
let fix = &finding.fixes[0];
let fixed_document = fix.apply(dependabot.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r"
version: 2
updates:
@ -286,7 +284,6 @@ updates:
let fix = &finding.fixes[0];
let fixed_document = fix.apply(dependabot.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r"
version: 2
updates:
@ -338,7 +335,6 @@ updates:
}
insta::assert_snapshot!(document.source(), @r"
version: 2
updates:
@ -383,7 +379,6 @@ updates:
// Verify the document remains unchanged
insta::assert_snapshot!(dependabot.as_document().source(), @r"
version: 2
updates:

View file

@ -61,7 +61,7 @@ impl Audit for DependabotExecution {
.primary()
.annotated("enabled here"),
)
.add_location(update.location_with_grip())
.add_location(update.location_with_name())
.fix(Self::create_set_deny_fix(update))
.build(dependabot)?,
);
@ -123,7 +123,6 @@ updates:
let fix = &finding.fixes[0];
let fixed_document = fix.apply(dependabot.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r"
version: 2
updates:
@ -159,7 +158,6 @@ updates:
// Verify the document remains unchanged
insta::assert_snapshot!(dependabot.as_document().source(), @r"
version: 2
updates:
@ -194,7 +192,6 @@ updates:
// Verify the document remains unchanged
insta::assert_snapshot!(dependabot.as_document().source(), @r"
version: 2
updates:
@ -243,7 +240,6 @@ updates:
}
insta::assert_snapshot!(document.source(), @r"
version: 2
updates:

View file

@ -14,7 +14,6 @@ use crate::{
static KNOWN_PERMISSIONS: LazyLock<HashMap<&str, Severity>> = LazyLock::new(|| {
[
("actions", Severity::High),
("artifact-metadata", Severity::Medium),
("attestations", Severity::High),
("checks", Severity::Medium),
("contents", Severity::High),
@ -22,8 +21,6 @@ static KNOWN_PERMISSIONS: LazyLock<HashMap<&str, Severity>> = LazyLock::new(|| {
("discussions", Severity::Medium),
("id-token", Severity::High),
("issues", Severity::High),
// What does the write permission even do here?
("models", Severity::Low),
("packages", Severity::High),
("pages", Severity::High),
("pull-requests", Severity::High),

View file

@ -1,5 +1,4 @@
use github_actions_models::common::Uses;
use subfeature::Subfeature;
use super::{Audit, AuditLoadError, AuditState, audit_meta};
use crate::audit::AuditError;
@ -60,7 +59,6 @@ impl ForbiddenUses {
step.location()
.primary()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.raw()))
.annotated("use of this action is forbidden"),
)
.build(step)?,

View file

@ -13,7 +13,7 @@ use crate::config::Config;
use crate::finding::location::Locatable as _;
use crate::finding::{Confidence, Finding, Severity};
use crate::models::StepCommon;
use crate::models::{workflow::JobCommon as _, workflow::Step};
use crate::models::{workflow::JobExt as _, workflow::Step};
use crate::state::AuditState;
use crate::utils;
use crate::utils::once::static_regex;
@ -88,22 +88,20 @@ const PWSH_REDIRECT_QUERY: &str = r#"
const PWSH_PIPELINE_QUERY: &str = r#"
(pipeline
(pipeline_chain
(command
command_name: (command_name) @cmd
command_elements: (command_elements
(_)*
(array_literal_expression
(unary_expression [
(string_literal
(expandable_string_literal (variable) @destination))
(variable) @destination
])
)
(_)*))
(#match? @cmd "(?i)out-file|add-content|set-content|tee-object")
(#match? @destination "(?i)ENV:GITHUB_ENV|ENV:GITHUB_PATH")
)
(command
command_name: (command_name) @cmd
command_elements: (command_elements
(_)*
(array_literal_expression
(unary_expression [
(string_literal
(expandable_string_literal (variable) @destination))
(variable) @destination
])
)
(_)*))
(#match? @cmd "(?i)out-file|add-content|set-content|tee-object")
(#match? @destination "(?i)ENV:GITHUB_ENV|ENV:GITHUB_PATH")
) @span
"#;
@ -394,7 +392,7 @@ impl Audit for GitHubEnv {
}
if let StepBody::Run { run, .. } = &step.deref().body {
let shell = step.shell().map(|s| s.0).unwrap_or_else(|| {
let shell = step.shell().unwrap_or_else(|| {
tracing::warn!(
"github-env: couldn't determine shell type for {workflow}:{job} step {stepno}; assuming bash",
workflow = step.workflow().key.presentation_path(),
@ -421,7 +419,7 @@ impl Audit for GitHubEnv {
.with_keys(["run".into()])
.annotated(format!("write to {dest} may allow code execution")),
)
.build(step)?,
.build(step.workflow())?,
)
}
}
@ -440,7 +438,7 @@ impl Audit for GitHubEnv {
return Ok(findings);
};
let shell = step.shell().map(|s| s.0).unwrap_or_else(|| {
let shell = step.shell().unwrap_or_else(|| {
tracing::warn!(
"github-env: couldn't determine shell type for {action} step {stepno}; assuming bash",
action = step.action().key.presentation_path(),
@ -465,7 +463,7 @@ impl Audit for GitHubEnv {
.with_keys(["run".into()])
.annotated(format!("write to {dest} may allow code execution")),
)
.build(step)?,
.build(step.action())?,
)
}

View file

@ -7,7 +7,6 @@
use anyhow::anyhow;
use github_actions_models::common::{RepositoryUses, Uses};
use subfeature::Subfeature;
use super::{Audit, AuditLoadError, Job, audit_meta};
use crate::{
@ -52,7 +51,7 @@ impl ImpostorCommit {
Ok(
match self
.client
.compare_commits(uses.owner(), uses.repo(), base_ref, head_ref)
.compare_commits(&uses.owner, &uses.repo, base_ref, head_ref)
.await
.map_err(Self::err)?
{
@ -77,13 +76,13 @@ impl ImpostorCommit {
return Ok(false);
};
// Fastest path: almost all commit refs will be at the tip of
// Fast path: almost all commit refs will be at the tip of
// the branch or tag's history, so check those first.
// Check tags before branches, since in practice version tags
// are more commonly pinned.
let tags = self
.client
.list_tags(uses.owner(), uses.repo())
.list_tags(&uses.owner, &uses.repo)
.await
.map_err(Self::err)?;
@ -95,7 +94,7 @@ impl ImpostorCommit {
let branches = self
.client
.list_branches(uses.owner(), uses.repo())
.list_branches(&uses.owner, &uses.repo)
.await
.map_err(Self::err)?;
@ -105,21 +104,6 @@ impl ImpostorCommit {
}
}
// Fast path: attempt to use GitHub's undocumented `branch_commits`
// API to see if the commit is present in any branch/tag.
// There are no stabilitiy guarantees for this API, so we fall back
// to the slow(er) paths if it fails.
match self
.client
.branch_commits(uses.owner(), uses.repo(), head_ref)
.await
{
Ok(branch_commits) => return Ok(branch_commits.is_empty()),
Err(e) => tracing::warn!("fast path impostor check failed for {uses}: {e}"),
}
// Slow path: use GitHub's comparison API to check each branch and tag's
// history for presence of the commit.
for branch in &branches {
if self
.named_ref_contains_commit(uses, &format!("refs/heads/{}", &branch.name), head_ref)
@ -147,7 +131,7 @@ impl ImpostorCommit {
async fn get_highest_tag(&self, uses: &RepositoryUses) -> Result<Option<String>, AuditError> {
let tags = self
.client
.list_tags(uses.owner(), uses.repo())
.list_tags(&uses.owner, &uses.repo)
.await
.map_err(Self::err)?;
@ -202,16 +186,16 @@ impl ImpostorCommit {
Ok(None) => {
tracing::warn!(
"No tags found for {}/{}, cannot create fix",
uses.owner(),
uses.repo()
uses.owner,
uses.repo
);
return None;
}
Err(e) => {
tracing::error!(
"Failed to get latest tag for {}/{}: {}",
uses.owner(),
uses.repo(),
uses.owner,
uses.repo,
e
);
return None;
@ -219,8 +203,8 @@ impl ImpostorCommit {
};
// Build the new uses string with the latest tag
let mut uses_slug = format!("{}/{}", uses.owner(), uses.repo());
if let Some(subpath) = &uses.subpath() {
let mut uses_slug = format!("{}/{}", uses.owner, uses.repo);
if let Some(subpath) = &uses.subpath {
uses_slug.push_str(&format!("/{subpath}"));
}
let fixed_uses = format!("{uses_slug}@{latest_tag}");
@ -272,13 +256,8 @@ impl Audit for ImpostorCommit {
let mut finding_builder = Self::finding()
.severity(Severity::High)
.confidence(Confidence::High)
.add_location(step.location_with_grip())
.add_location(
step.location()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.raw()))
.primary()
.annotated(IMPOSTOR_ANNOTATION),
step.location().primary().annotated(IMPOSTOR_ANNOTATION),
);
if let Some(fix) = self.create_impostor_fix(uses, &step).await {
@ -300,14 +279,8 @@ impl Audit for ImpostorCommit {
let mut finding_builder = Self::finding()
.severity(Severity::High)
.confidence(Confidence::High)
.add_location(reusable.location_with_grip())
.add_location(
reusable
.location()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.raw()))
.primary()
.annotated(IMPOSTOR_ANNOTATION),
reusable.location().primary().annotated(IMPOSTOR_ANNOTATION),
);
if let Some(fix) = self.create_reusable_fix(uses, &reusable).await {
@ -337,20 +310,13 @@ impl Audit for ImpostorCommit {
let mut finding_builder = Self::finding()
.severity(Severity::High)
.confidence(Confidence::High)
.add_location(step.location_with_grip())
.add_location(
step.location()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.raw()))
.primary()
.annotated(IMPOSTOR_ANNOTATION),
);
.add_location(step.location().primary().annotated(IMPOSTOR_ANNOTATION));
if let Some(fix) = self.create_impostor_fix(uses, step).await {
finding_builder = finding_builder.fix(fix);
}
findings.push(finding_builder.build(step).map_err(Self::err)?);
findings.push(finding_builder.build(step.action()).map_err(Self::err)?);
}
Ok(findings)
@ -415,7 +381,6 @@ jobs:
// Apply the fix and snapshot test the result
let new_doc = findings[0].fixes[0].apply(input.as_document()).unwrap();
assert_snapshot!(new_doc.source(), @r"
name: Test Impostor Commit Fix
on: push
jobs:

View file

@ -295,7 +295,6 @@ jobs:
assert!(fixed_document.source().contains("ANOTHER_VAR: also-keep"));
insta::assert_snapshot!(fixed_document.source(), @r#"
on: push
jobs:
@ -350,7 +349,6 @@ jobs:
assert!(fixed_document.source().contains("GLOBAL_VAR: keep-me"));
insta::assert_snapshot!(fixed_document.source(), @r#"
on: push
env:
@ -405,7 +403,6 @@ jobs:
assert!(fixed_document.source().contains("STEP_VAR: keep-me"));
insta::assert_snapshot!(fixed_document.source(), @r#"
on: push
jobs:
@ -449,7 +446,6 @@ jobs:
let fixed_document = fix.apply(workflow.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r#"
on: push
jobs:

View file

@ -34,7 +34,7 @@ impl KnownVulnerableActions {
&self,
uses: &RepositoryUses,
) -> Result<Vec<(Severity, String, Option<String>)>, AuditError> {
let version = match &uses.git_ref() {
let version = match &uses.git_ref {
// If `uses` is pinned to a symbolic ref, we need to perform
// feats of heroism to figure out what's going on.
// In the "happy" case the symbolic ref is an exact version tag,
@ -54,7 +54,7 @@ impl KnownVulnerableActions {
version if !uses.ref_is_commit() => {
let Some(commit_ref) = self
.client
.commit_for_ref(uses.owner(), uses.repo(), version)
.commit_for_ref(&uses.owner, &uses.repo, version)
.await
.map_err(Self::err)?
else {
@ -65,7 +65,7 @@ impl KnownVulnerableActions {
match self
.client
.longest_tag_for_commit(uses.owner(), uses.repo(), &commit_ref)
.longest_tag_for_commit(&uses.owner, &uses.repo, &commit_ref)
.await
.map_err(Self::err)?
{
@ -84,7 +84,7 @@ impl KnownVulnerableActions {
commit_ref => {
match self
.client
.longest_tag_for_commit(uses.owner(), uses.repo(), commit_ref)
.longest_tag_for_commit(&uses.owner, &uses.repo, commit_ref)
.await
.map_err(Self::err)?
{
@ -100,7 +100,7 @@ impl KnownVulnerableActions {
let vulns = self
.client
.gha_advisories(uses.owner(), uses.repo(), &version)
.gha_advisories(&uses.owner, &uses.repo, &version)
.await
.map_err(Self::err)?;
@ -135,8 +135,8 @@ impl KnownVulnerableActions {
target_version: String,
step: &impl StepCommon<'doc>,
) -> Result<Fix<'doc>, AuditError> {
let mut uses_slug = format!("{}/{}", uses.owner(), uses.repo());
if let Some(subpath) = &uses.subpath() {
let mut uses_slug = format!("{}/{}", uses.owner, uses.repo);
if let Some(subpath) = &uses.subpath {
uses_slug.push_str(&format!("/{subpath}"));
}
@ -162,13 +162,13 @@ impl KnownVulnerableActions {
let (target_ref, target_commit) = match self
.client
.commit_for_ref(uses.owner(), uses.repo(), &prefixed_version)
.commit_for_ref(&uses.owner, &uses.repo, &prefixed_version)
.await
{
Ok(commit) => commit.map(|commit| (&prefixed_version, commit)),
Err(_) => self
.client
.commit_for_ref(uses.owner(), uses.repo(), &bare_version)
.commit_for_ref(&uses.owner, &uses.repo, &bare_version)
.await
.map_err(Self::err)?
.map(|commit| (&bare_version, commit)),
@ -176,8 +176,8 @@ impl KnownVulnerableActions {
.ok_or_else(|| {
Self::err(anyhow!(
"Cannot resolve version {bare_version} to commit hash for {}/{}",
uses.owner(),
uses.repo()
uses.owner,
uses.repo
))
})?;
@ -208,7 +208,7 @@ impl KnownVulnerableActions {
// prefixed with `v` or not. Instead of trying to figure it out
// via the GitHub API, we match the style of the current `uses`
// clause.
let target_version_tag = if uses.git_ref().starts_with('v') {
let target_version_tag = if uses.git_ref.starts_with('v') {
prefixed_version
} else {
bare_version
@ -362,7 +362,12 @@ jobs:
let step = &steps[0];
// Test the fix directly
let uses = RepositoryUses::parse("actions/checkout@v2").unwrap();
let uses = RepositoryUses {
owner: "actions".to_string(),
repo: "checkout".to_string(),
git_ref: "v2".to_string(),
subpath: None,
};
let audit = create_test_audit();
let fix = audit
@ -372,7 +377,6 @@ jobs:
let fixed_document = fix.apply(workflow.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r#"
name: Test Vulnerable Actions
on: push
jobs:
@ -413,7 +417,12 @@ jobs:
let step = &steps[0];
// Test the fix directly
let uses = RepositoryUses::parse("actions/setup-node@v1").unwrap();
let uses = RepositoryUses {
owner: "actions".to_string(),
repo: "setup-node".to_string(),
git_ref: "v1".to_string(),
subpath: None,
};
let audit = create_test_audit();
let fix = audit
@ -422,8 +431,7 @@ jobs:
.unwrap();
let fixed_document = fix.apply(workflow.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r"
insta::assert_snapshot!(fixed_document.source(), @r#"
name: Test Node Setup
on: push
jobs:
@ -436,7 +444,7 @@ jobs:
node-version: '18'
- name: Install dependencies
run: npm install
");
"#);
}
#[tokio::test]
@ -466,7 +474,12 @@ jobs:
let step = &steps[0];
// Test the fix directly
let uses = RepositoryUses::parse("codecov/codecov-action@v1").unwrap();
let uses = RepositoryUses {
owner: "codecov".to_string(),
repo: "codecov-action".to_string(),
git_ref: "v1".to_string(),
subpath: None,
};
let audit = create_test_audit();
let fix = audit
@ -476,7 +489,6 @@ jobs:
let fixed_document = fix.apply(workflow.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r#"
name: Test Third Party Action
on: push
jobs:
@ -529,7 +541,12 @@ jobs:
let audit = create_test_audit();
// Fix checkout action
let uses_checkout = RepositoryUses::parse("actions/checkout@v2").unwrap();
let uses_checkout = RepositoryUses {
owner: "actions".to_string(),
repo: "checkout".to_string(),
git_ref: "v2".to_string(),
subpath: None,
};
let fix_checkout = audit
.create_upgrade_fix(&uses_checkout, "v4".into(), &steps[0])
.await
@ -537,7 +554,12 @@ jobs:
current_document = fix_checkout.apply(&current_document).unwrap();
// Fix setup-node action
let uses_node = RepositoryUses::parse("actions/setup-node@v1").unwrap();
let uses_node = RepositoryUses {
owner: "actions".to_string(),
repo: "setup-node".to_string(),
git_ref: "v1".to_string(),
subpath: None,
};
let fix_node = audit
.create_upgrade_fix(&uses_node, "v4".into(), &steps[1])
.await
@ -545,15 +567,19 @@ jobs:
current_document = fix_node.apply(&current_document).unwrap();
// Fix cache action
let uses_cache = RepositoryUses::parse("actions/cache@v2").unwrap();
let uses_cache = RepositoryUses {
owner: "actions".to_string(),
repo: "cache".to_string(),
git_ref: "v2".to_string(),
subpath: None,
};
let fix_cache = audit
.create_upgrade_fix(&uses_cache, "v4".into(), &steps[2])
.await
.unwrap();
current_document = fix_cache.apply(&current_document).unwrap();
insta::assert_snapshot!(current_document.source(), @r"
insta::assert_snapshot!(current_document.source(), @r#"
name: Test Multiple Vulnerable Actions
on: push
jobs:
@ -573,7 +599,7 @@ jobs:
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
- name: Install dependencies
run: npm install
");
"#);
}
#[tokio::test]
@ -601,7 +627,12 @@ jobs:
let step = &steps[0];
// Test the fix with subpath
let uses = RepositoryUses::parse("owner/repo/subpath@v1").unwrap();
let uses = RepositoryUses {
owner: "owner".to_string(),
repo: "repo".to_string(),
git_ref: "v1".to_string(),
subpath: Some("subpath".to_string()),
};
let audit = create_test_audit();
let fix = audit
@ -611,7 +642,6 @@ jobs:
let fixed_document = fix.apply(workflow.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r"
name: Test Action with Subpath
on: push
jobs:
@ -648,7 +678,12 @@ jobs:
};
let step = &steps[0];
let uses = RepositoryUses::parse("actions/checkout@v2").unwrap();
let uses = RepositoryUses {
owner: "actions".to_string(),
repo: "checkout".to_string(),
git_ref: "v2".to_string(),
subpath: None,
};
// Test that when first_patched_version is provided, it's used
let audit = create_test_audit();
@ -660,8 +695,7 @@ jobs:
.apply(workflow.as_document())
.unwrap();
insta::assert_snapshot!(fixed_document.source(), @r"
insta::assert_snapshot!(fixed_document.source(), @r#"
name: Test First Patched Version Priority
on: push
jobs:
@ -670,7 +704,7 @@ jobs:
steps:
- name: Vulnerable action
uses: actions/checkout@v3.1.0
");
"#);
}
#[tokio::test]
@ -695,7 +729,12 @@ jobs:
};
let step = &steps[0];
let uses = RepositoryUses::parse("actions/checkout@v2").unwrap();
let uses = RepositoryUses {
owner: "actions".to_string(),
repo: "checkout".to_string(),
git_ref: "v2".to_string(),
subpath: None,
};
let audit = create_test_audit();
let fix = audit
@ -706,7 +745,6 @@ jobs:
let new_doc = fix.apply(workflow.as_document()).unwrap();
assert_snapshot!(new_doc.source(), @r"
name: Test Non-Commit Ref
on: push
jobs:
@ -770,7 +808,6 @@ jobs:
let new_doc = findings[0].fixes[0].apply(input.as_document()).unwrap();
assert_snapshot!(new_doc.source(), @r"
name: Test Commit Hash Pinning Real API
on: push
permissions: {}
@ -827,7 +864,6 @@ jobs:
let new_doc = findings[0].fixes[0].apply(input.as_document()).unwrap();
assert_snapshot!(new_doc.source(), @r"
name: Test Commit Hash Pinning Real API
on: push
permissions: {}

View file

@ -18,7 +18,6 @@ use crate::{
};
pub(crate) mod anonymous_definition;
pub(crate) mod archived_uses;
pub(crate) mod artipacked;
pub(crate) mod bot_conditions;
pub(crate) mod cache_poisoning;
@ -189,9 +188,10 @@ pub(crate) enum AuditLoadError {
}
#[derive(Error, Debug)]
#[error("error in '{ident}' audit")]
#[error("error in {ident}")]
pub(crate) struct AuditError {
ident: &'static str,
#[source]
source: anyhow::Error,
}

View file

@ -33,7 +33,7 @@ impl Obfuscation {
// GitHub happily interprets but otherwise gums up pattern matching
// in audits like unpinned-uses, forbidden-uses, and cache-poisoning.
// We check for some of these forms of nonsense here and report them.
if let Some(subpath) = uses.subpath() {
if let Some(subpath) = uses.subpath.as_deref() {
for component in subpath.split('/') {
match component {
// . and .. are valid in uses subpaths, but are impossible to
@ -60,7 +60,7 @@ impl Obfuscation {
/// Normalizes a uses path by removing unnecessary components like empty slashes, `.`, and `..`.
fn normalize_uses_path(&self, uses: &RepositoryUses) -> Option<String> {
let subpath = uses.subpath()?;
let subpath = uses.subpath.as_deref()?;
let mut components = Vec::new();
for component in subpath.split('/') {
@ -83,19 +83,14 @@ impl Obfuscation {
// If all components were removed, the subpath should be empty
if components.is_empty() {
Some(format!(
"{}/{}@{}",
uses.owner(),
uses.repo(),
uses.git_ref()
))
Some(format!("{}/{}@{}", uses.owner, uses.repo, uses.git_ref))
} else {
Some(format!(
"{}/{}/{}@{}",
uses.owner(),
uses.repo(),
uses.owner,
uses.repo,
components.join("/"),
uses.git_ref()
uses.git_ref
))
}
}
@ -225,10 +220,7 @@ impl Obfuscation {
}
}
crate::models::StepBodyCommon::Run { .. } => {
if let Some(("cmd" | "cmd.exe", shell_loc)) = step
.shell()
.map(|(shell, loc)| (utils::normalize_shell(shell), loc))
{
if let Some("cmd" | "cmd.exe") = step.shell().map(utils::normalize_shell) {
// `shell: cmd` is basically impossible to analyze: it has no formal
// grammar and has several line continuation mechanisms that stymie
// naive matching. It also hasn't been the default shell on Windows
@ -238,10 +230,11 @@ impl Obfuscation {
.confidence(Confidence::High)
.severity(Severity::Low)
.add_location(
step.location_with_grip()
step.location()
.primary()
.with_keys(["shell".into()])
.annotated("Windows CMD shell limits analysis"),
)
.add_location(shell_loc.primary())
.tip("use 'shell: pwsh' or 'shell: bash' for improved analysis")
.build(step)
.map_err(Self::err)?,
@ -407,8 +400,7 @@ jobs:
"#;
let result = apply_fix_for_snapshot(workflow_content, "obfuscation").await;
insta::assert_snapshot!(result, @r"
insta::assert_snapshot!(result, @r#"
name: Test Workflow
on: push
@ -417,7 +409,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
");
"#);
}
#[tokio::test]
@ -434,8 +426,7 @@ jobs:
"#;
let result = apply_fix_for_snapshot(workflow_content, "obfuscation").await;
insta::assert_snapshot!(result, @r"
insta::assert_snapshot!(result, @r#"
name: Test Workflow
on: push
@ -444,7 +435,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: github/codeql-action/init@v2
");
"#);
}
#[tokio::test]
@ -461,8 +452,7 @@ jobs:
"#;
let result = apply_fix_for_snapshot(workflow_content, "obfuscation").await;
insta::assert_snapshot!(result, @r"
insta::assert_snapshot!(result, @r#"
name: Test Workflow
on: push
@ -471,6 +461,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/cache/save@v4
");
"#);
}
}

View file

@ -43,12 +43,12 @@ impl RefConfusion {
// TODO: use a tokio JoinSet here?
let branches_match = self
.client
.has_branch(uses.owner(), uses.repo(), sym_ref)
.has_branch(&uses.owner, &uses.repo, sym_ref)
.await
.map_err(Self::err)?;
let tags_match = self
.client
.has_tag(uses.owner(), uses.repo(), sym_ref)
.has_tag(&uses.owner, &uses.repo, sym_ref)
.await
.map_err(Self::err)?;
@ -158,7 +158,7 @@ impl Audit for RefConfusion {
.with_keys(["uses".into()])
.annotated(REF_CONFUSION_ANNOTATION),
)
.build(step)
.build(step.action())
.map_err(Self::err)?,
);
}

View file

@ -106,7 +106,7 @@ impl RefVersionMismatch {
let Some(commit_for_ref) = self
.client
.commit_for_ref(uses.owner(), uses.repo(), version_from_comment)
.commit_for_ref(&uses.owner, &uses.repo, version_from_comment)
.await
.map_err(Self::err)?
else {
@ -138,7 +138,7 @@ impl RefVersionMismatch {
if let Some(suggestion) = self
.client
.longest_tag_for_commit(uses.owner(), uses.repo(), commit_sha)
.longest_tag_for_commit(&uses.owner, &uses.repo, commit_sha)
.await
.map_err(Self::err)?
{

View file

@ -1,10 +1,10 @@
use github_actions_models::workflow::job::Secrets;
use subfeature::Subfeature;
use super::{Audit, AuditLoadError, AuditState, audit_meta};
use crate::{
audit::AuditError,
finding::{Confidence, location::Locatable as _},
models::workflow::JobExt as _,
};
pub(crate) struct SecretsInherit;
@ -38,7 +38,6 @@ impl Audit for SecretsInherit {
job.location()
.primary()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, job.uses.raw()))
.annotated("this reusable workflow"),
)
.add_location(
@ -48,7 +47,7 @@ impl Audit for SecretsInherit {
)
.confidence(Confidence::High)
.severity(crate::finding::Severity::Medium)
.build(job)?,
.build(job.parent())?,
);
}

View file

@ -2,7 +2,6 @@
use anyhow::anyhow;
use github_actions_models::common::{RepositoryUses, Uses};
use subfeature::Subfeature;
use super::{Audit, AuditLoadError, audit_meta};
use crate::{
@ -30,7 +29,7 @@ impl StaleActionRefs {
let tag = match &uses.commit_ref() {
Some(commit_ref) => self
.client
.longest_tag_for_commit(uses.owner(), uses.repo(), commit_ref)
.longest_tag_for_commit(&uses.owner, &uses.repo, commit_ref)
.await
.map_err(Self::err)?,
None => return Ok(false),
@ -54,12 +53,7 @@ impl StaleActionRefs {
.confidence(Confidence::High)
.severity(Severity::Low)
.persona(Persona::Pedantic)
.add_location(
step.location()
.primary()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.raw())),
)
.add_location(step.location().primary().with_keys(["uses".into()]))
.build(step)?,
);
}

View file

@ -185,7 +185,7 @@ impl TemplateInjection {
return None;
}
let shell = utils::normalize_shell(step.shell()?.0);
let shell = utils::normalize_shell(step.shell()?);
match shell {
"bash" | "sh" | "zsh" => Some(format!("${{{env_var}}}")),
@ -707,7 +707,6 @@ jobs:
"replace expression with environment variable",
);
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection
on: push
jobs:
@ -759,7 +758,6 @@ jobs:
"replace expression with environment variable",
);
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection
on: push
jobs:
@ -813,7 +811,6 @@ jobs:
"replace expression with environment variable",
);
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection
on: push
jobs:
@ -923,7 +920,6 @@ jobs:
}
insta::assert_snapshot!(current_document.source(), @r#"
name: Test Multiple Template Injections
on: push
jobs:
@ -990,7 +986,6 @@ jobs:
}
insta::assert_snapshot!(current_document.source(), @r#"
name: Test Duplicate Template Injections
on: push
jobs:
@ -1051,7 +1046,6 @@ jobs:
}
insta::assert_snapshot!(current_document.source(), @r#"
name: Test Duplicate Template Injections
on: push
jobs:
@ -1115,7 +1109,6 @@ jobs:
}
insta::assert_snapshot!(current_document.source(), @r#"
name: Test Duplicate Template Injections
on: push
jobs:
@ -1232,7 +1225,6 @@ jobs:
"replace expression with environment variable",
);
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection - Bash
on: push
jobs:
@ -1279,7 +1271,6 @@ jobs:
"replace expression with environment variable",
);
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection - Bash
on: push
jobs:
@ -1325,8 +1316,7 @@ jobs:
finding,
"replace expression with environment variable",
);
insta::assert_snapshot!(fixed_content.source(), @r"
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection - CMD
on: push
jobs:
@ -1336,7 +1326,7 @@ jobs:
- name: Vulnerable step with cmd shell
shell: cmd
run: echo User is %GITHUB_ACTOR%
");
"#);
}
}
);
@ -1373,7 +1363,6 @@ jobs:
"replace expression with environment variable",
);
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection - PowerShell
on: push
jobs:
@ -1420,7 +1409,6 @@ jobs:
);
// Ubuntu default shell is bash, so should use ${VAR} syntax
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection - Default Shell Ubuntu
on: push
jobs:
@ -1466,7 +1454,6 @@ jobs:
);
// Windows default shell is pwsh, so should use $env:VAR syntax
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection - Default Shell Windows
on: push
jobs:
@ -1511,8 +1498,7 @@ jobs:
finding,
"replace expression with environment variable",
);
insta::assert_snapshot!(fixed_content.source(), @r"
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection - CMD with Custom Env
on: push
jobs:
@ -1524,7 +1510,7 @@ jobs:
run: echo PR title is %GITHUB_EVENT_PULL_REQUEST_TITLE%
env:
GITHUB_EVENT_PULL_REQUEST_TITLE: ${{ github.event.pull_request.title }}
");
"#);
}
}
);

View file

@ -4,6 +4,7 @@ use crate::{
Confidence, Finding, Persona, Severity,
location::{Locatable as _, SymbolicLocation},
},
models::workflow::JobExt as _,
state::AuditState,
};
@ -29,7 +30,7 @@ impl UnpinnedImages {
.confidence(Confidence::High)
.add_location(annotated_location)
.persona(persona)
.build(job)
.build(job.parent())
}
}
@ -51,11 +52,13 @@ impl Audit for UnpinnedImages {
_config: &crate::config::Config,
) -> anyhow::Result<Vec<Finding<'doc>>, AuditError> {
let mut findings = vec![];
let mut image_refs_with_locations: Vec<(&'doc DockerUses, SymbolicLocation<'doc>)> = vec![];
let mut image_refs_with_locations: Vec<(DockerUses, SymbolicLocation<'doc>)> = vec![];
if let Some(Container::Container { image, .. }) = &job.container {
image_refs_with_locations.push((
image,
image
.parse()
.expect("failed to parse job container image as DockerUses"),
job.location()
.primary()
.with_keys(["container".into(), "image".into()]),
@ -65,7 +68,9 @@ impl Audit for UnpinnedImages {
for (service, config) in job.services.iter() {
if let Container::Container { image, .. } = &config {
image_refs_with_locations.push((
image,
image
.parse()
.expect("failed to parse service container image as DockerUses"),
job.location().primary().with_keys([
"services".into(),
service.as_str().into(),
@ -76,9 +81,9 @@ impl Audit for UnpinnedImages {
}
for (image, location) in image_refs_with_locations {
match image.hash() {
match image.hash {
Some(_) => continue,
None => match image.tag() {
None => match image.tag.as_deref() {
Some("latest") => {
findings.push(self.build_finding(
location,

View file

@ -1,5 +1,4 @@
use github_actions_models::common::Uses;
use subfeature::Subfeature;
use super::{Audit, AuditLoadError, AuditState, audit_meta};
use crate::audit::AuditError;
@ -32,7 +31,7 @@ impl UnpinnedUses {
Uses::Docker(_) => {
if uses.unpinned() {
Some((
"image is not pinned to a tag, branch, or hash ref".into(),
"action is not pinned to a tag, branch, or hash ref".into(),
Severity::Medium,
Persona::default(),
))
@ -109,7 +108,6 @@ impl UnpinnedUses {
step.location()
.primary()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.raw()))
.annotated(annotation),
)
.build(step)?,

View file

@ -6,7 +6,7 @@ use crate::{
Confidence, Fix, FixDisposition, Severity,
location::{Locatable as _, SymbolicLocation},
},
models::AsDocument,
models::{AsDocument, workflow::JobExt},
utils,
};
use yamlpatch::{Op, Patch};
@ -158,7 +158,7 @@ impl Audit for UnsoundCondition {
job: &crate::models::workflow::NormalJob<'doc>,
_config: &crate::config::Config,
) -> Result<Vec<crate::finding::Finding<'doc>>, AuditError> {
self.process_conditions(job, job.conditions())
self.process_conditions(job.parent(), job.conditions())
}
async fn audit_reusable_job<'doc>(
@ -167,7 +167,7 @@ impl Audit for UnsoundCondition {
_config: &crate::config::Config,
) -> Result<Vec<crate::finding::Finding<'doc>>, AuditError> {
let conds = job.r#if.iter().map(|cond| (cond, job.location()));
self.process_conditions(job, conds)
self.process_conditions(job.parent(), conds)
}
async fn audit_action<'doc>(
@ -244,7 +244,6 @@ jobs:
let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings);
insta::assert_snapshot!(fixed_document.source(), @r#"
name: Test
on: push
jobs:
@ -284,7 +283,6 @@ jobs:
let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings);
insta::assert_snapshot!(fixed_document.source(), @r#"
name: Test
on: push
jobs:
@ -325,7 +323,6 @@ jobs:
let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings);
insta::assert_snapshot!(fixed_document.source(), @r#"
name: Test
on: push
jobs:
@ -370,7 +367,6 @@ jobs:
let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings);
insta::assert_snapshot!(fixed_document.source(), @r#"
name: Test
on: push
jobs:
@ -410,8 +406,7 @@ jobs:
assert_eq!(findings.len(), 1);
let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings);
insta::assert_snapshot!(fixed_document.source(), @r"
insta::assert_snapshot!(fixed_document.source(), @r#"
name: Test
on: push
jobs:
@ -419,7 +414,7 @@ jobs:
if: |-
${{ github.event_name == 'pull_request' }}
uses: ./.github/workflows/reusable.yml
");
"#);
}
);
}
@ -468,7 +463,6 @@ jobs:
}
insta::assert_snapshot!(document.source(), @r#"
name: Test
on: push
jobs:

View file

@ -9,6 +9,7 @@ use super::{Audit, AuditLoadError, AuditState, audit_meta};
use crate::{
audit::AuditError,
finding::{Confidence, Severity},
models::workflow::JobExt as _,
utils::{self, ExtractedExpr},
};
@ -67,7 +68,7 @@ impl Audit for UnsoundContains {
.primary()
.annotated(format!("contains(..) condition can be bypassed if attacker can control '{context}'")),
)
.build(job)
.build(job.parent())
})
})
.collect()

View file

@ -1,3 +1,4 @@
use std::collections::HashSet;
use std::{sync::LazyLock, vec};
use anyhow::Context as _;
@ -12,7 +13,7 @@ use crate::{
models::{
StepBodyCommon, StepCommon,
coordinate::{ActionCoordinate, ControlExpr, ControlFieldType, Toggle},
workflow::JobCommon as _,
workflow::JobExt as _,
},
state::AuditState,
utils,
@ -179,101 +180,66 @@ impl UseTrustedPublishing {
match cmd {
"cargo" => {
// Looking for `cargo ... publish` without `--dry-run` or `-n`.
let args = args.collect::<HashSet<_>>();
args.any(|arg| arg == "publish")
&& args.all(|arg| arg != "--dry-run" && arg != "-n")
// Looking for `cargo ... publish` without `--dry-run` or `-n`.
args.contains("publish") && !args.contains("--dry-run") && !args.contains("-n")
}
"uv" => {
match args.find(|arg| *arg == "publish" || *arg == "run") {
Some("publish") => {
// `uv ... publish` without `--dry-run`.
args.all(|arg| arg != "--dry-run")
}
Some("run") => {
// `uv ... run ... twine ... upload`.
args.any(|arg| arg == "twine") && args.any(|arg| arg == "upload")
}
_ => false,
}
}
"uvx" => {
// Looking for `uvx twine ... upload`.
// Like with pipx, we loosely match the `twine` part
// to allow for version specifiers. In uvx's case, these
// are formatted like `twine@X.Y.Z`.
let args = args.collect::<HashSet<_>>();
args.any(|arg| arg.starts_with("twine")) && args.any(|arg| arg == "upload")
// Looking for `uv ... publish` without `--dry-run`.
args.contains("publish") && !args.contains("--dry-run")
}
"hatch" | "pdm" => {
// Looking for `hatch ... publish` or `pdm ... publish`.
args.any(|arg| arg == "publish")
}
"poetry" => {
// Looking for `poetry ... publish` without `--dry-run`.
//
// Poetry has no support for Trusted Publishing at all as
// of 2025-12-1: https://github.com/python-poetry/poetry/issues/7940
args.any(|arg| arg == "publish") && args.all(|arg| arg != "--dry-run")
}
"twine" => {
// Looking for `twine ... upload`.
args.any(|arg| arg == "upload")
}
"pipx" => {
// TODO: also match `pipx ... run ... uv ... publish`, etc.
// Looking for `pipx ... run ... twine ... upload`.
//
// A wrinkle here is that `pipx run` takes version specifiers
// too, e.g. `pipx run twine==X.Y.Z upload ...`. So we only
// loosely match the `twine` part.
args.any(|arg| arg == "run")
&& args.any(|arg| arg.starts_with("twine"))
&& args.any(|arg| arg == "upload")
}
_ if cmd.starts_with("python") => {
// Looking for `python* ... -m ... twine ... upload`.
args.any(|arg| arg == "-m")
&& args.any(|arg| arg == "twine")
&& args.any(|arg| arg == "upload")
}
"gem" => {
// Looking for `gem ... push`.
args.any(|arg| arg == "push")
}
"bundle" => {
// Looking for `bundle ... exec ... gem ... push`.
args.any(|arg| arg == "exec")
&& args.any(|arg| arg == "gem")
&& args.any(|arg| arg == "push")
}
"npm" => {
// Looking for `npm ... publish` without `--dry-run`.
let args = args.collect::<HashSet<_>>();
// TODO: Figure out `npm run ... publish` patterns.
args.any(|arg| arg == "publish") && args.all(|arg| arg != "--dry-run")
// Looking for `npm ... publish` without `--dry-run`.
args.contains("publish") && !args.contains("--dry-run")
}
"yarn" => {
let args = args.collect::<HashSet<_>>();
// TODO: Figure out `yarn run ... publish` patterns.
// TODO: Figure out `yarn ... publish` patterns for lerna/npm workspaces.
// Looking for `yarn ... npm publish` without `--dry-run` or `-n`.
args.any(|arg| arg == "npm") && args.all(|arg| arg != "--dry-run" && arg != "-n")
args.contains("npm")
&& args.contains("publish")
&& !args.contains("--dry-run")
&& !args.contains("-n")
}
"pnpm" => {
let args = args.collect::<HashSet<_>>();
// TODO: Figure out `pnpm run ... publish` patterns.
// Looking for `pnpm ... publish` without `--dry-run`.
args.any(|arg| arg == "publish") && args.all(|arg| arg != "--dry-run")
args.contains("publish") && !args.contains("--dry-run")
}
"nuget" | "nuget.exe" => {
// Looking for `nuget ... push`.
args.any(|arg| arg == "push")
}
"dotnet" => {
// Looking for `dotnet ... nuget ... push`.
args.any(|arg| arg == "nuget") && args.any(|arg| arg == "push")
// Looking for `dotnet ... nuget push`.
args.next()
.map(|cmd| cmd == "nuget" && Self::is_publish_command(cmd, args))
.unwrap_or(false)
}
_ => false,
}
@ -430,7 +396,7 @@ impl Audit for UseTrustedPublishing {
if let StepBodyCommon::Run { run, .. } = step.body()
&& !step.parent.has_id_token()
{
let shell = step.shell().map(|s| s.0).unwrap_or_else(|| {
let shell = step.shell().unwrap_or_else(|| {
tracing::debug!(
"use-trusted-publishing: couldn't determine shell type for {workflow}:{job} step {stepno}",
workflow = step.workflow().key.filename(),
@ -476,69 +442,3 @@ impl Audit for UseTrustedPublishing {
self.process_step(step)
}
}
#[cfg(test)]
mod tests {
#[test]
fn test_is_publish_command() {
for (args, is_publish_command) in &[
(&["cargo", "publish"][..], true),
(&["cargo", "publish", "-p", "foo"][..], true),
(&["cargo", "publish", "--dry-run"][..], false),
(&["cargo", "publish", "-n"][..], false),
(&["cargo", "build"][..], false),
(&["uv", "publish"][..], true),
(&["uv", "publish", "dist/*"][..], true),
(&["uv", "publish", "--dry-run"][..], false),
(&["uv", "run", "--dev", "twine", "upload"][..], true),
(&["uv", "run", "twine", "upload"][..], true),
(&["uv"][..], false),
(&["uv", "sync"][..], false),
(&["uvx", "twine", "upload"][..], true),
(&["uvx", "twine@3.4.1", "upload"][..], true),
(&["uvx", "twine@6.1.0", "upload"][..], true),
(&["uvx", "twine"][..], false),
(&["poetry", "publish"][..], true),
(&["poetry", "publish", "--dry-run"][..], false),
(&["hatch", "publish"][..], true),
(&["pdm", "publish"][..], true),
(&["twine", "upload", "dist/*"][..], true),
(&["pipx", "run", "twine", "upload", "dist/*"][..], true),
(
&["pipx", "run", "twine==3.4.1", "upload", "dist/*"][..],
true,
),
(
&["pipx", "run", "twine==6.1.0", "upload", "dist/*"][..],
true,
),
(&["python", "-m", "twine", "upload", "dist/*"][..], true),
(&["python3.9", "-m", "twine", "upload", "dist/*"][..], true),
(&["twine", "check", "dist/*"], false),
(&["gem", "push", "mygem-0.1.0.gem"][..], true),
(
&["bundle", "exec", "gem", "push", "mygem-0.1.0.gem"][..],
true,
),
(&["npm", "publish"][..], true),
(&["npm", "run", "publish"][..], true),
(&["npm", "publish", "--dry-run"][..], false),
(&["yarn", "npm", "publish"][..], true),
(&["yarn", "npm", "publish", "--dry-run"][..], false),
(&["pnpm", "publish"][..], true),
(&["pnpm", "publish", "--dry-run"][..], false),
(&["nuget", "push", "MyPackage.nupkg"][..], true),
(&["nuget.exe", "push", "MyPackage.nupkg"][..], true),
(&["dotnet", "nuget", "push", "MyPackage.nupkg"][..], true),
(&["dotnet", "build"][..], false),
] {
let cmd = args[0];
let args_iter = args[1..].iter().map(|s| *s);
assert_eq!(
super::UseTrustedPublishing::is_publish_command(cmd, args_iter),
*is_publish_command,
"cmd: {cmd:?}, args: {args:?}"
);
}
}
}

View file

@ -21,12 +21,7 @@ use crate::{
registry::input::RepoSlug,
};
const CONFIG_CANDIDATES: &[&str] = &[
".github/zizmor.yml",
".github/zizmor.yaml",
"zizmor.yml",
"zizmor.yaml",
];
const CONFIG_CANDIDATES: &[&str] = &[".github/zizmor.yml", "zizmor.yml"];
#[derive(Error, Debug)]
#[error("configuration error in {path}")]
@ -259,7 +254,7 @@ impl UnpinnedUsesPolicies {
&self,
uses: &RepositoryUses,
) -> (Option<&RepositoryUsesPattern>, UsesPolicy) {
match self.policy_tree.get(uses.owner()) {
match self.policy_tree.get(&uses.owner) {
Some(policies) => {
// Policies are ordered by specificity, so we can
// iterate and return eagerly.

View file

@ -647,28 +647,24 @@
},
"package-ecosystem-values": {
"enum": [
"bazel",
"bun",
"bundler",
"cargo",
"composer",
"conda",
"devcontainers",
"docker",
"docker-compose",
"dotnet-sdk",
"elm",
"github-actions",
"gitsubmodule",
"github-actions",
"gomod",
"gradle",
"helm",
"julia",
"maven",
"mix",
"npm",
"nuget",
"opentofu",
"pip",
"pub",
"rust-toolchain",
@ -1049,21 +1045,6 @@
"versioning-strategy": {
"$ref": "#/definitions/versioning-strategy",
"description": "How to update manifest version requirements"
},
"patterns": {
"description": "Array of dependency patterns to include in a multi-ecosystem group. Required when using multi-ecosystem-group. Use '*' to include all dependencies.",
"type": "array",
"items": {
"type": "string",
"minLength": 1
},
"minItems": 1,
"uniqueItems": true
},
"multi-ecosystem-group": {
"description": "String identifier linking this ecosystem to a multi-ecosystem group",
"type": "string",
"minLength": 1
}
},
"allOf": [
@ -1075,15 +1056,6 @@
{ "required": ["directories"] },
{ "required": ["directory"] }
]
},
{
"$comment": "If multi-ecosystem-group is specified, patterns is required",
"if": {
"required": ["multi-ecosystem-group"]
},
"then": {
"required": ["patterns"]
}
}
]
},
@ -1160,129 +1132,6 @@
}
},
"minProperties": 1
},
"multi-ecosystem-group": {
"type": "object",
"description": "Define a group that spans multiple package ecosystems, allowing consolidated pull requests across different ecosystems",
"additionalProperties": false,
"properties": {
"schedule": {
"description": "Schedule preferences for the group",
"type": "object",
"properties": {
"interval": {
"$ref": "#/definitions/schedule-interval"
},
"day": {
"$ref": "#/definitions/schedule-day",
"description": "Specify an alternative day to check for updates"
},
"time": {
"type": "string",
"description": "Specify an alternative time of day to check for updates (format: hh:mm)",
"pattern": "^([01][0-9]|2[0-3]):[0-5][0-9]$"
},
"timezone": {
"$ref": "#/definitions/timezone",
"description": "The time zone identifier must be from the Time Zone database maintained by IANA"
},
"cronjob": {
"type": "string",
"description": "Specify a valid cron expression for updates"
}
},
"allOf": [
{
"$comment": "If interval type is 'cron', enforce 'cronjob' property.",
"if": {
"properties": {
"interval": {
"const": "cron"
}
}
},
"then": {
"required": ["interval", "cronjob"]
},
"else": {
"required": ["interval"]
}
}
]
},
"labels": {
"description": "Labels to set on pull requests (additive - merges with ecosystem-level labels)",
"type": "array",
"items": {
"type": "string",
"minLength": 1
},
"minItems": 0,
"uniqueItems": true
},
"assignees": {
"description": "Assignees to set on pull requests (additive - merges with ecosystem-level assignees)",
"type": "array",
"items": {
"type": "string",
"minLength": 1
},
"minItems": 1,
"uniqueItems": true
},
"milestone": {
"description": "Associate all pull requests raised for this group with a milestone. You need to specify the numeric identifier of the milestone and not its label.",
"type": "integer",
"minimum": 1
},
"target-branch": {
"description": "Specify a different branch for manifest files and for pull requests.",
"type": "string",
"minLength": 1
},
"commit-message": {
"description": "Commit message preferences for the group",
"type": "object",
"properties": {
"prefix": {
"description": "A prefix for all commit messages",
"type": "string",
"maxLength": 50
},
"prefix-development": {
"description": "A separate prefix for all commit messages that update dependencies in the Development dependency group",
"type": "string",
"maxLength": 50
},
"include": {
"description": "Specifies that any prefix is followed by a list of the dependencies updated in the commit",
"type": "string",
"enum": ["scope"]
}
},
"anyOf": [
{ "required": ["prefix"] },
{ "required": ["prefix-development"] },
{ "required": ["include"] }
],
"additionalProperties": false
},
"pull-request-branch-name": {
"description": "Pull request branch name preferences for the group",
"type": "object",
"properties": {
"separator": {
"description": "Change separator for PR branch name",
"type": "string",
"default": "/",
"enum": ["-", "_", "/"]
}
},
"required": ["separator"],
"additionalProperties": false
}
},
"required": ["schedule"]
}
},
"properties": {
@ -1306,14 +1155,6 @@
},
"registries": {
"$ref": "#/definitions/registry"
},
"multi-ecosystem-groups": {
"type": "object",
"description": "Define groups that span multiple package ecosystems, allowing consolidated pull requests across different ecosystems",
"additionalProperties": {
"$ref": "#/definitions/multi-ecosystem-group"
},
"minProperties": 1
}
},
"required": ["version", "updates"],

View file

@ -5,10 +5,7 @@ use clap::ValueEnum;
use serde::{Deserialize, Serialize};
use self::location::{Location, SymbolicLocation};
use crate::{
InputKey, audit::AuditError, finding::location::LocationKind, models::AsDocument,
registry::input::Group,
};
use crate::{InputKey, audit::AuditError, models::AsDocument, registry::input::Group};
use yamlpatch::{self, Patch};
pub(crate) mod location;
@ -249,12 +246,7 @@ impl<'doc> FindingBuilder<'doc> {
locations.extend(self.raw_locations);
if locations.len() == 1
&& let Some(location) = locations.get_mut(0)
{
// If there's only one location, then it's primary by definition.
location.symbolic.kind = LocationKind::Primary;
} else if !locations.iter().any(|l| l.symbolic.is_primary()) {
if !locations.iter().any(|l| l.symbolic.is_primary()) {
return Err(AuditError::new(
self.ident,
anyhow!("API misuse: at least one location must be marked with primary()"),

View file

@ -208,12 +208,12 @@ pub(crate) trait Locatable<'doc> {
fn location(&self) -> SymbolicLocation<'doc>;
/// Returns an "enriched" symbolic location of this model,
/// when the model has one or more "grip" fields that are
/// visually useful to key off of (like a `name` or `id` field).
/// when the model is of a type that has a name. Otherwise,
/// returns the same symbolic location as `location()`.
///
/// For example, a GitHub Actions workflow step has an optional name,
/// which is included in this symbolic location if present.
fn location_with_grip(&self) -> SymbolicLocation<'doc> {
fn location_with_name(&self) -> SymbolicLocation<'doc> {
self.location()
}
}

View file

@ -575,29 +575,6 @@ impl Client {
.max_by_key(|t| t.name.len()))
}
#[instrument(skip(self))]
pub(crate) async fn branch_commits(
&self,
owner: &str,
repo: &str,
commit: &str,
) -> Result<BranchCommits, ClientError> {
// NOTE(ww): This API is undocumented.
// See: https://github.com/orgs/community/discussions/78161
let url = format!("https://github.com/{owner}/{repo}/branch_commits/{commit}");
// We ask GitHub for JSON, because it sends HTML by default for this endpoint.
self.base_client
.get(&url)
.header(ACCEPT, "application/json")
.send()
.await?
.error_for_status()?
.json()
.await
.map_err(Into::into)
}
#[instrument(skip(self))]
pub(crate) async fn compare_commits(
&self,
@ -882,23 +859,6 @@ pub(crate) struct Commit {
pub(crate) sha: String,
}
/// The response structure from GitHub's undocumented `branch_commits` API.
///
/// This model is intentionally incomplete.
#[derive(Clone, Deserialize)]
#[serde(rename_all = "lowercase")]
#[non_exhaustive]
pub(crate) struct BranchCommits {
branches: Vec<serde_json::Value>,
tags: Vec<String>,
}
impl BranchCommits {
pub(crate) fn is_empty(&self) -> bool {
self.branches.is_empty() && self.tags.is_empty()
}
}
#[derive(Clone, Deserialize)]
#[serde(rename_all = "lowercase")]
pub(crate) enum ComparisonStatus {

View file

@ -4,7 +4,7 @@ use std::str::FromStr;
use camino::Utf8Path;
use thiserror::Error;
use tower_lsp_server::ls_types::{self, TextDocumentSyncKind};
use tower_lsp_server::lsp_types::{self, TextDocumentSyncKind};
use tower_lsp_server::{Client, LanguageServer, LspService, Server};
use crate::audit::AuditInput;
@ -25,7 +25,7 @@ pub(crate) struct Error {
}
struct LspDocumentCommon {
uri: ls_types::Uri,
uri: lsp_types::Uri,
text: String,
version: Option<i32>,
}
@ -39,35 +39,35 @@ struct Backend {
impl LanguageServer for Backend {
async fn initialize(
&self,
_: ls_types::InitializeParams,
) -> tower_lsp_server::jsonrpc::Result<ls_types::InitializeResult> {
Ok(ls_types::InitializeResult {
server_info: Some(ls_types::ServerInfo {
_: lsp_types::InitializeParams,
) -> tower_lsp_server::jsonrpc::Result<lsp_types::InitializeResult> {
Ok(lsp_types::InitializeResult {
server_info: Some(lsp_types::ServerInfo {
name: "zizmor (LSP)".into(),
version: Some(env!("CARGO_PKG_VERSION").into()),
}),
capabilities: ls_types::ServerCapabilities {
text_document_sync: Some(ls_types::TextDocumentSyncCapability::Kind(
ls_types::TextDocumentSyncKind::FULL,
capabilities: lsp_types::ServerCapabilities {
text_document_sync: Some(lsp_types::TextDocumentSyncCapability::Kind(
lsp_types::TextDocumentSyncKind::FULL,
)),
..Default::default()
},
})
}
async fn initialized(&self, _: ls_types::InitializedParams) {
async fn initialized(&self, _: lsp_types::InitializedParams) {
let selectors = vec![
ls_types::DocumentFilter {
lsp_types::DocumentFilter {
language: Some("yaml".into()),
scheme: None,
pattern: Some("**/.github/workflows/*.{yml,yaml}".into()),
},
ls_types::DocumentFilter {
lsp_types::DocumentFilter {
language: Some("yaml".into()),
scheme: None,
pattern: Some("**/action.{yml,yaml}".into()),
},
ls_types::DocumentFilter {
lsp_types::DocumentFilter {
language: Some("yaml".into()),
scheme: None,
pattern: Some("**/.github/dependabot.{yml,yaml}".into()),
@ -80,46 +80,46 @@ impl LanguageServer for Backend {
// neglects to.
self.client
.register_capability(vec![
ls_types::Registration {
lsp_types::Registration {
id: "zizmor-didopen".into(),
method: "textDocument/didOpen".into(),
register_options: Some(
serde_json::to_value(ls_types::TextDocumentRegistrationOptions {
serde_json::to_value(lsp_types::TextDocumentRegistrationOptions {
document_selector: Some(selectors.clone()),
})
.expect("failed to serialize LSP document registration options"),
),
},
ls_types::Registration {
lsp_types::Registration {
id: "zizmor-didchange".into(),
method: "textDocument/didChange".into(),
register_options: Some(
serde_json::to_value(ls_types::TextDocumentChangeRegistrationOptions {
serde_json::to_value(lsp_types::TextDocumentChangeRegistrationOptions {
document_selector: Some(selectors.clone()),
sync_kind: TextDocumentSyncKind::FULL,
})
.expect("failed to serialize LSP document registration options"),
),
},
ls_types::Registration {
lsp_types::Registration {
id: "zizmor-didsave".into(),
method: "textDocument/didSave".into(),
register_options: Some(
serde_json::to_value(ls_types::TextDocumentSaveRegistrationOptions {
serde_json::to_value(lsp_types::TextDocumentSaveRegistrationOptions {
include_text: Some(true),
text_document_registration_options:
ls_types::TextDocumentRegistrationOptions {
lsp_types::TextDocumentRegistrationOptions {
document_selector: Some(selectors.clone()),
},
})
.expect("failed to serialize LSP document registration options"),
),
},
ls_types::Registration {
lsp_types::Registration {
id: "zizmor-didclose".into(),
method: "textDocument/didClose".into(),
register_options: Some(
serde_json::to_value(ls_types::TextDocumentRegistrationOptions {
serde_json::to_value(lsp_types::TextDocumentRegistrationOptions {
document_selector: Some(selectors),
})
.expect("failed to serialize LSP document registration options"),
@ -130,7 +130,7 @@ impl LanguageServer for Backend {
.expect("failed to register text document capabilities with the LSP client");
self.client
.log_message(ls_types::MessageType::INFO, "server initialized!")
.log_message(lsp_types::MessageType::INFO, "server initialized!")
.await;
}
@ -139,7 +139,7 @@ impl LanguageServer for Backend {
Ok(())
}
async fn did_open(&self, params: ls_types::DidOpenTextDocumentParams) {
async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) {
tracing::debug!("did_open: {:?}", params);
self.audit(LspDocumentCommon {
uri: params.text_document.uri,
@ -149,7 +149,7 @@ impl LanguageServer for Backend {
.await;
}
async fn did_change(&self, params: ls_types::DidChangeTextDocumentParams) {
async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) {
tracing::debug!("did_change: {:?}", params);
let mut params = params;
let Some(change) = params.content_changes.pop() else {
@ -164,7 +164,7 @@ impl LanguageServer for Backend {
.await;
}
async fn did_save(&self, params: ls_types::DidSaveTextDocumentParams) {
async fn did_save(&self, params: lsp_types::DidSaveTextDocumentParams) {
tracing::debug!("did_save: {:?}", params);
if let Some(text) = params.text {
self.audit(LspDocumentCommon {
@ -217,15 +217,15 @@ impl Backend {
.iter()
.map(|finding| {
let primary = finding.primary_location();
ls_types::Diagnostic {
range: ls_types::Range {
lsp_types::Diagnostic {
range: lsp_types::Range {
start: primary.concrete.location.start_point.into(),
end: primary.concrete.location.end_point.into(),
},
severity: Some(finding.determinations.severity.into()),
code: Some(ls_types::NumberOrString::String(finding.ident.into())),
code_description: Some(ls_types::CodeDescription {
href: ls_types::Uri::from_str(finding.url)
code: Some(lsp_types::NumberOrString::String(finding.ident.into())),
code_description: Some(lsp_types::CodeDescription {
href: lsp_types::Uri::from_str(finding.url)
.expect("finding contains an invalid URL somehow"),
}),
source: Some("zizmor".into()),
@ -248,25 +248,25 @@ impl Backend {
async fn audit(&self, params: LspDocumentCommon) {
if let Err(e) = self.audit_inner(params).await {
self.client
.log_message(ls_types::MessageType::ERROR, format!("audit failed: {e}"))
.log_message(lsp_types::MessageType::ERROR, format!("audit failed: {e}"))
.await;
}
}
}
impl From<Severity> for ls_types::DiagnosticSeverity {
impl From<Severity> for lsp_types::DiagnosticSeverity {
fn from(value: Severity) -> Self {
// TODO: Does this mapping make sense?
match value {
Severity::Informational => ls_types::DiagnosticSeverity::INFORMATION,
Severity::Low => ls_types::DiagnosticSeverity::WARNING,
Severity::Medium => ls_types::DiagnosticSeverity::WARNING,
Severity::High => ls_types::DiagnosticSeverity::ERROR,
Severity::Informational => lsp_types::DiagnosticSeverity::INFORMATION,
Severity::Low => lsp_types::DiagnosticSeverity::WARNING,
Severity::Medium => lsp_types::DiagnosticSeverity::WARNING,
Severity::High => lsp_types::DiagnosticSeverity::ERROR,
}
}
}
impl From<Point> for ls_types::Position {
impl From<Point> for lsp_types::Position {
fn from(value: Point) -> Self {
Self {
line: value.row as u32,

View file

@ -2,7 +2,6 @@
use std::{
collections::HashSet,
env,
io::{Write, stdout},
process::ExitCode,
};
@ -30,7 +29,6 @@ use tracing_indicatif::{IndicatifLayer, span_ext::IndicatifSpanExt};
use tracing_subscriber::{EnvFilter, layer::SubscriberExt as _, util::SubscriberInitExt as _};
use crate::{
audit::AuditError,
config::{Config, ConfigError, ConfigErrorInner},
github::Client,
models::AsDocument,
@ -118,22 +116,6 @@ struct App {
#[arg(long, value_enum, default_value_t)]
format: OutputFormat,
/// Whether to render OSC 8 links in the output.
///
/// This affects links under audit IDs, as well as any links
/// produced by audit rules.
///
/// Only affects `--format=plain` (the default).
#[arg(long, value_enum, default_value_t, env = "ZIZMOR_RENDER_LINKS")]
render_links: CliRenderLinks,
/// Whether to render audit URLs in the output, separately from any URLs
/// embedded in OSC 8 links.
///
/// Only affects `--format=plain` (the default).
#[arg(long, value_enum, default_value_t, env = "ZIZMOR_SHOW_AUDIT_URLS")]
show_audit_urls: CliShowAuditUrls,
/// Control the use of color in output.
#[arg(long, value_enum, value_name = "MODE")]
color: Option<ColorMode>,
@ -334,79 +316,6 @@ pub(crate) enum OutputFormat {
Github,
}
#[derive(Debug, Default, Copy, Clone, ValueEnum)]
pub(crate) enum CliRenderLinks {
/// Render OSC 8 links in output if support is detected.
#[default]
Auto,
/// Always render OSC 8 links in output.
Always,
/// Never render OSC 8 links in output.
Never,
}
#[derive(Debug, Copy, Clone)]
pub(crate) enum RenderLinks {
Always,
Never,
}
impl From<CliRenderLinks> for RenderLinks {
fn from(value: CliRenderLinks) -> Self {
match value {
CliRenderLinks::Auto => {
// We render links if stdout is a terminal. This is assumed
// to preclude CI environments and log files.
//
// TODO: Switch this to the support-hyperlinks crate?
// See: https://github.com/zkat/supports-hyperlinks/pull/8
if stdout().is_terminal() {
RenderLinks::Always
} else {
RenderLinks::Never
}
}
CliRenderLinks::Always => RenderLinks::Always,
CliRenderLinks::Never => RenderLinks::Never,
}
}
}
#[derive(Debug, Default, Copy, Clone, ValueEnum)]
pub(crate) enum CliShowAuditUrls {
/// Render audit URLs in output automatically based on output format and runtime context.
///
/// For example, URLs will be shown if a CI runtime is detected.
#[default]
Auto,
/// Always render audit URLs in output.
Always,
/// Never render audit URLs in output.
Never,
}
#[derive(Debug, Copy, Clone)]
pub(crate) enum ShowAuditUrls {
Always,
Never,
}
impl From<CliShowAuditUrls> for ShowAuditUrls {
fn from(value: CliShowAuditUrls) -> Self {
match value {
CliShowAuditUrls::Auto => {
if utils::is_ci() || !stdout().is_terminal() {
ShowAuditUrls::Always
} else {
ShowAuditUrls::Never
}
}
CliShowAuditUrls::Always => ShowAuditUrls::Always,
CliShowAuditUrls::Never => ShowAuditUrls::Never,
}
}
}
#[derive(Debug, Copy, Clone, ValueEnum)]
pub(crate) enum ColorMode {
/// Use color output if the output supports it.
@ -642,10 +551,10 @@ enum Error {
#[error("failed to load audit rules")]
AuditLoad(#[source] anyhow::Error),
/// An error while running an audit.
#[error("'{ident}' audit failed on {input}")]
#[error("{ident} failed on {input}")]
Audit {
ident: &'static str,
source: AuditError,
source: anyhow::Error,
input: String,
},
/// An error while rendering output.
@ -688,7 +597,6 @@ async fn run(app: &mut App) -> Result<ExitCode, Error> {
ColorMode::Never
} else if std::env::var("FORCE_COLOR").is_ok()
|| std::env::var("CLICOLOR_FORCE").is_ok()
|| utils::is_ci()
{
ColorMode::Always
} else {
@ -843,7 +751,7 @@ async fn run(app: &mut App) -> Result<ExitCode, Error> {
while let Some(findings) = completion_stream.next().await {
let findings = findings.map_err(|err| Error::Audit {
ident: err.ident(),
source: err,
source: err.into(),
input: input.key().to_string(),
})?;
@ -860,13 +768,7 @@ async fn run(app: &mut App) -> Result<ExitCode, Error> {
}
match app.format {
OutputFormat::Plain => output::plain::render_findings(
&registry,
&results,
&app.show_audit_urls.into(),
&app.render_links.into(),
app.naches,
),
OutputFormat::Plain => output::plain::render_findings(&registry, &results, app.naches),
OutputFormat::Json | OutputFormat::JsonV1 => {
output::json::v1::output(stdout(), results.findings()).map_err(Error::Output)?
}
@ -909,7 +811,7 @@ async fn main() -> ExitCode {
// which is then typically inaccessible from an already failed
// CI job. In those cases, it's better to dump directly to stderr,
// since that'll typically be captured by console logging.
if utils::is_ci() {
if std::env::var_os("CI").is_some() {
std::panic::set_hook(Box::new(|info| {
let trace = std::backtrace::Backtrace::force_capture();
eprintln!("FATAL: zizmor crashed. This is a bug that should be reported.");
@ -969,16 +871,6 @@ async fn main() -> ExitCode {
Some(report)
}
Error::Collection(err) => match err.inner() {
CollectionError::NoInputs => {
let group = Group::with_title(Level::ERROR.primary_title(err.to_string()))
.element(Level::HELP.message("collection yielded no auditable inputs"))
.element(Level::HELP.message("inputs must contain at least one valid workflow, action, or Dependabot config"));
let renderer = Renderer::styled();
let report = renderer.render(&[group]);
Some(report)
}
CollectionError::DuplicateInput(..) => {
let group = Group::with_title(Level::ERROR.primary_title(err.to_string()))
.element(Level::HELP.message(format!(
@ -1015,8 +907,7 @@ async fn main() -> ExitCode {
Some(report)
}
// These errors only happen if something is wrong with zizmor itself.
CollectionError::Yamlpath(..) | CollectionError::Model(..) => {
CollectionError::Yamlpath(..) => {
let group = Group::with_title(Level::ERROR.primary_title(err.to_string())).elements([
Level::HELP.message("this typically indicates a bug in zizmor; please report it"),
Level::HELP.message(

View file

@ -7,7 +7,7 @@ use github_actions_models::common::Env;
use github_actions_models::common::expr::LoE;
use github_actions_models::workflow::job::Strategy;
use crate::finding::location::{Locatable, SymbolicLocation};
use crate::finding::location::Locatable;
use crate::models::inputs::HasInputs;
pub(crate) mod action;
@ -45,7 +45,7 @@ pub(crate) trait StepCommon<'doc>: Locatable<'doc> + HasInputs {
fn env_is_static(&self, ctx: &context::Context) -> bool;
/// Returns a [`common::Uses`] for this step, if it has one.
fn uses(&self) -> Option<&'doc common::Uses>;
fn uses(&self) -> Option<&common::Uses>;
/// Returns this step's job's strategy, if present.
///
@ -64,7 +64,7 @@ pub(crate) trait StepCommon<'doc>: Locatable<'doc> + HasInputs {
///
/// Returns `None` if the shell cannot be statically determined, including
/// if the shell is specified via an expression.
fn shell(&self) -> Option<(&str, SymbolicLocation<'doc>)>;
fn shell(&self) -> Option<&str>;
}
impl<'a, 'doc, T: StepCommon<'doc>> AsDocument<'a, 'doc> for T {

View file

@ -175,13 +175,10 @@ impl<'doc> Locatable<'doc> for CompositeStep<'doc> {
])
}
fn location_with_grip(&self) -> SymbolicLocation<'doc> {
if self.inner.name.is_some() {
self.location().with_keys(["name".into()])
} else if self.inner.id.is_some() {
self.location().with_keys(["id".into()])
} else {
self.location()
fn location_with_name(&self) -> SymbolicLocation<'doc> {
match self.inner.name {
Some(_) => self.location().with_keys(["name".into()]),
None => self.location(),
}
}
}
@ -201,7 +198,7 @@ impl<'doc> StepCommon<'doc> for CompositeStep<'doc> {
utils::env_is_static(ctx, &[&self.env])
}
fn uses(&self) -> Option<&'doc common::Uses> {
fn uses(&self) -> Option<&common::Uses> {
let action::StepBody::Uses { uses, .. } = &self.inner.body else {
return None;
};
@ -232,19 +229,14 @@ impl<'doc> StepCommon<'doc> for CompositeStep<'doc> {
self.action().as_document()
}
fn shell(&self) -> Option<(&str, SymbolicLocation<'doc>)> {
fn shell(&self) -> Option<&str> {
// For composite action steps, shell is always explicitly specified in the YAML.
if let action::StepBody::Run {
shell: LoE::Literal(shell),
..
} = &self.inner.body
{
Some((
shell,
self.location()
.with_keys(["shell".into()])
.annotated("shell defined here"),
))
Some(shell)
} else {
None
}

View file

@ -131,7 +131,7 @@ impl<'doc> Locatable<'doc> for Update<'doc> {
.annotated("this update rule")
}
fn location_with_grip(&self) -> SymbolicLocation<'doc> {
fn location_with_name(&self) -> SymbolicLocation<'doc> {
self.location()
.with_keys(["package-ecosystem".into()])
.annotated("this ecosystem")

View file

@ -1,18 +1,18 @@
//! Extension traits for the `Uses` APIs.
use std::{str::FromStr, sync::LazyLock};
use std::str::FromStr;
use github_actions_models::common::{RepositoryUses, Uses};
use regex::Regex;
use serde::Deserialize;
/// Matches all variants of [`RepositoryUsesPattern`] except `*`.
///
/// TODO: Replace this with a real parser; this is ridiculous.
static REPOSITORY_USES_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
#[allow(clippy::unwrap_used)]
Regex::new(
r#"(?xmi) # verbose, multi-line mode, case-insensitive
use crate::utils::once::static_regex;
// Matches all variants of [`RepositoryUsesPattern`] except `*`.
//
// TODO: Replace this with a real parser; this is ridiculous.
static_regex!(
REPOSITORY_USES_PATTERN,
r#"(?xmi) # verbose, multi-line mode, case-insensitive
^ # start of line
([\w-]+) # (1) owner
/ # /
@ -30,10 +30,8 @@ static REPOSITORY_USES_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
([[[:graph:]]&&[^\*]]+) # (4) git ref (any non-space, non-* characters)
)? # end of non-capturing group for optional git ref
$ # end of line
"#,
)
.unwrap()
});
"#
);
/// Represents a pattern for matching repository `uses` references.
/// These patterns are ordered by specificity; more specific patterns
@ -73,10 +71,10 @@ impl RepositoryUsesPattern {
subpath,
git_ref,
} => {
uses.owner().eq_ignore_ascii_case(owner)
&& uses.repo().eq_ignore_ascii_case(repo)
&& uses.subpath() == subpath.as_deref()
&& uses.git_ref() == git_ref
uses.owner.eq_ignore_ascii_case(owner)
&& uses.repo.eq_ignore_ascii_case(repo)
&& uses.subpath == *subpath
&& uses.git_ref.as_str() == git_ref
}
RepositoryUsesPattern::ExactPath {
owner,
@ -89,19 +87,19 @@ impl RepositoryUsesPattern {
// Utf8Path gets us part of the way there, but is
// platform dependent (i.e. will do the wrong thing
// if the platform separator is not /).
uses.owner().eq_ignore_ascii_case(owner)
&& uses.repo().eq_ignore_ascii_case(repo)
&& uses.subpath().is_some_and(|s| s == subpath)
uses.owner.eq_ignore_ascii_case(owner)
&& uses.repo.eq_ignore_ascii_case(repo)
&& uses.subpath.as_deref().is_some_and(|s| s == subpath)
}
RepositoryUsesPattern::ExactRepo { owner, repo } => {
uses.owner().eq_ignore_ascii_case(owner)
&& uses.repo().eq_ignore_ascii_case(repo)
&& uses.subpath().is_none()
uses.owner.eq_ignore_ascii_case(owner)
&& uses.repo.eq_ignore_ascii_case(repo)
&& uses.subpath.is_none()
}
RepositoryUsesPattern::InRepo { owner, repo } => {
uses.owner().eq_ignore_ascii_case(owner) && uses.repo().eq_ignore_ascii_case(repo)
uses.owner.eq_ignore_ascii_case(owner) && uses.repo.eq_ignore_ascii_case(repo)
}
RepositoryUsesPattern::InOwner(owner) => uses.owner().eq_ignore_ascii_case(owner),
RepositoryUsesPattern::InOwner(owner) => uses.owner.eq_ignore_ascii_case(owner),
RepositoryUsesPattern::Any => true,
}
}
@ -220,18 +218,18 @@ impl RepositoryUsesExt for RepositoryUses {
}
fn ref_is_commit(&self) -> bool {
self.git_ref().len() == 40 && self.git_ref().chars().all(|c| c.is_ascii_hexdigit())
self.git_ref.len() == 40 && self.git_ref.chars().all(|c| c.is_ascii_hexdigit())
}
fn commit_ref(&self) -> Option<&str> {
match &self.git_ref() {
match &self.git_ref {
git_ref if self.ref_is_commit() => Some(git_ref),
_ => None,
}
}
fn symbolic_ref(&self) -> Option<&str> {
match &self.git_ref() {
match &self.git_ref {
git_ref if !self.ref_is_commit() => Some(git_ref),
_ => None,
}
@ -248,7 +246,7 @@ impl UsesExt for Uses {
/// Whether the `uses:` is unpinned.
fn unpinned(&self) -> bool {
match self {
Uses::Docker(docker) => docker.hash().is_none() && docker.tag().is_none(),
Uses::Docker(docker) => docker.hash.is_none() && docker.tag.is_none(),
Uses::Repository(_) => false,
// Local `uses:` are always unpinned; any `@ref` component
// is actually part of the path.
@ -265,7 +263,7 @@ impl UsesExt for Uses {
// (since it's fully contained within the calling repo),
Uses::Local(_) => false,
Uses::Repository(repo) => !repo.ref_is_commit(),
Uses::Docker(docker) => docker.hash().is_none(),
Uses::Docker(docker) => docker.hash.is_none(),
}
}
}
@ -479,7 +477,7 @@ mod tests {
("actions/checkout/foo@v3", "actions/checkout/foo@v3", true),
("actions/checkout/foo@v1", "actions/checkout/foo@v3", false),
] {
let Ok(Uses::Repository(uses)) = Uses::parse(uses) else {
let Ok(Uses::Repository(uses)) = Uses::from_str(uses) else {
return Err(anyhow!("invalid uses: {uses}"));
};

View file

@ -286,13 +286,7 @@ impl<'doc> NormalJob<'doc> {
}
}
impl<'a, 'doc> AsDocument<'a, 'doc> for NormalJob<'doc> {
fn as_document(&'a self) -> &'doc yamlpath::Document {
self.parent.as_document()
}
}
impl<'doc> JobCommon<'doc> for NormalJob<'doc> {
impl<'doc> JobExt<'doc> for NormalJob<'doc> {
fn id(&self) -> &'doc str {
self.id
}
@ -335,13 +329,7 @@ impl<'doc> ReusableWorkflowCallJob<'doc> {
}
}
impl<'a, 'doc> AsDocument<'a, 'doc> for ReusableWorkflowCallJob<'doc> {
fn as_document(&'a self) -> &'doc yamlpath::Document {
self.parent.as_document()
}
}
impl<'doc> JobCommon<'doc> for ReusableWorkflowCallJob<'doc> {
impl<'doc> JobExt<'doc> for ReusableWorkflowCallJob<'doc> {
fn id(&self) -> &'doc str {
self.id
}
@ -364,7 +352,7 @@ impl<'doc> std::ops::Deref for ReusableWorkflowCallJob<'doc> {
}
/// Common behavior across both normal and reusable jobs.
pub(crate) trait JobCommon<'doc>: Locatable<'doc> {
pub(crate) trait JobExt<'doc> {
/// The job's unique ID (i.e., its key in the workflow's `jobs:` block).
fn id(&self) -> &'doc str;
@ -375,7 +363,7 @@ pub(crate) trait JobCommon<'doc>: Locatable<'doc> {
fn parent(&self) -> &'doc Workflow;
}
impl<'doc, T: JobCommon<'doc>> Locatable<'doc> for T {
impl<'doc, T: JobExt<'doc>> Locatable<'doc> for T {
/// Returns this job's [`SymbolicLocation`].
fn location(&self) -> SymbolicLocation<'doc> {
self.parent()
@ -384,15 +372,10 @@ impl<'doc, T: JobCommon<'doc>> Locatable<'doc> for T {
.with_keys(["jobs".into(), self.id().into()])
}
fn location_with_grip(&self) -> SymbolicLocation<'doc> {
if self.name().is_some() {
self.location().with_keys(["name".into()])
} else {
self.parent()
.location()
.annotated("this job")
.with_keys(["jobs".into(), self.id().into()])
.key_only()
fn location_with_name(&self) -> SymbolicLocation<'doc> {
match self.name() {
Some(_) => self.location().with_keys(["name".into()]),
None => self.location(),
}
}
}
@ -634,13 +617,10 @@ impl<'doc> Locatable<'doc> for Step<'doc> {
.annotated("this step")
}
fn location_with_grip(&self) -> SymbolicLocation<'doc> {
if self.inner.name.is_some() {
self.location().with_keys(["name".into()])
} else if self.inner.id.is_some() {
self.location().with_keys(["id".into()])
} else {
self.location()
fn location_with_name(&self) -> SymbolicLocation<'doc> {
match self.inner.name {
Some(_) => self.location().with_keys(["name".into()]),
None => self.location(),
}
}
}
@ -660,7 +640,7 @@ impl<'doc> StepCommon<'doc> for Step<'doc> {
utils::env_is_static(ctx, &[&self.env, &self.job().env, &self.workflow().env])
}
fn uses(&self) -> Option<&'doc common::Uses> {
fn uses(&self) -> Option<&common::Uses> {
let StepBody::Uses { uses, .. } = &self.inner.body else {
return None;
};
@ -691,7 +671,7 @@ impl<'doc> StepCommon<'doc> for Step<'doc> {
self.workflow().as_document()
}
fn shell(&self) -> Option<(&str, SymbolicLocation<'doc>)> {
fn shell(&self) -> Option<&str> {
// For workflow steps, we can use the existing shell() method
self.shell()
}
@ -720,7 +700,7 @@ impl<'doc> Step<'doc> {
/// if the shell can't be statically inferred.
///
/// Invariant: panics if the step is not a `run:` step.
pub(crate) fn shell(&self) -> Option<(&str, SymbolicLocation<'doc>)> {
pub(crate) fn shell(&self) -> Option<&str> {
let StepBody::Run {
run: _,
working_directory: _,
@ -736,12 +716,7 @@ impl<'doc> Step<'doc> {
// If any of these is an expression, we can't infer the shell
// statically, so we terminate early with `None`.
let shell = match shell {
Some(LoE::Literal(shell)) => Some((
shell.as_str(),
self.location()
.with_keys(["shell".into()])
.annotated("shell defined here"),
)),
Some(LoE::Literal(shell)) => Some(shell.as_str()),
Some(LoE::Expr(_)) => return None,
None => match self
.job()
@ -749,13 +724,7 @@ impl<'doc> Step<'doc> {
.as_ref()
.and_then(|d| d.run.as_ref().and_then(|r| r.shell.as_ref()))
{
Some(LoE::Literal(shell)) => Some((
shell.as_str(),
self.job()
.location()
.with_keys(["defaults".into(), "run".into(), "shell".into()])
.annotated("job default shell defined here"),
)),
Some(LoE::Literal(shell)) => Some(shell.as_str()),
Some(LoE::Expr(_)) => return None,
None => match self
.workflow()
@ -763,30 +732,14 @@ impl<'doc> Step<'doc> {
.as_ref()
.and_then(|d| d.run.as_ref().and_then(|r| r.shell.as_ref()))
{
Some(LoE::Literal(shell)) => Some((
shell.as_str(),
self.workflow()
.location()
.with_keys(["defaults".into(), "run".into(), "shell".into()])
.annotated("workflow default shell defined here"),
)),
Some(LoE::Literal(shell)) => Some(shell.as_str()),
Some(LoE::Expr(_)) => return None,
None => None,
},
},
};
shell.or_else(|| {
self.parent.runner_default_shell().map(|shell| {
(
shell,
self.job()
.location()
.with_keys(["runs-on".into()])
.annotated("shell implied by runner"),
)
})
})
shell.or_else(|| self.parent.runner_default_shell())
}
}

View file

@ -7,7 +7,6 @@ use anstream::{eprintln, print, println};
use owo_colors::OwoColorize;
use crate::{
RenderLinks, ShowAuditUrls,
finding::{
Finding, Severity,
location::{Location, LocationKind},
@ -44,7 +43,6 @@ impl From<&Severity> for Level<'_> {
pub(crate) fn finding_snippets<'doc>(
registry: &'doc InputRegistry,
finding: &'doc Finding<'doc>,
render_links_mode: &RenderLinks,
) -> Vec<Snippet<'doc, Annotation<'doc>>> {
// Our finding might span multiple workflows, so we need to group locations
// by their enclosing workflow to generate each snippet correctly.
@ -69,20 +67,15 @@ pub(crate) fn finding_snippets<'doc>(
for (input_key, locations) in locations_by_workflow {
let input = registry.get_input(input_key);
let path = match render_links_mode {
RenderLinks::Always => input.link().unwrap_or(input_key.presentation_path()),
RenderLinks::Never => input_key.presentation_path(),
};
snippets.push(
Snippet::source(input.as_document().source())
.fold(true)
.line_start(1)
.path(path)
.path(input.link().unwrap_or(input_key.presentation_path()))
.annotations(locations.iter().map(|loc| {
let annotation = match (loc.symbolic.link.as_deref(), render_links_mode) {
(Some(link), RenderLinks::Always) => link,
_ => &loc.symbolic.annotation,
let annotation = match loc.symbolic.link {
Some(ref link) => link,
None => &loc.symbolic.annotation,
};
AnnotationKind::from(loc.symbolic.kind)
@ -101,12 +94,10 @@ pub(crate) fn finding_snippets<'doc>(
pub(crate) fn render_findings(
registry: &InputRegistry,
findings: &FindingRegistry,
show_urls_mode: &ShowAuditUrls,
render_links_mode: &RenderLinks,
naches_mode: bool,
) {
for finding in findings.findings() {
render_finding(registry, finding, show_urls_mode, render_links_mode);
render_finding(registry, finding);
println!();
}
@ -199,19 +190,11 @@ pub(crate) fn render_findings(
}
}
fn render_finding(
registry: &InputRegistry,
finding: &Finding,
show_urls_mode: &ShowAuditUrls,
render_links_mode: &RenderLinks,
) {
let mut title = Level::from(&finding.determinations.severity)
fn render_finding(registry: &InputRegistry, finding: &Finding) {
let title = Level::from(&finding.determinations.severity)
.primary_title(finding.desc)
.id(finding.ident);
if matches!(render_links_mode, RenderLinks::Always) {
title = title.id_url(finding.url);
}
.id(finding.ident)
.id_url(finding.url);
let confidence = format!(
"audit confidence → {:?}",
@ -219,7 +202,7 @@ fn render_finding(
);
let mut group = Group::with_title(title)
.elements(finding_snippets(registry, finding, render_links_mode))
.elements(finding_snippets(registry, finding))
.element(Level::NOTE.message(confidence));
if let Some(tip) = &finding.tip {
@ -230,13 +213,6 @@ fn render_finding(
group = group.element(Level::NOTE.message("this finding has an auto-fix"));
}
if matches!(show_urls_mode, ShowAuditUrls::Always) {
group = group.element(Level::HELP.message(format!(
"audit documentation → {url}",
url = finding.url.green()
)))
}
// TODO: Evaluate alternative decor styles.
let renderer = Renderer::styled();
println!("{}", renderer.render(&[group]));

View file

@ -74,7 +74,6 @@ impl AuditRegistry {
register_audit!(audit::dependabot_execution::DependabotExecution);
register_audit!(audit::dependabot_cooldown::DependabotCooldown);
register_audit!(audit::concurrency_limits::ConcurrencyLimits);
register_audit!(audit::archived_uses::ArchivedUses);
Ok(registry)
}

View file

@ -34,7 +34,7 @@ pub(crate) enum CollectionError {
/// The input couldn't be converted into the expected model.
/// This typically indicates a bug in `github-actions-models`.
#[error("couldn't turn input into a an appropriate model")]
Model(#[from] serde_yaml::Error),
Model(#[source] anyhow::Error),
/// The input couldn't be loaded into an internal yamlpath document.
/// This typically indicates a bug in `yamlpath`.

View file

@ -1,11 +1,15 @@
//! Helper routines.
use anyhow::{Error, anyhow};
use anyhow::{Context as _, Error, anyhow};
use camino::Utf8Path;
use github_actions_expressions::context::{Context, ContextPattern};
use github_actions_models::common::{Env, expr::LoE};
use jsonschema::ErrorEntry;
use jsonschema::{Validator, validator_for};
use jsonschema::{
BasicOutput::{Invalid, Valid},
Validator,
output::{ErrorDescription, OutputUnit},
validator_for,
};
use std::ops::{Deref, Range};
use std::{fmt::Write, sync::LazyLock};
@ -303,11 +307,11 @@ pub(crate) static DEFAULT_ENVIRONMENT_VARIABLES: &[(
),
];
fn parse_validation_errors(errors: Vec<ErrorEntry<'_>>) -> Error {
fn parse_validation_errors(errors: Vec<OutputUnit<ErrorDescription>>) -> Error {
let mut message = String::new();
for error in errors {
let description = error.error.to_string();
let description = error.error_description().to_string();
// HACK: error descriptions are sometimes a long rats' nest
// of JSON objects. We should render this in a palatable way
// but doing so is nontrivial, so we just skip them for now.
@ -315,7 +319,7 @@ fn parse_validation_errors(errors: Vec<ErrorEntry<'_>>) -> Error {
// the error for an unmatched "oneOf", so these errors are
// typically less useful anyways.
if !description.starts_with("{") {
let location = error.instance_location.as_str();
let location = error.instance_location().as_str();
if location.is_empty() {
writeln!(message, "{description}").expect("I/O on a String failed");
} else {
@ -349,17 +353,11 @@ where
// to distinguish between syntax and semantic errors,
// but serde-yaml doesn't give us an API to do that.
// To approximate it, we re-parse the input as a
// `serde_yaml::Mapping`, then convert that `serde_yaml::Mapping`
// into a `serde_json::Value` and use it as an oracle -- a successful
// `Value` and use that as an oracle -- a successful
// re-parse indicates that the input is valid YAML and
// that our error is semantic, while a failed re-parse
// indicates a syntax error.
//
// We need to round-trip through a `serde_yaml::Mapping` to ensure that
// all of YAML's validity rules are preserved -- directly deserializing
// into a `serde_json::Value` would miss some YAML-specific checks,
// like duplicate keys within mappings. See #1395 for an example of this.
//
// We do this in a nested fashion to avoid re-parsing
// the input twice if we can help it, and because the
// more obvious trick (`serde_yaml::from_value`) doesn't
@ -368,26 +366,21 @@ where
// See: https://github.com/dtolnay/serde-yaml/issues/170
// See: https://github.com/dtolnay/serde-yaml/issues/395
match serde_yaml::from_str::<serde_yaml::Mapping>(contents) {
match serde_yaml::from_str(contents) {
// We know we have valid YAML, so one of two things happened here:
// 1. The input is semantically valid, but we have a bug in
// `github-actions-models`.
// 2. The input is semantically invalid, and the user
// needs to fix it.
// We the JSON schema `validator` to separate these.
Ok(raw_value) => {
let evaluation = validator.evaluate(
&serde_json::to_value(&raw_value)
.map_err(|e| CollectionError::Syntax(e.into()))?,
);
if evaluation.flag().valid {
Err(e.into())
} else {
let errors = evaluation.iter_errors().collect::<Vec<_>>();
Ok(raw_value) => match validator.apply(&raw_value).basic() {
Valid(_) => Err(e)
.context("this suggests a bug in zizmor; please report it!")
.map_err(CollectionError::Model),
Invalid(errors) => {
Err(CollectionError::Schema(parse_validation_errors(errors)))
}
}
},
// Syntax error.
Err(e) => Err(CollectionError::Syntax(e.into())),
}
@ -710,13 +703,6 @@ pub(crate) mod once {
pub(crate) use warn_once;
}
/// Returns whether we are running in a CI environment.
pub(crate) fn is_ci() -> bool {
static IS_CI: LazyLock<bool> = LazyLock::new(|| std::env::var_os("CI").is_some());
*IS_CI
}
#[cfg(test)]
mod tests {
use anyhow::Result;

View file

@ -196,11 +196,15 @@ fn audit_unpinned_uses() -> anyhow::Result<()> {
assert_value_match(&findings, "$[0].determinations.confidence", "High");
assert_value_match(&findings, "$[0].determinations.severity", "Medium");
assert_value_match(&findings, "$[0].locations[0].concrete.feature", "ubuntu");
assert_value_match(
&findings,
"$[0].locations[0].concrete.feature",
"uses: docker://ubuntu",
);
assert_value_match(
&findings,
"$[1].locations[0].concrete.feature",
"ghcr.io/pypa/gh-action-pypi-publish",
"uses: docker://ghcr.io/pypa/gh-action-pypi-publish",
);
Ok(())

View file

@ -7,7 +7,7 @@ fn test_regular_persona() -> anyhow::Result<()> {
zizmor()
.input(input_under_test("anonymous-definition.yml"))
.run()?,
@"No findings to report. Good job! (2 suppressed)"
@r"No findings to report. Good job! (2 suppressed)"
);
Ok(())
@ -34,16 +34,17 @@ fn test_pedantic_persona() -> anyhow::Result<()> {
| |__________________________________________^ this workflow
|
= note: audit confidence High
= tip: use 'name: ...' to give this workflow a name
info[anonymous-definition]: workflow or action definition without a name
--> @@INPUT@@:21:3
|
21 | will-trigger:
| ^^^^^^^^^^^^ this job
21 | / will-trigger:
22 | | runs-on: ubuntu-latest
23 | | steps:
24 | | - run: "echo this job will trigger"
| |__________________________________________^ this job
|
= note: audit confidence High
= tip: use 'name: ...' to give this job a name
2 findings: 1 informational, 1 low, 0 medium, 0 high
"#

View file

@ -1,67 +0,0 @@
use crate::common::{input_under_test, zizmor};
#[test]
fn test_regular_persona() -> anyhow::Result<()> {
insta::assert_snapshot!(
zizmor().input(input_under_test("archived-uses.yml")).run()?,
@r"
warning[archived-uses]: action or reusable workflow from archived repository
--> @@INPUT@@:17:15
|
16 | - name: setup ruby
| ---------------- this step
17 | uses: actions/setup-ruby@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3
| ^^^^^^^^^^^^^^^^^^ repository is archived
|
= note: audit confidence High
warning[archived-uses]: action or reusable workflow from archived repository
--> @@INPUT@@:20:15
|
19 | - name: SETUP RUBY BUT LOUDLY
| --------------------------- this step
20 | uses: ACTIONS/SETUP-RUBY@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3
| ^^^^^^^^^^^^^^^^^^ repository is archived
|
= note: audit confidence High
warning[archived-uses]: action or reusable workflow from archived repository
--> @@INPUT@@:24:11
|
23 | name: archived-uses-reusable
| ---------------------------- this job
24 | uses: actions/setup-ruby/.github/workflows/notreal.yml@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3
| ^^^^^^^^^^^^^^^^^^ repository is archived
|
= note: audit confidence High
3 findings: 0 informational, 0 low, 3 medium, 0 high
"
);
Ok(())
}
#[test]
fn test_composite_action() -> anyhow::Result<()> {
insta::assert_snapshot!(
zizmor()
.input(input_under_test("archived-uses/action/"))
.run()?,
@r"
warning[archived-uses]: action or reusable workflow from archived repository
--> @@INPUT@@action.yml:9:13
|
8 | - name: setup ruby
| ---------------- this step
9 | uses: actions/setup-ruby@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3
| ^^^^^^^^^^^^^^^^^^ repository is archived
|
= note: audit confidence High
1 finding: 0 informational, 0 low, 1 medium, 0 high
"
);
Ok(())
}

View file

@ -80,28 +80,28 @@ fn test_jobs_missing_no_cancel() -> anyhow::Result<()> {
.args(["--persona=pedantic"])
.run()?,
@r"
help[concurrency-limits]: insufficient job-level concurrency limits
--> @@INPUT@@:9:5
|
9 | concurrency: group
| ^^^^^^^^^^^^^^^^^^ job concurrency is missing cancel-in-progress
|
= note: audit confidence High
help[concurrency-limits]: insufficient job-level concurrency limits
--> @@INPUT@@:9:5
|
9 | concurrency: group
| ^^^^^^^^^^^^^^^^^^ job concurrency is missing cancel-in-progress
|
= note: audit confidence High
help[concurrency-limits]: insufficient job-level concurrency limits
--> @@INPUT@@:1:1
|
1 | / name: Workflow with job 1 missing cancel-in-progress and job 2 missing concurrency
2 | | on: push
3 | | permissions: {}
... |
17 | | - name: 2-ok
18 | | run: echo ok
| |___________________^ missing concurrency setting
|
= note: audit confidence High
help[concurrency-limits]: insufficient job-level concurrency limits
--> @@INPUT@@:1:1
|
1 | / name: Workflow with job 1 missing cancel-in-progress and job 2 missing concurrency
2 | | on: push
3 | | permissions: {}
... |
17 | | - name: 2-ok
18 | | run: echo ok
| |___________________^ missing concurrency setting
|
= note: audit confidence High
2 findings: 0 informational, 2 low, 0 medium, 0 high
2 findings: 0 informational, 2 low, 0 medium, 0 high
"
);

View file

@ -165,7 +165,7 @@ fn test_config_short_cooldown_permitted() -> anyhow::Result<()> {
.input(input_under_test("dependabot-cooldown/default-days-too-short/dependabot.yml"))
.config(input_under_test("dependabot-cooldown/configs/cooldown-one-day.yml"))
.run()?,
@"No findings to report. Good job!"
@r"No findings to report. Good job!"
);
Ok(())

View file

@ -27,26 +27,26 @@ fn test_deny_all() -> Result<()> {
.run()?,
@r"
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:13:15
--> @@INPUT@@:13:9
|
13 | - uses: actions/setup-python@v4
| ^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:14:15
--> @@INPUT@@:14:9
|
14 | - uses: pypa/gh-action-pypi-publish@release/v1
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:15:15
--> @@INPUT@@:15:9
|
15 | - uses: actions/checkout@v4
| ^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High
@ -68,10 +68,10 @@ fn test_allow_some() -> Result<()> {
.run()?,
@r"
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:13:15
--> @@INPUT@@:13:9
|
13 | - uses: actions/setup-python@v4
| ^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High
@ -93,18 +93,18 @@ fn test_deny_some() -> Result<()> {
.run()?,
@r"
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:14:15
--> @@INPUT@@:14:9
|
14 | - uses: pypa/gh-action-pypi-publish@release/v1
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:15:15
--> @@INPUT@@:15:9
|
15 | - uses: actions/checkout@v4
| ^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High
@ -126,18 +126,18 @@ fn test_deny_some_refs() -> Result<()> {
.run()?,
@r"
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:13:15
--> @@INPUT@@:13:9
|
13 | - uses: actions/setup-python@v4
| ^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:14:15
--> @@INPUT@@:14:9
|
14 | - uses: pypa/gh-action-pypi-publish@release/v1
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High
@ -159,10 +159,10 @@ fn test_allow_some_refs() -> Result<()> {
.run()?,
@r"
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:15:15
--> @@INPUT@@:15:9
|
15 | - uses: actions/checkout@v4
| ^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High

View file

@ -1,31 +0,0 @@
use crate::common::{input_under_test, zizmor};
#[cfg_attr(not(feature = "gh-token-tests"), ignore)]
#[test]
fn test_regular_persona() -> anyhow::Result<()> {
insta::assert_snapshot!(
zizmor()
.input(input_under_test("impostor-commit.yml"))
.offline(false)
.run()?,
@r"
error[impostor-commit]: commit with no history in referenced repository
--> @@INPUT@@:29:15
|
29 | - uses: actions/checkout@c7d749a2d57b4b375d1ebcd17cfbfb60c676f18e
| - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ uses a commit that doesn't belong to the specified org/repo
| _________|
| |
30 | | with:
31 | | persist-credentials: false
| |____________________________________- this step
|
= note: audit confidence High
= note: this finding has an auto-fix
4 findings (3 suppressed, 1 fixable): 0 informational, 0 low, 0 medium, 1 high
"
);
Ok(())
}

View file

@ -1,7 +1,6 @@
//! Per-audit integrationt tests, including snapshots.
mod anonymous_definition;
mod archived_uses;
mod artipacked;
mod bot_conditions;
mod cache_poisoning;
@ -13,7 +12,7 @@ mod excessive_permissions;
mod forbidden_uses;
mod github_env;
// mod hardcoded_container_credentials; // TODO
mod impostor_commit;
// mod impostor_commit; // TODO
mod insecure_commands;
// mod known_vulnerable_actions; // TODO
mod obfuscation;

View file

@ -237,56 +237,3 @@ fn test_issue_1177_repro_pedantic() -> Result<()> {
Ok(())
}
/// Reproduces issue #1414: the obfuscation audit should not crash if the
/// user has `shell: cmd` defined as a job or workflow default rather than
/// at the step level.
///
/// See: https://github.com/zizmorcore/zizmor/issues/1414
#[test]
fn test_issue_1414_repro() -> Result<()> {
insta::assert_snapshot!(
zizmor()
.input(input_under_test("obfuscation/issue-1414-repro.yml"))
.run()?,
@r"
help[obfuscation]: obfuscated usage of GitHub Actions features
--> @@INPUT@@:13:9
|
13 | shell: cmd
| ^^^^^^^^^^ job default shell defined here
14 | steps:
15 | - name: say hi
| ------------ Windows CMD shell limits analysis
|
= note: audit confidence High
= tip: use 'shell: pwsh' or 'shell: bash' for improved analysis
3 findings (2 suppressed): 0 informational, 1 low, 0 medium, 0 high
"
);
// Like #1414, but with `shell: cmd` defined at the workflow level.
insta::assert_snapshot!(
zizmor()
.input(input_under_test("obfuscation/workflow-cmd-default-shell.yml"))
.run()?,
@r"
help[obfuscation]: obfuscated usage of GitHub Actions features
--> @@INPUT@@:10:5
|
10 | shell: cmd
| ^^^^^^^^^^ workflow default shell defined here
...
16 | - name: say hi
| ------------ Windows CMD shell limits analysis
|
= note: audit confidence High
= tip: use 'shell: pwsh' or 'shell: bash' for improved analysis
3 findings (2 suppressed): 0 informational, 1 low, 0 medium, 0 high
"
);
Ok(())
}

View file

@ -33,7 +33,7 @@ fn test_issue_518_repro() -> Result<()> {
.input(input_under_test("ref-confusion/issue-518-repro.yml"))
.offline(false)
.run()?,
@"No findings to report. Good job! (1 ignored, 1 suppressed)"
@r"No findings to report. Good job! (1 ignored, 1 suppressed)"
);
Ok(())

View file

@ -42,7 +42,7 @@ fn test_nested_annotated_tags() -> Result<()> {
"ref-version-mismatch/nested-annotated-tags.yml"
))
.run()?,
@"No findings to report. Good job! (1 suppressed)"
@r"No findings to report. Good job! (1 suppressed)"
);
Ok(())

View file

@ -8,10 +8,10 @@ fn secrets_inherit() -> anyhow::Result<()> {
.run()?,
@r"
warning[secrets-inherit]: secrets unconditionally inherited by called workflow
--> @@INPUT@@:10:11
--> @@INPUT@@:10:5
|
10 | uses: octo-org/example-repo/.github/workflows/called-workflow.yml@main
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this reusable workflow
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this reusable workflow
11 | # NOT OK: unconditionally inherits
12 | secrets: inherit
| ---------------- inherits all parent secrets

View file

@ -30,7 +30,7 @@ fn test_self_hosted_default() -> Result<()> {
zizmor()
.input(input_under_test("self-hosted.yml"))
.run()?,
@"No findings to report. Good job! (1 suppressed)"
@r"No findings to report. Good job! (1 suppressed)"
);
Ok(())

View file

@ -11,10 +11,10 @@ fn test_pedantic_persona() -> anyhow::Result<()> {
.run()?,
@r"
help[stale-action-refs]: commit hash does not point to a Git tag
--> @@INPUT@@:34:13
--> @@INPUT@@:34:7
|
34 | - uses: actions/checkout@009b9ae9e446ad8d9b8c809870b0fbcc5e03573e
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this step
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this step
|
= note: audit confidence High

View file

@ -183,7 +183,7 @@ fn test_issue_418_repro() -> Result<()> {
zizmor()
.input(input_under_test("template-injection/issue-418-repro.yml"))
.run()?,
@"No findings to report. Good job! (3 suppressed)"
@r"No findings to report. Good job! (3 suppressed)"
);
Ok(())
@ -247,10 +247,10 @@ fn test_pr_425_backstop_action() -> Result<()> {
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:29:13
--> @@INPUT@@:29:7
|
29 | uses: azure/powershell@whatever
| ^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High

View file

@ -64,7 +64,7 @@ fn test_undocumented_permissions_default() -> Result<()> {
zizmor()
.input(input_under_test("undocumented-permissions.yml"))
.run()?,
@"No findings to report. Good job! (5 suppressed)"
@r"No findings to report. Good job! (5 suppressed)"
);
Ok(())
@ -78,7 +78,7 @@ fn test_documented_permissions_pedantic() -> Result<()> {
.input(input_under_test("undocumented-permissions/documented.yml"))
.args(["--persona=pedantic"])
.run()?,
@"No findings to report. Good job! (1 ignored)"
@r"No findings to report. Good job! (1 ignored)"
);
Ok(())
@ -94,7 +94,7 @@ fn test_contents_read_only_pedantic() -> Result<()> {
))
.args(["--persona=pedantic"])
.run()?,
@"No findings to report. Good job!"
@r"No findings to report. Good job!"
);
Ok(())
@ -110,7 +110,7 @@ fn test_empty_permissions_pedantic() -> Result<()> {
))
.args(["--persona=pedantic"])
.run()?,
@"No findings to report. Good job!"
@r"No findings to report. Good job!"
);
Ok(())

View file

@ -10,18 +10,18 @@ fn test_unpinned_uses_pedantic() -> Result<()> {
.run()?,
@r"
warning[unpinned-uses]: unpinned action reference
--> @@INPUT@@:21:24
--> @@INPUT@@:21:9
|
21 | - uses: docker://ubuntu
| ^^^^^^ image is not pinned to a tag, branch, or hash ref
| ^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref
|
= note: audit confidence High
warning[unpinned-uses]: unpinned action reference
--> @@INPUT@@:27:24
--> @@INPUT@@:27:9
|
27 | - uses: docker://ghcr.io/pypa/gh-action-pypi-publish
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ image is not pinned to a tag, branch, or hash ref
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref
|
= note: audit confidence High
@ -40,18 +40,18 @@ fn test_unpinned_uses_default() -> Result<()> {
.run()?,
@r"
warning[unpinned-uses]: unpinned action reference
--> @@INPUT@@:21:24
--> @@INPUT@@:21:9
|
21 | - uses: docker://ubuntu
| ^^^^^^ image is not pinned to a tag, branch, or hash ref
| ^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref
|
= note: audit confidence High
warning[unpinned-uses]: unpinned action reference
--> @@INPUT@@:27:24
--> @@INPUT@@:27:9
|
27 | - uses: docker://ghcr.io/pypa/gh-action-pypi-publish
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ image is not pinned to a tag, branch, or hash ref
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref
|
= note: audit confidence High
@ -71,18 +71,18 @@ fn test_action_pedantic() -> Result<()> {
.run()?,
@r"
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:12:13
--> @@INPUT@@:12:7
|
12 | uses: asdf-vm/actions/setup@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:15:13
--> @@INPUT@@:15:7
|
15 | uses: asdf-vm/actions/setup@main
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
@ -114,7 +114,7 @@ fn test_issue_659_repro() -> Result<()> {
.input(input_under_test("unpinned-uses/issue-659-repro.yml"))
.args(["--pedantic"])
.run()?,
@"No findings to report. Good job!"
@r"No findings to report. Good job!"
);
Ok(())
@ -129,10 +129,10 @@ fn test_default_config() -> Result<()> {
.run()?,
@r"
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:22:15
--> @@INPUT@@:22:9
|
22 | - uses: pypa/gh-action-pypi-publish@release/v1
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
@ -155,42 +155,42 @@ fn test_hash_pin_everything_config() -> Result<()> {
.run()?,
@r"
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:12:15
--> @@INPUT@@:12:9
|
12 | - uses: actions/setup-python@v4
| ^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:14:15
--> @@INPUT@@:14:9
|
14 | - uses: actions/checkout@v3
| ^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:22:15
--> @@INPUT@@:22:9
|
22 | - uses: pypa/gh-action-pypi-publish@release/v1
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:24:15
--> @@INPUT@@:24:9
|
24 | - uses: github/codeql-action/init@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:26:15
--> @@INPUT@@:26:9
|
26 | - uses: github/codeql-action/upload-sarif@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
@ -226,26 +226,26 @@ fn test_composite_config() -> Result<()> {
.run()?,
@r"
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:12:15
--> @@INPUT@@:12:9
|
12 | - uses: actions/setup-python@v4
| ^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by actions/setup-python policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by actions/setup-python policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:24:15
--> @@INPUT@@:24:9
|
24 | - uses: github/codeql-action/init@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:26:15
--> @@INPUT@@:26:9
|
26 | - uses: github/codeql-action/upload-sarif@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
@ -265,18 +265,18 @@ fn test_composite_config_2() -> Result<()> {
.run()?,
@r"
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:24:15
--> @@INPUT@@:24:9
|
24 | - uses: github/codeql-action/init@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by github/codeql-action/init policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by github/codeql-action/init policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:26:15
--> @@INPUT@@:26:9
|
26 | - uses: github/codeql-action/upload-sarif@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by github/codeql-action/upload-sarif policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by github/codeql-action/upload-sarif policy)
|
= note: audit confidence High
@ -296,42 +296,42 @@ fn test_empty_config() -> Result<()> {
.run()?,
@r"
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:12:15
--> @@INPUT@@:12:9
|
12 | - uses: actions/setup-python@v4
| ^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:14:15
--> @@INPUT@@:14:9
|
14 | - uses: actions/checkout@v3
| ^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:22:15
--> @@INPUT@@:22:9
|
22 | - uses: pypa/gh-action-pypi-publish@release/v1
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:24:15
--> @@INPUT@@:24:9
|
24 | - uses: github/codeql-action/init@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:26:15
--> @@INPUT@@:26:9
|
26 | - uses: github/codeql-action/upload-sarif@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High

View file

@ -357,7 +357,7 @@ fn test_issue_1191_repro() -> Result<()> {
"use-trusted-publishing/issue-1191-repro.yml"
))
.run()?,
@"No findings to report. Good job! (3 suppressed)"
@r"No findings to report. Good job! (3 suppressed)"
);
Ok(())
@ -406,106 +406,3 @@ fn test_nuget_push() -> Result<()> {
Ok(())
}
#[test]
fn test_gem_push() -> Result<()> {
insta::assert_snapshot!(
zizmor()
.input(input_under_test("use-trusted-publishing/gem-push.yml"))
.run()?,
@r"
info[use-trusted-publishing]: prefer trusted publishing for authentication
--> @@INPUT@@:12:14
|
12 | run: gem push foo-0.1.0.gem
| --- ^^^^^^^^^^^^^^^^^^^^^^ this command
| |
| this step
|
= note: audit confidence High
info[use-trusted-publishing]: prefer trusted publishing for authentication
--> @@INPUT@@:15:14
|
15 | run: bundle exec gem push foo-0.1.0.gem
| --- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this command
| |
| this step
|
= note: audit confidence High
info[use-trusted-publishing]: prefer trusted publishing for authentication
--> @@INPUT@@:20:11
|
19 | run: |
| --- this step
20 | / gem \
21 | | push \
22 | | foo-0.1.0.gem
| |_________________________^ this command
|
= note: audit confidence High
5 findings (2 suppressed): 3 informational, 0 low, 0 medium, 0 high
"
);
Ok(())
}
#[test]
fn test_twine_upload() -> Result<()> {
insta::assert_snapshot!(
zizmor()
.input(input_under_test("use-trusted-publishing/twine-upload.yml"))
.run()?,
@r"
info[use-trusted-publishing]: prefer trusted publishing for authentication
--> @@INPUT@@:12:14
|
12 | run: twine upload dist/*
| --- ^^^^^^^^^^^^^^^^^^^ this command
| |
| this step
|
= note: audit confidence High
info[use-trusted-publishing]: prefer trusted publishing for authentication
--> @@INPUT@@:15:14
|
15 | run: python -m twine upload dist/*
| --- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this command
| |
| this step
|
= note: audit confidence High
info[use-trusted-publishing]: prefer trusted publishing for authentication
--> @@INPUT@@:19:11
|
18 | run: |
| --- this step
19 | / python3.10 -m \
20 | | twine \
21 | | upload \
22 | | dist/*
| |__________________^ this command
|
= note: audit confidence High
info[use-trusted-publishing]: prefer trusted publishing for authentication
--> @@INPUT@@:26:11
|
25 | run: |
| --- this step
26 | pipx run twine==6.1.0 upload dist/*
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this command
|
= note: audit confidence High
6 findings (2 suppressed): 4 informational, 0 low, 0 medium, 0 high
"
);
Ok(())
}

View file

@ -42,37 +42,28 @@ pub struct Zizmor {
stdin: Option<String>,
unbuffer: bool,
offline: bool,
gh_token: bool,
inputs: Vec<String>,
config: Option<String>,
no_config: bool,
output: OutputMode,
expects_failure: bool,
show_audit_urls: bool,
}
impl Zizmor {
/// Create a new zizmor runner.
pub fn new() -> Self {
let mut cmd = Command::new(cargo::cargo_bin!());
// Our child `zizmor` process starts with a clean environment, to
// ensure we explicitly test interactions with things like `CI`
// and `GH_TOKEN`.
cmd.env_clear();
let cmd = Command::new(cargo::cargo_bin!());
Self {
cmd,
stdin: None,
unbuffer: false,
offline: true,
gh_token: true,
inputs: vec![],
config: None,
no_config: false,
output: OutputMode::Stdout,
expects_failure: false,
show_audit_urls: false,
}
}
@ -91,6 +82,11 @@ impl Zizmor {
self
}
pub fn unsetenv(mut self, key: &str) -> Self {
self.cmd.env_remove(key);
self
}
pub fn input(mut self, input: impl Into<String>) -> Self {
self.inputs.push(input.into());
self
@ -116,11 +112,6 @@ impl Zizmor {
self
}
pub fn gh_token(mut self, flag: bool) -> Self {
self.gh_token = flag;
self
}
pub fn output(mut self, output: OutputMode) -> Self {
self.output = output;
self
@ -134,11 +125,6 @@ impl Zizmor {
self
}
pub fn show_audit_urls(mut self, flag: bool) -> Self {
self.show_audit_urls = flag;
self
}
pub fn working_dir(mut self, dir: impl Into<String>) -> Self {
self.cmd.current_dir(dir.into());
self
@ -154,12 +140,7 @@ impl Zizmor {
} else {
// If we're running in online mode, we pre-assert the
// presence of GH_TOKEN to make configuration failures more obvious.
let token =
std::env::var("GH_TOKEN").context("online tests require GH_TOKEN to be set")?;
if self.gh_token {
self.cmd.env("GH_TOKEN", token);
}
std::env::var("GH_TOKEN").context("online tests require GH_TOKEN to be set")?;
}
if self.no_config && self.config.is_some() {
@ -184,12 +165,6 @@ impl Zizmor {
self.cmd.arg("--no-progress");
}
if self.show_audit_urls {
self.cmd.arg("--show-audit-urls=always");
} else {
self.cmd.arg("--show-audit-urls=never");
}
for input in &self.inputs {
self.cmd.arg(input);
}

Some files were not shown because too many files have changed in this diff Show more