mirror of
https://github.com/zizmorcore/zizmor.git
synced 2025-12-23 08:47:33 +00:00
Compare commits
36 commits
github-act
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ff76e6c96e | ||
|
|
8a138d4d7f | ||
|
|
27041c58c9 | ||
|
|
c3913e7eff | ||
|
|
1a6a008951 | ||
|
|
a5e304f536 | ||
|
|
2942f11dc2 | ||
|
|
9d61a10dc1 | ||
|
|
de6f9d6042 | ||
|
|
cc76e2b93f | ||
|
|
7e1c93b760 | ||
|
|
081f844760 | ||
|
|
abdfe5d9b6 | ||
|
|
32d6831a1f | ||
|
|
eafa9e43fc | ||
|
|
f0525e0f30 | ||
|
|
5b7e8bcd72 | ||
|
|
4a7e4e27bc | ||
|
|
147bfabc0f | ||
|
|
411b74a5a9 | ||
|
|
c9fc966d0a | ||
|
|
b3e8725791 | ||
|
|
5bb11ce251 | ||
|
|
94eea71efd | ||
|
|
b9b65b0e80 | ||
|
|
c9f0ea9aaf | ||
|
|
5987ac7503 | ||
|
|
b714997a0c | ||
|
|
b78376a737 | ||
|
|
1f71a18100 | ||
|
|
e0aa73cc7f | ||
|
|
2f9b3da4f5 | ||
|
|
c17d68930c | ||
|
|
77b58ceec6 | ||
|
|
26a7d434a2 | ||
|
|
1e51d1fe9f |
113 changed files with 1870 additions and 1011 deletions
51
.github/workflows/benchmark.yml
vendored
Normal file
51
.github/workflows/benchmark.yml
vendored
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
name: CodSpeed Benchmarks
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
- labeled
|
||||
# `workflow_dispatch` allows CodSpeed to trigger backtest
|
||||
# performance analysis in order to generate initial data.
|
||||
workflow_dispatch:
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
benchmarks:
|
||||
name: Run benchmarks
|
||||
# PRs only get benchmarked if they have the `run-benchmarks` label.
|
||||
if: |
|
||||
contains(github.event.pull_request.labels.*.name, 'run-benchmarks')
|
||||
|| github.event_name == 'push'
|
||||
|| github.event_name == 'workflow_dispatch'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
with:
|
||||
cache-all-crates: true
|
||||
|
||||
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
|
||||
|
||||
- name: Build zizmor (release)
|
||||
run: cargo build --release
|
||||
|
||||
- name: Run the benchmarks
|
||||
uses: CodSpeedHQ/action@346a2d8a8d9d38909abd0bc3d23f773110f076ad # v4.4.1
|
||||
with:
|
||||
mode: walltime
|
||||
run: make bench
|
||||
14
.github/workflows/ci.yml
vendored
14
.github/workflows/ci.yml
vendored
|
|
@ -22,14 +22,14 @@ jobs:
|
|||
name: Lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Format
|
||||
run: cargo fmt --check
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Lint
|
||||
run: cargo clippy -- --deny warnings
|
||||
|
|
@ -38,13 +38,13 @@ jobs:
|
|||
name: Test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
|
||||
|
||||
- name: Test dependencies
|
||||
run: |
|
||||
|
|
@ -67,11 +67,11 @@ jobs:
|
|||
name: Test site build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
|
||||
|
||||
- name: Test site
|
||||
run: make site
|
||||
|
|
|
|||
16
.github/workflows/codegen.yml
vendored
16
.github/workflows/codegen.yml
vendored
|
|
@ -22,7 +22,7 @@ jobs:
|
|||
pull-requests: write # for opening PRs
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
|
@ -31,7 +31,7 @@ jobs:
|
|||
make refresh-schemas
|
||||
|
||||
- name: create PR
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
|
||||
with:
|
||||
draft: true
|
||||
commit-message: "[BOT] update JSON schemas from SchemaStore"
|
||||
|
|
@ -59,18 +59,18 @@ jobs:
|
|||
pull-requests: write # for opening PRs
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
|
||||
|
||||
- name: try to refresh context capabilities
|
||||
run: |
|
||||
make webhooks-to-contexts
|
||||
|
||||
- name: create PR
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
|
||||
with:
|
||||
draft: true
|
||||
commit-message: "[BOT] update context capabilities"
|
||||
|
|
@ -97,18 +97,18 @@ jobs:
|
|||
pull-requests: write # for opening PRs
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
|
||||
|
||||
- name: try to refresh CodeQL injection sinks
|
||||
run: |
|
||||
make codeql-injection-sinks
|
||||
|
||||
- name: create PR
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
|
||||
with:
|
||||
draft: true
|
||||
commit-message: "[BOT] update CodeQL injection sinks"
|
||||
|
|
|
|||
6
.github/workflows/release-binaries.yml
vendored
6
.github/workflows/release-binaries.yml
vendored
|
|
@ -27,7 +27,7 @@ jobs:
|
|||
target: x86_64-pc-windows-msvc
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
|
@ -60,7 +60,7 @@ jobs:
|
|||
shell: bash
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: artifacts-${{ matrix.target }}
|
||||
path: ${{ steps.archive-release.outputs.filename }}
|
||||
|
|
@ -78,7 +78,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: distrib/
|
||||
|
|
|
|||
10
.github/workflows/release-docker.yml
vendored
10
.github/workflows/release-docker.yml
vendored
|
|
@ -43,7 +43,7 @@ jobs:
|
|||
packages: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
|
@ -54,7 +54,7 @@ jobs:
|
|||
|
||||
- name: Extract Docker metadata
|
||||
id: docker-metadata
|
||||
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
images: "${{ env.ZIZMOR_IMAGE }}"
|
||||
|
||||
|
|
@ -86,7 +86,7 @@ jobs:
|
|||
shell: bash
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: digests-${{ matrix.image.platform-pair }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
|
|
@ -107,7 +107,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-*
|
||||
|
|
@ -128,7 +128,7 @@ jobs:
|
|||
|
||||
- name: Extract Docker metadata
|
||||
id: docker-metadata
|
||||
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
|
|
|
|||
22
.github/workflows/release-pypi.yml
vendored
22
.github/workflows/release-pypi.yml
vendored
|
|
@ -37,7 +37,7 @@ jobs:
|
|||
# target: ppc64le
|
||||
# manylinux: "2_28"
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Build wheels
|
||||
|
|
@ -47,7 +47,7 @@ jobs:
|
|||
args: --release --out dist --manifest-path crates/zizmor/Cargo.toml
|
||||
manylinux: ${{ matrix.platform.manylinux }}
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: wheels-linux-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
|
|
@ -67,7 +67,7 @@ jobs:
|
|||
- runner: ubuntu-24.04
|
||||
target: armv7
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Build wheels
|
||||
|
|
@ -77,7 +77,7 @@ jobs:
|
|||
args: --release --out dist --manifest-path crates/zizmor/Cargo.toml
|
||||
manylinux: musllinux_1_2
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: wheels-musllinux-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
|
|
@ -93,7 +93,7 @@ jobs:
|
|||
- runner: windows-latest
|
||||
target: x86
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Build wheels
|
||||
|
|
@ -102,7 +102,7 @@ jobs:
|
|||
target: ${{ matrix.platform.target }}
|
||||
args: --release --out dist --manifest-path crates/zizmor/Cargo.toml
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: wheels-windows-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
|
|
@ -118,7 +118,7 @@ jobs:
|
|||
- runner: macos-15
|
||||
target: aarch64
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Build wheels
|
||||
|
|
@ -127,7 +127,7 @@ jobs:
|
|||
target: ${{ matrix.platform.target }}
|
||||
args: --release --out dist --manifest-path crates/zizmor/Cargo.toml
|
||||
- name: Upload wheels
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: wheels-macos-${{ matrix.platform.target }}
|
||||
path: dist
|
||||
|
|
@ -136,7 +136,7 @@ jobs:
|
|||
name: Build source distribution
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Build sdist
|
||||
|
|
@ -145,7 +145,7 @@ jobs:
|
|||
command: sdist
|
||||
args: --out dist --manifest-path crates/zizmor/Cargo.toml
|
||||
- name: Upload sdist
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: wheels-sdist
|
||||
path: dist
|
||||
|
|
@ -161,7 +161,7 @@ jobs:
|
|||
permissions:
|
||||
id-token: write # Trusted Publishing + PEP 740 attestations
|
||||
steps:
|
||||
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
- name: Attest
|
||||
uses: astral-sh/attest-action@2c727738cea36d6c97dd85eb133ea0e0e8fe754b # v0.0.4
|
||||
with:
|
||||
|
|
|
|||
2
.github/workflows/release-support-crate.yml
vendored
2
.github/workflows/release-support-crate.yml
vendored
|
|
@ -53,7 +53,7 @@ jobs:
|
|||
permissions:
|
||||
id-token: write # for trusted publishing to crates.io
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
|
|
|||
2
.github/workflows/release-zizmor-crate.yml
vendored
2
.github/workflows/release-zizmor-crate.yml
vendored
|
|
@ -19,7 +19,7 @@ jobs:
|
|||
permissions:
|
||||
id-token: write # for trusted publishing to crates.io
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
|
|
|
|||
4
.github/workflows/site.yml
vendored
4
.github/workflows/site.yml
vendored
|
|
@ -26,12 +26,12 @@ jobs:
|
|||
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
|
||||
|
||||
- name: build site
|
||||
run: make site
|
||||
|
|
|
|||
34
.github/workflows/test-output.yml
vendored
34
.github/workflows/test-output.yml
vendored
|
|
@ -19,18 +19,18 @@ jobs:
|
|||
pull-requests: write # for 'Leave comment' step
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Run zizmor
|
||||
run: |
|
||||
cargo run -- --format sarif . > results.sarif
|
||||
|
||||
- name: Upload SARIF file
|
||||
uses: github/codeql-action/upload-sarif@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
uses: github/codeql-action/upload-sarif@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
category: zizmor-test-sarif-presentation
|
||||
|
|
@ -52,11 +52,11 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Run zizmor
|
||||
run: |
|
||||
|
|
@ -67,3 +67,27 @@ jobs:
|
|||
--no-exit-codes \
|
||||
--format github \
|
||||
crates/zizmor/tests/integration/test-data/several-vulnerabilities.yml
|
||||
|
||||
test-plain-presentation:
|
||||
name: Test plain text presentation
|
||||
runs-on: ubuntu-latest
|
||||
if: contains(github.event.pull_request.labels.*.name, 'test-plain-presentation')
|
||||
permissions: {}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Run zizmor
|
||||
run: |
|
||||
# Normally we'd want a workflow to fail if the audit fails,
|
||||
# but we're only testing presentation here.
|
||||
cargo run \
|
||||
-- \
|
||||
--no-exit-codes \
|
||||
--format plain \
|
||||
crates/zizmor/tests/integration/test-data/several-vulnerabilities.yml
|
||||
|
|
|
|||
2
.github/workflows/wolfi-update-check.yml
vendored
2
.github/workflows/wolfi-update-check.yml
vendored
|
|
@ -19,7 +19,7 @@ jobs:
|
|||
issues: write # to create an issue if a new version is found
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
sparse-checkout: support/
|
||||
|
|
|
|||
9
.github/workflows/zizmor.yml
vendored
9
.github/workflows/zizmor.yml
vendored
|
|
@ -5,6 +5,11 @@ on:
|
|||
branches: ["main"]
|
||||
pull_request:
|
||||
branches: ["**"]
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- ready_for_review
|
||||
|
||||
permissions: {}
|
||||
|
||||
|
|
@ -16,12 +21,12 @@ jobs:
|
|||
security-events: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run zizmor 🌈
|
||||
uses: zizmorcore/zizmor-action@e673c3917a1aef3c65c972347ed84ccd013ecda4 # v0.2.0
|
||||
uses: zizmorcore/zizmor-action@e639db99335bc9038abc0e066dfcd72e23d26fb4 # v0.3.0
|
||||
with:
|
||||
# intentionally not scanning the entire repository,
|
||||
# since it contains integration tests.
|
||||
|
|
|
|||
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -10,3 +10,6 @@
|
|||
|
||||
# pending snapshots
|
||||
.*.pending-snap
|
||||
|
||||
# benchmarks
|
||||
.codspeed/
|
||||
|
|
|
|||
184
Cargo.lock
generated
184
Cargo.lock
generated
|
|
@ -58,9 +58,9 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
|
|||
|
||||
[[package]]
|
||||
name = "annotate-snippets"
|
||||
version = "0.12.9"
|
||||
version = "0.12.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a44baf24dd94e781f74dfe67ffee75a09a57971ddf0f615a178b4f6d404b48ff"
|
||||
checksum = "15580ece6ea97cbf832d60ba19c021113469480852c6a2a6beb0db28f097bf1f"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"unicode-width 0.2.2",
|
||||
|
|
@ -228,12 +228,6 @@ version = "0.8.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.10.0"
|
||||
|
|
@ -313,18 +307,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "camino"
|
||||
version = "1.2.1"
|
||||
version = "1.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "276a59bf2b2c967788139340c9f0c5b12d7fd6630315c15c217e559de85d2609"
|
||||
checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.2.41"
|
||||
version = "1.2.49"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac9fe6cdbb24b6ade63616c0a0688e45bb56732262c158df3c0c4bea4ca47cb7"
|
||||
checksum = "90583009037521a116abf44494efecd645ba48b6622457080f080b85544e2215"
|
||||
dependencies = [
|
||||
"find-msvc-tools",
|
||||
"shlex",
|
||||
|
|
@ -344,9 +338,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
|
|||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.51"
|
||||
version = "4.5.53"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c26d721170e0295f191a69bd9a1f93efcdb0aff38684b61ab5750468972e5f5"
|
||||
checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
|
|
@ -364,9 +358,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.51"
|
||||
version = "4.5.53"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75835f0c7bf681bfd05abe44e965760fea999a5286c6eb2d59883634fd02011a"
|
||||
checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
|
|
@ -376,9 +370,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_complete"
|
||||
version = "4.5.60"
|
||||
version = "4.5.61"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e602857739c5a4291dfa33b5a298aeac9006185229a700e5810a3ef7272d971"
|
||||
checksum = "39615915e2ece2550c0149addac32fb5bd312c657f43845bb9088cb9c8a7c992"
|
||||
dependencies = [
|
||||
"clap",
|
||||
]
|
||||
|
|
@ -564,6 +558,12 @@ dependencies = [
|
|||
"parking_lot_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "data-encoding"
|
||||
version = "2.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476"
|
||||
|
||||
[[package]]
|
||||
name = "deranged"
|
||||
version = "0.5.4"
|
||||
|
|
@ -705,9 +705,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "find-msvc-tools"
|
||||
version = "0.1.4"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "52051878f80a721bb68ebfbc930e07b65ba72f2da88968ea5c06fd6ca3d3a127"
|
||||
checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844"
|
||||
|
||||
[[package]]
|
||||
name = "flate2"
|
||||
|
|
@ -721,11 +721,12 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "fluent-uri"
|
||||
version = "0.1.4"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "17c704e9dbe1ddd863da1e6ff3567795087b1eb201ce80d8fa81162e1516500d"
|
||||
checksum = "1918b65d96df47d3591bed19c5cca17e3fa5d0707318e4b5ef2eae01764df7e5"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"borrow-or-share",
|
||||
"ref-cast",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -923,7 +924,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "github-actions-models"
|
||||
version = "0.40.0"
|
||||
version = "0.42.0"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"insta",
|
||||
|
|
@ -954,9 +955,9 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
|
|||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.16.0"
|
||||
version = "0.16.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
|
||||
checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
|
||||
dependencies = [
|
||||
"allocator-api2",
|
||||
"equivalent",
|
||||
|
|
@ -977,12 +978,11 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
|||
|
||||
[[package]]
|
||||
name = "http"
|
||||
version = "1.3.1"
|
||||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565"
|
||||
checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"fnv",
|
||||
"itoa",
|
||||
]
|
||||
|
||||
|
|
@ -1291,12 +1291,12 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.12.0"
|
||||
version = "2.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f"
|
||||
checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown 0.16.0",
|
||||
"hashbrown 0.16.1",
|
||||
"serde",
|
||||
"serde_core",
|
||||
]
|
||||
|
|
@ -1317,9 +1317,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "insta"
|
||||
version = "1.43.2"
|
||||
version = "1.44.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "46fdb647ebde000f43b5b53f773c30cf9b0cb4300453208713fa38b2c70935a0"
|
||||
checksum = "b5c943d4415edd8153251b6f197de5eb1640e56d84e8d9159bea190421c73698"
|
||||
dependencies = [
|
||||
"console 0.15.11",
|
||||
"once_cell",
|
||||
|
|
@ -1384,13 +1384,13 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "jsonschema"
|
||||
version = "0.35.0"
|
||||
version = "0.37.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0303b14f91cbac17c64aaf2ef60ab71fe5f34c3867cedcbca72c9dd15f5040fe"
|
||||
checksum = "73c9ffb2b5c56d58030e1b532d8e8389da94590515f118cf35b5cb68e4764a7e"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"base64 0.22.1",
|
||||
"bytecount",
|
||||
"data-encoding",
|
||||
"email_address",
|
||||
"fancy-regex",
|
||||
"fraction",
|
||||
|
|
@ -1428,7 +1428,7 @@ version = "0.1.10"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"bitflags",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
]
|
||||
|
|
@ -1477,16 +1477,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
|
||||
|
||||
[[package]]
|
||||
name = "lsp-types"
|
||||
version = "0.97.0"
|
||||
name = "ls-types"
|
||||
version = "0.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "53353550a17c04ac46c585feb189c2db82154fc84b79c7a66c96c2c644f66071"
|
||||
checksum = "7a7deb98ef9daaa7500324351a5bab7c80c644cfb86b4be0c4433b582af93510"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"fluent-uri 0.1.4",
|
||||
"bitflags",
|
||||
"fluent-uri 0.3.2",
|
||||
"percent-encoding",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_repr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1782,9 +1782,9 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
|
|||
|
||||
[[package]]
|
||||
name = "pest"
|
||||
version = "2.8.3"
|
||||
version = "2.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "989e7521a040efde50c3ab6bbadafbe15ab6dc042686926be59ac35d74607df4"
|
||||
checksum = "cbcfd20a6d4eeba40179f05735784ad32bdaef05ce8e8af05f180d45bb3e7e22"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"ucd-trie",
|
||||
|
|
@ -1792,9 +1792,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "pest_derive"
|
||||
version = "2.8.3"
|
||||
version = "2.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "187da9a3030dbafabbbfb20cb323b976dc7b7ce91fcd84f2f74d6e31d378e2de"
|
||||
checksum = "51f72981ade67b1ca6adc26ec221be9f463f2b5839c7508998daa17c23d94d7f"
|
||||
dependencies = [
|
||||
"pest",
|
||||
"pest_generator",
|
||||
|
|
@ -1802,9 +1802,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "pest_generator"
|
||||
version = "2.8.3"
|
||||
version = "2.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49b401d98f5757ebe97a26085998d6c0eecec4995cad6ab7fc30ffdf4b052843"
|
||||
checksum = "dee9efd8cdb50d719a80088b76f81aec7c41ed6d522ee750178f83883d271625"
|
||||
dependencies = [
|
||||
"pest",
|
||||
"pest_meta",
|
||||
|
|
@ -1815,9 +1815,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "pest_meta"
|
||||
version = "2.8.3"
|
||||
version = "2.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72f27a2cfee9f9039c4d86faa5af122a0ac3851441a34865b8a043b46be0065a"
|
||||
checksum = "bf1d70880e76bdc13ba52eafa6239ce793d85c8e43896507e43dd8984ff05b82"
|
||||
dependencies = [
|
||||
"pest",
|
||||
"sha2",
|
||||
|
|
@ -2048,7 +2048,7 @@ version = "0.5.18"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"bitflags",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -2073,14 +2073,14 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "referencing"
|
||||
version = "0.35.0"
|
||||
version = "0.37.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22d0d0665043906aacf1d83bea9d61e5134f8f437815b84320e7facf8ff4e9c2"
|
||||
checksum = "4283168a506f0dcbdce31c9f9cce3129c924da4c6bca46e46707fcb746d2d70c"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"fluent-uri 0.4.1",
|
||||
"getrandom 0.3.4",
|
||||
"hashbrown 0.16.0",
|
||||
"hashbrown 0.16.1",
|
||||
"parking_lot",
|
||||
"percent-encoding",
|
||||
"serde_json",
|
||||
|
|
@ -2129,9 +2129,9 @@ checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58"
|
|||
|
||||
[[package]]
|
||||
name = "reqwest"
|
||||
version = "0.12.24"
|
||||
version = "0.12.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f"
|
||||
checksum = "3b4c14b2d9afca6a60277086b0cc6a6ae0b568f6f7916c943a8cdc79f8be240f"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
|
|
@ -2224,7 +2224,7 @@ version = "1.1.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"bitflags",
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
|
|
@ -2347,7 +2347,7 @@ version = "3.3.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "80fb1d92c5028aa318b4b8bd7302a5bfcf48be96a37fc6fc790f806b0004ee0c"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"bitflags",
|
||||
"core-foundation",
|
||||
"core-foundation-sys",
|
||||
"libc",
|
||||
|
|
@ -2490,17 +2490,6 @@ dependencies = [
|
|||
"syn 2.0.108",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_repr"
|
||||
version = "0.1.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.108",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_spanned"
|
||||
version = "1.0.3"
|
||||
|
|
@ -3004,11 +2993,11 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tower-http"
|
||||
version = "0.6.6"
|
||||
version = "0.6.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
|
||||
checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"bitflags",
|
||||
"bytes",
|
||||
"futures-util",
|
||||
"http",
|
||||
|
|
@ -3028,17 +3017,16 @@ checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
|
|||
|
||||
[[package]]
|
||||
name = "tower-lsp-server"
|
||||
version = "0.22.1"
|
||||
version = "0.23.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "88f3f8ec0dcfdda4d908bad2882fe0f89cf2b606e78d16491323e918dfa95765"
|
||||
checksum = "2f0e711655c89181a6bc6a2cc348131fcd9680085f5b06b6af13427a393a6e72"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"dashmap",
|
||||
"futures",
|
||||
"httparse",
|
||||
"lsp-types",
|
||||
"ls-types",
|
||||
"memchr",
|
||||
"percent-encoding",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
|
|
@ -3055,9 +3043,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
|
|||
|
||||
[[package]]
|
||||
name = "tracing"
|
||||
version = "0.1.41"
|
||||
version = "0.1.43"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
|
||||
checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647"
|
||||
dependencies = [
|
||||
"pin-project-lite",
|
||||
"tracing-attributes",
|
||||
|
|
@ -3066,9 +3054,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tracing-attributes"
|
||||
version = "0.1.30"
|
||||
version = "0.1.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
|
||||
checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
@ -3077,9 +3065,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tracing-core"
|
||||
version = "0.1.34"
|
||||
version = "0.1.35"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
|
||||
checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"valuable",
|
||||
|
|
@ -3087,9 +3075,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tracing-indicatif"
|
||||
version = "0.3.13"
|
||||
version = "0.3.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "04d4e11e0e27acef25a47f27e9435355fecdc488867fa2bc90e75b0700d2823d"
|
||||
checksum = "e1ef6990e0438749f0080573248e96631171a0b5ddfddde119aa5ba8c3a9c47e"
|
||||
dependencies = [
|
||||
"indicatif",
|
||||
"tracing",
|
||||
|
|
@ -3110,9 +3098,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tracing-subscriber"
|
||||
version = "0.3.20"
|
||||
version = "0.3.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5"
|
||||
checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e"
|
||||
dependencies = [
|
||||
"matchers",
|
||||
"nu-ansi-term",
|
||||
|
|
@ -3128,9 +3116,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tree-sitter"
|
||||
version = "0.25.10"
|
||||
version = "0.26.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78f873475d258561b06f1c595d93308a7ed124d9977cb26b148c2084a4a3cc87"
|
||||
checksum = "974d205cc395652cfa8b37daa053fe56eebd429acf8dc055503fee648dae981e"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"regex",
|
||||
|
|
@ -3142,9 +3130,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tree-sitter-bash"
|
||||
version = "0.25.0"
|
||||
version = "0.25.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "871b0606e667e98a1237ebdc1b0d7056e0aebfdc3141d12b399865d4cb6ed8a6"
|
||||
checksum = "9e5ec769279cc91b561d3df0d8a5deb26b0ad40d183127f409494d6d8fc53062"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter-language",
|
||||
|
|
@ -3152,7 +3140,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tree-sitter-iter"
|
||||
version = "0.0.2"
|
||||
version = "0.0.3"
|
||||
dependencies = [
|
||||
"tree-sitter",
|
||||
"tree-sitter-yaml",
|
||||
|
|
@ -3166,9 +3154,9 @@ checksum = "c4013970217383f67b18aef68f6fb2e8d409bc5755227092d32efb0422ba24b8"
|
|||
|
||||
[[package]]
|
||||
name = "tree-sitter-powershell"
|
||||
version = "0.25.9"
|
||||
version = "0.25.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae0e37101b110badaf99aa40460915a8797ceba15fc0ed22773280377a8dffb6"
|
||||
checksum = "415ec6251d133d26b4f62c60721149fe36c315334f47812450187d6ea59cffdf"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter-language",
|
||||
|
|
@ -3816,7 +3804,7 @@ checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3"
|
|||
|
||||
[[package]]
|
||||
name = "yamlpatch"
|
||||
version = "0.7.0"
|
||||
version = "0.8.0"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"insta",
|
||||
|
|
@ -3832,7 +3820,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "yamlpath"
|
||||
version = "0.29.0"
|
||||
version = "0.31.0"
|
||||
dependencies = [
|
||||
"line-index",
|
||||
"self_cell",
|
||||
|
|
@ -3956,7 +3944,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "zizmor"
|
||||
version = "1.18.0"
|
||||
version = "1.19.0"
|
||||
dependencies = [
|
||||
"annotate-snippets",
|
||||
"anstream",
|
||||
|
|
|
|||
44
Cargo.toml
44
Cargo.toml
|
|
@ -21,38 +21,38 @@ rust-version = "1.88.0"
|
|||
[workspace.dependencies]
|
||||
anyhow = "1.0.100"
|
||||
github-actions-expressions = { path = "crates/github-actions-expressions", version = "0.0.11" }
|
||||
github-actions-models = { path = "crates/github-actions-models", version = "0.40.0" }
|
||||
github-actions-models = { path = "crates/github-actions-models", version = "0.42.0" }
|
||||
itertools = "0.14.0"
|
||||
pest = "2.8.3"
|
||||
pest_derive = "2.8.3"
|
||||
pest = "2.8.4"
|
||||
pest_derive = "2.8.4"
|
||||
pretty_assertions = "1.4.1"
|
||||
annotate-snippets = "0.12.9"
|
||||
annotate-snippets = "0.12.10"
|
||||
anstream = "0.6.21"
|
||||
assert_cmd = "2.1.1"
|
||||
async-trait = "0.1.89"
|
||||
camino = "1.2.1"
|
||||
clap = "4.5.51"
|
||||
camino = "1.2.2"
|
||||
clap = "4.5.53"
|
||||
clap-verbosity-flag = { version = "3.0.4", default-features = false }
|
||||
clap_complete = "4.5.60"
|
||||
clap_complete = "4.5.61"
|
||||
clap_complete_nushell = "4.5.10"
|
||||
csv = "1.3.1"
|
||||
etcetera = "0.11.0"
|
||||
flate2 = "1.1.5"
|
||||
fst = "0.4.7"
|
||||
futures = "0.3"
|
||||
http = "1.3.1"
|
||||
http = "1.4.0"
|
||||
http-cache-reqwest = { version = "1.0.0-alpha.2", features = ["manager-moka"] }
|
||||
human-panic = "2.0.4"
|
||||
ignore = "0.4.25"
|
||||
indexmap = { version = "2.11.4", features = ["serde"] }
|
||||
indexmap = { version = "2.12.1", features = ["serde"] }
|
||||
indicatif = "0.18"
|
||||
insta = "1.43.2"
|
||||
jsonschema = "0.35.0"
|
||||
insta = "1.44.3"
|
||||
jsonschema = "0.37.4"
|
||||
line-index = "0.1.2"
|
||||
memchr = "2.7.6"
|
||||
owo-colors = "4.2.3"
|
||||
regex = "1.12.1"
|
||||
reqwest = { version = "0.12.23", default-features = false }
|
||||
reqwest = { version = "0.12.25", default-features = false }
|
||||
reqwest-middleware = "0.4.2"
|
||||
self_cell = "1"
|
||||
serde = { version = "1.0.228", features = ["derive"] }
|
||||
|
|
@ -65,16 +65,18 @@ tar = "0.4.44"
|
|||
terminal-link = "0.1.0"
|
||||
thiserror = "2.0.17"
|
||||
tokio = { version = "1.47.1", features = ["rt-multi-thread", "io-std"] }
|
||||
tower-lsp-server = "0.22"
|
||||
tracing = "0.1.41"
|
||||
tracing-indicatif = "0.3.13"
|
||||
tower-lsp-server = "0.23"
|
||||
tracing = "0.1.43"
|
||||
tracing-indicatif = "0.3.14"
|
||||
tracing-subscriber = "0.3.20"
|
||||
tree-sitter = "0.25.10"
|
||||
tree-sitter-bash = "0.25.0"
|
||||
tree-sitter-iter = { path = "crates/tree-sitter-iter", version = "0.0.2" }
|
||||
tree-sitter-powershell = "0.25.9"
|
||||
yamlpath = { path = "crates/yamlpath", version = "0.29.0" }
|
||||
yamlpatch = { path = "crates/yamlpatch", version = "0.7.0" }
|
||||
tree-sitter = "0.26.3"
|
||||
tree-sitter-bash = "0.25.1"
|
||||
tree-sitter-iter = { path = "crates/tree-sitter-iter", version = "0.0.3" }
|
||||
# Exact version since the upstream performed a breaking change outside of semver.
|
||||
# See: https://github.com/zizmorcore/zizmor/pull/1427
|
||||
tree-sitter-powershell = "=0.25.10"
|
||||
yamlpath = { path = "crates/yamlpath", version = "0.31.0" }
|
||||
yamlpatch = { path = "crates/yamlpatch", version = "0.8.0" }
|
||||
tree-sitter-yaml = "0.7.2"
|
||||
tikv-jemallocator = "0.6"
|
||||
|
||||
|
|
|
|||
2
Makefile
2
Makefile
|
|
@ -53,4 +53,4 @@ pinact:
|
|||
|
||||
.PHONY: bench
|
||||
bench:
|
||||
uv run bench/benchmark.py --offline
|
||||
uv run --only-group=bench pytest bench/ --codspeed
|
||||
|
|
|
|||
1
bench/.gitignore
vendored
1
bench/.gitignore
vendored
|
|
@ -1 +0,0 @@
|
|||
results/
|
||||
0
bench/__init__.py
Normal file
0
bench/__init__.py
Normal file
|
|
@ -1,242 +0,0 @@
|
|||
# /// script
|
||||
# requires-python = ">=3.12"
|
||||
# ///
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from typing import Iterator, NoReturn, TypedDict
|
||||
|
||||
_DEPS = ["hyperfine", "curl", "unzip"]
|
||||
|
||||
_HERE = Path(__file__).parent
|
||||
_PROJECT_ROOT = _HERE.parent
|
||||
_ZIZMOR = _PROJECT_ROOT / "target" / "release" / "zizmor"
|
||||
|
||||
assert (_PROJECT_ROOT / "Cargo.toml").is_file(), "Missing project root?"
|
||||
|
||||
_BENCHMARKS = _HERE / "benchmarks.json"
|
||||
_RESULTS = _HERE / "results"
|
||||
|
||||
assert _BENCHMARKS.is_file(), f"Benchmarks file not found: {_BENCHMARKS}"
|
||||
_RESULTS.mkdir(exist_ok=True)
|
||||
|
||||
_CACHE_DIR = Path(tempfile.gettempdir()) / "zizmor-benchmark-cache"
|
||||
_CACHE_DIR.mkdir(exist_ok=True)
|
||||
|
||||
_GH_TOKEN = os.getenv("GH_TOKEN")
|
||||
|
||||
|
||||
class Log:
|
||||
def __init__(self, scope: str | None) -> None:
|
||||
self.scopes = [scope] if scope else []
|
||||
|
||||
def info(self, message: str) -> None:
|
||||
scopes = " ".join(f"[{s}]" for s in self.scopes)
|
||||
print(f"[+] {scopes} {message}", file=sys.stderr)
|
||||
|
||||
def warn(self, message: str) -> None:
|
||||
scopes = " ".join(f"[{s}]" for s in self.scopes)
|
||||
print(f"[!] {scopes} {message}", file=sys.stderr)
|
||||
|
||||
def error(self, message: str) -> NoReturn:
|
||||
self.warn(message)
|
||||
sys.exit(1)
|
||||
|
||||
@contextmanager
|
||||
def scope(self, new_scope: str) -> Iterator[None]:
|
||||
"""Create a new logging scope."""
|
||||
self.scopes.append(new_scope)
|
||||
try:
|
||||
yield None
|
||||
finally:
|
||||
self.scopes.pop()
|
||||
|
||||
|
||||
LOG = Log("benchmarks")
|
||||
|
||||
|
||||
def _curl(url: str, expected_sha256: str) -> Path:
|
||||
"""Download a URL and cache it using content addressing with SHA256."""
|
||||
cached_file = _CACHE_DIR / expected_sha256
|
||||
if cached_file.exists():
|
||||
LOG.info("Using cached file")
|
||||
return cached_file
|
||||
|
||||
result = subprocess.run(
|
||||
["curl", "-fsSL", url],
|
||||
capture_output=True,
|
||||
check=True,
|
||||
)
|
||||
|
||||
content = result.stdout
|
||||
content_hash = hashlib.sha256(content).hexdigest()
|
||||
|
||||
if content_hash != expected_sha256:
|
||||
LOG.error(f"Hash mismatch: {expected_sha256} != {content_hash}")
|
||||
|
||||
cached_file.write_bytes(content)
|
||||
|
||||
return cached_file
|
||||
|
||||
|
||||
def _unzip(archive_path: Path, extract_name: str) -> Path:
|
||||
"""Extract an archive to a directory in the cache."""
|
||||
extract_dir = _CACHE_DIR / extract_name
|
||||
|
||||
if extract_dir.exists():
|
||||
LOG.info("Using cached extraction")
|
||||
return extract_dir
|
||||
|
||||
extract_dir.mkdir(exist_ok=True)
|
||||
|
||||
subprocess.run(
|
||||
["unzip", "-q", str(archive_path), "-d", str(extract_dir)],
|
||||
check=True,
|
||||
)
|
||||
|
||||
LOG.info(f"Extracted {archive_path.name} to {extract_dir}")
|
||||
return extract_dir
|
||||
|
||||
|
||||
class Benchmark(TypedDict):
|
||||
name: str
|
||||
source_type: str
|
||||
source: str
|
||||
source_sha256: str
|
||||
stencil: str
|
||||
online: bool | None
|
||||
|
||||
|
||||
Plan = list[str]
|
||||
|
||||
|
||||
class Bench:
|
||||
def __init__(self, benchmark: Benchmark) -> None:
|
||||
self.benchmark = benchmark
|
||||
|
||||
def plan(self) -> Plan:
|
||||
match self.benchmark["source_type"]:
|
||||
case "archive-url":
|
||||
url = self.benchmark["source"]
|
||||
sha256 = self.benchmark["source_sha256"]
|
||||
archive = _curl(url, sha256)
|
||||
inputs = [str(_unzip(archive, self.benchmark["name"]))]
|
||||
case _:
|
||||
LOG.error(f"Unknown source type: {self.benchmark['source_type']}")
|
||||
|
||||
if self.benchmark.get("online", False):
|
||||
if not _GH_TOKEN:
|
||||
LOG.error("Benchmark requires online access but GH_TOKEN is not set")
|
||||
|
||||
stencil = self.benchmark["stencil"]
|
||||
command = stencil.replace("$ZIZMOR", str(_ZIZMOR)).replace(
|
||||
"$INPUTS", " ".join(inputs)
|
||||
)
|
||||
return shlex.split(command)
|
||||
|
||||
def run(self, plan: Plan, *, dry_run: bool) -> None:
|
||||
command = shlex.join(plan)
|
||||
|
||||
result_file = _RESULTS / f"{self.benchmark['name']}.json"
|
||||
if result_file.exists() and not dry_run:
|
||||
LOG.warn("clobbering existing result file")
|
||||
|
||||
hyperfine_command = [
|
||||
"hyperfine",
|
||||
"--warmup",
|
||||
"3",
|
||||
# NOTE: not needed because we use --no-exit-codes in the stencil
|
||||
# "--ignore-failure",
|
||||
"--export-json",
|
||||
str(result_file),
|
||||
command,
|
||||
]
|
||||
|
||||
if dry_run:
|
||||
LOG.warn(f"would have run: {shlex.join(hyperfine_command)}")
|
||||
return
|
||||
|
||||
try:
|
||||
subprocess.run(
|
||||
hyperfine_command,
|
||||
check=True,
|
||||
)
|
||||
except subprocess.CalledProcessError:
|
||||
LOG.error("run failed, see above for details")
|
||||
|
||||
# Stupid hack: fixup each result file's results[0].command
|
||||
# to be a more useful benchmark identifier, since bencher
|
||||
# apparently keys on these.
|
||||
result_json = json.loads(result_file.read_bytes())
|
||||
result_json["results"][0]["command"] = f"zizmor::{self.benchmark['name']}"
|
||||
result_file.write_text(json.dumps(result_json))
|
||||
|
||||
LOG.info(f"run written to {result_file}")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--dry-run", action="store_true", help="Show plans without running them"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--offline", action="store_true", help="Run only offline benchmarks"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
missing = []
|
||||
for dep in _DEPS:
|
||||
if not shutil.which(dep):
|
||||
missing.append(dep)
|
||||
|
||||
if missing:
|
||||
LOG.error(
|
||||
f"Missing dependencies: {', '.join(missing)}. "
|
||||
"Please install them before running benchmarks."
|
||||
)
|
||||
|
||||
LOG.info("ensuring we have a benchable zizmor build")
|
||||
subprocess.run(
|
||||
["cargo", "build", "--release", "-p", "zizmor"],
|
||||
check=True,
|
||||
cwd=_PROJECT_ROOT,
|
||||
)
|
||||
|
||||
if not _ZIZMOR.is_file():
|
||||
LOG.error("zizmor build presumably failed, see above for details")
|
||||
|
||||
LOG.info(f"using cache dir: {_CACHE_DIR}")
|
||||
|
||||
benchmarks: list[Benchmark] = json.loads(_BENCHMARKS.read_text(encoding="utf-8"))
|
||||
LOG.info(f"found {len(benchmarks)} benchmarks in {_BENCHMARKS.name}")
|
||||
|
||||
if args.offline:
|
||||
benchmarks = [b for b in benchmarks if not b.get("online", False)]
|
||||
LOG.info(f"filtered to {len(benchmarks)} offline benchmarks")
|
||||
|
||||
benches = [Bench(benchmark) for benchmark in benchmarks]
|
||||
plans = []
|
||||
with LOG.scope("plan"):
|
||||
for bench in benches:
|
||||
with LOG.scope(bench.benchmark["name"]):
|
||||
LOG.info("beginning plan")
|
||||
plans.append(bench.plan())
|
||||
|
||||
with LOG.scope("run"):
|
||||
for bench, plan in zip(benches, plans):
|
||||
with LOG.scope(bench.benchmark["name"]):
|
||||
bench.run(plan, dry_run=args.dry_run)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
[
|
||||
{
|
||||
"name": "grafana-9f212d11d0ac",
|
||||
"source_type": "archive-url",
|
||||
"source": "https://github.com/grafana/grafana/archive/9f212d11d0ac9c38ada62a7db830844bb9b02905.zip",
|
||||
"source_sha256": "c6d42b52c8d912db2698d8b06f227de46f0c2d04cc757841792ed6567f0c56c7",
|
||||
"stencil": "$ZIZMOR --offline --format=plain --no-exit-codes --no-config $INPUTS"
|
||||
},
|
||||
{
|
||||
"name": "cpython-48f88310044c",
|
||||
"source_type": "archive-url",
|
||||
"source": "https://github.com/python/cpython/archive/48f88310044c6ef877f3b0761cf7afece2f8fb3a.zip",
|
||||
"source_sha256": "a52a67f1dd9cfa67c7d1305d5b9639629abe247b2c32f01b77f790ddf8b49503",
|
||||
"stencil": "$ZIZMOR --offline --format=plain --no-exit-codes --no-config $INPUTS"
|
||||
},
|
||||
{
|
||||
"name": "gha-hazmat-da3c3cd-online",
|
||||
"source_type": "archive-url",
|
||||
"source": "https://github.com/woodruffw/gha-hazmat/archive/da3c3cd.zip",
|
||||
"source_sha256": "f0aa224c5203218ad26e9f104d8dc3eaf8b322c97056add04d79f4a0d53c8f1f",
|
||||
"stencil": "$ZIZMOR --format=plain --no-exit-codes --no-config $INPUTS",
|
||||
"online": true
|
||||
}
|
||||
]
|
||||
12
bench/common.py
Normal file
12
bench/common.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
_HERE = Path(__file__).parent
|
||||
_ZIZMOR = _HERE.parent / "target" / "release" / "zizmor"
|
||||
|
||||
|
||||
def zizmor(args: list[str], *, check: bool = False) -> None:
|
||||
assert _ZIZMOR.is_file(), (
|
||||
f"zizmor binary not found at {_ZIZMOR}, run prepare() first"
|
||||
)
|
||||
subprocess.run([str(_ZIZMOR), *args], check=check)
|
||||
0
bench/conftest.py
Normal file
0
bench/conftest.py
Normal file
13
bench/test_bench_basic.py
Normal file
13
bench/test_bench_basic.py
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
import pytest
|
||||
|
||||
from .common import zizmor
|
||||
|
||||
|
||||
@pytest.mark.benchmark
|
||||
def test_zizmor_startup():
|
||||
zizmor(["--version"])
|
||||
|
||||
|
||||
@pytest.mark.benchmark
|
||||
def test_zizmor_help():
|
||||
zizmor(["--help"])
|
||||
68
bench/test_bench_offline.py
Normal file
68
bench/test_bench_offline.py
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
import io
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import urllib3
|
||||
|
||||
from bench.common import zizmor
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def grafana(tmp_path_factory) -> Path:
|
||||
archive = "https://github.com/grafana/grafana/archive/9f212d11d0ac9c38ada62a7db830844bb9b02905.zip"
|
||||
raw_zip = urllib3.PoolManager().request("GET", archive).data
|
||||
|
||||
path = tmp_path_factory.mktemp("grafana")
|
||||
|
||||
zipfile.ZipFile(io.BytesIO(raw_zip)).extractall(path)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def cpython(tmp_path_factory) -> Path:
|
||||
archive = "https://github.com/python/cpython/archive/48f88310044c6ef877f3b0761cf7afece2f8fb3a.zip"
|
||||
raw_zip = urllib3.PoolManager().request("GET", archive).data
|
||||
|
||||
path = tmp_path_factory.mktemp("cpython")
|
||||
|
||||
zipfile.ZipFile(io.BytesIO(raw_zip)).extractall(path)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
@pytest.mark.benchmark
|
||||
def test_zizmor_offline_grafana_9f212d11d0(grafana: Path):
|
||||
"""
|
||||
Runs `zizmor --offline --format=plain --no-exit-codes --no-config <path-to-grafana-source>`
|
||||
"""
|
||||
|
||||
zizmor(
|
||||
[
|
||||
"--offline",
|
||||
"--format=plain",
|
||||
"--no-exit-codes",
|
||||
"--no-config",
|
||||
str(grafana),
|
||||
],
|
||||
check=True,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.benchmark
|
||||
def test_zizmor_offline_cpython_48f88310044c(cpython: Path):
|
||||
"""
|
||||
Runs `zizmor --offline --format=plain --no-exit-codes --no-config <path-to-cpython-source>`
|
||||
"""
|
||||
|
||||
zizmor(
|
||||
[
|
||||
"--offline",
|
||||
"--format=plain",
|
||||
"--no-exit-codes",
|
||||
"--no-config",
|
||||
str(cpython),
|
||||
],
|
||||
check=True,
|
||||
)
|
||||
47
bench/test_bench_online.py
Normal file
47
bench/test_bench_online.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
from bench.common import zizmor
|
||||
|
||||
|
||||
@pytest.mark.skipif("GH_TOKEN" not in os.environ, reason="GH_TOKEN not set")
|
||||
def test_zizmor_online_gha_hazmat_da3c3cd(benchmark):
|
||||
"""
|
||||
Runs `zizmor --format=plain --no-exit-codes --no-config woodruffw/gha-hazmat@da3c3cd`
|
||||
"""
|
||||
|
||||
benchmark.pedantic(
|
||||
zizmor,
|
||||
args=(
|
||||
[
|
||||
"--format=plain",
|
||||
"--no-exit-codes",
|
||||
"--no-config",
|
||||
"woodruffw/gha-hazmat@da3c3cd",
|
||||
],
|
||||
),
|
||||
warmup_rounds=2,
|
||||
iterations=10,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.skipif("GH_TOKEN" not in os.environ, reason="GH_TOKEN not set")
|
||||
def test_zizmor_online_cpython_48f88310044c(benchmark):
|
||||
"""
|
||||
Runs `zizmor --format=plain --no-exit-codes --no-config python/cpython@48f88310044c`
|
||||
"""
|
||||
|
||||
benchmark.pedantic(
|
||||
zizmor,
|
||||
args=(
|
||||
[
|
||||
"--format=plain",
|
||||
"--no-exit-codes",
|
||||
"--no-config",
|
||||
"python/cpython@48f88310044c",
|
||||
],
|
||||
),
|
||||
warmup_rounds=2,
|
||||
iterations=10,
|
||||
)
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "github-actions-models"
|
||||
version = "0.40.0"
|
||||
version = "0.42.0"
|
||||
description = "Unofficial, high-quality data models for GitHub Actions workflows, actions, and related components"
|
||||
repository = "https://github.com/zizmorcore/zizmor/tree/main/crates/github-actions-models"
|
||||
keywords = ["github", "ci"]
|
||||
|
|
|
|||
|
|
@ -212,6 +212,15 @@ impl Uses {
|
|||
RepositoryUses::parse(uses).map(Self::Repository)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the original raw `uses:` clause.
|
||||
pub fn raw(&self) -> &str {
|
||||
match self {
|
||||
Uses::Local(local) => &local.path,
|
||||
Uses::Repository(repo) => repo.raw(),
|
||||
Uses::Docker(docker) => docker.raw(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A `uses: ./some/path` clause.
|
||||
|
|
@ -288,6 +297,12 @@ self_cell!(
|
|||
impl {Debug, PartialEq}
|
||||
);
|
||||
|
||||
impl Display for RepositoryUses {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.raw())
|
||||
}
|
||||
}
|
||||
|
||||
impl RepositoryUses {
|
||||
/// Parse a `uses: some/repo` clause.
|
||||
pub fn parse(uses: impl Into<String>) -> Result<Self, UsesError> {
|
||||
|
|
|
|||
|
|
@ -349,6 +349,8 @@ pub enum AllowDeny {
|
|||
#[derive(Deserialize, Debug, PartialEq)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum PackageEcosystem {
|
||||
/// `bazel`
|
||||
Bazel,
|
||||
/// `bun`
|
||||
Bun,
|
||||
/// `bundler`
|
||||
|
|
@ -369,6 +371,8 @@ pub enum PackageEcosystem {
|
|||
DotnetSdk,
|
||||
/// `helm`
|
||||
Helm,
|
||||
/// `julia`
|
||||
Julia,
|
||||
/// `elm`
|
||||
Elm,
|
||||
/// `gitsubmodule`
|
||||
|
|
@ -387,6 +391,8 @@ pub enum PackageEcosystem {
|
|||
Npm,
|
||||
/// `nuget`
|
||||
Nuget,
|
||||
/// `opentofu`
|
||||
Opentofu,
|
||||
/// `pip`
|
||||
Pip,
|
||||
/// `pub`
|
||||
|
|
|
|||
|
|
@ -0,0 +1,23 @@
|
|||
# https://github.com/zizmorcore/zizmor/issues/1451
|
||||
|
||||
version: 2
|
||||
enable-beta-ecosystems: true
|
||||
|
||||
updates:
|
||||
- package-ecosystem: "opentofu"
|
||||
directories:
|
||||
- "/stack"
|
||||
- "/modules/default-branch-protection"
|
||||
schedule:
|
||||
interval: "cron"
|
||||
cronjob: "30 7 * * *"
|
||||
timezone: "Europe/London"
|
||||
target-branch: "main"
|
||||
groups:
|
||||
terraform:
|
||||
applies-to: "version-updates"
|
||||
patterns:
|
||||
- "*"
|
||||
update-types:
|
||||
- "patch"
|
||||
- "minor"
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "tree-sitter-iter"
|
||||
description = "A very simple pre-order iterator for tree-sitter CSTs"
|
||||
version = "0.0.2"
|
||||
version = "0.0.3"
|
||||
authors.workspace = true
|
||||
homepage.workspace = true
|
||||
edition.workspace = true
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "yamlpatch"
|
||||
version = "0.7.0"
|
||||
version = "0.8.0"
|
||||
description = "Comment and format-preserving YAML patch operations"
|
||||
repository = "https://github.com/zizmorcore/zizmor/tree/main/crates/yamlpatch"
|
||||
keywords = ["yaml", "patch"]
|
||||
|
|
|
|||
|
|
@ -903,21 +903,21 @@ normal:
|
|||
let end = find_content_end(&feature, &doc);
|
||||
|
||||
insta::assert_snapshot!(doc.source()[feature.location.byte_span.0..end], @r"
|
||||
bar: baz
|
||||
abc: def # comment
|
||||
");
|
||||
bar: baz
|
||||
abc: def # comment
|
||||
");
|
||||
|
||||
let feature = route_to_feature_exact(&route!("interior-spaces"), &doc)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let end = find_content_end(&feature, &doc);
|
||||
insta::assert_snapshot!(doc.source()[feature.location.byte_span.0..end], @r"
|
||||
- foo
|
||||
- foo
|
||||
|
||||
- bar
|
||||
# hello
|
||||
- baz # hello
|
||||
");
|
||||
- bar
|
||||
# hello
|
||||
- baz # hello
|
||||
");
|
||||
|
||||
let feature = route_to_feature_exact(&route!("normal"), &doc)
|
||||
.unwrap()
|
||||
|
|
@ -2775,11 +2775,12 @@ items:
|
|||
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
|
||||
|
||||
insta::assert_snapshot!(result.source(), @r"
|
||||
items:
|
||||
- first
|
||||
- second
|
||||
- third
|
||||
");
|
||||
|
||||
items:
|
||||
- first
|
||||
- second
|
||||
- third
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -2822,19 +2823,20 @@ databases:
|
|||
let result =
|
||||
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
|
||||
|
||||
insta::assert_snapshot!(result.source(), @r#"
|
||||
databases:
|
||||
- name: primary
|
||||
host: db1.example.com
|
||||
port: 5432
|
||||
max_connections: 100
|
||||
ssl: true
|
||||
readonly: false
|
||||
- name: analytics
|
||||
host: db2.example.com
|
||||
port: 5433
|
||||
readonly: true
|
||||
"#);
|
||||
insta::assert_snapshot!(result.source(), @r"
|
||||
|
||||
databases:
|
||||
- name: primary
|
||||
host: db1.example.com
|
||||
port: 5432
|
||||
max_connections: 100
|
||||
ssl: true
|
||||
readonly: false
|
||||
- name: analytics
|
||||
host: db2.example.com
|
||||
port: 5433
|
||||
readonly: true
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -2870,16 +2872,17 @@ jobs:
|
|||
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
|
||||
|
||||
insta::assert_snapshot!(result.source(), @r#"
|
||||
jobs:
|
||||
test:
|
||||
steps:
|
||||
- name: First step
|
||||
run: echo "first"
|
||||
- name: Second step
|
||||
run: echo "second"
|
||||
- name: Third step
|
||||
run: echo "third"
|
||||
"#);
|
||||
|
||||
jobs:
|
||||
test:
|
||||
steps:
|
||||
- name: First step
|
||||
run: echo "first"
|
||||
- name: Second step
|
||||
run: echo "second"
|
||||
- name: Third step
|
||||
run: echo "third"
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -2924,20 +2927,21 @@ servers:
|
|||
assert!(result.source().contains("# Staging server"));
|
||||
assert!(result.source().contains("# internal only"));
|
||||
|
||||
insta::assert_snapshot!(result.source(), @r#"
|
||||
servers:
|
||||
# Production server
|
||||
- name: prod
|
||||
host: prod.example.com
|
||||
port: 443
|
||||
# Staging server
|
||||
- name: staging
|
||||
host: staging.example.com # internal only
|
||||
port: 8443
|
||||
- name: dev
|
||||
host: localhost
|
||||
port: 8080
|
||||
"#);
|
||||
insta::assert_snapshot!(result.source(), @r"
|
||||
|
||||
servers:
|
||||
# Production server
|
||||
- name: prod
|
||||
host: prod.example.com
|
||||
port: 443
|
||||
# Staging server
|
||||
- name: staging
|
||||
host: staging.example.com # internal only
|
||||
port: 8443
|
||||
- name: dev
|
||||
host: localhost
|
||||
port: 8080
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -2959,11 +2963,12 @@ ports:
|
|||
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
|
||||
|
||||
insta::assert_snapshot!(result.source(), @r"
|
||||
ports:
|
||||
- 8080
|
||||
- 8081
|
||||
- 8082
|
||||
");
|
||||
|
||||
ports:
|
||||
- 8080
|
||||
- 8081
|
||||
- 8082
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -2985,11 +2990,12 @@ configs:
|
|||
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
|
||||
|
||||
insta::assert_snapshot!(result.source(), @r"
|
||||
configs:
|
||||
- name: config1
|
||||
value: 123
|
||||
- {}
|
||||
");
|
||||
|
||||
configs:
|
||||
- name: config1
|
||||
value: 123
|
||||
- {}
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -3031,15 +3037,16 @@ services:
|
|||
let result =
|
||||
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
|
||||
|
||||
insta::assert_snapshot!(result.source(), @r#"
|
||||
services:
|
||||
- name: api
|
||||
port: 8080
|
||||
- name: worker
|
||||
port: 9090
|
||||
config:
|
||||
replicas: 3
|
||||
"#);
|
||||
insta::assert_snapshot!(result.source(), @r"
|
||||
|
||||
services:
|
||||
- name: api
|
||||
port: 8080
|
||||
- name: worker
|
||||
port: 9090
|
||||
config:
|
||||
replicas: 3
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -3096,11 +3103,12 @@ tasks:
|
|||
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
|
||||
|
||||
insta::assert_snapshot!(result.source(), @r"
|
||||
tasks:
|
||||
- task1
|
||||
- task2
|
||||
- task3
|
||||
");
|
||||
|
||||
tasks:
|
||||
- task1
|
||||
- task2
|
||||
- task3
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -3138,20 +3146,21 @@ jobs:
|
|||
let result =
|
||||
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
|
||||
|
||||
insta::assert_snapshot!(result.source(), @r#"
|
||||
name: CI
|
||||
on: push
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Run tests
|
||||
run: npm test
|
||||
- name: Upload coverage
|
||||
uses: codecov/codecov-action@v3
|
||||
"#);
|
||||
insta::assert_snapshot!(result.source(), @r"
|
||||
|
||||
name: CI
|
||||
on: push
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Run tests
|
||||
run: npm test
|
||||
- name: Upload coverage
|
||||
uses: codecov/codecov-action@v3
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -3176,9 +3185,10 @@ foo:
|
|||
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
|
||||
|
||||
insta::assert_snapshot!(result.source(), @r"
|
||||
foo:
|
||||
- abc
|
||||
- - def
|
||||
- ghi
|
||||
");
|
||||
|
||||
foo:
|
||||
- abc
|
||||
- - def
|
||||
- ghi
|
||||
");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "yamlpath"
|
||||
version = "0.29.0"
|
||||
version = "0.31.0"
|
||||
description = "Format-preserving YAML feature extraction"
|
||||
repository = "https://github.com/zizmorcore/zizmor/tree/main/crates/yamlpath"
|
||||
readme = "README.md"
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "zizmor"
|
||||
description = "Static analysis for GitHub Actions"
|
||||
version = "1.18.0"
|
||||
version = "1.19.0"
|
||||
repository = "https://github.com/zizmorcore/zizmor"
|
||||
documentation = "https://docs.zizmor.sh"
|
||||
keywords = ["cli", "github-actions", "static-analysis", "security"]
|
||||
|
|
|
|||
|
|
@ -272,6 +272,17 @@ github.event.changes.new_issue.draft,fixed
|
|||
github.event.changes.new_issue.events_url,structured
|
||||
github.event.changes.new_issue.html_url,structured
|
||||
github.event.changes.new_issue.id,fixed
|
||||
github.event.changes.new_issue.issue_dependencies_summary.blocked_by,fixed
|
||||
github.event.changes.new_issue.issue_dependencies_summary.blocking,fixed
|
||||
github.event.changes.new_issue.issue_dependencies_summary.total_blocked_by,fixed
|
||||
github.event.changes.new_issue.issue_dependencies_summary.total_blocking,fixed
|
||||
github.event.changes.new_issue.issue_field_values.*.data_type,fixed
|
||||
github.event.changes.new_issue.issue_field_values.*.issue_field_id,fixed
|
||||
github.event.changes.new_issue.issue_field_values.*.node_id,arbitrary
|
||||
github.event.changes.new_issue.issue_field_values.*.single_select_option.color,arbitrary
|
||||
github.event.changes.new_issue.issue_field_values.*.single_select_option.id,fixed
|
||||
github.event.changes.new_issue.issue_field_values.*.single_select_option.name,arbitrary
|
||||
github.event.changes.new_issue.issue_field_values.*.value,arbitrary
|
||||
github.event.changes.new_issue.labels.*.color,arbitrary
|
||||
github.event.changes.new_issue.labels.*.default,fixed
|
||||
github.event.changes.new_issue.labels.*.description,arbitrary
|
||||
|
|
@ -762,6 +773,17 @@ github.event.changes.old_issue.draft,fixed
|
|||
github.event.changes.old_issue.events_url,structured
|
||||
github.event.changes.old_issue.html_url,structured
|
||||
github.event.changes.old_issue.id,fixed
|
||||
github.event.changes.old_issue.issue_dependencies_summary.blocked_by,fixed
|
||||
github.event.changes.old_issue.issue_dependencies_summary.blocking,fixed
|
||||
github.event.changes.old_issue.issue_dependencies_summary.total_blocked_by,fixed
|
||||
github.event.changes.old_issue.issue_dependencies_summary.total_blocking,fixed
|
||||
github.event.changes.old_issue.issue_field_values.*.data_type,fixed
|
||||
github.event.changes.old_issue.issue_field_values.*.issue_field_id,fixed
|
||||
github.event.changes.old_issue.issue_field_values.*.node_id,arbitrary
|
||||
github.event.changes.old_issue.issue_field_values.*.single_select_option.color,arbitrary
|
||||
github.event.changes.old_issue.issue_field_values.*.single_select_option.id,fixed
|
||||
github.event.changes.old_issue.issue_field_values.*.single_select_option.name,arbitrary
|
||||
github.event.changes.old_issue.issue_field_values.*.value,arbitrary
|
||||
github.event.changes.old_issue.labels.*.color,arbitrary
|
||||
github.event.changes.old_issue.labels.*.default,fixed
|
||||
github.event.changes.old_issue.labels.*.description,arbitrary
|
||||
|
|
@ -1060,7 +1082,6 @@ github.event.changes.required_status_checks.from.*,arbitrary
|
|||
github.event.changes.required_status_checks_enforcement_level.from,fixed
|
||||
github.event.changes.tag_name.from,arbitrary
|
||||
github.event.changes.title.from,arbitrary
|
||||
github.event.check_run.app,fixed
|
||||
github.event.check_run.app.client_id,arbitrary
|
||||
github.event.check_run.app.created_at,fixed
|
||||
github.event.check_run.app.description,arbitrary
|
||||
|
|
@ -1191,6 +1212,8 @@ github.event.check_run.check_suite.repository.compare_url,arbitrary
|
|||
github.event.check_run.check_suite.repository.contents_url,arbitrary
|
||||
github.event.check_run.check_suite.repository.contributors_url,structured
|
||||
github.event.check_run.check_suite.repository.created_at,fixed
|
||||
github.event.check_run.check_suite.repository.custom_properties,arbitrary
|
||||
github.event.check_run.check_suite.repository.custom_properties.*,arbitrary
|
||||
github.event.check_run.check_suite.repository.default_branch,arbitrary
|
||||
github.event.check_run.check_suite.repository.delete_branch_on_merge,fixed
|
||||
github.event.check_run.check_suite.repository.deployments_url,structured
|
||||
|
|
@ -2207,6 +2230,17 @@ github.event.issue.draft,fixed
|
|||
github.event.issue.events_url,arbitrary
|
||||
github.event.issue.html_url,arbitrary
|
||||
github.event.issue.id,fixed
|
||||
github.event.issue.issue_dependencies_summary.blocked_by,fixed
|
||||
github.event.issue.issue_dependencies_summary.blocking,fixed
|
||||
github.event.issue.issue_dependencies_summary.total_blocked_by,fixed
|
||||
github.event.issue.issue_dependencies_summary.total_blocking,fixed
|
||||
github.event.issue.issue_field_values.*.data_type,fixed
|
||||
github.event.issue.issue_field_values.*.issue_field_id,fixed
|
||||
github.event.issue.issue_field_values.*.node_id,arbitrary
|
||||
github.event.issue.issue_field_values.*.single_select_option.color,arbitrary
|
||||
github.event.issue.issue_field_values.*.single_select_option.id,fixed
|
||||
github.event.issue.issue_field_values.*.single_select_option.name,arbitrary
|
||||
github.event.issue.issue_field_values.*.value,arbitrary
|
||||
github.event.issue.labels.*,arbitrary
|
||||
github.event.issue.labels.*.color,arbitrary
|
||||
github.event.issue.labels.*.default,fixed
|
||||
|
|
@ -3047,6 +3081,7 @@ github.event.pull_request.requested_reviewers.*.url,structured
|
|||
github.event.pull_request.requested_reviewers.*.user_view_type,arbitrary
|
||||
github.event.pull_request.requested_teams.*.deleted,fixed
|
||||
github.event.pull_request.requested_teams.*.description,arbitrary
|
||||
github.event.pull_request.requested_teams.*.enterprise_id,fixed
|
||||
github.event.pull_request.requested_teams.*.html_url,structured
|
||||
github.event.pull_request.requested_teams.*.id,fixed
|
||||
github.event.pull_request.requested_teams.*.ldap_dn,arbitrary
|
||||
|
|
@ -3054,6 +3089,7 @@ github.event.pull_request.requested_teams.*.members_url,arbitrary
|
|||
github.event.pull_request.requested_teams.*.name,arbitrary
|
||||
github.event.pull_request.requested_teams.*.node_id,arbitrary
|
||||
github.event.pull_request.requested_teams.*.notification_setting,arbitrary
|
||||
github.event.pull_request.requested_teams.*.organization_id,fixed
|
||||
github.event.pull_request.requested_teams.*.parent.description,arbitrary
|
||||
github.event.pull_request.requested_teams.*.parent.html_url,structured
|
||||
github.event.pull_request.requested_teams.*.parent.id,fixed
|
||||
|
|
@ -3069,6 +3105,7 @@ github.event.pull_request.requested_teams.*.permission,arbitrary
|
|||
github.event.pull_request.requested_teams.*.privacy,arbitrary
|
||||
github.event.pull_request.requested_teams.*.repositories_url,structured
|
||||
github.event.pull_request.requested_teams.*.slug,arbitrary
|
||||
github.event.pull_request.requested_teams.*.type,fixed
|
||||
github.event.pull_request.requested_teams.*.url,structured
|
||||
github.event.pull_request.review_comment_url,arbitrary
|
||||
github.event.pull_request.review_comments,fixed
|
||||
|
|
@ -3349,6 +3386,7 @@ github.event.release.discussion_url,structured
|
|||
github.event.release.draft,fixed
|
||||
github.event.release.html_url,structured
|
||||
github.event.release.id,fixed
|
||||
github.event.release.immutable,fixed
|
||||
github.event.release.name,arbitrary
|
||||
github.event.release.node_id,arbitrary
|
||||
github.event.release.prerelease,fixed
|
||||
|
|
@ -3366,6 +3404,7 @@ github.event.release.reactions.url,structured
|
|||
github.event.release.tag_name,arbitrary
|
||||
github.event.release.tarball_url,structured
|
||||
github.event.release.target_commitish,arbitrary
|
||||
github.event.release.updated_at,fixed
|
||||
github.event.release.upload_url,structured
|
||||
github.event.release.url,structured
|
||||
github.event.release.zipball_url,structured
|
||||
|
|
@ -3691,6 +3730,7 @@ github.event.review.node_id,arbitrary
|
|||
github.event.review.pull_request_url,structured
|
||||
github.event.review.state,arbitrary
|
||||
github.event.review.submitted_at,fixed
|
||||
github.event.review.updated_at,fixed
|
||||
github.event.review.user.avatar_url,structured
|
||||
github.event.review.user.deleted,fixed
|
||||
github.event.review.user.email,arbitrary
|
||||
|
|
|
|||
|
|
|
@ -39,6 +39,7 @@ impl Audit for AnonymousDefinition {
|
|||
.confidence(Confidence::High)
|
||||
.persona(Persona::Pedantic)
|
||||
.add_location(workflow.location().primary())
|
||||
.tip("use 'name: ...' to give this workflow a name")
|
||||
.build(workflow)?,
|
||||
);
|
||||
}
|
||||
|
|
@ -47,14 +48,13 @@ impl Audit for AnonymousDefinition {
|
|||
match job {
|
||||
Job::NormalJob(normal) => {
|
||||
if normal.name.is_none() {
|
||||
let location = normal.location().primary();
|
||||
|
||||
findings.push(
|
||||
Self::finding()
|
||||
.severity(ANONYMOUS_DEFINITION_JOB_SEVERITY)
|
||||
.confidence(Confidence::High)
|
||||
.persona(Persona::Pedantic)
|
||||
.add_location(location)
|
||||
.add_location(normal.location_with_grip().primary())
|
||||
.tip("use 'name: ...' to give this job a name")
|
||||
.build(workflow)?,
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ impl Audit for ArchivedUses {
|
|||
{
|
||||
findings.push(
|
||||
finding
|
||||
.with_step(step)
|
||||
.add_location(step.location_with_grip())
|
||||
.add_location(
|
||||
step.location()
|
||||
.with_keys(["uses".into()])
|
||||
|
|
@ -96,10 +96,11 @@ impl Audit for ArchivedUses {
|
|||
{
|
||||
findings.push(
|
||||
finding
|
||||
.with_step(step)
|
||||
.add_location(step.location_with_grip())
|
||||
.add_location(
|
||||
step.location()
|
||||
.with_keys(["uses".into()])
|
||||
.subfeature(Subfeature::new(0, uses.slug()))
|
||||
.annotated("repository is archived")
|
||||
.primary(),
|
||||
)
|
||||
|
|
@ -122,10 +123,11 @@ impl Audit for ArchivedUses {
|
|||
{
|
||||
findings.push(
|
||||
finding
|
||||
.with_job(job)
|
||||
.add_location(job.location_with_grip())
|
||||
.add_location(
|
||||
job.location()
|
||||
.with_keys(["uses".into()])
|
||||
.subfeature(Subfeature::new(0, uses.slug()))
|
||||
.annotated("repository is archived")
|
||||
.primary(),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -524,6 +524,7 @@ jobs:
|
|||
|workflow: &Workflow, findings| {
|
||||
let fixed = apply_fix_for_snapshot(workflow.as_document(), findings);
|
||||
insta::assert_snapshot!(fixed.source(), @r"
|
||||
|
||||
name: Test Workflow
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -571,6 +572,7 @@ jobs:
|
|||
|workflow: &Workflow, findings| {
|
||||
let fixed = apply_fix_for_snapshot(workflow.as_document(), findings);
|
||||
insta::assert_snapshot!(fixed.source(), @r"
|
||||
|
||||
name: Test Workflow
|
||||
on: push
|
||||
jobs:
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ impl Audit for BotConditions {
|
|||
if let Some(If::Expr(expr)) = &job.r#if {
|
||||
conds.push((
|
||||
expr,
|
||||
job.location_with_name(),
|
||||
job.location_with_grip(),
|
||||
job.location().with_keys(["if".into()]),
|
||||
));
|
||||
}
|
||||
|
|
@ -91,7 +91,7 @@ impl Audit for BotConditions {
|
|||
if let Some(If::Expr(expr)) = &step.r#if {
|
||||
conds.push((
|
||||
expr,
|
||||
step.location_with_name(),
|
||||
step.location_with_grip(),
|
||||
step.location().with_keys(["if".into()]),
|
||||
));
|
||||
}
|
||||
|
|
@ -130,7 +130,7 @@ impl Audit for BotConditions {
|
|||
finding_builder = finding_builder.fix(fix);
|
||||
}
|
||||
|
||||
findings.push(finding_builder.build(job)?);
|
||||
findings.push(finding_builder.build(job.parent())?);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -538,6 +538,7 @@ jobs:
|
|||
}
|
||||
|
||||
insta::assert_snapshot!(document.source(), @r#"
|
||||
|
||||
name: Test Workflow
|
||||
on:
|
||||
pull_request_target:
|
||||
|
|
@ -588,6 +589,7 @@ jobs:
|
|||
}
|
||||
}
|
||||
insta::assert_snapshot!(document.source(), @r#"
|
||||
|
||||
name: Test Workflow
|
||||
on:
|
||||
pull_request_target:
|
||||
|
|
@ -641,6 +643,7 @@ jobs:
|
|||
|
||||
// Verify it suggests comment.user.login for issue_comment events
|
||||
insta::assert_snapshot!(document.source(), @r#"
|
||||
|
||||
name: Test Issue Comment
|
||||
on: issue_comment
|
||||
|
||||
|
|
@ -690,6 +693,7 @@ jobs:
|
|||
|
||||
// Verify it suggests review.user.login for pull_request_review events
|
||||
insta::assert_snapshot!(document.source(), @r#"
|
||||
|
||||
name: Test PR Review
|
||||
on: pull_request_review
|
||||
|
||||
|
|
@ -739,6 +743,7 @@ jobs:
|
|||
|
||||
// Verify it suggests issue.user.login for issues events
|
||||
insta::assert_snapshot!(document.source(), @r#"
|
||||
|
||||
name: Test Issues
|
||||
on: issues
|
||||
|
||||
|
|
@ -788,6 +793,7 @@ jobs:
|
|||
|
||||
// Verify it suggests release.author.login for release events
|
||||
insta::assert_snapshot!(document.source(), @r#"
|
||||
|
||||
name: Test Release
|
||||
on: release
|
||||
|
||||
|
|
@ -836,6 +842,7 @@ jobs:
|
|||
}
|
||||
|
||||
insta::assert_snapshot!(document.source(), @r#"
|
||||
|
||||
name: Test Create
|
||||
on: create
|
||||
|
||||
|
|
@ -885,6 +892,7 @@ jobs:
|
|||
}
|
||||
|
||||
insta::assert_snapshot!(document.source(), @r#"
|
||||
|
||||
name: Test Workflow
|
||||
on:
|
||||
pull_request_target:
|
||||
|
|
|
|||
|
|
@ -555,6 +555,7 @@ jobs:
|
|||
|findings: Vec<Finding>| {
|
||||
let fixed_content = apply_fix_for_snapshot(workflow_content, findings);
|
||||
insta::assert_snapshot!(fixed_content, @r"
|
||||
|
||||
name: Test Workflow
|
||||
on: release
|
||||
|
||||
|
|
@ -599,6 +600,7 @@ jobs:
|
|||
|findings: Vec<Finding>| {
|
||||
let fixed_content = apply_fix_for_snapshot(workflow_content, findings);
|
||||
insta::assert_snapshot!(fixed_content, @r"
|
||||
|
||||
name: Test Workflow
|
||||
on: release
|
||||
|
||||
|
|
|
|||
|
|
@ -135,7 +135,7 @@ impl Audit for DependabotCooldown {
|
|||
Self::finding()
|
||||
.add_location(
|
||||
update
|
||||
.location_with_name()
|
||||
.location_with_grip()
|
||||
.primary()
|
||||
.annotated("missing cooldown configuration"),
|
||||
)
|
||||
|
|
@ -202,6 +202,7 @@ updates:
|
|||
let fix = &finding.fixes[0];
|
||||
let fixed_document = fix.apply(dependabot.as_document()).unwrap();
|
||||
insta::assert_snapshot!(fixed_document.source(), @r"
|
||||
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
|
|
@ -243,6 +244,7 @@ updates:
|
|||
let fix = &finding.fixes[0];
|
||||
let fixed_document = fix.apply(dependabot.as_document()).unwrap();
|
||||
insta::assert_snapshot!(fixed_document.source(), @r"
|
||||
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
|
|
@ -284,6 +286,7 @@ updates:
|
|||
let fix = &finding.fixes[0];
|
||||
let fixed_document = fix.apply(dependabot.as_document()).unwrap();
|
||||
insta::assert_snapshot!(fixed_document.source(), @r"
|
||||
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
|
|
@ -335,6 +338,7 @@ updates:
|
|||
}
|
||||
|
||||
insta::assert_snapshot!(document.source(), @r"
|
||||
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
|
|
@ -379,6 +383,7 @@ updates:
|
|||
|
||||
// Verify the document remains unchanged
|
||||
insta::assert_snapshot!(dependabot.as_document().source(), @r"
|
||||
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ impl Audit for DependabotExecution {
|
|||
.primary()
|
||||
.annotated("enabled here"),
|
||||
)
|
||||
.add_location(update.location_with_name())
|
||||
.add_location(update.location_with_grip())
|
||||
.fix(Self::create_set_deny_fix(update))
|
||||
.build(dependabot)?,
|
||||
);
|
||||
|
|
@ -123,6 +123,7 @@ updates:
|
|||
let fix = &finding.fixes[0];
|
||||
let fixed_document = fix.apply(dependabot.as_document()).unwrap();
|
||||
insta::assert_snapshot!(fixed_document.source(), @r"
|
||||
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
|
|
@ -158,6 +159,7 @@ updates:
|
|||
|
||||
// Verify the document remains unchanged
|
||||
insta::assert_snapshot!(dependabot.as_document().source(), @r"
|
||||
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
|
|
@ -192,6 +194,7 @@ updates:
|
|||
|
||||
// Verify the document remains unchanged
|
||||
insta::assert_snapshot!(dependabot.as_document().source(), @r"
|
||||
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
|
|
@ -240,6 +243,7 @@ updates:
|
|||
}
|
||||
|
||||
insta::assert_snapshot!(document.source(), @r"
|
||||
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ use crate::{
|
|||
static KNOWN_PERMISSIONS: LazyLock<HashMap<&str, Severity>> = LazyLock::new(|| {
|
||||
[
|
||||
("actions", Severity::High),
|
||||
("artifact-metadata", Severity::Medium),
|
||||
("attestations", Severity::High),
|
||||
("checks", Severity::Medium),
|
||||
("contents", Severity::High),
|
||||
|
|
@ -21,6 +22,8 @@ static KNOWN_PERMISSIONS: LazyLock<HashMap<&str, Severity>> = LazyLock::new(|| {
|
|||
("discussions", Severity::Medium),
|
||||
("id-token", Severity::High),
|
||||
("issues", Severity::High),
|
||||
// What does the write permission even do here?
|
||||
("models", Severity::Low),
|
||||
("packages", Severity::High),
|
||||
("pages", Severity::High),
|
||||
("pull-requests", Severity::High),
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
use github_actions_models::common::Uses;
|
||||
use subfeature::Subfeature;
|
||||
|
||||
use super::{Audit, AuditLoadError, AuditState, audit_meta};
|
||||
use crate::audit::AuditError;
|
||||
|
|
@ -59,6 +60,7 @@ impl ForbiddenUses {
|
|||
step.location()
|
||||
.primary()
|
||||
.with_keys(["uses".into()])
|
||||
.subfeature(Subfeature::new(0, uses.raw()))
|
||||
.annotated("use of this action is forbidden"),
|
||||
)
|
||||
.build(step)?,
|
||||
|
|
|
|||
|
|
@ -88,20 +88,22 @@ const PWSH_REDIRECT_QUERY: &str = r#"
|
|||
|
||||
const PWSH_PIPELINE_QUERY: &str = r#"
|
||||
(pipeline
|
||||
(command
|
||||
command_name: (command_name) @cmd
|
||||
command_elements: (command_elements
|
||||
(_)*
|
||||
(array_literal_expression
|
||||
(unary_expression [
|
||||
(string_literal
|
||||
(expandable_string_literal (variable) @destination))
|
||||
(variable) @destination
|
||||
])
|
||||
)
|
||||
(_)*))
|
||||
(#match? @cmd "(?i)out-file|add-content|set-content|tee-object")
|
||||
(#match? @destination "(?i)ENV:GITHUB_ENV|ENV:GITHUB_PATH")
|
||||
(pipeline_chain
|
||||
(command
|
||||
command_name: (command_name) @cmd
|
||||
command_elements: (command_elements
|
||||
(_)*
|
||||
(array_literal_expression
|
||||
(unary_expression [
|
||||
(string_literal
|
||||
(expandable_string_literal (variable) @destination))
|
||||
(variable) @destination
|
||||
])
|
||||
)
|
||||
(_)*))
|
||||
(#match? @cmd "(?i)out-file|add-content|set-content|tee-object")
|
||||
(#match? @destination "(?i)ENV:GITHUB_ENV|ENV:GITHUB_PATH")
|
||||
)
|
||||
) @span
|
||||
"#;
|
||||
|
||||
|
|
@ -392,7 +394,7 @@ impl Audit for GitHubEnv {
|
|||
}
|
||||
|
||||
if let StepBody::Run { run, .. } = &step.deref().body {
|
||||
let shell = step.shell().unwrap_or_else(|| {
|
||||
let shell = step.shell().map(|s| s.0).unwrap_or_else(|| {
|
||||
tracing::warn!(
|
||||
"github-env: couldn't determine shell type for {workflow}:{job} step {stepno}; assuming bash",
|
||||
workflow = step.workflow().key.presentation_path(),
|
||||
|
|
@ -438,7 +440,7 @@ impl Audit for GitHubEnv {
|
|||
return Ok(findings);
|
||||
};
|
||||
|
||||
let shell = step.shell().unwrap_or_else(|| {
|
||||
let shell = step.shell().map(|s| s.0).unwrap_or_else(|| {
|
||||
tracing::warn!(
|
||||
"github-env: couldn't determine shell type for {action} step {stepno}; assuming bash",
|
||||
action = step.action().key.presentation_path(),
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@
|
|||
|
||||
use anyhow::anyhow;
|
||||
use github_actions_models::common::{RepositoryUses, Uses};
|
||||
use subfeature::Subfeature;
|
||||
|
||||
use super::{Audit, AuditLoadError, Job, audit_meta};
|
||||
use crate::{
|
||||
|
|
@ -76,7 +77,7 @@ impl ImpostorCommit {
|
|||
return Ok(false);
|
||||
};
|
||||
|
||||
// Fast path: almost all commit refs will be at the tip of
|
||||
// Fastest path: almost all commit refs will be at the tip of
|
||||
// the branch or tag's history, so check those first.
|
||||
// Check tags before branches, since in practice version tags
|
||||
// are more commonly pinned.
|
||||
|
|
@ -104,6 +105,21 @@ impl ImpostorCommit {
|
|||
}
|
||||
}
|
||||
|
||||
// Fast path: attempt to use GitHub's undocumented `branch_commits`
|
||||
// API to see if the commit is present in any branch/tag.
|
||||
// There are no stabilitiy guarantees for this API, so we fall back
|
||||
// to the slow(er) paths if it fails.
|
||||
match self
|
||||
.client
|
||||
.branch_commits(uses.owner(), uses.repo(), head_ref)
|
||||
.await
|
||||
{
|
||||
Ok(branch_commits) => return Ok(branch_commits.is_empty()),
|
||||
Err(e) => tracing::warn!("fast path impostor check failed for {uses}: {e}"),
|
||||
}
|
||||
|
||||
// Slow path: use GitHub's comparison API to check each branch and tag's
|
||||
// history for presence of the commit.
|
||||
for branch in &branches {
|
||||
if self
|
||||
.named_ref_contains_commit(uses, &format!("refs/heads/{}", &branch.name), head_ref)
|
||||
|
|
@ -256,8 +272,13 @@ impl Audit for ImpostorCommit {
|
|||
let mut finding_builder = Self::finding()
|
||||
.severity(Severity::High)
|
||||
.confidence(Confidence::High)
|
||||
.add_location(step.location_with_grip())
|
||||
.add_location(
|
||||
step.location().primary().annotated(IMPOSTOR_ANNOTATION),
|
||||
step.location()
|
||||
.with_keys(["uses".into()])
|
||||
.subfeature(Subfeature::new(0, uses.raw()))
|
||||
.primary()
|
||||
.annotated(IMPOSTOR_ANNOTATION),
|
||||
);
|
||||
|
||||
if let Some(fix) = self.create_impostor_fix(uses, &step).await {
|
||||
|
|
@ -279,8 +300,14 @@ impl Audit for ImpostorCommit {
|
|||
let mut finding_builder = Self::finding()
|
||||
.severity(Severity::High)
|
||||
.confidence(Confidence::High)
|
||||
.add_location(reusable.location_with_grip())
|
||||
.add_location(
|
||||
reusable.location().primary().annotated(IMPOSTOR_ANNOTATION),
|
||||
reusable
|
||||
.location()
|
||||
.with_keys(["uses".into()])
|
||||
.subfeature(Subfeature::new(0, uses.raw()))
|
||||
.primary()
|
||||
.annotated(IMPOSTOR_ANNOTATION),
|
||||
);
|
||||
|
||||
if let Some(fix) = self.create_reusable_fix(uses, &reusable).await {
|
||||
|
|
@ -310,7 +337,14 @@ impl Audit for ImpostorCommit {
|
|||
let mut finding_builder = Self::finding()
|
||||
.severity(Severity::High)
|
||||
.confidence(Confidence::High)
|
||||
.add_location(step.location().primary().annotated(IMPOSTOR_ANNOTATION));
|
||||
.add_location(step.location_with_grip())
|
||||
.add_location(
|
||||
step.location()
|
||||
.with_keys(["uses".into()])
|
||||
.subfeature(Subfeature::new(0, uses.raw()))
|
||||
.primary()
|
||||
.annotated(IMPOSTOR_ANNOTATION),
|
||||
);
|
||||
|
||||
if let Some(fix) = self.create_impostor_fix(uses, step).await {
|
||||
finding_builder = finding_builder.fix(fix);
|
||||
|
|
@ -381,6 +415,7 @@ jobs:
|
|||
// Apply the fix and snapshot test the result
|
||||
let new_doc = findings[0].fixes[0].apply(input.as_document()).unwrap();
|
||||
assert_snapshot!(new_doc.source(), @r"
|
||||
|
||||
name: Test Impostor Commit Fix
|
||||
on: push
|
||||
jobs:
|
||||
|
|
|
|||
|
|
@ -295,6 +295,7 @@ jobs:
|
|||
assert!(fixed_document.source().contains("ANOTHER_VAR: also-keep"));
|
||||
|
||||
insta::assert_snapshot!(fixed_document.source(), @r#"
|
||||
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
|
|
@ -349,6 +350,7 @@ jobs:
|
|||
assert!(fixed_document.source().contains("GLOBAL_VAR: keep-me"));
|
||||
|
||||
insta::assert_snapshot!(fixed_document.source(), @r#"
|
||||
|
||||
on: push
|
||||
|
||||
env:
|
||||
|
|
@ -403,6 +405,7 @@ jobs:
|
|||
assert!(fixed_document.source().contains("STEP_VAR: keep-me"));
|
||||
|
||||
insta::assert_snapshot!(fixed_document.source(), @r#"
|
||||
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
|
|
@ -446,6 +449,7 @@ jobs:
|
|||
let fixed_document = fix.apply(workflow.as_document()).unwrap();
|
||||
|
||||
insta::assert_snapshot!(fixed_document.source(), @r#"
|
||||
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
|
|
|
|||
|
|
@ -372,6 +372,7 @@ jobs:
|
|||
let fixed_document = fix.apply(workflow.as_document()).unwrap();
|
||||
|
||||
insta::assert_snapshot!(fixed_document.source(), @r#"
|
||||
|
||||
name: Test Vulnerable Actions
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -421,7 +422,8 @@ jobs:
|
|||
.unwrap();
|
||||
let fixed_document = fix.apply(workflow.as_document()).unwrap();
|
||||
|
||||
insta::assert_snapshot!(fixed_document.source(), @r#"
|
||||
insta::assert_snapshot!(fixed_document.source(), @r"
|
||||
|
||||
name: Test Node Setup
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -434,7 +436,7 @@ jobs:
|
|||
node-version: '18'
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
"#);
|
||||
");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
@ -474,6 +476,7 @@ jobs:
|
|||
let fixed_document = fix.apply(workflow.as_document()).unwrap();
|
||||
|
||||
insta::assert_snapshot!(fixed_document.source(), @r#"
|
||||
|
||||
name: Test Third Party Action
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -549,7 +552,8 @@ jobs:
|
|||
.unwrap();
|
||||
current_document = fix_cache.apply(¤t_document).unwrap();
|
||||
|
||||
insta::assert_snapshot!(current_document.source(), @r#"
|
||||
insta::assert_snapshot!(current_document.source(), @r"
|
||||
|
||||
name: Test Multiple Vulnerable Actions
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -569,7 +573,7 @@ jobs:
|
|||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
"#);
|
||||
");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
@ -607,6 +611,7 @@ jobs:
|
|||
let fixed_document = fix.apply(workflow.as_document()).unwrap();
|
||||
|
||||
insta::assert_snapshot!(fixed_document.source(), @r"
|
||||
|
||||
name: Test Action with Subpath
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -655,7 +660,8 @@ jobs:
|
|||
.apply(workflow.as_document())
|
||||
.unwrap();
|
||||
|
||||
insta::assert_snapshot!(fixed_document.source(), @r#"
|
||||
insta::assert_snapshot!(fixed_document.source(), @r"
|
||||
|
||||
name: Test First Patched Version Priority
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -664,7 +670,7 @@ jobs:
|
|||
steps:
|
||||
- name: Vulnerable action
|
||||
uses: actions/checkout@v3.1.0
|
||||
"#);
|
||||
");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
@ -700,6 +706,7 @@ jobs:
|
|||
let new_doc = fix.apply(workflow.as_document()).unwrap();
|
||||
|
||||
assert_snapshot!(new_doc.source(), @r"
|
||||
|
||||
name: Test Non-Commit Ref
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -763,6 +770,7 @@ jobs:
|
|||
|
||||
let new_doc = findings[0].fixes[0].apply(input.as_document()).unwrap();
|
||||
assert_snapshot!(new_doc.source(), @r"
|
||||
|
||||
name: Test Commit Hash Pinning Real API
|
||||
on: push
|
||||
permissions: {}
|
||||
|
|
@ -819,6 +827,7 @@ jobs:
|
|||
|
||||
let new_doc = findings[0].fixes[0].apply(input.as_document()).unwrap();
|
||||
assert_snapshot!(new_doc.source(), @r"
|
||||
|
||||
name: Test Commit Hash Pinning Real API
|
||||
on: push
|
||||
permissions: {}
|
||||
|
|
|
|||
|
|
@ -189,10 +189,9 @@ pub(crate) enum AuditLoadError {
|
|||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
#[error("error in {ident}")]
|
||||
#[error("error in '{ident}' audit")]
|
||||
pub(crate) struct AuditError {
|
||||
ident: &'static str,
|
||||
#[source]
|
||||
source: anyhow::Error,
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -225,7 +225,10 @@ impl Obfuscation {
|
|||
}
|
||||
}
|
||||
crate::models::StepBodyCommon::Run { .. } => {
|
||||
if let Some("cmd" | "cmd.exe") = step.shell().map(utils::normalize_shell) {
|
||||
if let Some(("cmd" | "cmd.exe", shell_loc)) = step
|
||||
.shell()
|
||||
.map(|(shell, loc)| (utils::normalize_shell(shell), loc))
|
||||
{
|
||||
// `shell: cmd` is basically impossible to analyze: it has no formal
|
||||
// grammar and has several line continuation mechanisms that stymie
|
||||
// naive matching. It also hasn't been the default shell on Windows
|
||||
|
|
@ -235,11 +238,10 @@ impl Obfuscation {
|
|||
.confidence(Confidence::High)
|
||||
.severity(Severity::Low)
|
||||
.add_location(
|
||||
step.location()
|
||||
.primary()
|
||||
.with_keys(["shell".into()])
|
||||
step.location_with_grip()
|
||||
.annotated("Windows CMD shell limits analysis"),
|
||||
)
|
||||
.add_location(shell_loc.primary())
|
||||
.tip("use 'shell: pwsh' or 'shell: bash' for improved analysis")
|
||||
.build(step)
|
||||
.map_err(Self::err)?,
|
||||
|
|
@ -405,7 +407,8 @@ jobs:
|
|||
"#;
|
||||
|
||||
let result = apply_fix_for_snapshot(workflow_content, "obfuscation").await;
|
||||
insta::assert_snapshot!(result, @r#"
|
||||
insta::assert_snapshot!(result, @r"
|
||||
|
||||
name: Test Workflow
|
||||
on: push
|
||||
|
||||
|
|
@ -414,7 +417,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
"#);
|
||||
");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
@ -431,7 +434,8 @@ jobs:
|
|||
"#;
|
||||
|
||||
let result = apply_fix_for_snapshot(workflow_content, "obfuscation").await;
|
||||
insta::assert_snapshot!(result, @r#"
|
||||
insta::assert_snapshot!(result, @r"
|
||||
|
||||
name: Test Workflow
|
||||
on: push
|
||||
|
||||
|
|
@ -440,7 +444,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: github/codeql-action/init@v2
|
||||
"#);
|
||||
");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
@ -457,7 +461,8 @@ jobs:
|
|||
"#;
|
||||
|
||||
let result = apply_fix_for_snapshot(workflow_content, "obfuscation").await;
|
||||
insta::assert_snapshot!(result, @r#"
|
||||
insta::assert_snapshot!(result, @r"
|
||||
|
||||
name: Test Workflow
|
||||
on: push
|
||||
|
||||
|
|
@ -466,6 +471,6 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/cache/save@v4
|
||||
"#);
|
||||
");
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
use github_actions_models::workflow::job::Secrets;
|
||||
use subfeature::Subfeature;
|
||||
|
||||
use super::{Audit, AuditLoadError, AuditState, audit_meta};
|
||||
use crate::{
|
||||
|
|
@ -37,6 +38,7 @@ impl Audit for SecretsInherit {
|
|||
job.location()
|
||||
.primary()
|
||||
.with_keys(["uses".into()])
|
||||
.subfeature(Subfeature::new(0, job.uses.raw()))
|
||||
.annotated("this reusable workflow"),
|
||||
)
|
||||
.add_location(
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
use anyhow::anyhow;
|
||||
use github_actions_models::common::{RepositoryUses, Uses};
|
||||
use subfeature::Subfeature;
|
||||
|
||||
use super::{Audit, AuditLoadError, audit_meta};
|
||||
use crate::{
|
||||
|
|
@ -53,7 +54,12 @@ impl StaleActionRefs {
|
|||
.confidence(Confidence::High)
|
||||
.severity(Severity::Low)
|
||||
.persona(Persona::Pedantic)
|
||||
.add_location(step.location().primary().with_keys(["uses".into()]))
|
||||
.add_location(
|
||||
step.location()
|
||||
.primary()
|
||||
.with_keys(["uses".into()])
|
||||
.subfeature(Subfeature::new(0, uses.raw())),
|
||||
)
|
||||
.build(step)?,
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -185,7 +185,7 @@ impl TemplateInjection {
|
|||
return None;
|
||||
}
|
||||
|
||||
let shell = utils::normalize_shell(step.shell()?);
|
||||
let shell = utils::normalize_shell(step.shell()?.0);
|
||||
|
||||
match shell {
|
||||
"bash" | "sh" | "zsh" => Some(format!("${{{env_var}}}")),
|
||||
|
|
@ -707,6 +707,7 @@ jobs:
|
|||
"replace expression with environment variable",
|
||||
);
|
||||
insta::assert_snapshot!(fixed_content.source(), @r#"
|
||||
|
||||
name: Test Template Injection
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -758,6 +759,7 @@ jobs:
|
|||
"replace expression with environment variable",
|
||||
);
|
||||
insta::assert_snapshot!(fixed_content.source(), @r#"
|
||||
|
||||
name: Test Template Injection
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -811,6 +813,7 @@ jobs:
|
|||
"replace expression with environment variable",
|
||||
);
|
||||
insta::assert_snapshot!(fixed_content.source(), @r#"
|
||||
|
||||
name: Test Template Injection
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -920,6 +923,7 @@ jobs:
|
|||
}
|
||||
|
||||
insta::assert_snapshot!(current_document.source(), @r#"
|
||||
|
||||
name: Test Multiple Template Injections
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -986,6 +990,7 @@ jobs:
|
|||
}
|
||||
|
||||
insta::assert_snapshot!(current_document.source(), @r#"
|
||||
|
||||
name: Test Duplicate Template Injections
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -1046,6 +1051,7 @@ jobs:
|
|||
}
|
||||
|
||||
insta::assert_snapshot!(current_document.source(), @r#"
|
||||
|
||||
name: Test Duplicate Template Injections
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -1109,6 +1115,7 @@ jobs:
|
|||
}
|
||||
|
||||
insta::assert_snapshot!(current_document.source(), @r#"
|
||||
|
||||
name: Test Duplicate Template Injections
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -1225,6 +1232,7 @@ jobs:
|
|||
"replace expression with environment variable",
|
||||
);
|
||||
insta::assert_snapshot!(fixed_content.source(), @r#"
|
||||
|
||||
name: Test Template Injection - Bash
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -1271,6 +1279,7 @@ jobs:
|
|||
"replace expression with environment variable",
|
||||
);
|
||||
insta::assert_snapshot!(fixed_content.source(), @r#"
|
||||
|
||||
name: Test Template Injection - Bash
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -1316,7 +1325,8 @@ jobs:
|
|||
finding,
|
||||
"replace expression with environment variable",
|
||||
);
|
||||
insta::assert_snapshot!(fixed_content.source(), @r#"
|
||||
insta::assert_snapshot!(fixed_content.source(), @r"
|
||||
|
||||
name: Test Template Injection - CMD
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -1326,7 +1336,7 @@ jobs:
|
|||
- name: Vulnerable step with cmd shell
|
||||
shell: cmd
|
||||
run: echo User is %GITHUB_ACTOR%
|
||||
"#);
|
||||
");
|
||||
}
|
||||
}
|
||||
);
|
||||
|
|
@ -1363,6 +1373,7 @@ jobs:
|
|||
"replace expression with environment variable",
|
||||
);
|
||||
insta::assert_snapshot!(fixed_content.source(), @r#"
|
||||
|
||||
name: Test Template Injection - PowerShell
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -1409,6 +1420,7 @@ jobs:
|
|||
);
|
||||
// Ubuntu default shell is bash, so should use ${VAR} syntax
|
||||
insta::assert_snapshot!(fixed_content.source(), @r#"
|
||||
|
||||
name: Test Template Injection - Default Shell Ubuntu
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -1454,6 +1466,7 @@ jobs:
|
|||
);
|
||||
// Windows default shell is pwsh, so should use $env:VAR syntax
|
||||
insta::assert_snapshot!(fixed_content.source(), @r#"
|
||||
|
||||
name: Test Template Injection - Default Shell Windows
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -1498,7 +1511,8 @@ jobs:
|
|||
finding,
|
||||
"replace expression with environment variable",
|
||||
);
|
||||
insta::assert_snapshot!(fixed_content.source(), @r#"
|
||||
insta::assert_snapshot!(fixed_content.source(), @r"
|
||||
|
||||
name: Test Template Injection - CMD with Custom Env
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -1510,7 +1524,7 @@ jobs:
|
|||
run: echo PR title is %GITHUB_EVENT_PULL_REQUEST_TITLE%
|
||||
env:
|
||||
GITHUB_EVENT_PULL_REQUEST_TITLE: ${{ github.event.pull_request.title }}
|
||||
"#);
|
||||
");
|
||||
}
|
||||
}
|
||||
);
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
use github_actions_models::common::Uses;
|
||||
use subfeature::Subfeature;
|
||||
|
||||
use super::{Audit, AuditLoadError, AuditState, audit_meta};
|
||||
use crate::audit::AuditError;
|
||||
|
|
@ -31,7 +32,7 @@ impl UnpinnedUses {
|
|||
Uses::Docker(_) => {
|
||||
if uses.unpinned() {
|
||||
Some((
|
||||
"action is not pinned to a tag, branch, or hash ref".into(),
|
||||
"image is not pinned to a tag, branch, or hash ref".into(),
|
||||
Severity::Medium,
|
||||
Persona::default(),
|
||||
))
|
||||
|
|
@ -108,6 +109,7 @@ impl UnpinnedUses {
|
|||
step.location()
|
||||
.primary()
|
||||
.with_keys(["uses".into()])
|
||||
.subfeature(Subfeature::new(0, uses.raw()))
|
||||
.annotated(annotation),
|
||||
)
|
||||
.build(step)?,
|
||||
|
|
|
|||
|
|
@ -244,6 +244,7 @@ jobs:
|
|||
|
||||
let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings);
|
||||
insta::assert_snapshot!(fixed_document.source(), @r#"
|
||||
|
||||
name: Test
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -283,6 +284,7 @@ jobs:
|
|||
|
||||
let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings);
|
||||
insta::assert_snapshot!(fixed_document.source(), @r#"
|
||||
|
||||
name: Test
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -323,6 +325,7 @@ jobs:
|
|||
|
||||
let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings);
|
||||
insta::assert_snapshot!(fixed_document.source(), @r#"
|
||||
|
||||
name: Test
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -367,6 +370,7 @@ jobs:
|
|||
|
||||
let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings);
|
||||
insta::assert_snapshot!(fixed_document.source(), @r#"
|
||||
|
||||
name: Test
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -406,7 +410,8 @@ jobs:
|
|||
assert_eq!(findings.len(), 1);
|
||||
|
||||
let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings);
|
||||
insta::assert_snapshot!(fixed_document.source(), @r#"
|
||||
insta::assert_snapshot!(fixed_document.source(), @r"
|
||||
|
||||
name: Test
|
||||
on: push
|
||||
jobs:
|
||||
|
|
@ -414,7 +419,7 @@ jobs:
|
|||
if: |-
|
||||
${{ github.event_name == 'pull_request' }}
|
||||
uses: ./.github/workflows/reusable.yml
|
||||
"#);
|
||||
");
|
||||
}
|
||||
);
|
||||
}
|
||||
|
|
@ -463,6 +468,7 @@ jobs:
|
|||
}
|
||||
|
||||
insta::assert_snapshot!(document.source(), @r#"
|
||||
|
||||
name: Test
|
||||
on: push
|
||||
jobs:
|
||||
|
|
|
|||
|
|
@ -430,7 +430,7 @@ impl Audit for UseTrustedPublishing {
|
|||
if let StepBodyCommon::Run { run, .. } = step.body()
|
||||
&& !step.parent.has_id_token()
|
||||
{
|
||||
let shell = step.shell().unwrap_or_else(|| {
|
||||
let shell = step.shell().map(|s| s.0).unwrap_or_else(|| {
|
||||
tracing::debug!(
|
||||
"use-trusted-publishing: couldn't determine shell type for {workflow}:{job} step {stepno}",
|
||||
workflow = step.workflow().key.filename(),
|
||||
|
|
|
|||
|
|
@ -21,7 +21,12 @@ use crate::{
|
|||
registry::input::RepoSlug,
|
||||
};
|
||||
|
||||
const CONFIG_CANDIDATES: &[&str] = &[".github/zizmor.yml", "zizmor.yml"];
|
||||
const CONFIG_CANDIDATES: &[&str] = &[
|
||||
".github/zizmor.yml",
|
||||
".github/zizmor.yaml",
|
||||
"zizmor.yml",
|
||||
"zizmor.yaml",
|
||||
];
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
#[error("configuration error in {path}")]
|
||||
|
|
|
|||
|
|
@ -647,24 +647,28 @@
|
|||
},
|
||||
"package-ecosystem-values": {
|
||||
"enum": [
|
||||
"bazel",
|
||||
"bun",
|
||||
"bundler",
|
||||
"cargo",
|
||||
"composer",
|
||||
"conda",
|
||||
"devcontainers",
|
||||
"docker",
|
||||
"docker-compose",
|
||||
"dotnet-sdk",
|
||||
"elm",
|
||||
"gitsubmodule",
|
||||
"github-actions",
|
||||
"gitsubmodule",
|
||||
"gomod",
|
||||
"gradle",
|
||||
"helm",
|
||||
"julia",
|
||||
"maven",
|
||||
"mix",
|
||||
"npm",
|
||||
"nuget",
|
||||
"opentofu",
|
||||
"pip",
|
||||
"pub",
|
||||
"rust-toolchain",
|
||||
|
|
|
|||
|
|
@ -6,9 +6,7 @@ use serde::{Deserialize, Serialize};
|
|||
|
||||
use self::location::{Location, SymbolicLocation};
|
||||
use crate::{
|
||||
InputKey,
|
||||
audit::AuditError,
|
||||
models::{AsDocument, StepCommon, workflow::JobCommon},
|
||||
InputKey, audit::AuditError, finding::location::LocationKind, models::AsDocument,
|
||||
registry::input::Group,
|
||||
};
|
||||
use yamlpatch::{self, Patch};
|
||||
|
|
@ -228,44 +226,6 @@ impl<'doc> FindingBuilder<'doc> {
|
|||
self
|
||||
}
|
||||
|
||||
/// Add a "useful" location for the given step, if it has a name or ID.
|
||||
pub(crate) fn with_step(mut self, step: &impl StepCommon<'doc>) -> Self {
|
||||
if step.name().is_some() {
|
||||
self.locations.push(
|
||||
step.location()
|
||||
.with_keys(["name".into()])
|
||||
.annotated("this step"),
|
||||
);
|
||||
} else if step.id().is_some() {
|
||||
self.locations.push(
|
||||
step.location()
|
||||
.with_keys(["id".into()])
|
||||
.annotated("this step"),
|
||||
);
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn with_job(mut self, job: &impl JobCommon<'doc>) -> Self {
|
||||
if job.name().is_some() {
|
||||
self.locations.push(
|
||||
job.location()
|
||||
.with_keys(["name".into()])
|
||||
.annotated("this job"),
|
||||
);
|
||||
} else {
|
||||
self.locations.push(
|
||||
job.parent()
|
||||
.location()
|
||||
.with_keys(["jobs".into(), job.id().into()])
|
||||
.annotated("this job"),
|
||||
);
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn tip(mut self, tip: impl Into<String>) -> Self {
|
||||
self.tip = Some(tip.into());
|
||||
self
|
||||
|
|
@ -289,7 +249,12 @@ impl<'doc> FindingBuilder<'doc> {
|
|||
|
||||
locations.extend(self.raw_locations);
|
||||
|
||||
if !locations.iter().any(|l| l.symbolic.is_primary()) {
|
||||
if locations.len() == 1
|
||||
&& let Some(location) = locations.get_mut(0)
|
||||
{
|
||||
// If there's only one location, then it's primary by definition.
|
||||
location.symbolic.kind = LocationKind::Primary;
|
||||
} else if !locations.iter().any(|l| l.symbolic.is_primary()) {
|
||||
return Err(AuditError::new(
|
||||
self.ident,
|
||||
anyhow!("API misuse: at least one location must be marked with primary()"),
|
||||
|
|
|
|||
|
|
@ -208,12 +208,12 @@ pub(crate) trait Locatable<'doc> {
|
|||
fn location(&self) -> SymbolicLocation<'doc>;
|
||||
|
||||
/// Returns an "enriched" symbolic location of this model,
|
||||
/// when the model is of a type that has a name. Otherwise,
|
||||
/// returns the same symbolic location as `location()`.
|
||||
/// when the model has one or more "grip" fields that are
|
||||
/// visually useful to key off of (like a `name` or `id` field).
|
||||
///
|
||||
/// For example, a GitHub Actions workflow step has an optional name,
|
||||
/// which is included in this symbolic location if present.
|
||||
fn location_with_name(&self) -> SymbolicLocation<'doc> {
|
||||
fn location_with_grip(&self) -> SymbolicLocation<'doc> {
|
||||
self.location()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -575,6 +575,29 @@ impl Client {
|
|||
.max_by_key(|t| t.name.len()))
|
||||
}
|
||||
|
||||
#[instrument(skip(self))]
|
||||
pub(crate) async fn branch_commits(
|
||||
&self,
|
||||
owner: &str,
|
||||
repo: &str,
|
||||
commit: &str,
|
||||
) -> Result<BranchCommits, ClientError> {
|
||||
// NOTE(ww): This API is undocumented.
|
||||
// See: https://github.com/orgs/community/discussions/78161
|
||||
let url = format!("https://github.com/{owner}/{repo}/branch_commits/{commit}");
|
||||
|
||||
// We ask GitHub for JSON, because it sends HTML by default for this endpoint.
|
||||
self.base_client
|
||||
.get(&url)
|
||||
.header(ACCEPT, "application/json")
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.json()
|
||||
.await
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
#[instrument(skip(self))]
|
||||
pub(crate) async fn compare_commits(
|
||||
&self,
|
||||
|
|
@ -859,6 +882,23 @@ pub(crate) struct Commit {
|
|||
pub(crate) sha: String,
|
||||
}
|
||||
|
||||
/// The response structure from GitHub's undocumented `branch_commits` API.
|
||||
///
|
||||
/// This model is intentionally incomplete.
|
||||
#[derive(Clone, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
#[non_exhaustive]
|
||||
pub(crate) struct BranchCommits {
|
||||
branches: Vec<serde_json::Value>,
|
||||
tags: Vec<String>,
|
||||
}
|
||||
|
||||
impl BranchCommits {
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
self.branches.is_empty() && self.tags.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub(crate) enum ComparisonStatus {
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ use std::str::FromStr;
|
|||
|
||||
use camino::Utf8Path;
|
||||
use thiserror::Error;
|
||||
use tower_lsp_server::lsp_types::{self, TextDocumentSyncKind};
|
||||
use tower_lsp_server::ls_types::{self, TextDocumentSyncKind};
|
||||
use tower_lsp_server::{Client, LanguageServer, LspService, Server};
|
||||
|
||||
use crate::audit::AuditInput;
|
||||
|
|
@ -25,7 +25,7 @@ pub(crate) struct Error {
|
|||
}
|
||||
|
||||
struct LspDocumentCommon {
|
||||
uri: lsp_types::Uri,
|
||||
uri: ls_types::Uri,
|
||||
text: String,
|
||||
version: Option<i32>,
|
||||
}
|
||||
|
|
@ -39,35 +39,35 @@ struct Backend {
|
|||
impl LanguageServer for Backend {
|
||||
async fn initialize(
|
||||
&self,
|
||||
_: lsp_types::InitializeParams,
|
||||
) -> tower_lsp_server::jsonrpc::Result<lsp_types::InitializeResult> {
|
||||
Ok(lsp_types::InitializeResult {
|
||||
server_info: Some(lsp_types::ServerInfo {
|
||||
_: ls_types::InitializeParams,
|
||||
) -> tower_lsp_server::jsonrpc::Result<ls_types::InitializeResult> {
|
||||
Ok(ls_types::InitializeResult {
|
||||
server_info: Some(ls_types::ServerInfo {
|
||||
name: "zizmor (LSP)".into(),
|
||||
version: Some(env!("CARGO_PKG_VERSION").into()),
|
||||
}),
|
||||
capabilities: lsp_types::ServerCapabilities {
|
||||
text_document_sync: Some(lsp_types::TextDocumentSyncCapability::Kind(
|
||||
lsp_types::TextDocumentSyncKind::FULL,
|
||||
capabilities: ls_types::ServerCapabilities {
|
||||
text_document_sync: Some(ls_types::TextDocumentSyncCapability::Kind(
|
||||
ls_types::TextDocumentSyncKind::FULL,
|
||||
)),
|
||||
..Default::default()
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
async fn initialized(&self, _: lsp_types::InitializedParams) {
|
||||
async fn initialized(&self, _: ls_types::InitializedParams) {
|
||||
let selectors = vec![
|
||||
lsp_types::DocumentFilter {
|
||||
ls_types::DocumentFilter {
|
||||
language: Some("yaml".into()),
|
||||
scheme: None,
|
||||
pattern: Some("**/.github/workflows/*.{yml,yaml}".into()),
|
||||
},
|
||||
lsp_types::DocumentFilter {
|
||||
ls_types::DocumentFilter {
|
||||
language: Some("yaml".into()),
|
||||
scheme: None,
|
||||
pattern: Some("**/action.{yml,yaml}".into()),
|
||||
},
|
||||
lsp_types::DocumentFilter {
|
||||
ls_types::DocumentFilter {
|
||||
language: Some("yaml".into()),
|
||||
scheme: None,
|
||||
pattern: Some("**/.github/dependabot.{yml,yaml}".into()),
|
||||
|
|
@ -80,46 +80,46 @@ impl LanguageServer for Backend {
|
|||
// neglects to.
|
||||
self.client
|
||||
.register_capability(vec![
|
||||
lsp_types::Registration {
|
||||
ls_types::Registration {
|
||||
id: "zizmor-didopen".into(),
|
||||
method: "textDocument/didOpen".into(),
|
||||
register_options: Some(
|
||||
serde_json::to_value(lsp_types::TextDocumentRegistrationOptions {
|
||||
serde_json::to_value(ls_types::TextDocumentRegistrationOptions {
|
||||
document_selector: Some(selectors.clone()),
|
||||
})
|
||||
.expect("failed to serialize LSP document registration options"),
|
||||
),
|
||||
},
|
||||
lsp_types::Registration {
|
||||
ls_types::Registration {
|
||||
id: "zizmor-didchange".into(),
|
||||
method: "textDocument/didChange".into(),
|
||||
register_options: Some(
|
||||
serde_json::to_value(lsp_types::TextDocumentChangeRegistrationOptions {
|
||||
serde_json::to_value(ls_types::TextDocumentChangeRegistrationOptions {
|
||||
document_selector: Some(selectors.clone()),
|
||||
sync_kind: TextDocumentSyncKind::FULL,
|
||||
})
|
||||
.expect("failed to serialize LSP document registration options"),
|
||||
),
|
||||
},
|
||||
lsp_types::Registration {
|
||||
ls_types::Registration {
|
||||
id: "zizmor-didsave".into(),
|
||||
method: "textDocument/didSave".into(),
|
||||
register_options: Some(
|
||||
serde_json::to_value(lsp_types::TextDocumentSaveRegistrationOptions {
|
||||
serde_json::to_value(ls_types::TextDocumentSaveRegistrationOptions {
|
||||
include_text: Some(true),
|
||||
text_document_registration_options:
|
||||
lsp_types::TextDocumentRegistrationOptions {
|
||||
ls_types::TextDocumentRegistrationOptions {
|
||||
document_selector: Some(selectors.clone()),
|
||||
},
|
||||
})
|
||||
.expect("failed to serialize LSP document registration options"),
|
||||
),
|
||||
},
|
||||
lsp_types::Registration {
|
||||
ls_types::Registration {
|
||||
id: "zizmor-didclose".into(),
|
||||
method: "textDocument/didClose".into(),
|
||||
register_options: Some(
|
||||
serde_json::to_value(lsp_types::TextDocumentRegistrationOptions {
|
||||
serde_json::to_value(ls_types::TextDocumentRegistrationOptions {
|
||||
document_selector: Some(selectors),
|
||||
})
|
||||
.expect("failed to serialize LSP document registration options"),
|
||||
|
|
@ -130,7 +130,7 @@ impl LanguageServer for Backend {
|
|||
.expect("failed to register text document capabilities with the LSP client");
|
||||
|
||||
self.client
|
||||
.log_message(lsp_types::MessageType::INFO, "server initialized!")
|
||||
.log_message(ls_types::MessageType::INFO, "server initialized!")
|
||||
.await;
|
||||
}
|
||||
|
||||
|
|
@ -139,7 +139,7 @@ impl LanguageServer for Backend {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) {
|
||||
async fn did_open(&self, params: ls_types::DidOpenTextDocumentParams) {
|
||||
tracing::debug!("did_open: {:?}", params);
|
||||
self.audit(LspDocumentCommon {
|
||||
uri: params.text_document.uri,
|
||||
|
|
@ -149,7 +149,7 @@ impl LanguageServer for Backend {
|
|||
.await;
|
||||
}
|
||||
|
||||
async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) {
|
||||
async fn did_change(&self, params: ls_types::DidChangeTextDocumentParams) {
|
||||
tracing::debug!("did_change: {:?}", params);
|
||||
let mut params = params;
|
||||
let Some(change) = params.content_changes.pop() else {
|
||||
|
|
@ -164,7 +164,7 @@ impl LanguageServer for Backend {
|
|||
.await;
|
||||
}
|
||||
|
||||
async fn did_save(&self, params: lsp_types::DidSaveTextDocumentParams) {
|
||||
async fn did_save(&self, params: ls_types::DidSaveTextDocumentParams) {
|
||||
tracing::debug!("did_save: {:?}", params);
|
||||
if let Some(text) = params.text {
|
||||
self.audit(LspDocumentCommon {
|
||||
|
|
@ -217,15 +217,15 @@ impl Backend {
|
|||
.iter()
|
||||
.map(|finding| {
|
||||
let primary = finding.primary_location();
|
||||
lsp_types::Diagnostic {
|
||||
range: lsp_types::Range {
|
||||
ls_types::Diagnostic {
|
||||
range: ls_types::Range {
|
||||
start: primary.concrete.location.start_point.into(),
|
||||
end: primary.concrete.location.end_point.into(),
|
||||
},
|
||||
severity: Some(finding.determinations.severity.into()),
|
||||
code: Some(lsp_types::NumberOrString::String(finding.ident.into())),
|
||||
code_description: Some(lsp_types::CodeDescription {
|
||||
href: lsp_types::Uri::from_str(finding.url)
|
||||
code: Some(ls_types::NumberOrString::String(finding.ident.into())),
|
||||
code_description: Some(ls_types::CodeDescription {
|
||||
href: ls_types::Uri::from_str(finding.url)
|
||||
.expect("finding contains an invalid URL somehow"),
|
||||
}),
|
||||
source: Some("zizmor".into()),
|
||||
|
|
@ -248,25 +248,25 @@ impl Backend {
|
|||
async fn audit(&self, params: LspDocumentCommon) {
|
||||
if let Err(e) = self.audit_inner(params).await {
|
||||
self.client
|
||||
.log_message(lsp_types::MessageType::ERROR, format!("audit failed: {e}"))
|
||||
.log_message(ls_types::MessageType::ERROR, format!("audit failed: {e}"))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Severity> for lsp_types::DiagnosticSeverity {
|
||||
impl From<Severity> for ls_types::DiagnosticSeverity {
|
||||
fn from(value: Severity) -> Self {
|
||||
// TODO: Does this mapping make sense?
|
||||
match value {
|
||||
Severity::Informational => lsp_types::DiagnosticSeverity::INFORMATION,
|
||||
Severity::Low => lsp_types::DiagnosticSeverity::WARNING,
|
||||
Severity::Medium => lsp_types::DiagnosticSeverity::WARNING,
|
||||
Severity::High => lsp_types::DiagnosticSeverity::ERROR,
|
||||
Severity::Informational => ls_types::DiagnosticSeverity::INFORMATION,
|
||||
Severity::Low => ls_types::DiagnosticSeverity::WARNING,
|
||||
Severity::Medium => ls_types::DiagnosticSeverity::WARNING,
|
||||
Severity::High => ls_types::DiagnosticSeverity::ERROR,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Point> for lsp_types::Position {
|
||||
impl From<Point> for ls_types::Position {
|
||||
fn from(value: Point) -> Self {
|
||||
Self {
|
||||
line: value.row as u32,
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ use tracing_indicatif::{IndicatifLayer, span_ext::IndicatifSpanExt};
|
|||
use tracing_subscriber::{EnvFilter, layer::SubscriberExt as _, util::SubscriberInitExt as _};
|
||||
|
||||
use crate::{
|
||||
audit::AuditError,
|
||||
config::{Config, ConfigError, ConfigErrorInner},
|
||||
github::Client,
|
||||
models::AsDocument,
|
||||
|
|
@ -117,6 +118,15 @@ struct App {
|
|||
#[arg(long, value_enum, default_value_t)]
|
||||
format: OutputFormat,
|
||||
|
||||
/// Whether to render OSC 8 links in the output.
|
||||
///
|
||||
/// This affects links under audit IDs, as well as any links
|
||||
/// produced by audit rules.
|
||||
///
|
||||
/// Only affects `--format=plain` (the default).
|
||||
#[arg(long, value_enum, default_value_t, env = "ZIZMOR_RENDER_LINKS")]
|
||||
render_links: CliRenderLinks,
|
||||
|
||||
/// Whether to render audit URLs in the output, separately from any URLs
|
||||
/// embedded in OSC 8 links.
|
||||
///
|
||||
|
|
@ -324,6 +334,44 @@ pub(crate) enum OutputFormat {
|
|||
Github,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Copy, Clone, ValueEnum)]
|
||||
pub(crate) enum CliRenderLinks {
|
||||
/// Render OSC 8 links in output if support is detected.
|
||||
#[default]
|
||||
Auto,
|
||||
/// Always render OSC 8 links in output.
|
||||
Always,
|
||||
/// Never render OSC 8 links in output.
|
||||
Never,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub(crate) enum RenderLinks {
|
||||
Always,
|
||||
Never,
|
||||
}
|
||||
|
||||
impl From<CliRenderLinks> for RenderLinks {
|
||||
fn from(value: CliRenderLinks) -> Self {
|
||||
match value {
|
||||
CliRenderLinks::Auto => {
|
||||
// We render links if stdout is a terminal. This is assumed
|
||||
// to preclude CI environments and log files.
|
||||
//
|
||||
// TODO: Switch this to the support-hyperlinks crate?
|
||||
// See: https://github.com/zkat/supports-hyperlinks/pull/8
|
||||
if stdout().is_terminal() {
|
||||
RenderLinks::Always
|
||||
} else {
|
||||
RenderLinks::Never
|
||||
}
|
||||
}
|
||||
CliRenderLinks::Always => RenderLinks::Always,
|
||||
CliRenderLinks::Never => RenderLinks::Never,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Copy, Clone, ValueEnum)]
|
||||
pub(crate) enum CliShowAuditUrls {
|
||||
/// Render audit URLs in output automatically based on output format and runtime context.
|
||||
|
|
@ -594,10 +642,10 @@ enum Error {
|
|||
#[error("failed to load audit rules")]
|
||||
AuditLoad(#[source] anyhow::Error),
|
||||
/// An error while running an audit.
|
||||
#[error("{ident} failed on {input}")]
|
||||
#[error("'{ident}' audit failed on {input}")]
|
||||
Audit {
|
||||
ident: &'static str,
|
||||
source: anyhow::Error,
|
||||
source: AuditError,
|
||||
input: String,
|
||||
},
|
||||
/// An error while rendering output.
|
||||
|
|
@ -640,6 +688,7 @@ async fn run(app: &mut App) -> Result<ExitCode, Error> {
|
|||
ColorMode::Never
|
||||
} else if std::env::var("FORCE_COLOR").is_ok()
|
||||
|| std::env::var("CLICOLOR_FORCE").is_ok()
|
||||
|| utils::is_ci()
|
||||
{
|
||||
ColorMode::Always
|
||||
} else {
|
||||
|
|
@ -794,7 +843,7 @@ async fn run(app: &mut App) -> Result<ExitCode, Error> {
|
|||
while let Some(findings) = completion_stream.next().await {
|
||||
let findings = findings.map_err(|err| Error::Audit {
|
||||
ident: err.ident(),
|
||||
source: err.into(),
|
||||
source: err,
|
||||
input: input.key().to_string(),
|
||||
})?;
|
||||
|
||||
|
|
@ -815,6 +864,7 @@ async fn run(app: &mut App) -> Result<ExitCode, Error> {
|
|||
®istry,
|
||||
&results,
|
||||
&app.show_audit_urls.into(),
|
||||
&app.render_links.into(),
|
||||
app.naches,
|
||||
),
|
||||
OutputFormat::Json | OutputFormat::JsonV1 => {
|
||||
|
|
@ -919,6 +969,16 @@ async fn main() -> ExitCode {
|
|||
Some(report)
|
||||
}
|
||||
Error::Collection(err) => match err.inner() {
|
||||
CollectionError::NoInputs => {
|
||||
let group = Group::with_title(Level::ERROR.primary_title(err.to_string()))
|
||||
.element(Level::HELP.message("collection yielded no auditable inputs"))
|
||||
.element(Level::HELP.message("inputs must contain at least one valid workflow, action, or Dependabot config"));
|
||||
|
||||
let renderer = Renderer::styled();
|
||||
let report = renderer.render(&[group]);
|
||||
|
||||
Some(report)
|
||||
}
|
||||
CollectionError::DuplicateInput(..) => {
|
||||
let group = Group::with_title(Level::ERROR.primary_title(err.to_string()))
|
||||
.element(Level::HELP.message(format!(
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ use github_actions_models::common::Env;
|
|||
use github_actions_models::common::expr::LoE;
|
||||
use github_actions_models::workflow::job::Strategy;
|
||||
|
||||
use crate::finding::location::Locatable;
|
||||
use crate::finding::location::{Locatable, SymbolicLocation};
|
||||
use crate::models::inputs::HasInputs;
|
||||
|
||||
pub(crate) mod action;
|
||||
|
|
@ -37,12 +37,6 @@ pub(crate) enum StepBodyCommon<'s> {
|
|||
|
||||
/// Common interfaces between workflow and action steps.
|
||||
pub(crate) trait StepCommon<'doc>: Locatable<'doc> + HasInputs {
|
||||
/// Returns the step's name, if present.
|
||||
fn name(&self) -> Option<&'doc str>;
|
||||
|
||||
/// Returns the step's ID, if present.
|
||||
fn id(&self) -> Option<&'doc str>;
|
||||
|
||||
/// Returns the step's index within its parent job or action.
|
||||
fn index(&self) -> usize;
|
||||
|
||||
|
|
@ -70,7 +64,7 @@ pub(crate) trait StepCommon<'doc>: Locatable<'doc> + HasInputs {
|
|||
///
|
||||
/// Returns `None` if the shell cannot be statically determined, including
|
||||
/// if the shell is specified via an expression.
|
||||
fn shell(&self) -> Option<&str>;
|
||||
fn shell(&self) -> Option<(&str, SymbolicLocation<'doc>)>;
|
||||
}
|
||||
|
||||
impl<'a, 'doc, T: StepCommon<'doc>> AsDocument<'a, 'doc> for T {
|
||||
|
|
|
|||
|
|
@ -175,10 +175,13 @@ impl<'doc> Locatable<'doc> for CompositeStep<'doc> {
|
|||
])
|
||||
}
|
||||
|
||||
fn location_with_name(&self) -> SymbolicLocation<'doc> {
|
||||
match self.inner.name {
|
||||
Some(_) => self.location().with_keys(["name".into()]),
|
||||
None => self.location(),
|
||||
fn location_with_grip(&self) -> SymbolicLocation<'doc> {
|
||||
if self.inner.name.is_some() {
|
||||
self.location().with_keys(["name".into()])
|
||||
} else if self.inner.id.is_some() {
|
||||
self.location().with_keys(["id".into()])
|
||||
} else {
|
||||
self.location()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -190,14 +193,6 @@ impl HasInputs for CompositeStep<'_> {
|
|||
}
|
||||
|
||||
impl<'doc> StepCommon<'doc> for CompositeStep<'doc> {
|
||||
fn name(&self) -> Option<&'doc str> {
|
||||
self.name.as_deref()
|
||||
}
|
||||
|
||||
fn id(&self) -> Option<&'doc str> {
|
||||
self.id.as_deref()
|
||||
}
|
||||
|
||||
fn index(&self) -> usize {
|
||||
self.index
|
||||
}
|
||||
|
|
@ -237,14 +232,19 @@ impl<'doc> StepCommon<'doc> for CompositeStep<'doc> {
|
|||
self.action().as_document()
|
||||
}
|
||||
|
||||
fn shell(&self) -> Option<&str> {
|
||||
fn shell(&self) -> Option<(&str, SymbolicLocation<'doc>)> {
|
||||
// For composite action steps, shell is always explicitly specified in the YAML.
|
||||
if let action::StepBody::Run {
|
||||
shell: LoE::Literal(shell),
|
||||
..
|
||||
} = &self.inner.body
|
||||
{
|
||||
Some(shell)
|
||||
Some((
|
||||
shell,
|
||||
self.location()
|
||||
.with_keys(["shell".into()])
|
||||
.annotated("shell defined here"),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
|
|
|||
|
|
@ -131,7 +131,7 @@ impl<'doc> Locatable<'doc> for Update<'doc> {
|
|||
.annotated("this update rule")
|
||||
}
|
||||
|
||||
fn location_with_name(&self) -> SymbolicLocation<'doc> {
|
||||
fn location_with_grip(&self) -> SymbolicLocation<'doc> {
|
||||
self.location()
|
||||
.with_keys(["package-ecosystem".into()])
|
||||
.annotated("this ecosystem")
|
||||
|
|
|
|||
|
|
@ -384,10 +384,15 @@ impl<'doc, T: JobCommon<'doc>> Locatable<'doc> for T {
|
|||
.with_keys(["jobs".into(), self.id().into()])
|
||||
}
|
||||
|
||||
fn location_with_name(&self) -> SymbolicLocation<'doc> {
|
||||
match self.name() {
|
||||
Some(_) => self.location().with_keys(["name".into()]),
|
||||
None => self.location(),
|
||||
fn location_with_grip(&self) -> SymbolicLocation<'doc> {
|
||||
if self.name().is_some() {
|
||||
self.location().with_keys(["name".into()])
|
||||
} else {
|
||||
self.parent()
|
||||
.location()
|
||||
.annotated("this job")
|
||||
.with_keys(["jobs".into(), self.id().into()])
|
||||
.key_only()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -629,10 +634,13 @@ impl<'doc> Locatable<'doc> for Step<'doc> {
|
|||
.annotated("this step")
|
||||
}
|
||||
|
||||
fn location_with_name(&self) -> SymbolicLocation<'doc> {
|
||||
match self.inner.name {
|
||||
Some(_) => self.location().with_keys(["name".into()]),
|
||||
None => self.location(),
|
||||
fn location_with_grip(&self) -> SymbolicLocation<'doc> {
|
||||
if self.inner.name.is_some() {
|
||||
self.location().with_keys(["name".into()])
|
||||
} else if self.inner.id.is_some() {
|
||||
self.location().with_keys(["id".into()])
|
||||
} else {
|
||||
self.location()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -644,14 +652,6 @@ impl HasInputs for Step<'_> {
|
|||
}
|
||||
|
||||
impl<'doc> StepCommon<'doc> for Step<'doc> {
|
||||
fn name(&self) -> Option<&'doc str> {
|
||||
self.inner.name.as_deref()
|
||||
}
|
||||
|
||||
fn id(&self) -> Option<&'doc str> {
|
||||
self.inner.id.as_deref()
|
||||
}
|
||||
|
||||
fn index(&self) -> usize {
|
||||
self.index
|
||||
}
|
||||
|
|
@ -691,7 +691,7 @@ impl<'doc> StepCommon<'doc> for Step<'doc> {
|
|||
self.workflow().as_document()
|
||||
}
|
||||
|
||||
fn shell(&self) -> Option<&str> {
|
||||
fn shell(&self) -> Option<(&str, SymbolicLocation<'doc>)> {
|
||||
// For workflow steps, we can use the existing shell() method
|
||||
self.shell()
|
||||
}
|
||||
|
|
@ -720,7 +720,7 @@ impl<'doc> Step<'doc> {
|
|||
/// if the shell can't be statically inferred.
|
||||
///
|
||||
/// Invariant: panics if the step is not a `run:` step.
|
||||
pub(crate) fn shell(&self) -> Option<&str> {
|
||||
pub(crate) fn shell(&self) -> Option<(&str, SymbolicLocation<'doc>)> {
|
||||
let StepBody::Run {
|
||||
run: _,
|
||||
working_directory: _,
|
||||
|
|
@ -736,7 +736,12 @@ impl<'doc> Step<'doc> {
|
|||
// If any of these is an expression, we can't infer the shell
|
||||
// statically, so we terminate early with `None`.
|
||||
let shell = match shell {
|
||||
Some(LoE::Literal(shell)) => Some(shell.as_str()),
|
||||
Some(LoE::Literal(shell)) => Some((
|
||||
shell.as_str(),
|
||||
self.location()
|
||||
.with_keys(["shell".into()])
|
||||
.annotated("shell defined here"),
|
||||
)),
|
||||
Some(LoE::Expr(_)) => return None,
|
||||
None => match self
|
||||
.job()
|
||||
|
|
@ -744,7 +749,13 @@ impl<'doc> Step<'doc> {
|
|||
.as_ref()
|
||||
.and_then(|d| d.run.as_ref().and_then(|r| r.shell.as_ref()))
|
||||
{
|
||||
Some(LoE::Literal(shell)) => Some(shell.as_str()),
|
||||
Some(LoE::Literal(shell)) => Some((
|
||||
shell.as_str(),
|
||||
self.job()
|
||||
.location()
|
||||
.with_keys(["defaults".into(), "run".into(), "shell".into()])
|
||||
.annotated("job default shell defined here"),
|
||||
)),
|
||||
Some(LoE::Expr(_)) => return None,
|
||||
None => match self
|
||||
.workflow()
|
||||
|
|
@ -752,14 +763,30 @@ impl<'doc> Step<'doc> {
|
|||
.as_ref()
|
||||
.and_then(|d| d.run.as_ref().and_then(|r| r.shell.as_ref()))
|
||||
{
|
||||
Some(LoE::Literal(shell)) => Some(shell.as_str()),
|
||||
Some(LoE::Literal(shell)) => Some((
|
||||
shell.as_str(),
|
||||
self.workflow()
|
||||
.location()
|
||||
.with_keys(["defaults".into(), "run".into(), "shell".into()])
|
||||
.annotated("workflow default shell defined here"),
|
||||
)),
|
||||
Some(LoE::Expr(_)) => return None,
|
||||
None => None,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
shell.or_else(|| self.parent.runner_default_shell())
|
||||
shell.or_else(|| {
|
||||
self.parent.runner_default_shell().map(|shell| {
|
||||
(
|
||||
shell,
|
||||
self.job()
|
||||
.location()
|
||||
.with_keys(["runs-on".into()])
|
||||
.annotated("shell implied by runner"),
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ use anstream::{eprintln, print, println};
|
|||
use owo_colors::OwoColorize;
|
||||
|
||||
use crate::{
|
||||
ShowAuditUrls,
|
||||
RenderLinks, ShowAuditUrls,
|
||||
finding::{
|
||||
Finding, Severity,
|
||||
location::{Location, LocationKind},
|
||||
|
|
@ -44,6 +44,7 @@ impl From<&Severity> for Level<'_> {
|
|||
pub(crate) fn finding_snippets<'doc>(
|
||||
registry: &'doc InputRegistry,
|
||||
finding: &'doc Finding<'doc>,
|
||||
render_links_mode: &RenderLinks,
|
||||
) -> Vec<Snippet<'doc, Annotation<'doc>>> {
|
||||
// Our finding might span multiple workflows, so we need to group locations
|
||||
// by their enclosing workflow to generate each snippet correctly.
|
||||
|
|
@ -68,15 +69,20 @@ pub(crate) fn finding_snippets<'doc>(
|
|||
for (input_key, locations) in locations_by_workflow {
|
||||
let input = registry.get_input(input_key);
|
||||
|
||||
let path = match render_links_mode {
|
||||
RenderLinks::Always => input.link().unwrap_or(input_key.presentation_path()),
|
||||
RenderLinks::Never => input_key.presentation_path(),
|
||||
};
|
||||
|
||||
snippets.push(
|
||||
Snippet::source(input.as_document().source())
|
||||
.fold(true)
|
||||
.line_start(1)
|
||||
.path(input.link().unwrap_or(input_key.presentation_path()))
|
||||
.path(path)
|
||||
.annotations(locations.iter().map(|loc| {
|
||||
let annotation = match loc.symbolic.link {
|
||||
Some(ref link) => link,
|
||||
None => &loc.symbolic.annotation,
|
||||
let annotation = match (loc.symbolic.link.as_deref(), render_links_mode) {
|
||||
(Some(link), RenderLinks::Always) => link,
|
||||
_ => &loc.symbolic.annotation,
|
||||
};
|
||||
|
||||
AnnotationKind::from(loc.symbolic.kind)
|
||||
|
|
@ -96,10 +102,11 @@ pub(crate) fn render_findings(
|
|||
registry: &InputRegistry,
|
||||
findings: &FindingRegistry,
|
||||
show_urls_mode: &ShowAuditUrls,
|
||||
render_links_mode: &RenderLinks,
|
||||
naches_mode: bool,
|
||||
) {
|
||||
for finding in findings.findings() {
|
||||
render_finding(registry, finding, show_urls_mode);
|
||||
render_finding(registry, finding, show_urls_mode, render_links_mode);
|
||||
println!();
|
||||
}
|
||||
|
||||
|
|
@ -192,11 +199,19 @@ pub(crate) fn render_findings(
|
|||
}
|
||||
}
|
||||
|
||||
fn render_finding(registry: &InputRegistry, finding: &Finding, show_urls_mode: &ShowAuditUrls) {
|
||||
let title = Level::from(&finding.determinations.severity)
|
||||
fn render_finding(
|
||||
registry: &InputRegistry,
|
||||
finding: &Finding,
|
||||
show_urls_mode: &ShowAuditUrls,
|
||||
render_links_mode: &RenderLinks,
|
||||
) {
|
||||
let mut title = Level::from(&finding.determinations.severity)
|
||||
.primary_title(finding.desc)
|
||||
.id(finding.ident)
|
||||
.id_url(finding.url);
|
||||
.id(finding.ident);
|
||||
|
||||
if matches!(render_links_mode, RenderLinks::Always) {
|
||||
title = title.id_url(finding.url);
|
||||
}
|
||||
|
||||
let confidence = format!(
|
||||
"audit confidence → {:?}",
|
||||
|
|
@ -204,7 +219,7 @@ fn render_finding(registry: &InputRegistry, finding: &Finding, show_urls_mode: &
|
|||
);
|
||||
|
||||
let mut group = Group::with_title(title)
|
||||
.elements(finding_snippets(registry, finding))
|
||||
.elements(finding_snippets(registry, finding, render_links_mode))
|
||||
.element(Level::NOTE.message(confidence));
|
||||
|
||||
if let Some(tip) = &finding.tip {
|
||||
|
|
|
|||
|
|
@ -4,12 +4,8 @@ use anyhow::{Error, anyhow};
|
|||
use camino::Utf8Path;
|
||||
use github_actions_expressions::context::{Context, ContextPattern};
|
||||
use github_actions_models::common::{Env, expr::LoE};
|
||||
use jsonschema::{
|
||||
BasicOutput::{Invalid, Valid},
|
||||
Validator,
|
||||
output::{ErrorDescription, OutputUnit},
|
||||
validator_for,
|
||||
};
|
||||
use jsonschema::ErrorEntry;
|
||||
use jsonschema::{Validator, validator_for};
|
||||
use std::ops::{Deref, Range};
|
||||
use std::{fmt::Write, sync::LazyLock};
|
||||
|
||||
|
|
@ -307,11 +303,11 @@ pub(crate) static DEFAULT_ENVIRONMENT_VARIABLES: &[(
|
|||
),
|
||||
];
|
||||
|
||||
fn parse_validation_errors(errors: Vec<OutputUnit<ErrorDescription>>) -> Error {
|
||||
fn parse_validation_errors(errors: Vec<ErrorEntry<'_>>) -> Error {
|
||||
let mut message = String::new();
|
||||
|
||||
for error in errors {
|
||||
let description = error.error_description().to_string();
|
||||
let description = error.error.to_string();
|
||||
// HACK: error descriptions are sometimes a long rats' nest
|
||||
// of JSON objects. We should render this in a palatable way
|
||||
// but doing so is nontrivial, so we just skip them for now.
|
||||
|
|
@ -319,7 +315,7 @@ fn parse_validation_errors(errors: Vec<OutputUnit<ErrorDescription>>) -> Error {
|
|||
// the error for an unmatched "oneOf", so these errors are
|
||||
// typically less useful anyways.
|
||||
if !description.starts_with("{") {
|
||||
let location = error.instance_location().as_str();
|
||||
let location = error.instance_location.as_str();
|
||||
if location.is_empty() {
|
||||
writeln!(message, "{description}").expect("I/O on a String failed");
|
||||
} else {
|
||||
|
|
@ -379,18 +375,19 @@ where
|
|||
// 2. The input is semantically invalid, and the user
|
||||
// needs to fix it.
|
||||
// We the JSON schema `validator` to separate these.
|
||||
Ok(raw_value) => match validator
|
||||
.apply(
|
||||
Ok(raw_value) => {
|
||||
let evaluation = validator.evaluate(
|
||||
&serde_json::to_value(&raw_value)
|
||||
.map_err(|e| CollectionError::Syntax(e.into()))?,
|
||||
)
|
||||
.basic()
|
||||
{
|
||||
Valid(_) => Err(e.into()),
|
||||
Invalid(errors) => {
|
||||
);
|
||||
|
||||
if evaluation.flag().valid {
|
||||
Err(e.into())
|
||||
} else {
|
||||
let errors = evaluation.iter_errors().collect::<Vec<_>>();
|
||||
Err(CollectionError::Schema(parse_validation_errors(errors)))
|
||||
}
|
||||
},
|
||||
}
|
||||
// Syntax error.
|
||||
Err(e) => Err(CollectionError::Syntax(e.into())),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -196,15 +196,11 @@ fn audit_unpinned_uses() -> anyhow::Result<()> {
|
|||
|
||||
assert_value_match(&findings, "$[0].determinations.confidence", "High");
|
||||
assert_value_match(&findings, "$[0].determinations.severity", "Medium");
|
||||
assert_value_match(
|
||||
&findings,
|
||||
"$[0].locations[0].concrete.feature",
|
||||
"uses: docker://ubuntu",
|
||||
);
|
||||
assert_value_match(&findings, "$[0].locations[0].concrete.feature", "ubuntu");
|
||||
assert_value_match(
|
||||
&findings,
|
||||
"$[1].locations[0].concrete.feature",
|
||||
"uses: docker://ghcr.io/pypa/gh-action-pypi-publish",
|
||||
"ghcr.io/pypa/gh-action-pypi-publish",
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ fn test_regular_persona() -> anyhow::Result<()> {
|
|||
zizmor()
|
||||
.input(input_under_test("anonymous-definition.yml"))
|
||||
.run()?,
|
||||
@r"No findings to report. Good job! (2 suppressed)"
|
||||
@"No findings to report. Good job! (2 suppressed)"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
@ -34,17 +34,16 @@ fn test_pedantic_persona() -> anyhow::Result<()> {
|
|||
| |__________________________________________^ this workflow
|
||||
|
|
||||
= note: audit confidence → High
|
||||
= tip: use 'name: ...' to give this workflow a name
|
||||
|
||||
info[anonymous-definition]: workflow or action definition without a name
|
||||
--> @@INPUT@@:21:3
|
||||
|
|
||||
21 | / will-trigger:
|
||||
22 | | runs-on: ubuntu-latest
|
||||
23 | | steps:
|
||||
24 | | - run: "echo this job will trigger"
|
||||
| |__________________________________________^ this job
|
||||
21 | will-trigger:
|
||||
| ^^^^^^^^^^^^ this job
|
||||
|
|
||||
= note: audit confidence → High
|
||||
= tip: use 'name: ...' to give this job a name
|
||||
|
||||
2 findings: 1 informational, 1 low, 0 medium, 0 high
|
||||
"#
|
||||
|
|
|
|||
|
|
@ -26,12 +26,12 @@ fn test_regular_persona() -> anyhow::Result<()> {
|
|||
= note: audit confidence → High
|
||||
|
||||
warning[archived-uses]: action or reusable workflow from archived repository
|
||||
--> @@INPUT@@:24:5
|
||||
--> @@INPUT@@:24:11
|
||||
|
|
||||
23 | name: archived-uses-reusable
|
||||
| ---------------------------- this job
|
||||
24 | uses: actions/setup-ruby/.github/workflows/notreal.yml@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ repository is archived
|
||||
| ^^^^^^^^^^^^^^^^^^ repository is archived
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
@ -50,12 +50,12 @@ fn test_composite_action() -> anyhow::Result<()> {
|
|||
.run()?,
|
||||
@r"
|
||||
warning[archived-uses]: action or reusable workflow from archived repository
|
||||
--> @@INPUT@@action.yml:9:7
|
||||
--> @@INPUT@@action.yml:9:13
|
||||
|
|
||||
8 | - name: setup ruby
|
||||
| ---------------- this step
|
||||
9 | uses: actions/setup-ruby@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ repository is archived
|
||||
| ^^^^^^^^^^^^^^^^^^ repository is archived
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
|
|||
|
|
@ -80,28 +80,28 @@ fn test_jobs_missing_no_cancel() -> anyhow::Result<()> {
|
|||
.args(["--persona=pedantic"])
|
||||
.run()?,
|
||||
@r"
|
||||
help[concurrency-limits]: insufficient job-level concurrency limits
|
||||
--> @@INPUT@@:9:5
|
||||
|
|
||||
9 | concurrency: group
|
||||
| ^^^^^^^^^^^^^^^^^^ job concurrency is missing cancel-in-progress
|
||||
|
|
||||
= note: audit confidence → High
|
||||
help[concurrency-limits]: insufficient job-level concurrency limits
|
||||
--> @@INPUT@@:9:5
|
||||
|
|
||||
9 | concurrency: group
|
||||
| ^^^^^^^^^^^^^^^^^^ job concurrency is missing cancel-in-progress
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
help[concurrency-limits]: insufficient job-level concurrency limits
|
||||
--> @@INPUT@@:1:1
|
||||
|
|
||||
1 | / name: Workflow with job 1 missing cancel-in-progress and job 2 missing concurrency
|
||||
2 | | on: push
|
||||
3 | | permissions: {}
|
||||
... |
|
||||
17 | | - name: 2-ok
|
||||
18 | | run: echo ok
|
||||
| |___________________^ missing concurrency setting
|
||||
|
|
||||
= note: audit confidence → High
|
||||
help[concurrency-limits]: insufficient job-level concurrency limits
|
||||
--> @@INPUT@@:1:1
|
||||
|
|
||||
1 | / name: Workflow with job 1 missing cancel-in-progress and job 2 missing concurrency
|
||||
2 | | on: push
|
||||
3 | | permissions: {}
|
||||
... |
|
||||
17 | | - name: 2-ok
|
||||
18 | | run: echo ok
|
||||
| |___________________^ missing concurrency setting
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
2 findings: 0 informational, 2 low, 0 medium, 0 high
|
||||
2 findings: 0 informational, 2 low, 0 medium, 0 high
|
||||
"
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -165,7 +165,7 @@ fn test_config_short_cooldown_permitted() -> anyhow::Result<()> {
|
|||
.input(input_under_test("dependabot-cooldown/default-days-too-short/dependabot.yml"))
|
||||
.config(input_under_test("dependabot-cooldown/configs/cooldown-one-day.yml"))
|
||||
.run()?,
|
||||
@r"No findings to report. Good job!"
|
||||
@"No findings to report. Good job!"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -27,26 +27,26 @@ fn test_deny_all() -> Result<()> {
|
|||
.run()?,
|
||||
@r"
|
||||
error[forbidden-uses]: forbidden action used
|
||||
--> @@INPUT@@:13:9
|
||||
--> @@INPUT@@:13:15
|
||||
|
|
||||
13 | - uses: actions/setup-python@v4
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[forbidden-uses]: forbidden action used
|
||||
--> @@INPUT@@:14:9
|
||||
--> @@INPUT@@:14:15
|
||||
|
|
||||
14 | - uses: pypa/gh-action-pypi-publish@release/v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[forbidden-uses]: forbidden action used
|
||||
--> @@INPUT@@:15:9
|
||||
--> @@INPUT@@:15:15
|
||||
|
|
||||
15 | - uses: actions/checkout@v4
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
| ^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
@ -68,10 +68,10 @@ fn test_allow_some() -> Result<()> {
|
|||
.run()?,
|
||||
@r"
|
||||
error[forbidden-uses]: forbidden action used
|
||||
--> @@INPUT@@:13:9
|
||||
--> @@INPUT@@:13:15
|
||||
|
|
||||
13 | - uses: actions/setup-python@v4
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
@ -93,18 +93,18 @@ fn test_deny_some() -> Result<()> {
|
|||
.run()?,
|
||||
@r"
|
||||
error[forbidden-uses]: forbidden action used
|
||||
--> @@INPUT@@:14:9
|
||||
--> @@INPUT@@:14:15
|
||||
|
|
||||
14 | - uses: pypa/gh-action-pypi-publish@release/v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[forbidden-uses]: forbidden action used
|
||||
--> @@INPUT@@:15:9
|
||||
--> @@INPUT@@:15:15
|
||||
|
|
||||
15 | - uses: actions/checkout@v4
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
| ^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
@ -126,18 +126,18 @@ fn test_deny_some_refs() -> Result<()> {
|
|||
.run()?,
|
||||
@r"
|
||||
error[forbidden-uses]: forbidden action used
|
||||
--> @@INPUT@@:13:9
|
||||
--> @@INPUT@@:13:15
|
||||
|
|
||||
13 | - uses: actions/setup-python@v4
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[forbidden-uses]: forbidden action used
|
||||
--> @@INPUT@@:14:9
|
||||
--> @@INPUT@@:14:15
|
||||
|
|
||||
14 | - uses: pypa/gh-action-pypi-publish@release/v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
@ -159,10 +159,10 @@ fn test_allow_some_refs() -> Result<()> {
|
|||
.run()?,
|
||||
@r"
|
||||
error[forbidden-uses]: forbidden action used
|
||||
--> @@INPUT@@:15:9
|
||||
--> @@INPUT@@:15:15
|
||||
|
|
||||
15 | - uses: actions/checkout@v4
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
| ^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
|
|||
31
crates/zizmor/tests/integration/audit/impostor_commit.rs
Normal file
31
crates/zizmor/tests/integration/audit/impostor_commit.rs
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
use crate::common::{input_under_test, zizmor};
|
||||
|
||||
#[cfg_attr(not(feature = "gh-token-tests"), ignore)]
|
||||
#[test]
|
||||
fn test_regular_persona() -> anyhow::Result<()> {
|
||||
insta::assert_snapshot!(
|
||||
zizmor()
|
||||
.input(input_under_test("impostor-commit.yml"))
|
||||
.offline(false)
|
||||
.run()?,
|
||||
@r"
|
||||
error[impostor-commit]: commit with no history in referenced repository
|
||||
--> @@INPUT@@:29:15
|
||||
|
|
||||
29 | - uses: actions/checkout@c7d749a2d57b4b375d1ebcd17cfbfb60c676f18e
|
||||
| - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ uses a commit that doesn't belong to the specified org/repo
|
||||
| _________|
|
||||
| |
|
||||
30 | | with:
|
||||
31 | | persist-credentials: false
|
||||
| |____________________________________- this step
|
||||
|
|
||||
= note: audit confidence → High
|
||||
= note: this finding has an auto-fix
|
||||
|
||||
4 findings (3 suppressed, 1 fixable): 0 informational, 0 low, 0 medium, 1 high
|
||||
"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -13,7 +13,7 @@ mod excessive_permissions;
|
|||
mod forbidden_uses;
|
||||
mod github_env;
|
||||
// mod hardcoded_container_credentials; // TODO
|
||||
// mod impostor_commit; // TODO
|
||||
mod impostor_commit;
|
||||
mod insecure_commands;
|
||||
// mod known_vulnerable_actions; // TODO
|
||||
mod obfuscation;
|
||||
|
|
|
|||
|
|
@ -237,3 +237,56 @@ fn test_issue_1177_repro_pedantic() -> Result<()> {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Reproduces issue #1414: the obfuscation audit should not crash if the
|
||||
/// user has `shell: cmd` defined as a job or workflow default rather than
|
||||
/// at the step level.
|
||||
///
|
||||
/// See: https://github.com/zizmorcore/zizmor/issues/1414
|
||||
#[test]
|
||||
fn test_issue_1414_repro() -> Result<()> {
|
||||
insta::assert_snapshot!(
|
||||
zizmor()
|
||||
.input(input_under_test("obfuscation/issue-1414-repro.yml"))
|
||||
.run()?,
|
||||
@r"
|
||||
help[obfuscation]: obfuscated usage of GitHub Actions features
|
||||
--> @@INPUT@@:13:9
|
||||
|
|
||||
13 | shell: cmd
|
||||
| ^^^^^^^^^^ job default shell defined here
|
||||
14 | steps:
|
||||
15 | - name: say hi
|
||||
| ------------ Windows CMD shell limits analysis
|
||||
|
|
||||
= note: audit confidence → High
|
||||
= tip: use 'shell: pwsh' or 'shell: bash' for improved analysis
|
||||
|
||||
3 findings (2 suppressed): 0 informational, 1 low, 0 medium, 0 high
|
||||
"
|
||||
);
|
||||
|
||||
// Like #1414, but with `shell: cmd` defined at the workflow level.
|
||||
insta::assert_snapshot!(
|
||||
zizmor()
|
||||
.input(input_under_test("obfuscation/workflow-cmd-default-shell.yml"))
|
||||
.run()?,
|
||||
@r"
|
||||
help[obfuscation]: obfuscated usage of GitHub Actions features
|
||||
--> @@INPUT@@:10:5
|
||||
|
|
||||
10 | shell: cmd
|
||||
| ^^^^^^^^^^ workflow default shell defined here
|
||||
...
|
||||
16 | - name: say hi
|
||||
| ------------ Windows CMD shell limits analysis
|
||||
|
|
||||
= note: audit confidence → High
|
||||
= tip: use 'shell: pwsh' or 'shell: bash' for improved analysis
|
||||
|
||||
3 findings (2 suppressed): 0 informational, 1 low, 0 medium, 0 high
|
||||
"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ fn test_issue_518_repro() -> Result<()> {
|
|||
.input(input_under_test("ref-confusion/issue-518-repro.yml"))
|
||||
.offline(false)
|
||||
.run()?,
|
||||
@r"No findings to report. Good job! (1 ignored, 1 suppressed)"
|
||||
@"No findings to report. Good job! (1 ignored, 1 suppressed)"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ fn test_nested_annotated_tags() -> Result<()> {
|
|||
"ref-version-mismatch/nested-annotated-tags.yml"
|
||||
))
|
||||
.run()?,
|
||||
@r"No findings to report. Good job! (1 suppressed)"
|
||||
@"No findings to report. Good job! (1 suppressed)"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -8,10 +8,10 @@ fn secrets_inherit() -> anyhow::Result<()> {
|
|||
.run()?,
|
||||
@r"
|
||||
warning[secrets-inherit]: secrets unconditionally inherited by called workflow
|
||||
--> @@INPUT@@:10:5
|
||||
--> @@INPUT@@:10:11
|
||||
|
|
||||
10 | uses: octo-org/example-repo/.github/workflows/called-workflow.yml@main
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this reusable workflow
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this reusable workflow
|
||||
11 | # NOT OK: unconditionally inherits
|
||||
12 | secrets: inherit
|
||||
| ---------------- inherits all parent secrets
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ fn test_self_hosted_default() -> Result<()> {
|
|||
zizmor()
|
||||
.input(input_under_test("self-hosted.yml"))
|
||||
.run()?,
|
||||
@r"No findings to report. Good job! (1 suppressed)"
|
||||
@"No findings to report. Good job! (1 suppressed)"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -11,10 +11,10 @@ fn test_pedantic_persona() -> anyhow::Result<()> {
|
|||
.run()?,
|
||||
@r"
|
||||
help[stale-action-refs]: commit hash does not point to a Git tag
|
||||
--> @@INPUT@@:34:7
|
||||
--> @@INPUT@@:34:13
|
||||
|
|
||||
34 | - uses: actions/checkout@009b9ae9e446ad8d9b8c809870b0fbcc5e03573e
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this step
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this step
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
|
|||
|
|
@ -183,7 +183,7 @@ fn test_issue_418_repro() -> Result<()> {
|
|||
zizmor()
|
||||
.input(input_under_test("template-injection/issue-418-repro.yml"))
|
||||
.run()?,
|
||||
@r"No findings to report. Good job! (3 suppressed)"
|
||||
@"No findings to report. Good job! (3 suppressed)"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
@ -247,10 +247,10 @@ fn test_pr_425_backstop_action() -> Result<()> {
|
|||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:29:7
|
||||
--> @@INPUT@@:29:13
|
||||
|
|
||||
29 | uses: azure/powershell@whatever
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
|
|||
|
|
@ -64,7 +64,7 @@ fn test_undocumented_permissions_default() -> Result<()> {
|
|||
zizmor()
|
||||
.input(input_under_test("undocumented-permissions.yml"))
|
||||
.run()?,
|
||||
@r"No findings to report. Good job! (5 suppressed)"
|
||||
@"No findings to report. Good job! (5 suppressed)"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
@ -78,7 +78,7 @@ fn test_documented_permissions_pedantic() -> Result<()> {
|
|||
.input(input_under_test("undocumented-permissions/documented.yml"))
|
||||
.args(["--persona=pedantic"])
|
||||
.run()?,
|
||||
@r"No findings to report. Good job! (1 ignored)"
|
||||
@"No findings to report. Good job! (1 ignored)"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
@ -94,7 +94,7 @@ fn test_contents_read_only_pedantic() -> Result<()> {
|
|||
))
|
||||
.args(["--persona=pedantic"])
|
||||
.run()?,
|
||||
@r"No findings to report. Good job!"
|
||||
@"No findings to report. Good job!"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
@ -110,7 +110,7 @@ fn test_empty_permissions_pedantic() -> Result<()> {
|
|||
))
|
||||
.args(["--persona=pedantic"])
|
||||
.run()?,
|
||||
@r"No findings to report. Good job!"
|
||||
@"No findings to report. Good job!"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -10,18 +10,18 @@ fn test_unpinned_uses_pedantic() -> Result<()> {
|
|||
.run()?,
|
||||
@r"
|
||||
warning[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:21:9
|
||||
--> @@INPUT@@:21:24
|
||||
|
|
||||
21 | - uses: docker://ubuntu
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref
|
||||
| ^^^^^^ image is not pinned to a tag, branch, or hash ref
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
warning[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:27:9
|
||||
--> @@INPUT@@:27:24
|
||||
|
|
||||
27 | - uses: docker://ghcr.io/pypa/gh-action-pypi-publish
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ image is not pinned to a tag, branch, or hash ref
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
@ -40,18 +40,18 @@ fn test_unpinned_uses_default() -> Result<()> {
|
|||
.run()?,
|
||||
@r"
|
||||
warning[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:21:9
|
||||
--> @@INPUT@@:21:24
|
||||
|
|
||||
21 | - uses: docker://ubuntu
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref
|
||||
| ^^^^^^ image is not pinned to a tag, branch, or hash ref
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
warning[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:27:9
|
||||
--> @@INPUT@@:27:24
|
||||
|
|
||||
27 | - uses: docker://ghcr.io/pypa/gh-action-pypi-publish
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ image is not pinned to a tag, branch, or hash ref
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
@ -71,18 +71,18 @@ fn test_action_pedantic() -> Result<()> {
|
|||
.run()?,
|
||||
@r"
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:12:7
|
||||
--> @@INPUT@@:12:13
|
||||
|
|
||||
12 | uses: asdf-vm/actions/setup@v3
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:15:7
|
||||
--> @@INPUT@@:15:13
|
||||
|
|
||||
15 | uses: asdf-vm/actions/setup@main
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
@ -114,7 +114,7 @@ fn test_issue_659_repro() -> Result<()> {
|
|||
.input(input_under_test("unpinned-uses/issue-659-repro.yml"))
|
||||
.args(["--pedantic"])
|
||||
.run()?,
|
||||
@r"No findings to report. Good job!"
|
||||
@"No findings to report. Good job!"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
@ -129,10 +129,10 @@ fn test_default_config() -> Result<()> {
|
|||
.run()?,
|
||||
@r"
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:22:9
|
||||
--> @@INPUT@@:22:15
|
||||
|
|
||||
22 | - uses: pypa/gh-action-pypi-publish@release/v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
@ -155,42 +155,42 @@ fn test_hash_pin_everything_config() -> Result<()> {
|
|||
.run()?,
|
||||
@r"
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:12:9
|
||||
--> @@INPUT@@:12:15
|
||||
|
|
||||
12 | - uses: actions/setup-python@v4
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:14:9
|
||||
--> @@INPUT@@:14:15
|
||||
|
|
||||
14 | - uses: actions/checkout@v3
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:22:9
|
||||
--> @@INPUT@@:22:15
|
||||
|
|
||||
22 | - uses: pypa/gh-action-pypi-publish@release/v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:24:9
|
||||
--> @@INPUT@@:24:15
|
||||
|
|
||||
24 | - uses: github/codeql-action/init@v3
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:26:9
|
||||
--> @@INPUT@@:26:15
|
||||
|
|
||||
26 | - uses: github/codeql-action/upload-sarif@v3
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
@ -226,26 +226,26 @@ fn test_composite_config() -> Result<()> {
|
|||
.run()?,
|
||||
@r"
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:12:9
|
||||
--> @@INPUT@@:12:15
|
||||
|
|
||||
12 | - uses: actions/setup-python@v4
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by actions/setup-python policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by actions/setup-python policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:24:9
|
||||
--> @@INPUT@@:24:15
|
||||
|
|
||||
24 | - uses: github/codeql-action/init@v3
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:26:9
|
||||
--> @@INPUT@@:26:15
|
||||
|
|
||||
26 | - uses: github/codeql-action/upload-sarif@v3
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
@ -265,18 +265,18 @@ fn test_composite_config_2() -> Result<()> {
|
|||
.run()?,
|
||||
@r"
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:24:9
|
||||
--> @@INPUT@@:24:15
|
||||
|
|
||||
24 | - uses: github/codeql-action/init@v3
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by github/codeql-action/init policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by github/codeql-action/init policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:26:9
|
||||
--> @@INPUT@@:26:15
|
||||
|
|
||||
26 | - uses: github/codeql-action/upload-sarif@v3
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by github/codeql-action/upload-sarif policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by github/codeql-action/upload-sarif policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
@ -296,42 +296,42 @@ fn test_empty_config() -> Result<()> {
|
|||
.run()?,
|
||||
@r"
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:12:9
|
||||
--> @@INPUT@@:12:15
|
||||
|
|
||||
12 | - uses: actions/setup-python@v4
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:14:9
|
||||
--> @@INPUT@@:14:15
|
||||
|
|
||||
14 | - uses: actions/checkout@v3
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:22:9
|
||||
--> @@INPUT@@:22:15
|
||||
|
|
||||
22 | - uses: pypa/gh-action-pypi-publish@release/v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:24:9
|
||||
--> @@INPUT@@:24:15
|
||||
|
|
||||
24 | - uses: github/codeql-action/init@v3
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:26:9
|
||||
--> @@INPUT@@:26:15
|
||||
|
|
||||
26 | - uses: github/codeql-action/upload-sarif@v3
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
|
|||
|
|
@ -357,7 +357,7 @@ fn test_issue_1191_repro() -> Result<()> {
|
|||
"use-trusted-publishing/issue-1191-repro.yml"
|
||||
))
|
||||
.run()?,
|
||||
@r"No findings to report. Good job! (3 suppressed)"
|
||||
@"No findings to report. Good job! (3 suppressed)"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -42,6 +42,7 @@ pub struct Zizmor {
|
|||
stdin: Option<String>,
|
||||
unbuffer: bool,
|
||||
offline: bool,
|
||||
gh_token: bool,
|
||||
inputs: Vec<String>,
|
||||
config: Option<String>,
|
||||
no_config: bool,
|
||||
|
|
@ -53,13 +54,19 @@ pub struct Zizmor {
|
|||
impl Zizmor {
|
||||
/// Create a new zizmor runner.
|
||||
pub fn new() -> Self {
|
||||
let cmd = Command::new(cargo::cargo_bin!());
|
||||
let mut cmd = Command::new(cargo::cargo_bin!());
|
||||
|
||||
// Our child `zizmor` process starts with a clean environment, to
|
||||
// ensure we explicitly test interactions with things like `CI`
|
||||
// and `GH_TOKEN`.
|
||||
cmd.env_clear();
|
||||
|
||||
Self {
|
||||
cmd,
|
||||
stdin: None,
|
||||
unbuffer: false,
|
||||
offline: true,
|
||||
gh_token: true,
|
||||
inputs: vec![],
|
||||
config: None,
|
||||
no_config: false,
|
||||
|
|
@ -84,11 +91,6 @@ impl Zizmor {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn unsetenv(mut self, key: &str) -> Self {
|
||||
self.cmd.env_remove(key);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn input(mut self, input: impl Into<String>) -> Self {
|
||||
self.inputs.push(input.into());
|
||||
self
|
||||
|
|
@ -114,6 +116,11 @@ impl Zizmor {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn gh_token(mut self, flag: bool) -> Self {
|
||||
self.gh_token = flag;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn output(mut self, output: OutputMode) -> Self {
|
||||
self.output = output;
|
||||
self
|
||||
|
|
@ -147,7 +154,12 @@ impl Zizmor {
|
|||
} else {
|
||||
// If we're running in online mode, we pre-assert the
|
||||
// presence of GH_TOKEN to make configuration failures more obvious.
|
||||
std::env::var("GH_TOKEN").context("online tests require GH_TOKEN to be set")?;
|
||||
let token =
|
||||
std::env::var("GH_TOKEN").context("online tests require GH_TOKEN to be set")?;
|
||||
|
||||
if self.gh_token {
|
||||
self.cmd.env("GH_TOKEN", token);
|
||||
}
|
||||
}
|
||||
|
||||
if self.no_config && self.config.is_some() {
|
||||
|
|
|
|||
|
|
@ -164,6 +164,31 @@ fn test_discovers_config_in_dotgithub() -> anyhow::Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Ensures we correctly discover a `zizmor.yaml` configuration file in a `.github`
|
||||
/// subdirectory of a given input directory, i.e.
|
||||
/// `config-in-dotgithub/.github/zizmor.yaml` in this case.
|
||||
///
|
||||
/// This tests that both `.yml` and `.yaml` extensions are supported.
|
||||
#[test]
|
||||
fn test_discovers_dotyaml_config_in_dotgithub() -> anyhow::Result<()> {
|
||||
insta::assert_snapshot!(
|
||||
zizmor()
|
||||
.input(input_under_test("config-scenarios/dotyaml-config-in-dotgithub"))
|
||||
.setenv("RUST_LOG", "zizmor::config=debug")
|
||||
.output(OutputMode::Both)
|
||||
.run()?,
|
||||
@r"
|
||||
🌈 zizmor v@@VERSION@@
|
||||
DEBUG zizmor::config: discovering config for local input `@@INPUT@@`
|
||||
DEBUG zizmor::config: attempting config discovery in `@@INPUT@@`
|
||||
DEBUG zizmor::config: found config candidate at `@@INPUT@@/.github/zizmor.yaml`
|
||||
No findings to report. Good job! (1 ignored, 2 suppressed)
|
||||
",
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Ensures we correctly discover a configuration file in a `.github`
|
||||
/// subdirectory from an input filename, i.e. going from
|
||||
/// `config-in-dotgithub/.github/workflows/hackme.yml`
|
||||
|
|
|
|||
|
|
@ -83,13 +83,21 @@ fn menagerie() -> Result<()> {
|
|||
|
||||
#[test]
|
||||
fn color_control_basic() -> Result<()> {
|
||||
// No terminal, so no color by default.
|
||||
// No terminal and not CI, so no color by default.
|
||||
let no_color_default_output = zizmor()
|
||||
.output(OutputMode::Both)
|
||||
.input(input_under_test("e2e-menagerie"))
|
||||
.run()?;
|
||||
assert!(!no_color_default_output.contains("\x1b["));
|
||||
|
||||
// No terminal but CI, so color by default.
|
||||
let color_default_ci_output = zizmor()
|
||||
.setenv("CI", "true")
|
||||
.output(OutputMode::Both)
|
||||
.input(input_under_test("e2e-menagerie"))
|
||||
.run()?;
|
||||
assert!(color_default_ci_output.contains("\x1b["));
|
||||
|
||||
// Force color via --color=always.
|
||||
let forced_color_via_arg_output = zizmor()
|
||||
.output(OutputMode::Both)
|
||||
|
|
@ -251,6 +259,25 @@ fn invalid_inputs() -> Result<()> {
|
|||
);
|
||||
}
|
||||
|
||||
insta::assert_snapshot!(
|
||||
zizmor()
|
||||
.expects_failure(true)
|
||||
.input(input_under_test("invalid/empty/"))
|
||||
.args(["--strict-collection"])
|
||||
.run()?,
|
||||
@r"
|
||||
🌈 zizmor v@@VERSION@@
|
||||
fatal: no audit was performed
|
||||
error: no inputs collected
|
||||
|
|
||||
= help: collection yielded no auditable inputs
|
||||
= help: inputs must contain at least one valid workflow, action, or Dependabot config
|
||||
|
||||
Caused by:
|
||||
no inputs collected
|
||||
"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
@ -291,7 +318,13 @@ fn test_issue_1394() -> Result<()> {
|
|||
🌈 zizmor v@@VERSION@@
|
||||
WARN collect_inputs: zizmor::registry::input: failed to parse input: jobs.demo.steps[0]: duplicate entry with key "env" at line 10 column 9
|
||||
fatal: no audit was performed
|
||||
no inputs collected
|
||||
error: no inputs collected
|
||||
|
|
||||
= help: collection yielded no auditable inputs
|
||||
= help: inputs must contain at least one valid workflow, action, or Dependabot config
|
||||
|
||||
Caused by:
|
||||
no inputs collected
|
||||
"#
|
||||
);
|
||||
|
||||
|
|
@ -386,10 +419,10 @@ fn issue_1065() -> Result<()> {
|
|||
= note: audit confidence → Medium
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> @@INPUT@@:16:9
|
||||
--> @@INPUT@@:16:15
|
||||
|
|
||||
16 | uses: thollander/actions-comment-pull-request@v3
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
@ -481,10 +514,10 @@ fn issue_1286() -> Result<()> {
|
|||
@r"
|
||||
🌈 zizmor v@@VERSION@@
|
||||
fatal: no audit was performed
|
||||
ref-confusion failed on file://@@INPUT@@
|
||||
'ref-confusion' audit failed on file://@@INPUT@@
|
||||
|
||||
Caused by:
|
||||
0: error in ref-confusion
|
||||
0: error in 'ref-confusion' audit
|
||||
1: couldn't list branches for woodruffw-experiments/this-does-not-exist
|
||||
2: can't access woodruffw-experiments/this-does-not-exist: missing or you have no access
|
||||
",
|
||||
|
|
@ -575,7 +608,6 @@ fn test_cant_retrieve_offline() -> Result<()> {
|
|||
zizmor()
|
||||
.expects_failure(true)
|
||||
.offline(true)
|
||||
.unsetenv("GH_TOKEN")
|
||||
.args(["pypa/sampleproject"])
|
||||
.run()?,
|
||||
@r"
|
||||
|
|
@ -601,7 +633,7 @@ fn test_cant_retrieve_no_gh_token() -> Result<()> {
|
|||
zizmor()
|
||||
.expects_failure(true)
|
||||
.offline(false)
|
||||
.unsetenv("GH_TOKEN")
|
||||
.gh_token(false)
|
||||
.args(["pypa/sampleproject"])
|
||||
.run()?,
|
||||
@r"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
---
|
||||
source: crates/zizmor/tests/integration/e2e/json_v1.rs
|
||||
assertion_line: 17
|
||||
expression: output
|
||||
---
|
||||
[
|
||||
|
|
@ -249,7 +248,7 @@ expression: output
|
|||
"given_path": "@@INPUT@@"
|
||||
}
|
||||
},
|
||||
"annotation": "action is not pinned to a tag, branch, or hash ref",
|
||||
"annotation": "image is not pinned to a tag, branch, or hash ref",
|
||||
"route": {
|
||||
"route": [
|
||||
{
|
||||
|
|
@ -269,25 +268,32 @@ expression: output
|
|||
}
|
||||
]
|
||||
},
|
||||
"feature_kind": "Normal",
|
||||
"feature_kind": {
|
||||
"Subfeature": {
|
||||
"after": 0,
|
||||
"fragment": {
|
||||
"Raw": "ubuntu"
|
||||
}
|
||||
}
|
||||
},
|
||||
"kind": "Primary"
|
||||
},
|
||||
"concrete": {
|
||||
"location": {
|
||||
"start_point": {
|
||||
"row": 20,
|
||||
"column": 8
|
||||
"column": 23
|
||||
},
|
||||
"end_point": {
|
||||
"row": 20,
|
||||
"column": 29
|
||||
},
|
||||
"offset_span": {
|
||||
"start": 406,
|
||||
"start": 421,
|
||||
"end": 427
|
||||
}
|
||||
},
|
||||
"feature": "uses: docker://ubuntu",
|
||||
"feature": "docker://ubuntu",
|
||||
"comments": []
|
||||
}
|
||||
}
|
||||
|
|
@ -312,7 +318,7 @@ expression: output
|
|||
"given_path": "@@INPUT@@"
|
||||
}
|
||||
},
|
||||
"annotation": "action is not pinned to a tag, branch, or hash ref",
|
||||
"annotation": "image is not pinned to a tag, branch, or hash ref",
|
||||
"route": {
|
||||
"route": [
|
||||
{
|
||||
|
|
@ -332,25 +338,32 @@ expression: output
|
|||
}
|
||||
]
|
||||
},
|
||||
"feature_kind": "Normal",
|
||||
"feature_kind": {
|
||||
"Subfeature": {
|
||||
"after": 0,
|
||||
"fragment": {
|
||||
"Raw": "ghcr.io/pypa/gh-action-pypi-publish"
|
||||
}
|
||||
}
|
||||
},
|
||||
"kind": "Primary"
|
||||
},
|
||||
"concrete": {
|
||||
"location": {
|
||||
"start_point": {
|
||||
"row": 26,
|
||||
"column": 8
|
||||
"column": 23
|
||||
},
|
||||
"end_point": {
|
||||
"row": 26,
|
||||
"column": 58
|
||||
},
|
||||
"offset_span": {
|
||||
"start": 531,
|
||||
"start": 546,
|
||||
"end": 581
|
||||
}
|
||||
},
|
||||
"feature": "uses: docker://ghcr.io/pypa/gh-action-pypi-publish",
|
||||
"feature": "docker://ghcr.io/pypa/gh-action-pypi-publish",
|
||||
"comments": []
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -156,15 +156,11 @@ error[dangerous-triggers]: use of fundamentally insecure workflow trigger
|
|||
error[bot-conditions]: spoofable bot actor check
|
||||
--> .github/workflows/bot-conditions.yml:18:9
|
||||
|
|
||||
16 | / hackme:
|
||||
17 | | runs-on: ubuntu-latest
|
||||
18 | | if: github.actor == 'dependabot[bot]'
|
||||
| | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ actor context may be spoofable
|
||||
19 | | steps:
|
||||
... |
|
||||
33 | | run: echo hello
|
||||
34 | | if: github.actor == 'notabot'
|
||||
| |______________________________________- this job
|
||||
16 | hackme:
|
||||
| ------ this job
|
||||
17 | runs-on: ubuntu-latest
|
||||
18 | if: github.actor == 'dependabot[bot]'
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ actor context may be spoofable
|
||||
|
|
||||
= note: audit confidence → High
|
||||
= note: this finding has an auto-fix
|
||||
|
|
@ -488,26 +484,26 @@ warning[excessive-permissions]: overly broad permissions
|
|||
= note: audit confidence → Medium
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/known-vulnerable-actions.yml:19:9
|
||||
--> .github/workflows/known-vulnerable-actions.yml:19:15
|
||||
|
|
||||
19 | - uses: atlassian/gajira-create@v1.0.1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/known-vulnerable-actions.yml:25:9
|
||||
--> .github/workflows/known-vulnerable-actions.yml:25:15
|
||||
|
|
||||
25 | - uses: rlespinasse/github-slug-action@v4
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/known-vulnerable-actions.yml:28:9
|
||||
--> .github/workflows/known-vulnerable-actions.yml:28:15
|
||||
|
|
||||
28 | - uses: rlespinasse/github-slug-action@4.0.1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
@ -610,90 +606,90 @@ info[use-trusted-publishing]: prefer trusted publishing for authentication
|
|||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/pypi-manual-credential.yml:23:9
|
||||
--> .github/workflows/pypi-manual-credential.yml:23:15
|
||||
|
|
||||
23 | uses: pypa/gh-action-pypi-publish@release/v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/pypi-manual-credential.yml:27:9
|
||||
--> .github/workflows/pypi-manual-credential.yml:27:15
|
||||
|
|
||||
27 | uses: pypa/gh-action-pypi-publish@release/v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/pypi-manual-credential.yml:33:9
|
||||
--> .github/workflows/pypi-manual-credential.yml:33:15
|
||||
|
|
||||
33 | uses: pypa/gh-action-pypi-publish@release/v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/pypi-manual-credential.yml:39:9
|
||||
--> .github/workflows/pypi-manual-credential.yml:39:15
|
||||
|
|
||||
39 | uses: pypa/gh-action-pypi-publish@release/v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/pypi-manual-credential.yml:45:9
|
||||
--> .github/workflows/pypi-manual-credential.yml:45:15
|
||||
|
|
||||
45 | uses: pypa/gh-action-pypi-publish@release/v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/pypi-manual-credential.yml:51:9
|
||||
--> .github/workflows/pypi-manual-credential.yml:51:15
|
||||
|
|
||||
51 | uses: pypa/gh-action-pypi-publish@release/v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/pypi-manual-credential.yml:58:9
|
||||
--> .github/workflows/pypi-manual-credential.yml:58:15
|
||||
|
|
||||
58 | uses: pypa/gh-action-pypi-publish@release/v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/pypi-manual-credential.yml:66:9
|
||||
--> .github/workflows/pypi-manual-credential.yml:66:15
|
||||
|
|
||||
66 | uses: pypa/gh-action-pypi-publish@release/v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/pypi-manual-credential.yml:73:9
|
||||
--> .github/workflows/pypi-manual-credential.yml:73:15
|
||||
|
|
||||
73 | uses: pypa/gh-action-pypi-publish@release/v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/pypi-manual-credential.yml:81:9
|
||||
--> .github/workflows/pypi-manual-credential.yml:81:15
|
||||
|
|
||||
81 | uses: pypa/gh-action-pypi-publish@release/v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/ref-confusion.yml:26:9
|
||||
--> .github/workflows/ref-confusion.yml:26:15
|
||||
|
|
||||
26 | - uses: woodruffw/gha-hazmat/ref-confusion@confusable
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
@ -768,10 +764,10 @@ warning[excessive-permissions]: overly broad permissions
|
|||
= note: audit confidence → Medium
|
||||
|
||||
warning[secrets-inherit]: secrets unconditionally inherited by called workflow
|
||||
--> .github/workflows/secrets-inherit.yml:16:5
|
||||
--> .github/workflows/secrets-inherit.yml:16:11
|
||||
|
|
||||
16 | uses: octo-org/example-repo/.github/workflows/called-workflow.yml@main
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this reusable workflow
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this reusable workflow
|
||||
17 | # NOT OK: unconditionally inherits
|
||||
18 | secrets: inherit
|
||||
| ---------------- inherits all parent secrets
|
||||
|
|
@ -896,18 +892,18 @@ warning[excessive-permissions]: overly broad permissions
|
|||
= note: audit confidence → Medium
|
||||
|
||||
warning[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/unpinned.yml:20:9
|
||||
--> .github/workflows/unpinned.yml:20:24
|
||||
|
|
||||
20 | - uses: docker://ubuntu
|
||||
| ^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref
|
||||
| ^^^^^^ image is not pinned to a tag, branch, or hash ref
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
warning[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/unpinned.yml:26:9
|
||||
--> .github/workflows/unpinned.yml:26:24
|
||||
|
|
||||
26 | - uses: docker://ghcr.io/pypa/gh-action-pypi-publish
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ image is not pinned to a tag, branch, or hash ref
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
|
|||
|
|
@ -5,4 +5,10 @@ expression: "zizmor().expects_failure(true).input(input_under_test(&format!(\"in
|
|||
🌈 zizmor v@@VERSION@@
|
||||
WARN collect_inputs: zizmor::registry::input: failed to validate input as action: input does not match expected validation schema
|
||||
fatal: no audit was performed
|
||||
no inputs collected
|
||||
error: no inputs collected
|
||||
|
|
||||
= help: collection yielded no auditable inputs
|
||||
= help: inputs must contain at least one valid workflow, action, or Dependabot config
|
||||
|
||||
Caused by:
|
||||
no inputs collected
|
||||
|
|
|
|||
|
|
@ -5,4 +5,10 @@ expression: "zizmor().expects_failure(true).input(input_under_test(&format!(\"in
|
|||
🌈 zizmor v@@VERSION@@
|
||||
WARN collect_inputs: zizmor::registry::input: failed to validate input as workflow: input does not match expected validation schema
|
||||
fatal: no audit was performed
|
||||
no inputs collected
|
||||
error: no inputs collected
|
||||
|
|
||||
= help: collection yielded no auditable inputs
|
||||
= help: inputs must contain at least one valid workflow, action, or Dependabot config
|
||||
|
||||
Caused by:
|
||||
no inputs collected
|
||||
|
|
|
|||
|
|
@ -8,5 +8,5 @@ failed to load file://@@INPUT@@ as workflow
|
|||
|
||||
Caused by:
|
||||
0: input does not match expected validation schema
|
||||
1: on.workflow_call.inputs.input: "type" is a required property
|
||||
Additional properties are not allowed ('boom' was unexpected)
|
||||
1: Additional properties are not allowed ('boom' was unexpected)
|
||||
on.workflow_call.inputs.input: "type" is a required property
|
||||
|
|
|
|||
|
|
@ -24,58 +24,58 @@ expression: "zizmor().offline(false).output(OutputMode::Both).args([\"--no-onlin
|
|||
INFO audit: zizmor: 🌈 completed .github/workflows/tail-call.yml
|
||||
INFO audit: zizmor: 🌈 completed .github/workflows/verify-ensurepip-wheels.yml
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/build.yml:117:9
|
||||
--> .github/workflows/build.yml:117:15
|
||||
|
|
||||
117 | uses: hendrikmuhs/ccache-action@v1.2
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/build.yml:313:7
|
||||
--> .github/workflows/build.yml:313:13
|
||||
|
|
||||
313 | uses: hendrikmuhs/ccache-action@v1.2
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/build.yml:368:7
|
||||
--> .github/workflows/build.yml:368:13
|
||||
|
|
||||
368 | uses: hendrikmuhs/ccache-action@v1.2
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/build.yml:477:7
|
||||
--> .github/workflows/build.yml:477:13
|
||||
|
|
||||
477 | uses: egor-tensin/setup-gcc@v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/build.yml:498:7
|
||||
--> .github/workflows/build.yml:498:13
|
||||
|
|
||||
498 | uses: hendrikmuhs/ccache-action@v1.2
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/build.yml:583:9
|
||||
--> .github/workflows/build.yml:583:15
|
||||
|
|
||||
583 | uses: google/oss-fuzz/infra/cifuzz/actions/build_fuzzers@master
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/build.yml:588:9
|
||||
--> .github/workflows/build.yml:588:15
|
||||
|
|
||||
588 | uses: google/oss-fuzz/infra/cifuzz/actions/run_fuzzers@master
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
@ -89,82 +89,82 @@ help[obfuscation]: obfuscated usage of GitHub Actions features
|
|||
= note: this finding has an auto-fix
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/documentation-links.yml:25:9
|
||||
--> .github/workflows/documentation-links.yml:25:15
|
||||
|
|
||||
25 | - uses: readthedocs/actions/preview@v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/lint.yml:28:9
|
||||
--> .github/workflows/lint.yml:28:15
|
||||
|
|
||||
28 | - uses: pre-commit/action@v3.0.1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/require-pr-label.yml:19:9
|
||||
--> .github/workflows/require-pr-label.yml:19:15
|
||||
|
|
||||
19 | uses: mheap/github-action-required-labels@v5
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/require-pr-label.yml:38:9
|
||||
--> .github/workflows/require-pr-label.yml:38:15
|
||||
|
|
||||
38 | uses: mheap/github-action-required-labels@v5
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/require-pr-label.yml:47:9
|
||||
--> .github/workflows/require-pr-label.yml:47:15
|
||||
|
|
||||
47 | uses: mheap/github-action-required-labels@v5
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/require-pr-label.yml:58:9
|
||||
--> .github/workflows/require-pr-label.yml:58:15
|
||||
|
|
||||
58 | uses: mheap/github-action-required-labels@v5
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/reusable-tsan.yml:60:7
|
||||
--> .github/workflows/reusable-tsan.yml:60:13
|
||||
|
|
||||
60 | uses: hendrikmuhs/ccache-action@v1.2
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/reusable-ubuntu.yml:68:7
|
||||
--> .github/workflows/reusable-ubuntu.yml:68:13
|
||||
|
|
||||
68 | uses: hendrikmuhs/ccache-action@v1.2
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/reusable-wasi.yml:30:7
|
||||
--> .github/workflows/reusable-wasi.yml:30:13
|
||||
|
|
||||
30 | uses: bytecodealliance/actions/wasmtime/setup@v1
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
error[unpinned-uses]: unpinned action reference
|
||||
--> .github/workflows/reusable-wasi.yml:46:7
|
||||
--> .github/workflows/reusable-wasi.yml:46:13
|
||||
|
|
||||
46 | uses: hendrikmuhs/ccache-action@v1.2
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
||||
|
|
||||
= note: audit confidence → High
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,16 @@
|
|||
name: hackme
|
||||
on:
|
||||
issues:
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
inject-me:
|
||||
name: inject-me
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # tag=v7.0.1
|
||||
with:
|
||||
script: |
|
||||
return "doing a thing: ${{ github.event.issue.title }}"
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
rules:
|
||||
template-injection:
|
||||
ignore:
|
||||
- hackme.yml
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
# impostor-commit.yml
|
||||
#
|
||||
# what:
|
||||
# an example of commit impersonation. GitHub repositories and their forks
|
||||
# share a "network" of commit objects, meaning that GitHub's backend
|
||||
# can't easily distinguish between them. as a result, workflows that
|
||||
# appear to be pinned to a hash on a specific repo may actually be pinned
|
||||
# to a hash within a different fork repo, which can be malicious.
|
||||
#
|
||||
# how:
|
||||
# a user is unlikely to accidentally write a malicious commit, but may merge
|
||||
# an otherwise innocent-looking change from a third party without realizing
|
||||
# that the commits pinned by that party are actually references to a malicious
|
||||
# fork.
|
||||
#
|
||||
# see also: https://www.chainguard.dev/unchained/what-the-fork-imposter-commits-in-github-actions-and-ci-cd
|
||||
|
||||
name: example
|
||||
on: [push]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
commit:
|
||||
runs-on: ubuntu-latest
|
||||
permissions: {}
|
||||
steps:
|
||||
# NOT OK: c7d749a2d57b4b375d1ebcd17cfbfb60c676f18e is an impostor
|
||||
- uses: actions/checkout@c7d749a2d57b4b375d1ebcd17cfbfb60c676f18e
|
||||
with:
|
||||
persist-credentials: false
|
||||
- shell: bash
|
||||
run: |
|
||||
echo 'hello world!'
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
# empty
|
||||
|
||||
This is an empty directory, except for this README file.
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue