Compare commits

...

53 commits

Author SHA1 Message Date
github-actions[bot]
ff76e6c96e
[BOT] update JSON schemas from SchemaStore (#1462)
Some checks failed
CI / Lint (push) Has been cancelled
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Has been cancelled
CodSpeed Benchmarks / Run benchmarks (push) Has been cancelled
CI / Test (push) Has been cancelled
CI / Test site build (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Has been cancelled
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Release (push) Has been cancelled
CI / All tests pass (push) Has been cancelled
Co-authored-by: woodruffw <3059210+woodruffw@users.noreply.github.com>
Co-authored-by: William Woodruff <william@yossarian.net>
2025-12-22 17:20:59 -05:00
dependabot[bot]
8a138d4d7f
chore(deps): bump the cargo group with 3 updates (#1463)
Some checks are pending
CodSpeed Benchmarks / Run benchmarks (push) Waiting to run
CI / Lint (push) Waiting to run
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
2025-12-22 08:44:33 -05:00
dependabot[bot]
27041c58c9
chore(deps): bump the github-actions group with 4 updates (#1464) 2025-12-22 08:43:38 -05:00
William Woodruff
c3913e7eff
excessive-permissions: add missing known permissions (#1461)
Some checks failed
CI / Lint (push) Has been cancelled
CI / Test (push) Has been cancelled
CI / Test site build (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Has been cancelled
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Has been cancelled
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Has been cancelled
CodSpeed Benchmarks / Run benchmarks (push) Has been cancelled
CI / All tests pass (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Release (push) Has been cancelled
2025-12-18 23:19:35 -05:00
William Woodruff
1a6a008951
Fix links to impostor-commit (#1459)
Signed-off-by: William Woodruff <william@yossarian.net>
2025-12-18 23:01:34 +00:00
William Woodruff
a5e304f536
Prep zizmor 1.19.0 (#1458) 2025-12-18 17:48:37 -05:00
William Woodruff
2942f11dc2
Bump all tree-sitter dependent crates (#1457)
Some checks are pending
CodSpeed Benchmarks / Run benchmarks (push) Waiting to run
CI / Lint (push) Waiting to run
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
Avoids dep hell.

Signed-off-by: William Woodruff <william@yossarian.net>
2025-12-18 22:36:14 +00:00
William Woodruff
9d61a10dc1
Bump yamlpath to 0.30.0 (#1456) 2025-12-18 22:30:30 +00:00
William Woodruff
de6f9d6042
ci: add plain presentation test (#1454) 2025-12-18 17:25:07 -05:00
William Woodruff
cc76e2b93f
chore: bump github-actions-models to 0.42.0 (#1453)
Some checks failed
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Has been cancelled
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Has been cancelled
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Has been cancelled
CodSpeed Benchmarks / Run benchmarks (push) Has been cancelled
CI / Lint (push) Has been cancelled
CI / Test (push) Has been cancelled
CI / Test site build (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Release (push) Has been cancelled
CI / All tests pass (push) Has been cancelled
Signed-off-by: William Woodruff <william@yossarian.net>
2025-12-16 23:34:11 +00:00
William Woodruff
7e1c93b760
fix: add OpenTofu to Dependabot package ecosystems (#1452) 2025-12-16 18:27:24 -05:00
William Woodruff
081f844760
docs: bump trophies (#1450)
Some checks are pending
CodSpeed Benchmarks / Run benchmarks (push) Waiting to run
CI / Lint (push) Waiting to run
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
Signed-off-by: William Woodruff <william@yossarian.net>
2025-12-16 16:44:33 +00:00
William Woodruff
abdfe5d9b6
docs: bump trophies (#1449)
Some checks are pending
CodSpeed Benchmarks / Run benchmarks (push) Waiting to run
CI / Lint (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
Signed-off-by: William Woodruff <william@yossarian.net>
2025-12-15 23:11:47 +00:00
dependabot[bot]
32d6831a1f
chore(deps): bump tower-lsp-server from 0.22.1 to 0.23.0 in the cargo group (#1447)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: William Woodruff <william@yossarian.net>
2025-12-15 15:10:10 -08:00
dependabot[bot]
eafa9e43fc
chore(deps): bump actions/checkout in the github-actions group (#1448)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-15 15:00:18 -08:00
William Woodruff
f0525e0f30
ci: bump actions in test-output.yml (#1446)
Some checks failed
CodSpeed Benchmarks / Run benchmarks (push) Has been cancelled
CI / Lint (push) Has been cancelled
CI / Test (push) Has been cancelled
CI / Test site build (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Has been cancelled
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Has been cancelled
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Has been cancelled
CI / All tests pass (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Release (push) Has been cancelled
2025-12-14 20:37:02 -08:00
William Woodruff
5b7e8bcd72
bench: offline benchmarks (#1444) 2025-12-14 18:24:23 -08:00
William Woodruff
4a7e4e27bc
chore: bump online test runtimes (#1443)
Some checks are pending
CodSpeed Benchmarks / Run benchmarks (push) Waiting to run
CI / Lint (push) Waiting to run
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
2025-12-14 00:50:22 -08:00
William Woodruff
147bfabc0f
ci: only run benchmarks on labeled PRs (#1442) 2025-12-14 00:06:46 -08:00
William Woodruff
411b74a5a9
chore: online benchmarks (#1441) 2025-12-13 23:22:35 -08:00
William Woodruff
c9fc966d0a
chore: switch to CodSpeed benchmarking (#1440) 2025-12-13 22:42:08 -08:00
William Woodruff
b3e8725791
feat: improve "no inputs" error message (#1439)
Some checks are pending
CI / Lint (push) Waiting to run
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
2025-12-13 20:22:59 -08:00
William Woodruff
5bb11ce251
docs: bump trophies (#1437)
Some checks failed
CI / Lint (push) Has been cancelled
CI / Test (push) Has been cancelled
CI / Test site build (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Has been cancelled
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Has been cancelled
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Has been cancelled
CI / All tests pass (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Release (push) Has been cancelled
Signed-off-by: William Woodruff <william@yossarian.net>
2025-12-12 20:37:47 +00:00
William Woodruff
94eea71efd
docs: bump pinned actions (#1436)
Signed-off-by: William Woodruff <william@yossarian.net>
2025-12-12 20:34:12 +00:00
William Woodruff
b9b65b0e80
docs: bump trophies (#1435)
Some checks are pending
CI / Lint (push) Waiting to run
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
Signed-off-by: William Woodruff <william@yossarian.net>
2025-12-12 04:40:45 +00:00
William Woodruff
c9f0ea9aaf
feat: discover zizmor.yaml config files (#1431)
Some checks are pending
CI / Lint (push) Waiting to run
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
2025-12-10 23:12:36 -08:00
dependabot[bot]
5987ac7503
chore(deps): bump the cargo group across 1 directory with 12 updates (#1430)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: William Woodruff <william@yossarian.net>
2025-12-10 22:54:31 -08:00
William Woodruff
b714997a0c
fix: bump tree-sitter-powershell and fix query (#1427)
Some checks are pending
CI / Lint (push) Waiting to run
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
2025-12-10 09:46:41 -08:00
William Woodruff
b78376a737
feat: add another fast path for impostor commit detection (#1429)
Some checks are pending
CI / Lint (push) Waiting to run
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
2025-12-10 09:20:05 +00:00
Daniel D. Beck
1f71a18100
docs: fix typo in dependabot-execution rule description (#1428) 2025-12-10 09:12:11 +00:00
github-actions[bot]
e0aa73cc7f
[BOT] update context-capabilities from GitHub webhooks (#1420)
Some checks failed
CI / Test (push) Has been cancelled
CI / Test site build (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Has been cancelled
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Has been cancelled
CI / All tests pass (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Release (push) Has been cancelled
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Has been cancelled
CI / Lint (push) Has been cancelled
Co-authored-by: woodruffw <3059210+woodruffw@users.noreply.github.com>
Co-authored-by: William Woodruff <william@yossarian.net>
2025-12-08 15:54:35 +00:00
John Blackbourn
2f9b3da4f5
Update permissions documentation (#1423) 2025-12-08 10:47:31 -05:00
dependabot[bot]
c17d68930c
chore(deps): bump the github-actions group with 3 updates (#1421)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-08 10:47:18 -05:00
William Woodruff
77b58ceec6
docs: bump trophies (#1419)
Some checks are pending
CI / Lint (push) Waiting to run
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
Signed-off-by: William Woodruff <william@yossarian.net>
2025-12-08 04:20:12 +00:00
William Woodruff
26a7d434a2
fix: handle non-local shell definitions in obfuscation audit (#1418) 2025-12-07 23:15:27 -05:00
William Woodruff
1e51d1fe9f
feat: use with_job to give anonymous-definition findings a better span (#1416) 2025-12-07 22:25:58 -05:00
William Woodruff
1e25953cdd
perf: eliminate more low-level allocations (#1412)
Some checks are pending
CI / Lint (push) Waiting to run
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
2025-12-07 20:21:55 -05:00
William Woodruff
7f8f24f90c
feat: add an archived-uses audit (#1411)
Some checks failed
CI / Lint (push) Has been cancelled
CI / Test (push) Has been cancelled
CI / Test site build (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Has been cancelled
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Has been cancelled
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Has been cancelled
CI / All tests pass (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Release (push) Has been cancelled
2025-12-04 22:15:33 -05:00
William Woodruff
0f386aa3c1
docs: bump trophies (#1409)
Some checks failed
CI / All tests pass (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Release (push) Has been cancelled
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Has been cancelled
CI / Lint (push) Has been cancelled
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Has been cancelled
CI / Test (push) Has been cancelled
CI / Test site build (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Has been cancelled
Signed-off-by: William Woodruff <william@yossarian.net>
2025-12-03 23:38:16 +00:00
William Woodruff
8c377c1406
docs: bump trophies (#1408)
Signed-off-by: William Woodruff <william@yossarian.net>
2025-12-03 20:55:49 +00:00
William Woodruff
e64fe2ed2b
docs: bump trophies (#1406)
Some checks failed
CI / Lint (push) Has been cancelled
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Has been cancelled
CI / Test (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Has been cancelled
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Has been cancelled
CI / All tests pass (push) Has been cancelled
zizmor wheel builds for PyPI 🐍 / Release (push) Has been cancelled
CI / Test site build (push) Has been cancelled
Signed-off-by: William Woodruff <william@yossarian.net>
2025-12-02 15:29:52 +00:00
William Woodruff
4bf125eef8
chore(ci): allow CI to run on undrafting of PRs (#1404)
Some checks are pending
CI / Lint (push) Waiting to run
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
2025-12-01 22:47:07 -05:00
William Woodruff
8b4ea4c5e5
docs: bump trophies (#1403)
Signed-off-by: William Woodruff <william@yossarian.net>
2025-12-02 03:39:10 +00:00
William Woodruff
59abdf21b8
feat: more publish command patterns, avoid more allocations (#1402) 2025-12-02 03:35:42 +00:00
William Woodruff
6d1fd4f7ff
docs: bump trophies (#1401)
Some checks are pending
CI / Lint (push) Waiting to run
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
Signed-off-by: William Woodruff <william@yossarian.net>
2025-12-01 22:57:16 +00:00
github-actions[bot]
55c619aa0b
[BOT] update JSON schemas from SchemaStore (#1400)
Some checks are pending
CI / Lint (push) Waiting to run
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
2025-12-01 08:35:53 -05:00
William Woodruff
e95d8f374a
docs: bump trophies, add linter integration docs (#1398)
Some checks are pending
CI / Lint (push) Waiting to run
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
Signed-off-by: William Woodruff <william@yossarian.net>
2025-11-30 21:36:32 +00:00
William Woodruff
494cd3ee07
chore: bump zensical, site deps (#1397) 2025-11-30 16:22:15 -05:00
William Woodruff
bc22ff4147
feat: improve error reporting for invalid YAML inputs (#1396) 2025-11-30 16:09:19 -05:00
William Woodruff
adbf0af1a8
feat: use-trusted-publishing: support more wrapped publish commands (#1394)
Some checks are pending
CI / Lint (push) Waiting to run
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
2025-11-29 23:37:12 -05:00
William Woodruff
3b1951b71e
feat: add --show-audit-urls=... for controlling URL rendering (#1391) 2025-11-29 20:05:00 -05:00
William Woodruff
f203b457f6
chore: prep for 1.18.0 (#1390)
Some checks are pending
CI / Lint (push) Waiting to run
CI / Test (push) Waiting to run
CI / Test site build (push) Waiting to run
CI / All tests pass (push) Blocked by required conditions
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (musllinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Linux wheels (manylinux) (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build Windows wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build macOS wheels (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Build source distribution (push) Waiting to run
zizmor wheel builds for PyPI 🐍 / Release (push) Blocked by required conditions
Deploy zizmor documentation site 🌐 / Deploy zizmor documentation to GitHub Pages 🌐 (push) Waiting to run
GitHub Actions Security Analysis with zizmor 🌈 / Run zizmor 🌈 (push) Waiting to run
2025-11-29 14:46:48 -05:00
Mostafa Moradian
aed6f8cae7
Update README (#1389) 2025-11-29 18:52:14 +00:00
139 changed files with 3942 additions and 1461 deletions

51
.github/workflows/benchmark.yml vendored Normal file
View file

@ -0,0 +1,51 @@
name: CodSpeed Benchmarks
on:
push:
branches:
- "main"
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
- labeled
# `workflow_dispatch` allows CodSpeed to trigger backtest
# performance analysis in order to generate initial data.
workflow_dispatch:
permissions: {}
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
benchmarks:
name: Run benchmarks
# PRs only get benchmarked if they have the `run-benchmarks` label.
if: |
contains(github.event.pull_request.labels.*.name, 'run-benchmarks')
|| github.event_name == 'push'
|| github.event_name == 'workflow_dispatch'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with:
cache-all-crates: true
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- name: Build zizmor (release)
run: cargo build --release
- name: Run the benchmarks
uses: CodSpeedHQ/action@346a2d8a8d9d38909abd0bc3d23f773110f076ad # v4.4.1
with:
mode: walltime
run: make bench

View file

@ -5,6 +5,11 @@ on:
branches:
- main
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
permissions: {}
@ -17,14 +22,14 @@ jobs:
name: Lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: Format
run: cargo fmt --check
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- name: Lint
run: cargo clippy -- --deny warnings
@ -33,13 +38,13 @@ jobs:
name: Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- name: Test dependencies
run: |
@ -62,11 +67,11 @@ jobs:
name: Test site build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- name: Test site
run: make site

View file

@ -22,7 +22,7 @@ jobs:
pull-requests: write # for opening PRs
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
@ -31,7 +31,7 @@ jobs:
make refresh-schemas
- name: create PR
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
with:
draft: true
commit-message: "[BOT] update JSON schemas from SchemaStore"
@ -59,18 +59,18 @@ jobs:
pull-requests: write # for opening PRs
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- name: try to refresh context capabilities
run: |
make webhooks-to-contexts
- name: create PR
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
with:
draft: true
commit-message: "[BOT] update context capabilities"
@ -97,18 +97,18 @@ jobs:
pull-requests: write # for opening PRs
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
- uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- name: try to refresh CodeQL injection sinks
run: |
make codeql-injection-sinks
- name: create PR
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
with:
draft: true
commit-message: "[BOT] update CodeQL injection sinks"

View file

@ -27,7 +27,7 @@ jobs:
target: x86_64-pc-windows-msvc
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
@ -60,7 +60,7 @@ jobs:
shell: bash
- name: Upload artifact
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: artifacts-${{ matrix.target }}
path: ${{ steps.archive-release.outputs.filename }}
@ -78,7 +78,7 @@ jobs:
steps:
- name: Download artifacts
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
pattern: artifacts-*
path: distrib/

View file

@ -43,7 +43,7 @@ jobs:
packages: write
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
@ -54,7 +54,7 @@ jobs:
- name: Extract Docker metadata
id: docker-metadata
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
with:
images: "${{ env.ZIZMOR_IMAGE }}"
@ -86,7 +86,7 @@ jobs:
shell: bash
- name: Upload digest
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: digests-${{ matrix.image.platform-pair }}
path: ${{ runner.temp }}/digests/*
@ -107,7 +107,7 @@ jobs:
steps:
- name: Download digests
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
path: ${{ runner.temp }}/digests
pattern: digests-*
@ -128,7 +128,7 @@ jobs:
- name: Extract Docker metadata
id: docker-metadata
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
env:
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
with:

View file

@ -37,7 +37,7 @@ jobs:
# target: ppc64le
# manylinux: "2_28"
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: Build wheels
@ -47,7 +47,7 @@ jobs:
args: --release --out dist --manifest-path crates/zizmor/Cargo.toml
manylinux: ${{ matrix.platform.manylinux }}
- name: Upload wheels
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: wheels-linux-${{ matrix.platform.target }}
path: dist
@ -67,7 +67,7 @@ jobs:
- runner: ubuntu-24.04
target: armv7
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: Build wheels
@ -77,7 +77,7 @@ jobs:
args: --release --out dist --manifest-path crates/zizmor/Cargo.toml
manylinux: musllinux_1_2
- name: Upload wheels
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: wheels-musllinux-${{ matrix.platform.target }}
path: dist
@ -93,7 +93,7 @@ jobs:
- runner: windows-latest
target: x86
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: Build wheels
@ -102,7 +102,7 @@ jobs:
target: ${{ matrix.platform.target }}
args: --release --out dist --manifest-path crates/zizmor/Cargo.toml
- name: Upload wheels
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: wheels-windows-${{ matrix.platform.target }}
path: dist
@ -118,7 +118,7 @@ jobs:
- runner: macos-15
target: aarch64
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: Build wheels
@ -127,7 +127,7 @@ jobs:
target: ${{ matrix.platform.target }}
args: --release --out dist --manifest-path crates/zizmor/Cargo.toml
- name: Upload wheels
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: wheels-macos-${{ matrix.platform.target }}
path: dist
@ -136,7 +136,7 @@ jobs:
name: Build source distribution
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: Build sdist
@ -145,7 +145,7 @@ jobs:
command: sdist
args: --out dist --manifest-path crates/zizmor/Cargo.toml
- name: Upload sdist
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: wheels-sdist
path: dist
@ -161,7 +161,7 @@ jobs:
permissions:
id-token: write # Trusted Publishing + PEP 740 attestations
steps:
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
- name: Attest
uses: astral-sh/attest-action@2c727738cea36d6c97dd85eb133ea0e0e8fe754b # v0.0.4
with:

View file

@ -53,7 +53,7 @@ jobs:
permissions:
id-token: write # for trusted publishing to crates.io
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false

View file

@ -19,7 +19,7 @@ jobs:
permissions:
id-token: write # for trusted publishing to crates.io
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false

View file

@ -26,12 +26,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- name: build site
run: make site

View file

@ -19,18 +19,18 @@ jobs:
pull-requests: write # for 'Leave comment' step
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- name: Run zizmor
run: |
cargo run -- --format sarif . > results.sarif
- name: Upload SARIF file
uses: github/codeql-action/upload-sarif@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
uses: github/codeql-action/upload-sarif@1b168cd39490f61582a9beae412bb7057a6b2c4e # v4.31.8
with:
sarif_file: results.sarif
category: zizmor-test-sarif-presentation
@ -52,11 +52,11 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- name: Run zizmor
run: |
@ -67,3 +67,27 @@ jobs:
--no-exit-codes \
--format github \
crates/zizmor/tests/integration/test-data/several-vulnerabilities.yml
test-plain-presentation:
name: Test plain text presentation
runs-on: ubuntu-latest
if: contains(github.event.pull_request.labels.*.name, 'test-plain-presentation')
permissions: {}
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- name: Run zizmor
run: |
# Normally we'd want a workflow to fail if the audit fails,
# but we're only testing presentation here.
cargo run \
-- \
--no-exit-codes \
--format plain \
crates/zizmor/tests/integration/test-data/several-vulnerabilities.yml

View file

@ -19,7 +19,7 @@ jobs:
issues: write # to create an issue if a new version is found
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
sparse-checkout: support/

View file

@ -5,6 +5,11 @@ on:
branches: ["main"]
pull_request:
branches: ["**"]
types:
- opened
- synchronize
- reopened
- ready_for_review
permissions: {}
@ -16,12 +21,12 @@ jobs:
security-events: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: Run zizmor 🌈
uses: zizmorcore/zizmor-action@e673c3917a1aef3c65c972347ed84ccd013ecda4 # v0.2.0
uses: zizmorcore/zizmor-action@e639db99335bc9038abc0e066dfcd72e23d26fb4 # v0.3.0
with:
# intentionally not scanning the entire repository,
# since it contains integration tests.

3
.gitignore vendored
View file

@ -10,3 +10,6 @@
# pending snapshots
.*.pending-snap
# benchmarks
.codspeed/

186
Cargo.lock generated
View file

@ -58,9 +58,9 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
[[package]]
name = "annotate-snippets"
version = "0.12.9"
version = "0.12.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a44baf24dd94e781f74dfe67ffee75a09a57971ddf0f615a178b4f6d404b48ff"
checksum = "15580ece6ea97cbf832d60ba19c021113469480852c6a2a6beb0db28f097bf1f"
dependencies = [
"anstyle",
"unicode-width 0.2.2",
@ -228,12 +228,6 @@ version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.10.0"
@ -313,18 +307,18 @@ dependencies = [
[[package]]
name = "camino"
version = "1.2.1"
version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "276a59bf2b2c967788139340c9f0c5b12d7fd6630315c15c217e559de85d2609"
checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48"
dependencies = [
"serde_core",
]
[[package]]
name = "cc"
version = "1.2.41"
version = "1.2.49"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac9fe6cdbb24b6ade63616c0a0688e45bb56732262c158df3c0c4bea4ca47cb7"
checksum = "90583009037521a116abf44494efecd645ba48b6622457080f080b85544e2215"
dependencies = [
"find-msvc-tools",
"shlex",
@ -344,9 +338,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "clap"
version = "4.5.51"
version = "4.5.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c26d721170e0295f191a69bd9a1f93efcdb0aff38684b61ab5750468972e5f5"
checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8"
dependencies = [
"clap_builder",
"clap_derive",
@ -364,9 +358,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.51"
version = "4.5.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75835f0c7bf681bfd05abe44e965760fea999a5286c6eb2d59883634fd02011a"
checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00"
dependencies = [
"anstream",
"anstyle",
@ -376,9 +370,9 @@ dependencies = [
[[package]]
name = "clap_complete"
version = "4.5.60"
version = "4.5.61"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e602857739c5a4291dfa33b5a298aeac9006185229a700e5810a3ef7272d971"
checksum = "39615915e2ece2550c0149addac32fb5bd312c657f43845bb9088cb9c8a7c992"
dependencies = [
"clap",
]
@ -564,6 +558,12 @@ dependencies = [
"parking_lot_core",
]
[[package]]
name = "data-encoding"
version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476"
[[package]]
name = "deranged"
version = "0.5.4"
@ -705,9 +705,9 @@ dependencies = [
[[package]]
name = "find-msvc-tools"
version = "0.1.4"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52051878f80a721bb68ebfbc930e07b65ba72f2da88968ea5c06fd6ca3d3a127"
checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844"
[[package]]
name = "flate2"
@ -721,11 +721,12 @@ dependencies = [
[[package]]
name = "fluent-uri"
version = "0.1.4"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17c704e9dbe1ddd863da1e6ff3567795087b1eb201ce80d8fa81162e1516500d"
checksum = "1918b65d96df47d3591bed19c5cca17e3fa5d0707318e4b5ef2eae01764df7e5"
dependencies = [
"bitflags 1.3.2",
"borrow-or-share",
"ref-cast",
]
[[package]]
@ -923,9 +924,11 @@ dependencies = [
[[package]]
name = "github-actions-models"
version = "0.39.0"
version = "0.42.0"
dependencies = [
"indexmap",
"insta",
"self_cell",
"serde",
"serde_yaml",
"tracing",
@ -952,9 +955,9 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
[[package]]
name = "hashbrown"
version = "0.16.0"
version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
dependencies = [
"allocator-api2",
"equivalent",
@ -975,12 +978,11 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "http"
version = "1.3.1"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565"
checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a"
dependencies = [
"bytes",
"fnv",
"itoa",
]
@ -1289,12 +1291,12 @@ dependencies = [
[[package]]
name = "indexmap"
version = "2.12.0"
version = "2.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f"
checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2"
dependencies = [
"equivalent",
"hashbrown 0.16.0",
"hashbrown 0.16.1",
"serde",
"serde_core",
]
@ -1315,9 +1317,9 @@ dependencies = [
[[package]]
name = "insta"
version = "1.43.2"
version = "1.44.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46fdb647ebde000f43b5b53f773c30cf9b0cb4300453208713fa38b2c70935a0"
checksum = "b5c943d4415edd8153251b6f197de5eb1640e56d84e8d9159bea190421c73698"
dependencies = [
"console 0.15.11",
"once_cell",
@ -1382,13 +1384,13 @@ dependencies = [
[[package]]
name = "jsonschema"
version = "0.35.0"
version = "0.37.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0303b14f91cbac17c64aaf2ef60ab71fe5f34c3867cedcbca72c9dd15f5040fe"
checksum = "73c9ffb2b5c56d58030e1b532d8e8389da94590515f118cf35b5cb68e4764a7e"
dependencies = [
"ahash",
"base64 0.22.1",
"bytecount",
"data-encoding",
"email_address",
"fancy-regex",
"fraction",
@ -1426,7 +1428,7 @@ version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb"
dependencies = [
"bitflags 2.10.0",
"bitflags",
"libc",
"redox_syscall",
]
@ -1475,16 +1477,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
[[package]]
name = "lsp-types"
version = "0.97.0"
name = "ls-types"
version = "0.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53353550a17c04ac46c585feb189c2db82154fc84b79c7a66c96c2c644f66071"
checksum = "7a7deb98ef9daaa7500324351a5bab7c80c644cfb86b4be0c4433b582af93510"
dependencies = [
"bitflags 1.3.2",
"fluent-uri 0.1.4",
"bitflags",
"fluent-uri 0.3.2",
"percent-encoding",
"serde",
"serde_json",
"serde_repr",
]
[[package]]
@ -1780,9 +1782,9 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
[[package]]
name = "pest"
version = "2.8.3"
version = "2.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "989e7521a040efde50c3ab6bbadafbe15ab6dc042686926be59ac35d74607df4"
checksum = "cbcfd20a6d4eeba40179f05735784ad32bdaef05ce8e8af05f180d45bb3e7e22"
dependencies = [
"memchr",
"ucd-trie",
@ -1790,9 +1792,9 @@ dependencies = [
[[package]]
name = "pest_derive"
version = "2.8.3"
version = "2.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "187da9a3030dbafabbbfb20cb323b976dc7b7ce91fcd84f2f74d6e31d378e2de"
checksum = "51f72981ade67b1ca6adc26ec221be9f463f2b5839c7508998daa17c23d94d7f"
dependencies = [
"pest",
"pest_generator",
@ -1800,9 +1802,9 @@ dependencies = [
[[package]]
name = "pest_generator"
version = "2.8.3"
version = "2.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49b401d98f5757ebe97a26085998d6c0eecec4995cad6ab7fc30ffdf4b052843"
checksum = "dee9efd8cdb50d719a80088b76f81aec7c41ed6d522ee750178f83883d271625"
dependencies = [
"pest",
"pest_meta",
@ -1813,9 +1815,9 @@ dependencies = [
[[package]]
name = "pest_meta"
version = "2.8.3"
version = "2.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72f27a2cfee9f9039c4d86faa5af122a0ac3851441a34865b8a043b46be0065a"
checksum = "bf1d70880e76bdc13ba52eafa6239ce793d85c8e43896507e43dd8984ff05b82"
dependencies = [
"pest",
"sha2",
@ -2046,7 +2048,7 @@ version = "0.5.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d"
dependencies = [
"bitflags 2.10.0",
"bitflags",
]
[[package]]
@ -2071,14 +2073,14 @@ dependencies = [
[[package]]
name = "referencing"
version = "0.35.0"
version = "0.37.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22d0d0665043906aacf1d83bea9d61e5134f8f437815b84320e7facf8ff4e9c2"
checksum = "4283168a506f0dcbdce31c9f9cce3129c924da4c6bca46e46707fcb746d2d70c"
dependencies = [
"ahash",
"fluent-uri 0.4.1",
"getrandom 0.3.4",
"hashbrown 0.16.0",
"hashbrown 0.16.1",
"parking_lot",
"percent-encoding",
"serde_json",
@ -2127,9 +2129,9 @@ checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58"
[[package]]
name = "reqwest"
version = "0.12.24"
version = "0.12.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f"
checksum = "3b4c14b2d9afca6a60277086b0cc6a6ae0b568f6f7916c943a8cdc79f8be240f"
dependencies = [
"base64 0.22.1",
"bytes",
@ -2222,7 +2224,7 @@ version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e"
dependencies = [
"bitflags 2.10.0",
"bitflags",
"errno",
"libc",
"linux-raw-sys",
@ -2345,7 +2347,7 @@ version = "3.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "80fb1d92c5028aa318b4b8bd7302a5bfcf48be96a37fc6fc790f806b0004ee0c"
dependencies = [
"bitflags 2.10.0",
"bitflags",
"core-foundation",
"core-foundation-sys",
"libc",
@ -2488,17 +2490,6 @@ dependencies = [
"syn 2.0.108",
]
[[package]]
name = "serde_repr"
version = "0.1.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
]
[[package]]
name = "serde_spanned"
version = "1.0.3"
@ -3002,11 +2993,11 @@ dependencies = [
[[package]]
name = "tower-http"
version = "0.6.6"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8"
dependencies = [
"bitflags 2.10.0",
"bitflags",
"bytes",
"futures-util",
"http",
@ -3026,17 +3017,16 @@ checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
[[package]]
name = "tower-lsp-server"
version = "0.22.1"
version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88f3f8ec0dcfdda4d908bad2882fe0f89cf2b606e78d16491323e918dfa95765"
checksum = "2f0e711655c89181a6bc6a2cc348131fcd9680085f5b06b6af13427a393a6e72"
dependencies = [
"bytes",
"dashmap",
"futures",
"httparse",
"lsp-types",
"ls-types",
"memchr",
"percent-encoding",
"serde",
"serde_json",
"tokio",
@ -3053,9 +3043,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
[[package]]
name = "tracing"
version = "0.1.41"
version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647"
dependencies = [
"pin-project-lite",
"tracing-attributes",
@ -3064,9 +3054,9 @@ dependencies = [
[[package]]
name = "tracing-attributes"
version = "0.1.30"
version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
dependencies = [
"proc-macro2",
"quote",
@ -3075,9 +3065,9 @@ dependencies = [
[[package]]
name = "tracing-core"
version = "0.1.34"
version = "0.1.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c"
dependencies = [
"once_cell",
"valuable",
@ -3085,9 +3075,9 @@ dependencies = [
[[package]]
name = "tracing-indicatif"
version = "0.3.13"
version = "0.3.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04d4e11e0e27acef25a47f27e9435355fecdc488867fa2bc90e75b0700d2823d"
checksum = "e1ef6990e0438749f0080573248e96631171a0b5ddfddde119aa5ba8c3a9c47e"
dependencies = [
"indicatif",
"tracing",
@ -3108,9 +3098,9 @@ dependencies = [
[[package]]
name = "tracing-subscriber"
version = "0.3.20"
version = "0.3.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5"
checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e"
dependencies = [
"matchers",
"nu-ansi-term",
@ -3126,9 +3116,9 @@ dependencies = [
[[package]]
name = "tree-sitter"
version = "0.25.10"
version = "0.26.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78f873475d258561b06f1c595d93308a7ed124d9977cb26b148c2084a4a3cc87"
checksum = "974d205cc395652cfa8b37daa053fe56eebd429acf8dc055503fee648dae981e"
dependencies = [
"cc",
"regex",
@ -3140,9 +3130,9 @@ dependencies = [
[[package]]
name = "tree-sitter-bash"
version = "0.25.0"
version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "871b0606e667e98a1237ebdc1b0d7056e0aebfdc3141d12b399865d4cb6ed8a6"
checksum = "9e5ec769279cc91b561d3df0d8a5deb26b0ad40d183127f409494d6d8fc53062"
dependencies = [
"cc",
"tree-sitter-language",
@ -3150,7 +3140,7 @@ dependencies = [
[[package]]
name = "tree-sitter-iter"
version = "0.0.2"
version = "0.0.3"
dependencies = [
"tree-sitter",
"tree-sitter-yaml",
@ -3164,9 +3154,9 @@ checksum = "c4013970217383f67b18aef68f6fb2e8d409bc5755227092d32efb0422ba24b8"
[[package]]
name = "tree-sitter-powershell"
version = "0.25.9"
version = "0.25.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae0e37101b110badaf99aa40460915a8797ceba15fc0ed22773280377a8dffb6"
checksum = "415ec6251d133d26b4f62c60721149fe36c315334f47812450187d6ea59cffdf"
dependencies = [
"cc",
"tree-sitter-language",
@ -3814,7 +3804,7 @@ checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3"
[[package]]
name = "yamlpatch"
version = "0.7.0"
version = "0.8.0"
dependencies = [
"indexmap",
"insta",
@ -3830,7 +3820,7 @@ dependencies = [
[[package]]
name = "yamlpath"
version = "0.29.0"
version = "0.31.0"
dependencies = [
"line-index",
"self_cell",
@ -3954,7 +3944,7 @@ dependencies = [
[[package]]
name = "zizmor"
version = "1.18.0-rc3"
version = "1.19.0"
dependencies = [
"annotate-snippets",
"anstream",

View file

@ -21,38 +21,38 @@ rust-version = "1.88.0"
[workspace.dependencies]
anyhow = "1.0.100"
github-actions-expressions = { path = "crates/github-actions-expressions", version = "0.0.11" }
github-actions-models = { path = "crates/github-actions-models", version = "0.39.0" }
github-actions-models = { path = "crates/github-actions-models", version = "0.42.0" }
itertools = "0.14.0"
pest = "2.8.3"
pest_derive = "2.8.3"
pest = "2.8.4"
pest_derive = "2.8.4"
pretty_assertions = "1.4.1"
annotate-snippets = "0.12.9"
annotate-snippets = "0.12.10"
anstream = "0.6.21"
assert_cmd = "2.1.1"
async-trait = "0.1.89"
camino = "1.2.1"
clap = "4.5.51"
camino = "1.2.2"
clap = "4.5.53"
clap-verbosity-flag = { version = "3.0.4", default-features = false }
clap_complete = "4.5.60"
clap_complete = "4.5.61"
clap_complete_nushell = "4.5.10"
csv = "1.3.1"
etcetera = "0.11.0"
flate2 = "1.1.5"
fst = "0.4.7"
futures = "0.3"
http = "1.3.1"
http = "1.4.0"
http-cache-reqwest = { version = "1.0.0-alpha.2", features = ["manager-moka"] }
human-panic = "2.0.4"
ignore = "0.4.25"
indexmap = { version = "2.11.4", features = ["serde"] }
indexmap = { version = "2.12.1", features = ["serde"] }
indicatif = "0.18"
insta = "1.43.2"
jsonschema = "0.35.0"
insta = "1.44.3"
jsonschema = "0.37.4"
line-index = "0.1.2"
memchr = "2.7.6"
owo-colors = "4.2.3"
regex = "1.12.1"
reqwest = { version = "0.12.23", default-features = false }
reqwest = { version = "0.12.25", default-features = false }
reqwest-middleware = "0.4.2"
self_cell = "1"
serde = { version = "1.0.228", features = ["derive"] }
@ -65,16 +65,18 @@ tar = "0.4.44"
terminal-link = "0.1.0"
thiserror = "2.0.17"
tokio = { version = "1.47.1", features = ["rt-multi-thread", "io-std"] }
tower-lsp-server = "0.22"
tracing = "0.1.41"
tracing-indicatif = "0.3.13"
tower-lsp-server = "0.23"
tracing = "0.1.43"
tracing-indicatif = "0.3.14"
tracing-subscriber = "0.3.20"
tree-sitter = "0.25.10"
tree-sitter-bash = "0.25.0"
tree-sitter-iter = { path = "crates/tree-sitter-iter", version = "0.0.2" }
tree-sitter-powershell = "0.25.9"
yamlpath = { path = "crates/yamlpath", version = "0.29.0" }
yamlpatch = { path = "crates/yamlpatch", version = "0.7.0" }
tree-sitter = "0.26.3"
tree-sitter-bash = "0.25.1"
tree-sitter-iter = { path = "crates/tree-sitter-iter", version = "0.0.3" }
# Exact version since the upstream performed a breaking change outside of semver.
# See: https://github.com/zizmorcore/zizmor/pull/1427
tree-sitter-powershell = "=0.25.10"
yamlpath = { path = "crates/yamlpath", version = "0.31.0" }
yamlpatch = { path = "crates/yamlpatch", version = "0.8.0" }
tree-sitter-yaml = "0.7.2"
tikv-jemallocator = "0.6"

View file

@ -4,7 +4,7 @@ all:
.PHONY: site
site:
uv run --only-group docs zensical build
uv run --only-group docs zensical build --clean
.PHONY: site-live
site-live:
@ -42,6 +42,10 @@ codeql-injection-sinks: crates/zizmor/data/codeql-injection-sinks.json
crates/zizmor/data/codeql-injection-sinks.json: support/codeql-injection-sinks.py
$< > $@
.PHONY: archived-repos
archived-repos:
support/archived-repos.py
.PHONY: pinact
pinact:
pinact run --update --verify
@ -49,4 +53,4 @@ pinact:
.PHONY: bench
bench:
uv run bench/benchmark.py --offline
uv run --only-group=bench pytest bench/ --codspeed

1
bench/.gitignore vendored
View file

@ -1 +0,0 @@
results/

0
bench/__init__.py Normal file
View file

View file

@ -1,242 +0,0 @@
# /// script
# requires-python = ">=3.12"
# ///
import argparse
import hashlib
import json
import os
import shlex
import shutil
import subprocess
import sys
import tempfile
from contextlib import contextmanager
from pathlib import Path
from typing import Iterator, NoReturn, TypedDict
_DEPS = ["hyperfine", "curl", "unzip"]
_HERE = Path(__file__).parent
_PROJECT_ROOT = _HERE.parent
_ZIZMOR = _PROJECT_ROOT / "target" / "release" / "zizmor"
assert (_PROJECT_ROOT / "Cargo.toml").is_file(), "Missing project root?"
_BENCHMARKS = _HERE / "benchmarks.json"
_RESULTS = _HERE / "results"
assert _BENCHMARKS.is_file(), f"Benchmarks file not found: {_BENCHMARKS}"
_RESULTS.mkdir(exist_ok=True)
_CACHE_DIR = Path(tempfile.gettempdir()) / "zizmor-benchmark-cache"
_CACHE_DIR.mkdir(exist_ok=True)
_GH_TOKEN = os.getenv("GH_TOKEN")
class Log:
def __init__(self, scope: str | None) -> None:
self.scopes = [scope] if scope else []
def info(self, message: str) -> None:
scopes = " ".join(f"[{s}]" for s in self.scopes)
print(f"[+] {scopes} {message}", file=sys.stderr)
def warn(self, message: str) -> None:
scopes = " ".join(f"[{s}]" for s in self.scopes)
print(f"[!] {scopes} {message}", file=sys.stderr)
def error(self, message: str) -> NoReturn:
self.warn(message)
sys.exit(1)
@contextmanager
def scope(self, new_scope: str) -> Iterator[None]:
"""Create a new logging scope."""
self.scopes.append(new_scope)
try:
yield None
finally:
self.scopes.pop()
LOG = Log("benchmarks")
def _curl(url: str, expected_sha256: str) -> Path:
"""Download a URL and cache it using content addressing with SHA256."""
cached_file = _CACHE_DIR / expected_sha256
if cached_file.exists():
LOG.info("Using cached file")
return cached_file
result = subprocess.run(
["curl", "-fsSL", url],
capture_output=True,
check=True,
)
content = result.stdout
content_hash = hashlib.sha256(content).hexdigest()
if content_hash != expected_sha256:
LOG.error(f"Hash mismatch: {expected_sha256} != {content_hash}")
cached_file.write_bytes(content)
return cached_file
def _unzip(archive_path: Path, extract_name: str) -> Path:
"""Extract an archive to a directory in the cache."""
extract_dir = _CACHE_DIR / extract_name
if extract_dir.exists():
LOG.info("Using cached extraction")
return extract_dir
extract_dir.mkdir(exist_ok=True)
subprocess.run(
["unzip", "-q", str(archive_path), "-d", str(extract_dir)],
check=True,
)
LOG.info(f"Extracted {archive_path.name} to {extract_dir}")
return extract_dir
class Benchmark(TypedDict):
name: str
source_type: str
source: str
source_sha256: str
stencil: str
online: bool | None
Plan = list[str]
class Bench:
def __init__(self, benchmark: Benchmark) -> None:
self.benchmark = benchmark
def plan(self) -> Plan:
match self.benchmark["source_type"]:
case "archive-url":
url = self.benchmark["source"]
sha256 = self.benchmark["source_sha256"]
archive = _curl(url, sha256)
inputs = [str(_unzip(archive, self.benchmark["name"]))]
case _:
LOG.error(f"Unknown source type: {self.benchmark['source_type']}")
if self.benchmark.get("online", False):
if not _GH_TOKEN:
LOG.error("Benchmark requires online access but GH_TOKEN is not set")
stencil = self.benchmark["stencil"]
command = stencil.replace("$ZIZMOR", str(_ZIZMOR)).replace(
"$INPUTS", " ".join(inputs)
)
return shlex.split(command)
def run(self, plan: Plan, *, dry_run: bool) -> None:
command = shlex.join(plan)
result_file = _RESULTS / f"{self.benchmark['name']}.json"
if result_file.exists() and not dry_run:
LOG.warn("clobbering existing result file")
hyperfine_command = [
"hyperfine",
"--warmup",
"3",
# NOTE: not needed because we use --no-exit-codes in the stencil
# "--ignore-failure",
"--export-json",
str(result_file),
command,
]
if dry_run:
LOG.warn(f"would have run: {shlex.join(hyperfine_command)}")
return
try:
subprocess.run(
hyperfine_command,
check=True,
)
except subprocess.CalledProcessError:
LOG.error("run failed, see above for details")
# Stupid hack: fixup each result file's results[0].command
# to be a more useful benchmark identifier, since bencher
# apparently keys on these.
result_json = json.loads(result_file.read_bytes())
result_json["results"][0]["command"] = f"zizmor::{self.benchmark['name']}"
result_file.write_text(json.dumps(result_json))
LOG.info(f"run written to {result_file}")
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument(
"--dry-run", action="store_true", help="Show plans without running them"
)
parser.add_argument(
"--offline", action="store_true", help="Run only offline benchmarks"
)
args = parser.parse_args()
missing = []
for dep in _DEPS:
if not shutil.which(dep):
missing.append(dep)
if missing:
LOG.error(
f"Missing dependencies: {', '.join(missing)}. "
"Please install them before running benchmarks."
)
LOG.info("ensuring we have a benchable zizmor build")
subprocess.run(
["cargo", "build", "--release", "-p", "zizmor"],
check=True,
cwd=_PROJECT_ROOT,
)
if not _ZIZMOR.is_file():
LOG.error("zizmor build presumably failed, see above for details")
LOG.info(f"using cache dir: {_CACHE_DIR}")
benchmarks: list[Benchmark] = json.loads(_BENCHMARKS.read_text(encoding="utf-8"))
LOG.info(f"found {len(benchmarks)} benchmarks in {_BENCHMARKS.name}")
if args.offline:
benchmarks = [b for b in benchmarks if not b.get("online", False)]
LOG.info(f"filtered to {len(benchmarks)} offline benchmarks")
benches = [Bench(benchmark) for benchmark in benchmarks]
plans = []
with LOG.scope("plan"):
for bench in benches:
with LOG.scope(bench.benchmark["name"]):
LOG.info("beginning plan")
plans.append(bench.plan())
with LOG.scope("run"):
for bench, plan in zip(benches, plans):
with LOG.scope(bench.benchmark["name"]):
bench.run(plan, dry_run=args.dry_run)
if __name__ == "__main__":
main()

View file

@ -1,24 +0,0 @@
[
{
"name": "grafana-9f212d11d0ac",
"source_type": "archive-url",
"source": "https://github.com/grafana/grafana/archive/9f212d11d0ac9c38ada62a7db830844bb9b02905.zip",
"source_sha256": "c6d42b52c8d912db2698d8b06f227de46f0c2d04cc757841792ed6567f0c56c7",
"stencil": "$ZIZMOR --offline --format=plain --no-exit-codes --no-config $INPUTS"
},
{
"name": "cpython-48f88310044c",
"source_type": "archive-url",
"source": "https://github.com/python/cpython/archive/48f88310044c6ef877f3b0761cf7afece2f8fb3a.zip",
"source_sha256": "a52a67f1dd9cfa67c7d1305d5b9639629abe247b2c32f01b77f790ddf8b49503",
"stencil": "$ZIZMOR --offline --format=plain --no-exit-codes --no-config $INPUTS"
},
{
"name": "gha-hazmat-da3c3cd-online",
"source_type": "archive-url",
"source": "https://github.com/woodruffw/gha-hazmat/archive/da3c3cd.zip",
"source_sha256": "f0aa224c5203218ad26e9f104d8dc3eaf8b322c97056add04d79f4a0d53c8f1f",
"stencil": "$ZIZMOR --format=plain --no-exit-codes --no-config $INPUTS",
"online": true
}
]

12
bench/common.py Normal file
View file

@ -0,0 +1,12 @@
import subprocess
from pathlib import Path
_HERE = Path(__file__).parent
_ZIZMOR = _HERE.parent / "target" / "release" / "zizmor"
def zizmor(args: list[str], *, check: bool = False) -> None:
assert _ZIZMOR.is_file(), (
f"zizmor binary not found at {_ZIZMOR}, run prepare() first"
)
subprocess.run([str(_ZIZMOR), *args], check=check)

0
bench/conftest.py Normal file
View file

13
bench/test_bench_basic.py Normal file
View file

@ -0,0 +1,13 @@
import pytest
from .common import zizmor
@pytest.mark.benchmark
def test_zizmor_startup():
zizmor(["--version"])
@pytest.mark.benchmark
def test_zizmor_help():
zizmor(["--help"])

View file

@ -0,0 +1,68 @@
import io
import zipfile
from pathlib import Path
import pytest
import urllib3
from bench.common import zizmor
@pytest.fixture(scope="session")
def grafana(tmp_path_factory) -> Path:
archive = "https://github.com/grafana/grafana/archive/9f212d11d0ac9c38ada62a7db830844bb9b02905.zip"
raw_zip = urllib3.PoolManager().request("GET", archive).data
path = tmp_path_factory.mktemp("grafana")
zipfile.ZipFile(io.BytesIO(raw_zip)).extractall(path)
return path
@pytest.fixture(scope="session")
def cpython(tmp_path_factory) -> Path:
archive = "https://github.com/python/cpython/archive/48f88310044c6ef877f3b0761cf7afece2f8fb3a.zip"
raw_zip = urllib3.PoolManager().request("GET", archive).data
path = tmp_path_factory.mktemp("cpython")
zipfile.ZipFile(io.BytesIO(raw_zip)).extractall(path)
return path
@pytest.mark.benchmark
def test_zizmor_offline_grafana_9f212d11d0(grafana: Path):
"""
Runs `zizmor --offline --format=plain --no-exit-codes --no-config <path-to-grafana-source>`
"""
zizmor(
[
"--offline",
"--format=plain",
"--no-exit-codes",
"--no-config",
str(grafana),
],
check=True,
)
@pytest.mark.benchmark
def test_zizmor_offline_cpython_48f88310044c(cpython: Path):
"""
Runs `zizmor --offline --format=plain --no-exit-codes --no-config <path-to-cpython-source>`
"""
zizmor(
[
"--offline",
"--format=plain",
"--no-exit-codes",
"--no-config",
str(cpython),
],
check=True,
)

View file

@ -0,0 +1,47 @@
import os
import pytest
from bench.common import zizmor
@pytest.mark.skipif("GH_TOKEN" not in os.environ, reason="GH_TOKEN not set")
def test_zizmor_online_gha_hazmat_da3c3cd(benchmark):
"""
Runs `zizmor --format=plain --no-exit-codes --no-config woodruffw/gha-hazmat@da3c3cd`
"""
benchmark.pedantic(
zizmor,
args=(
[
"--format=plain",
"--no-exit-codes",
"--no-config",
"woodruffw/gha-hazmat@da3c3cd",
],
),
warmup_rounds=2,
iterations=10,
)
@pytest.mark.skipif("GH_TOKEN" not in os.environ, reason="GH_TOKEN not set")
def test_zizmor_online_cpython_48f88310044c(benchmark):
"""
Runs `zizmor --format=plain --no-exit-codes --no-config python/cpython@48f88310044c`
"""
benchmark.pedantic(
zizmor,
args=(
[
"--format=plain",
"--no-exit-codes",
"--no-config",
"python/cpython@48f88310044c",
],
),
warmup_rounds=2,
iterations=10,
)

View file

@ -1,6 +1,6 @@
[package]
name = "github-actions-models"
version = "0.39.0"
version = "0.42.0"
description = "Unofficial, high-quality data models for GitHub Actions workflows, actions, and related components"
repository = "https://github.com/zizmorcore/zizmor/tree/main/crates/github-actions-models"
keywords = ["github", "ci"]
@ -17,8 +17,10 @@ workspace = true
[dependencies]
indexmap.workspace = true
self_cell.workspace = true
serde.workspace = true
serde_yaml.workspace = true
tracing.workspace = true
[dev-dependencies]
insta.workspace = true

View file

@ -1,11 +1,9 @@
//! Shared models and utilities.
use std::{
fmt::{self, Display},
str::FromStr,
};
use std::fmt::{self, Display};
use indexmap::IndexMap;
use self_cell::self_cell;
use serde::{Deserialize, Deserializer, Serialize, de};
pub mod expr;
@ -201,57 +199,59 @@ pub enum Uses {
Docker(DockerUses),
}
impl FromStr for Uses {
type Err = UsesError;
impl Uses {
/// Parse a `uses:` clause into its appropriate variant.
pub fn parse(uses: impl Into<String>) -> Result<Self, UsesError> {
let uses = uses.into();
fn from_str(uses: &str) -> Result<Self, Self::Err> {
if uses.starts_with("./") {
LocalUses::from_str(uses).map(Self::Local)
Ok(Self::Local(LocalUses::new(uses)))
} else if let Some(image) = uses.strip_prefix("docker://") {
DockerUses::from_str(image).map(Self::Docker)
DockerUses::parse(image).map(Self::Docker)
} else {
RepositoryUses::from_str(uses).map(Self::Repository)
RepositoryUses::parse(uses).map(Self::Repository)
}
}
/// Returns the original raw `uses:` clause.
pub fn raw(&self) -> &str {
match self {
Uses::Local(local) => &local.path,
Uses::Repository(repo) => repo.raw(),
Uses::Docker(docker) => docker.raw(),
}
}
}
/// A `uses: ./some/path` clause.
#[derive(Debug, PartialEq)]
#[non_exhaustive]
pub struct LocalUses {
pub path: String,
}
impl FromStr for LocalUses {
type Err = UsesError;
fn from_str(uses: &str) -> Result<Self, Self::Err> {
Ok(LocalUses { path: uses.into() })
impl LocalUses {
fn new(path: String) -> Self {
LocalUses { path }
}
}
/// A `uses: some/repo` clause.
#[derive(Debug, PartialEq)]
pub struct RepositoryUses {
struct RepositoryUsesInner<'a> {
/// The repo user or org.
pub owner: String,
owner: &'a str,
/// The repo name.
pub repo: String,
repo: &'a str,
/// The owner/repo slug.
slug: &'a str,
/// The subpath to the action or reusable workflow, if present.
pub subpath: Option<String>,
subpath: Option<&'a str>,
/// The `@<ref>` that the `uses:` is pinned to.
pub git_ref: String,
git_ref: &'a str,
}
impl FromStr for RepositoryUses {
type Err = UsesError;
fn from_str(uses: &str) -> Result<Self, Self::Err> {
// NOTE: FromStr is slightly sub-optimal, since it takes a borrowed
// &str and results in bunch of allocs for a fully owned type.
//
// In theory we could do `From<String>` instead, but
// `&mut str::split_mut` and similar don't exist yet.
impl<'a> RepositoryUsesInner<'a> {
fn from_str(uses: &'a str) -> Result<Self, UsesError> {
// NOTE: Both git refs and paths can contain `@`, but in practice
// GHA refuses to run a `uses:` clause with more than one `@` in it.
let (path, git_ref) = match uses.rsplit_once('@') {
@ -259,44 +259,110 @@ impl FromStr for RepositoryUses {
None => return Err(UsesError(format!("missing `@<ref>` in {uses}"))),
};
let components = path.splitn(3, '/').collect::<Vec<_>>();
if components.len() < 2 {
return Err(UsesError(format!("owner/repo slug is too short: {uses}")));
}
let mut components = path.splitn(3, '/');
Ok(RepositoryUses {
owner: components[0].into(),
repo: components[1].into(),
subpath: components.get(2).map(ToString::to_string),
git_ref: git_ref.into(),
})
if let Some(owner) = components.next()
&& let Some(repo) = components.next()
{
let subpath = components.next();
let slug = if subpath.is_none() {
path
} else {
&path[..owner.len() + 1 + repo.len()]
};
Ok(RepositoryUsesInner {
owner,
repo,
slug,
subpath,
git_ref,
})
} else {
Err(UsesError(format!("owner/repo slug is too short: {uses}")))
}
}
}
/// A `uses: docker://some-image` clause.
#[derive(Debug, PartialEq)]
pub struct DockerUses {
/// The registry this image is on, if present.
pub registry: Option<String>,
/// The name of the Docker image.
pub image: String,
/// An optional tag for the image.
pub tag: Option<String>,
/// An optional integrity hash for the image.
pub hash: Option<String>,
self_cell!(
/// A `uses: some/repo` clause.
pub struct RepositoryUses {
owner: String,
#[covariant]
dependent: RepositoryUsesInner,
}
impl {Debug, PartialEq}
);
impl Display for RepositoryUses {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.raw())
}
}
impl DockerUses {
impl RepositoryUses {
/// Parse a `uses: some/repo` clause.
pub fn parse(uses: impl Into<String>) -> Result<Self, UsesError> {
RepositoryUses::try_new(uses.into(), |s| {
let inner = RepositoryUsesInner::from_str(s)?;
Ok(inner)
})
}
/// Get the raw `uses:` string.
pub fn raw(&self) -> &str {
self.borrow_owner()
}
/// Get the owner (user or org) of this repository `uses:` clause.
pub fn owner(&self) -> &str {
self.borrow_dependent().owner
}
/// Get the repository name of this repository `uses:` clause.
pub fn repo(&self) -> &str {
self.borrow_dependent().repo
}
/// Get the owner/repo slug of this repository `uses:` clause.
pub fn slug(&self) -> &str {
self.borrow_dependent().slug
}
/// Get the optional subpath of this repository `uses:` clause.
pub fn subpath(&self) -> Option<&str> {
self.borrow_dependent().subpath
}
/// Get the git ref (branch, tag, or SHA) of this repository `uses:` clause.
pub fn git_ref(&self) -> &str {
self.borrow_dependent().git_ref
}
}
#[derive(Debug, PartialEq)]
#[non_exhaustive]
pub struct DockerUsesInner<'a> {
/// The registry this image is on, if present.
registry: Option<&'a str>,
/// The name of the Docker image.
image: &'a str,
/// An optional tag for the image.
tag: Option<&'a str>,
/// An optional integrity hash for the image.
hash: Option<&'a str>,
}
impl<'a> DockerUsesInner<'a> {
fn is_registry(registry: &str) -> bool {
// https://stackoverflow.com/a/42116190
registry == "localhost" || registry.contains('.') || registry.contains(':')
}
}
impl FromStr for DockerUses {
type Err = UsesError;
fn from_str(uses: &str) -> Result<Self, Self::Err> {
fn from_str(uses: &'a str) -> Result<Self, UsesError> {
let (registry, image) = match uses.split_once('/') {
Some((registry, image)) if Self::is_registry(registry) => (Some(registry), image),
_ => (None, uses),
@ -314,11 +380,11 @@ impl FromStr for DockerUses {
Some(&hash[1..])
};
Ok(DockerUses {
registry: registry.map(Into::into),
image: image.into(),
Ok(DockerUsesInner {
registry,
image,
tag: None,
hash: hash.map(Into::into),
hash,
})
} else {
let (image, tag) = match image.split_once(':') {
@ -327,16 +393,63 @@ impl FromStr for DockerUses {
_ => (image, None),
};
Ok(DockerUses {
registry: registry.map(Into::into),
image: image.into(),
tag: tag.map(Into::into),
Ok(DockerUsesInner {
registry,
image,
tag,
hash: None,
})
}
}
}
self_cell!(
/// A `uses: docker://some-image` clause.
pub struct DockerUses {
owner: String,
#[covariant]
dependent: DockerUsesInner,
}
impl {Debug, PartialEq}
);
impl DockerUses {
/// Parse a `uses: docker://some-image` clause.
pub fn parse(uses: impl Into<String>) -> Result<Self, UsesError> {
DockerUses::try_new(uses.into(), |s| {
let inner = DockerUsesInner::from_str(s)?;
Ok(inner)
})
}
/// Get the raw uses clause. This does not include the `docker://` prefix.
pub fn raw(&self) -> &str {
self.borrow_owner()
}
/// Get the optional registry of this Docker image.
pub fn registry(&self) -> Option<&str> {
self.borrow_dependent().registry
}
/// Get the image name of this Docker image.
pub fn image(&self) -> &str {
self.borrow_dependent().image
}
/// Get the optional tag of this Docker image.
pub fn tag(&self) -> Option<&str> {
self.borrow_dependent().tag
}
/// Get the optional hash of this Docker image.
pub fn hash(&self) -> Option<&str> {
self.borrow_dependent().hash
}
}
/// Wraps a `de::Error::custom` call to log the same error as
/// a `tracing::error!` event.
///
@ -351,13 +464,22 @@ where
de::Error::custom(msg)
}
/// Deserialize a `DockerUses`.
pub(crate) fn docker_uses<'de, D>(de: D) -> Result<DockerUses, D::Error>
where
D: Deserializer<'de>,
{
let uses = <String>::deserialize(de)?;
DockerUses::parse(uses).map_err(custom_error::<D>)
}
/// Deserialize an ordinary step `uses:`.
pub(crate) fn step_uses<'de, D>(de: D) -> Result<Uses, D::Error>
where
D: Deserializer<'de>,
{
let uses = <&str>::deserialize(de)?;
Uses::from_str(uses).map_err(custom_error::<D>)
let uses = <String>::deserialize(de)?;
Uses::parse(uses).map_err(custom_error::<D>)
}
/// Deserialize a reusable workflow step `uses:`
@ -396,9 +518,7 @@ mod tests {
use crate::common::{BasePermission, Env, EnvValue, Permission};
use super::{
DockerUses, LocalUses, Permissions, RepositoryUses, Uses, UsesError, reusable_step_uses,
};
use super::{Permissions, Uses, reusable_step_uses};
#[test]
fn test_permissions() {
@ -453,230 +573,418 @@ mod tests {
#[test]
fn test_uses_parses() {
let vectors = [
(
// Valid: fully pinned.
"actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3",
Ok(Uses::Repository(RepositoryUses {
owner: "actions".to_owned(),
repo: "checkout".to_owned(),
// Fully pinned.
insta::assert_debug_snapshot!(
Uses::parse("actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3").unwrap(),
@r#"
Repository(
RepositoryUses {
owner: "actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3",
dependent: RepositoryUsesInner {
owner: "actions",
repo: "checkout",
slug: "actions/checkout",
subpath: None,
git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3".to_owned(),
})),
),
(
// Valid: fully pinned, subpath
"actions/aws/ec2@8f4b7f84864484a7bf31766abe9204da3cbe65b3",
Ok(Uses::Repository(RepositoryUses {
owner: "actions".to_owned(),
repo: "aws".to_owned(),
subpath: Some("ec2".to_owned()),
git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3".to_owned(),
})),
),
(
// Valid: fully pinned, complex subpath
"example/foo/bar/baz/quux@8f4b7f84864484a7bf31766abe9204da3cbe65b3",
Ok(Uses::Repository(RepositoryUses {
owner: "example".to_owned(),
repo: "foo".to_owned(),
subpath: Some("bar/baz/quux".to_owned()),
git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3".to_owned(),
})),
),
(
// Valid: pinned with branch/tag
"actions/checkout@v4",
Ok(Uses::Repository(RepositoryUses {
owner: "actions".to_owned(),
repo: "checkout".to_owned(),
subpath: None,
git_ref: "v4".to_owned(),
})),
),
(
"actions/checkout@abcd",
Ok(Uses::Repository(RepositoryUses {
owner: "actions".to_owned(),
repo: "checkout".to_owned(),
subpath: None,
git_ref: "abcd".to_owned(),
})),
),
(
// Invalid: unpinned
"actions/checkout",
Err(UsesError(
"missing `@<ref>` in actions/checkout".to_owned(),
)),
),
(
// Valid: Docker ref, implicit registry
"docker://alpine:3.8",
Ok(Uses::Docker(DockerUses {
registry: None,
image: "alpine".to_owned(),
tag: Some("3.8".to_owned()),
hash: None,
})),
),
(
// Valid: Docker ref, localhost
"docker://localhost/alpine:3.8",
Ok(Uses::Docker(DockerUses {
registry: Some("localhost".to_owned()),
image: "alpine".to_owned(),
tag: Some("3.8".to_owned()),
hash: None,
})),
),
(
// Valid: Docker ref, localhost w/ port
"docker://localhost:1337/alpine:3.8",
Ok(Uses::Docker(DockerUses {
registry: Some("localhost:1337".to_owned()),
image: "alpine".to_owned(),
tag: Some("3.8".to_owned()),
hash: None,
})),
),
(
// Valid: Docker ref, custom registry
"docker://ghcr.io/foo/alpine:3.8",
Ok(Uses::Docker(DockerUses {
registry: Some("ghcr.io".to_owned()),
image: "foo/alpine".to_owned(),
tag: Some("3.8".to_owned()),
hash: None,
})),
),
(
// Valid: Docker ref, missing tag
"docker://ghcr.io/foo/alpine",
Ok(Uses::Docker(DockerUses {
registry: Some("ghcr.io".to_owned()),
image: "foo/alpine".to_owned(),
tag: None,
hash: None,
})),
),
(
// Invalid, but allowed: Docker ref, empty tag
"docker://ghcr.io/foo/alpine:",
Ok(Uses::Docker(DockerUses {
registry: Some("ghcr.io".to_owned()),
image: "foo/alpine".to_owned(),
tag: None,
hash: None,
})),
),
(
// Valid: Docker ref, bare
"docker://alpine",
Ok(Uses::Docker(DockerUses {
registry: None,
image: "alpine".to_owned(),
tag: None,
hash: None,
})),
),
(
// Valid: Docker ref, hash
"docker://alpine@hash",
Ok(Uses::Docker(DockerUses {
registry: None,
image: "alpine".to_owned(),
tag: None,
hash: Some("hash".to_owned()),
})),
),
(
// Valid: Local action "ref", actually part of the path
"./.github/actions/hello-world-action@172239021f7ba04fe7327647b213799853a9eb89",
Ok(Uses::Local(LocalUses {
path: "./.github/actions/hello-world-action@172239021f7ba04fe7327647b213799853a9eb89".to_owned(),
})),
),
(
// Valid: Local action ref, unpinned
"./.github/actions/hello-world-action",
Ok(Uses::Local(LocalUses {
path: "./.github/actions/hello-world-action".to_owned(),
})),
),
// Invalid: missing user/repo
(
"checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3",
Err(UsesError(
"owner/repo slug is too short: checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3".to_owned()
)),
),
];
git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3",
},
},
)
"#,
);
for (input, expected) in vectors {
assert_eq!(input.parse(), expected);
}
// Fully pinned, subpath.
insta::assert_debug_snapshot!(
Uses::parse("actions/aws/ec2@8f4b7f84864484a7bf31766abe9204da3cbe65b3").unwrap(),
@r#"
Repository(
RepositoryUses {
owner: "actions/aws/ec2@8f4b7f84864484a7bf31766abe9204da3cbe65b3",
dependent: RepositoryUsesInner {
owner: "actions",
repo: "aws",
slug: "actions/aws",
subpath: Some(
"ec2",
),
git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3",
},
},
)
"#
);
// Fully pinned, complex subpath.
insta::assert_debug_snapshot!(
Uses::parse("example/foo/bar/baz/quux@8f4b7f84864484a7bf31766abe9204da3cbe65b3").unwrap(),
@r#"
Repository(
RepositoryUses {
owner: "example/foo/bar/baz/quux@8f4b7f84864484a7bf31766abe9204da3cbe65b3",
dependent: RepositoryUsesInner {
owner: "example",
repo: "foo",
slug: "example/foo",
subpath: Some(
"bar/baz/quux",
),
git_ref: "8f4b7f84864484a7bf31766abe9204da3cbe65b3",
},
},
)
"#
);
// Pinned with branch/tag.
insta::assert_debug_snapshot!(
Uses::parse("actions/checkout@v4").unwrap(),
@r#"
Repository(
RepositoryUses {
owner: "actions/checkout@v4",
dependent: RepositoryUsesInner {
owner: "actions",
repo: "checkout",
slug: "actions/checkout",
subpath: None,
git_ref: "v4",
},
},
)
"#
);
insta::assert_debug_snapshot!(
Uses::parse("actions/checkout@abcd").unwrap(),
@r#"
Repository(
RepositoryUses {
owner: "actions/checkout@abcd",
dependent: RepositoryUsesInner {
owner: "actions",
repo: "checkout",
slug: "actions/checkout",
subpath: None,
git_ref: "abcd",
},
},
)
"#
);
// Invalid: unpinned.
insta::assert_debug_snapshot!(
Uses::parse("actions/checkout").unwrap_err(),
@r#"
UsesError(
"missing `@<ref>` in actions/checkout",
)
"#
);
// Valid: Docker ref, implicit registry.
insta::assert_debug_snapshot!(
Uses::parse("docker://alpine:3.8").unwrap(),
@r#"
Docker(
DockerUses {
owner: "alpine:3.8",
dependent: DockerUsesInner {
registry: None,
image: "alpine",
tag: Some(
"3.8",
),
hash: None,
},
},
)
"#
);
// Valid: Docker ref, localhost.
insta::assert_debug_snapshot!(
Uses::parse("docker://localhost/alpine:3.8").unwrap(),
@r#"
Docker(
DockerUses {
owner: "localhost/alpine:3.8",
dependent: DockerUsesInner {
registry: Some(
"localhost",
),
image: "alpine",
tag: Some(
"3.8",
),
hash: None,
},
},
)
"#
);
// Valid: Docker ref, localhost with port.
insta::assert_debug_snapshot!(
Uses::parse("docker://localhost:1337/alpine:3.8").unwrap(),
@r#"
Docker(
DockerUses {
owner: "localhost:1337/alpine:3.8",
dependent: DockerUsesInner {
registry: Some(
"localhost:1337",
),
image: "alpine",
tag: Some(
"3.8",
),
hash: None,
},
},
)
"#
);
// Valid: Docker ref, custom registry.
insta::assert_debug_snapshot!(
Uses::parse("docker://ghcr.io/foo/alpine:3.8").unwrap(),
@r#"
Docker(
DockerUses {
owner: "ghcr.io/foo/alpine:3.8",
dependent: DockerUsesInner {
registry: Some(
"ghcr.io",
),
image: "foo/alpine",
tag: Some(
"3.8",
),
hash: None,
},
},
)
"#
);
// Valid: Docker ref, missing tag.
insta::assert_debug_snapshot!(
Uses::parse("docker://ghcr.io/foo/alpine").unwrap(),
@r#"
Docker(
DockerUses {
owner: "ghcr.io/foo/alpine",
dependent: DockerUsesInner {
registry: Some(
"ghcr.io",
),
image: "foo/alpine",
tag: None,
hash: None,
},
},
)
"#
);
// Invalid, but allowed: Docker ref, empty tag
insta::assert_debug_snapshot!(
Uses::parse("docker://ghcr.io/foo/alpine:").unwrap(),
@r#"
Docker(
DockerUses {
owner: "ghcr.io/foo/alpine:",
dependent: DockerUsesInner {
registry: Some(
"ghcr.io",
),
image: "foo/alpine",
tag: None,
hash: None,
},
},
)
"#
);
// Valid: Docker ref, bare.
insta::assert_debug_snapshot!(
Uses::parse("docker://alpine").unwrap(),
@r#"
Docker(
DockerUses {
owner: "alpine",
dependent: DockerUsesInner {
registry: None,
image: "alpine",
tag: None,
hash: None,
},
},
)
"#
);
// Valid: Docker ref, with hash.
insta::assert_debug_snapshot!(
Uses::parse("docker://alpine@hash").unwrap(),
@r#"
Docker(
DockerUses {
owner: "alpine@hash",
dependent: DockerUsesInner {
registry: None,
image: "alpine",
tag: None,
hash: Some(
"hash",
),
},
},
)
"#
);
// Valid: Local action "ref", actually part of the path
insta::assert_debug_snapshot!(
Uses::parse("./.github/actions/hello-world-action@172239021f7ba04fe7327647b213799853a9eb89").unwrap(),
@r#"
Local(
LocalUses {
path: "./.github/actions/hello-world-action@172239021f7ba04fe7327647b213799853a9eb89",
},
)
"#
);
// Valid: Local action ref, unpinned.
insta::assert_debug_snapshot!(
Uses::parse("./.github/actions/hello-world-action").unwrap(),
@r#"
Local(
LocalUses {
path: "./.github/actions/hello-world-action",
},
)
"#
);
// Invalid: missing user/repo
insta::assert_debug_snapshot!(
Uses::parse("checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3").unwrap_err(),
@r#"
UsesError(
"owner/repo slug is too short: checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3",
)
"#
);
}
#[test]
fn test_uses_deser_reusable() {
let vectors = [
// Valid, as expected.
(
"octo-org/this-repo/.github/workflows/workflow-1.yml@\
172239021f7ba04fe7327647b213799853a9eb89",
Some(Uses::Repository(RepositoryUses {
owner: "octo-org".to_owned(),
repo: "this-repo".to_owned(),
subpath: Some(".github/workflows/workflow-1.yml".to_owned()),
git_ref: "172239021f7ba04fe7327647b213799853a9eb89".to_owned(),
})),
),
(
"octo-org/this-repo/.github/workflows/workflow-1.yml@notahash",
Some(Uses::Repository(RepositoryUses {
owner: "octo-org".to_owned(),
repo: "this-repo".to_owned(),
subpath: Some(".github/workflows/workflow-1.yml".to_owned()),
git_ref: "notahash".to_owned(),
})),
),
(
"octo-org/this-repo/.github/workflows/workflow-1.yml@abcd",
Some(Uses::Repository(RepositoryUses {
owner: "octo-org".to_owned(),
repo: "this-repo".to_owned(),
subpath: Some(".github/workflows/workflow-1.yml".to_owned()),
git_ref: "abcd".to_owned(),
})),
),
// Invalid: remote reusable workflow without ref
("octo-org/this-repo/.github/workflows/workflow-1.yml", None),
// Invalid: local reusable workflow with ref
(
"./.github/workflows/workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89",
None,
),
// Invalid: no ref at all
("octo-org/this-repo/.github/workflows/workflow-1.yml", None),
(".github/workflows/workflow-1.yml", None),
// Invalid: missing user/repo
(
"workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89",
None,
),
];
// Dummy type for testing deser of `Uses`.
#[derive(Deserialize)]
#[serde(transparent)]
struct Dummy(#[serde(deserialize_with = "reusable_step_uses")] Uses);
for (input, expected) in vectors {
assert_eq!(
serde_yaml::from_str::<Dummy>(input).map(|d| d.0).ok(),
expected
);
}
insta::assert_debug_snapshot!(
serde_yaml::from_str::<Dummy>(
"octo-org/this-repo/.github/workflows/workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89"
)
.map(|d| d.0)
.unwrap(),
@r#"
Repository(
RepositoryUses {
owner: "octo-org/this-repo/.github/workflows/workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89",
dependent: RepositoryUsesInner {
owner: "octo-org",
repo: "this-repo",
slug: "octo-org/this-repo",
subpath: Some(
".github/workflows/workflow-1.yml",
),
git_ref: "172239021f7ba04fe7327647b213799853a9eb89",
},
},
)
"#
);
insta::assert_debug_snapshot!(
serde_yaml::from_str::<Dummy>(
"octo-org/this-repo/.github/workflows/workflow-1.yml@notahash"
).map(|d| d.0).unwrap(),
@r#"
Repository(
RepositoryUses {
owner: "octo-org/this-repo/.github/workflows/workflow-1.yml@notahash",
dependent: RepositoryUsesInner {
owner: "octo-org",
repo: "this-repo",
slug: "octo-org/this-repo",
subpath: Some(
".github/workflows/workflow-1.yml",
),
git_ref: "notahash",
},
},
)
"#
);
insta::assert_debug_snapshot!(
serde_yaml::from_str::<Dummy>(
"octo-org/this-repo/.github/workflows/workflow-1.yml@abcd"
).map(|d| d.0).unwrap(),
@r#"
Repository(
RepositoryUses {
owner: "octo-org/this-repo/.github/workflows/workflow-1.yml@abcd",
dependent: RepositoryUsesInner {
owner: "octo-org",
repo: "this-repo",
slug: "octo-org/this-repo",
subpath: Some(
".github/workflows/workflow-1.yml",
),
git_ref: "abcd",
},
},
)
"#
);
// Invalid: remote reusable workflow without ref
insta::assert_debug_snapshot!(
serde_yaml::from_str::<Dummy>(
"octo-org/this-repo/.github/workflows/workflow-1.yml"
).map(|d| d.0).unwrap_err(),
@r#"Error("malformed `uses` ref: missing `@<ref>` in octo-org/this-repo/.github/workflows/workflow-1.yml")"#
);
// Invalid: local reusable workflow with ref
insta::assert_debug_snapshot!(
serde_yaml::from_str::<Dummy>(
"./.github/workflows/workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89"
).map(|d| d.0).unwrap_err(),
@r#"Error("local reusable workflow reference can't specify `@<ref>`")"#
);
// Invalid: no ref at all
insta::assert_debug_snapshot!(
serde_yaml::from_str::<Dummy>(
".github/workflows/workflow-1.yml"
).map(|d| d.0).unwrap_err(),
@r#"Error("malformed `uses` ref: missing `@<ref>` in .github/workflows/workflow-1.yml")"#
);
// Invalid: missing user/repo
insta::assert_debug_snapshot!(
serde_yaml::from_str::<Dummy>(
"workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89"
).map(|d| d.0).unwrap_err(),
@r#"Error("malformed `uses` ref: owner/repo slug is too short: workflow-1.yml@172239021f7ba04fe7327647b213799853a9eb89")"#
);
}
}

View file

@ -349,6 +349,8 @@ pub enum AllowDeny {
#[derive(Deserialize, Debug, PartialEq)]
#[serde(rename_all = "kebab-case")]
pub enum PackageEcosystem {
/// `bazel`
Bazel,
/// `bun`
Bun,
/// `bundler`
@ -369,6 +371,8 @@ pub enum PackageEcosystem {
DotnetSdk,
/// `helm`
Helm,
/// `julia`
Julia,
/// `elm`
Elm,
/// `gitsubmodule`
@ -387,6 +391,8 @@ pub enum PackageEcosystem {
Npm,
/// `nuget`
Nuget,
/// `opentofu`
Opentofu,
/// `pip`
Pip,
/// `pub`

View file

@ -5,7 +5,7 @@ use serde::Deserialize;
use serde_yaml::Value;
use crate::common::expr::{BoE, LoE};
use crate::common::{Env, If, Permissions, Uses, custom_error};
use crate::common::{DockerUses, Env, If, Permissions, Uses, custom_error};
use super::{Concurrency, Defaults};
@ -162,7 +162,8 @@ pub struct Matrix {
pub enum Container {
Name(String),
Container {
image: String,
#[serde(deserialize_with = "crate::common::docker_uses")]
image: DockerUses,
credentials: Option<DockerCredentials>,
#[serde(default)]
env: LoE<Env>,

View file

@ -0,0 +1,23 @@
# https://github.com/zizmorcore/zizmor/issues/1451
version: 2
enable-beta-ecosystems: true
updates:
- package-ecosystem: "opentofu"
directories:
- "/stack"
- "/modules/default-branch-protection"
schedule:
interval: "cron"
cronjob: "30 7 * * *"
timezone: "Europe/London"
target-branch: "main"
groups:
terraform:
applies-to: "version-updates"
patterns:
- "*"
update-types:
- "patch"
- "minor"

View file

@ -1,4 +1,4 @@
use std::{env, path::Path, str::FromStr};
use std::{env, path::Path};
use github_actions_models::{
common::{
@ -56,13 +56,13 @@ fn test_pip_audit_ci() {
let StepBody::Uses { uses, with } = &test_job.steps[0].body else {
panic!("expected uses step");
};
assert_eq!(uses, &Uses::from_str("actions/checkout@v4.1.1").unwrap());
assert_eq!(uses, &Uses::parse("actions/checkout@v4.1.1").unwrap());
assert!(with.is_empty());
let StepBody::Uses { uses, with } = &test_job.steps[1].body else {
panic!("expected uses step");
};
assert_eq!(uses, &Uses::from_str("actions/setup-python@v5").unwrap());
assert_eq!(uses, &Uses::parse("actions/setup-python@v5").unwrap());
assert_eq!(with["python-version"].to_string(), "${{ matrix.python }}");
assert_eq!(with["cache"].to_string(), "pip");
assert_eq!(with["cache-dependency-path"].to_string(), "pyproject.toml");

View file

@ -1,7 +1,7 @@
[package]
name = "tree-sitter-iter"
description = "A very simple pre-order iterator for tree-sitter CSTs"
version = "0.0.2"
version = "0.0.3"
authors.workspace = true
homepage.workspace = true
edition.workspace = true

View file

@ -1,6 +1,6 @@
[package]
name = "yamlpatch"
version = "0.7.0"
version = "0.8.0"
description = "Comment and format-preserving YAML patch operations"
repository = "https://github.com/zizmorcore/zizmor/tree/main/crates/yamlpatch"
keywords = ["yaml", "patch"]

View file

@ -41,16 +41,17 @@ human review.
## Operations
`yamlpatch` supports several types of patch operations:
`yamlpatch` supports the following patch operations:
- **Replace**: Replace a value at a specific path
- **Add**: Add new key-value pairs to mappings
- **Remove**: Remove keys or elements
- **MergeInto**: Merge values into existing mappings
- **Append**: Append items to block sequences
- **ReplaceComment**: Replace comments associated with features
- **RewriteFragment**: Rewrite portions of string values (useful for templating)
Each operation is designed to work with the existing document structure
and formatting, making minimal changes while achieving the desired result.
Each operation preserves the document's formatting and structure (as best-effort).
## License

View file

@ -903,21 +903,21 @@ normal:
let end = find_content_end(&feature, &doc);
insta::assert_snapshot!(doc.source()[feature.location.byte_span.0..end], @r"
bar: baz
abc: def # comment
");
bar: baz
abc: def # comment
");
let feature = route_to_feature_exact(&route!("interior-spaces"), &doc)
.unwrap()
.unwrap();
let end = find_content_end(&feature, &doc);
insta::assert_snapshot!(doc.source()[feature.location.byte_span.0..end], @r"
- foo
- foo
- bar
# hello
- baz # hello
");
- bar
# hello
- baz # hello
");
let feature = route_to_feature_exact(&route!("normal"), &doc)
.unwrap()
@ -2775,11 +2775,12 @@ items:
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r"
items:
- first
- second
- third
");
items:
- first
- second
- third
");
}
#[test]
@ -2822,19 +2823,20 @@ databases:
let result =
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r#"
databases:
- name: primary
host: db1.example.com
port: 5432
max_connections: 100
ssl: true
readonly: false
- name: analytics
host: db2.example.com
port: 5433
readonly: true
"#);
insta::assert_snapshot!(result.source(), @r"
databases:
- name: primary
host: db1.example.com
port: 5432
max_connections: 100
ssl: true
readonly: false
- name: analytics
host: db2.example.com
port: 5433
readonly: true
");
}
#[test]
@ -2870,16 +2872,17 @@ jobs:
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r#"
jobs:
test:
steps:
- name: First step
run: echo "first"
- name: Second step
run: echo "second"
- name: Third step
run: echo "third"
"#);
jobs:
test:
steps:
- name: First step
run: echo "first"
- name: Second step
run: echo "second"
- name: Third step
run: echo "third"
"#);
}
#[test]
@ -2924,20 +2927,21 @@ servers:
assert!(result.source().contains("# Staging server"));
assert!(result.source().contains("# internal only"));
insta::assert_snapshot!(result.source(), @r#"
servers:
# Production server
- name: prod
host: prod.example.com
port: 443
# Staging server
- name: staging
host: staging.example.com # internal only
port: 8443
- name: dev
host: localhost
port: 8080
"#);
insta::assert_snapshot!(result.source(), @r"
servers:
# Production server
- name: prod
host: prod.example.com
port: 443
# Staging server
- name: staging
host: staging.example.com # internal only
port: 8443
- name: dev
host: localhost
port: 8080
");
}
#[test]
@ -2959,11 +2963,12 @@ ports:
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r"
ports:
- 8080
- 8081
- 8082
");
ports:
- 8080
- 8081
- 8082
");
}
#[test]
@ -2985,11 +2990,12 @@ configs:
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r"
configs:
- name: config1
value: 123
- {}
");
configs:
- name: config1
value: 123
- {}
");
}
#[test]
@ -3031,15 +3037,16 @@ services:
let result =
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r#"
services:
- name: api
port: 8080
- name: worker
port: 9090
config:
replicas: 3
"#);
insta::assert_snapshot!(result.source(), @r"
services:
- name: api
port: 8080
- name: worker
port: 9090
config:
replicas: 3
");
}
#[test]
@ -3096,11 +3103,12 @@ tasks:
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r"
tasks:
- task1
- task2
- task3
");
tasks:
- task1
- task2
- task3
");
}
#[test]
@ -3138,20 +3146,21 @@ jobs:
let result =
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r#"
name: CI
on: push
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Run tests
run: npm test
- name: Upload coverage
uses: codecov/codecov-action@v3
"#);
insta::assert_snapshot!(result.source(), @r"
name: CI
on: push
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Run tests
run: npm test
- name: Upload coverage
uses: codecov/codecov-action@v3
");
}
#[test]
@ -3176,9 +3185,10 @@ foo:
apply_yaml_patches(&yamlpath::Document::new(original).unwrap(), &operations).unwrap();
insta::assert_snapshot!(result.source(), @r"
foo:
- abc
- - def
- ghi
");
foo:
- abc
- - def
- ghi
");
}

View file

@ -1,6 +1,6 @@
[package]
name = "yamlpath"
version = "0.29.0"
version = "0.31.0"
description = "Format-preserving YAML feature extraction"
repository = "https://github.com/zizmorcore/zizmor/tree/main/crates/yamlpath"
readme = "README.md"

View file

@ -1,7 +1,7 @@
[package]
name = "zizmor"
description = "Static analysis for GitHub Actions"
version = "1.18.0-rc3"
version = "1.19.0"
repository = "https://github.com/zizmorcore/zizmor"
documentation = "https://docs.zizmor.sh"
keywords = ["cli", "github-actions", "static-analysis", "security"]

View file

@ -4,7 +4,7 @@ use std::fs::{self, File};
use std::path::Path;
use std::{env, io};
use fst::MapBuilder;
use fst::{MapBuilder, SetBuilder};
fn do_context_capabilities() {
let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
@ -55,7 +55,29 @@ fn do_codeql_injection_sinks() {
fs::copy(source, target).unwrap();
}
fn do_archived_action_repos() {
let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap();
let source = Path::new(&manifest_dir).join("data/archived-repos.txt");
let target = Path::new(&env::var("OUT_DIR").unwrap()).join("archived-repos.fst");
print!(
"cargo::rerun-if-changed={source}",
source = source.display()
);
let out = io::BufWriter::new(File::create(target).unwrap());
let mut build = SetBuilder::new(out).unwrap();
let contents = fs::read_to_string(source).unwrap();
for line in contents.lines() {
build.insert(line).unwrap();
}
build.finish().unwrap();
}
fn main() {
do_context_capabilities();
do_codeql_injection_sinks();
do_archived_action_repos();
}

View file

@ -0,0 +1,87 @@
8398a7/action-slack
actions-rs/audit-check
actions-rs/cargo
actions-rs/clippy-check
actions-rs/components-nightly
actions-rs/grcov
actions-rs/install
actions-rs/tarpaulin
actions-rs/toolchain
actions/create-release
actions/setup-elixir
actions/setup-haskell
actions/setup-ruby
actions/upload-release-asset
andrewmcodes-archive/rubocop-linter-action
artichoke/setup-rust
aslafy-z/conventional-pr-title-action
azure/appconfiguration-sync
azure/appservice-actions
azure/azure-resource-login-action
azure/container-actions
azure/container-scan
azure/data-factory-deploy-action
azure/data-factory-export-action
azure/data-factory-validate-action
azure/get-keyvault-secrets
azure/k8s-actions
azure/manage-azure-policy
azure/publish-security-assessments
azure/run-sqlpackage-action
azure/spring-cloud-deploy
azure/webapps-container-deploy
cedrickring/golang-action
cirrus-actions/rebase
crazy-max/ghaction-docker-buildx
decathlon/pull-request-labeler-action
delaguardo/setup-graalvm
dulvui/godot-android-export
expo/expo-preview-action
fabasoad/setup-zizmor-action
facebook/pysa-action
fregante/release-with-changelog
google/mirror-branch-action
google/skywater-pdk-actions
gradle/gradle-build-action
grafana/k6-action
helaili/github-graphql-action
helaili/jekyll-action
ilshidur/action-slack
jakejarvis/backblaze-b2-action
jakejarvis/cloudflare-purge-action
jakejarvis/firebase-deploy-action
jakejarvis/hugo-build-action
jakejarvis/lighthouse-action
jakejarvis/s3-sync-action
justinribeiro/lighthouse-action
kanadgupta/glitch-sync
kxxt/chatgpt-action
machine-learning-apps/wandb-action
mansagroup/gcs-cache-action
marvinpinto/action-automatic-releases
marvinpinto/actions
maxheld83/ghpages
micnncim/action-lgtm-reaction
mikepenz/gradle-dependency-submission
orf/cargo-bloat-action
paambaati/codeclimate-action
primer/figma-action
repo-sync/pull-request
repo-sync/repo-sync
sagebind/docker-swarm-deploy-action
scottbrenner/generate-changelog-action
secrethub/actions
semgrep/semgrep-action
shaunlwm/action-release-debugapk
sonarsource/sonarcloud-github-action
stefanprodan/kube-tools
swiftdocorg/github-wiki-publish-action
tachiyomiorg/issue-moderator-action
technote-space/auto-cancel-redundant-workflow
technote-space/get-diff-action
tencentcloudbase/cloudbase-action
trmcnvn/chrome-addon
whelk-io/maven-settings-xml-action
yeslayla/build-godot-action
youyo/aws-cdk-github-actions
z0al/dependent-issues

View file

@ -272,6 +272,17 @@ github.event.changes.new_issue.draft,fixed
github.event.changes.new_issue.events_url,structured
github.event.changes.new_issue.html_url,structured
github.event.changes.new_issue.id,fixed
github.event.changes.new_issue.issue_dependencies_summary.blocked_by,fixed
github.event.changes.new_issue.issue_dependencies_summary.blocking,fixed
github.event.changes.new_issue.issue_dependencies_summary.total_blocked_by,fixed
github.event.changes.new_issue.issue_dependencies_summary.total_blocking,fixed
github.event.changes.new_issue.issue_field_values.*.data_type,fixed
github.event.changes.new_issue.issue_field_values.*.issue_field_id,fixed
github.event.changes.new_issue.issue_field_values.*.node_id,arbitrary
github.event.changes.new_issue.issue_field_values.*.single_select_option.color,arbitrary
github.event.changes.new_issue.issue_field_values.*.single_select_option.id,fixed
github.event.changes.new_issue.issue_field_values.*.single_select_option.name,arbitrary
github.event.changes.new_issue.issue_field_values.*.value,arbitrary
github.event.changes.new_issue.labels.*.color,arbitrary
github.event.changes.new_issue.labels.*.default,fixed
github.event.changes.new_issue.labels.*.description,arbitrary
@ -762,6 +773,17 @@ github.event.changes.old_issue.draft,fixed
github.event.changes.old_issue.events_url,structured
github.event.changes.old_issue.html_url,structured
github.event.changes.old_issue.id,fixed
github.event.changes.old_issue.issue_dependencies_summary.blocked_by,fixed
github.event.changes.old_issue.issue_dependencies_summary.blocking,fixed
github.event.changes.old_issue.issue_dependencies_summary.total_blocked_by,fixed
github.event.changes.old_issue.issue_dependencies_summary.total_blocking,fixed
github.event.changes.old_issue.issue_field_values.*.data_type,fixed
github.event.changes.old_issue.issue_field_values.*.issue_field_id,fixed
github.event.changes.old_issue.issue_field_values.*.node_id,arbitrary
github.event.changes.old_issue.issue_field_values.*.single_select_option.color,arbitrary
github.event.changes.old_issue.issue_field_values.*.single_select_option.id,fixed
github.event.changes.old_issue.issue_field_values.*.single_select_option.name,arbitrary
github.event.changes.old_issue.issue_field_values.*.value,arbitrary
github.event.changes.old_issue.labels.*.color,arbitrary
github.event.changes.old_issue.labels.*.default,fixed
github.event.changes.old_issue.labels.*.description,arbitrary
@ -1060,7 +1082,6 @@ github.event.changes.required_status_checks.from.*,arbitrary
github.event.changes.required_status_checks_enforcement_level.from,fixed
github.event.changes.tag_name.from,arbitrary
github.event.changes.title.from,arbitrary
github.event.check_run.app,fixed
github.event.check_run.app.client_id,arbitrary
github.event.check_run.app.created_at,fixed
github.event.check_run.app.description,arbitrary
@ -1191,6 +1212,8 @@ github.event.check_run.check_suite.repository.compare_url,arbitrary
github.event.check_run.check_suite.repository.contents_url,arbitrary
github.event.check_run.check_suite.repository.contributors_url,structured
github.event.check_run.check_suite.repository.created_at,fixed
github.event.check_run.check_suite.repository.custom_properties,arbitrary
github.event.check_run.check_suite.repository.custom_properties.*,arbitrary
github.event.check_run.check_suite.repository.default_branch,arbitrary
github.event.check_run.check_suite.repository.delete_branch_on_merge,fixed
github.event.check_run.check_suite.repository.deployments_url,structured
@ -2207,6 +2230,17 @@ github.event.issue.draft,fixed
github.event.issue.events_url,arbitrary
github.event.issue.html_url,arbitrary
github.event.issue.id,fixed
github.event.issue.issue_dependencies_summary.blocked_by,fixed
github.event.issue.issue_dependencies_summary.blocking,fixed
github.event.issue.issue_dependencies_summary.total_blocked_by,fixed
github.event.issue.issue_dependencies_summary.total_blocking,fixed
github.event.issue.issue_field_values.*.data_type,fixed
github.event.issue.issue_field_values.*.issue_field_id,fixed
github.event.issue.issue_field_values.*.node_id,arbitrary
github.event.issue.issue_field_values.*.single_select_option.color,arbitrary
github.event.issue.issue_field_values.*.single_select_option.id,fixed
github.event.issue.issue_field_values.*.single_select_option.name,arbitrary
github.event.issue.issue_field_values.*.value,arbitrary
github.event.issue.labels.*,arbitrary
github.event.issue.labels.*.color,arbitrary
github.event.issue.labels.*.default,fixed
@ -3047,6 +3081,7 @@ github.event.pull_request.requested_reviewers.*.url,structured
github.event.pull_request.requested_reviewers.*.user_view_type,arbitrary
github.event.pull_request.requested_teams.*.deleted,fixed
github.event.pull_request.requested_teams.*.description,arbitrary
github.event.pull_request.requested_teams.*.enterprise_id,fixed
github.event.pull_request.requested_teams.*.html_url,structured
github.event.pull_request.requested_teams.*.id,fixed
github.event.pull_request.requested_teams.*.ldap_dn,arbitrary
@ -3054,6 +3089,7 @@ github.event.pull_request.requested_teams.*.members_url,arbitrary
github.event.pull_request.requested_teams.*.name,arbitrary
github.event.pull_request.requested_teams.*.node_id,arbitrary
github.event.pull_request.requested_teams.*.notification_setting,arbitrary
github.event.pull_request.requested_teams.*.organization_id,fixed
github.event.pull_request.requested_teams.*.parent.description,arbitrary
github.event.pull_request.requested_teams.*.parent.html_url,structured
github.event.pull_request.requested_teams.*.parent.id,fixed
@ -3069,6 +3105,7 @@ github.event.pull_request.requested_teams.*.permission,arbitrary
github.event.pull_request.requested_teams.*.privacy,arbitrary
github.event.pull_request.requested_teams.*.repositories_url,structured
github.event.pull_request.requested_teams.*.slug,arbitrary
github.event.pull_request.requested_teams.*.type,fixed
github.event.pull_request.requested_teams.*.url,structured
github.event.pull_request.review_comment_url,arbitrary
github.event.pull_request.review_comments,fixed
@ -3349,6 +3386,7 @@ github.event.release.discussion_url,structured
github.event.release.draft,fixed
github.event.release.html_url,structured
github.event.release.id,fixed
github.event.release.immutable,fixed
github.event.release.name,arbitrary
github.event.release.node_id,arbitrary
github.event.release.prerelease,fixed
@ -3366,6 +3404,7 @@ github.event.release.reactions.url,structured
github.event.release.tag_name,arbitrary
github.event.release.tarball_url,structured
github.event.release.target_commitish,arbitrary
github.event.release.updated_at,fixed
github.event.release.upload_url,structured
github.event.release.url,structured
github.event.release.zipball_url,structured
@ -3691,6 +3730,7 @@ github.event.review.node_id,arbitrary
github.event.review.pull_request_url,structured
github.event.review.state,arbitrary
github.event.review.submitted_at,fixed
github.event.review.updated_at,fixed
github.event.review.user.avatar_url,structured
github.event.review.user.deleted,fixed
github.event.review.user.email,arbitrary

1 github.action_path fixed
272 github.event.changes.new_issue.events_url structured
273 github.event.changes.new_issue.html_url structured
274 github.event.changes.new_issue.id fixed
275 github.event.changes.new_issue.issue_dependencies_summary.blocked_by fixed
276 github.event.changes.new_issue.issue_dependencies_summary.blocking fixed
277 github.event.changes.new_issue.issue_dependencies_summary.total_blocked_by fixed
278 github.event.changes.new_issue.issue_dependencies_summary.total_blocking fixed
279 github.event.changes.new_issue.issue_field_values.*.data_type fixed
280 github.event.changes.new_issue.issue_field_values.*.issue_field_id fixed
281 github.event.changes.new_issue.issue_field_values.*.node_id arbitrary
282 github.event.changes.new_issue.issue_field_values.*.single_select_option.color arbitrary
283 github.event.changes.new_issue.issue_field_values.*.single_select_option.id fixed
284 github.event.changes.new_issue.issue_field_values.*.single_select_option.name arbitrary
285 github.event.changes.new_issue.issue_field_values.*.value arbitrary
286 github.event.changes.new_issue.labels.*.color arbitrary
287 github.event.changes.new_issue.labels.*.default fixed
288 github.event.changes.new_issue.labels.*.description arbitrary
773 github.event.changes.old_issue.events_url structured
774 github.event.changes.old_issue.html_url structured
775 github.event.changes.old_issue.id fixed
776 github.event.changes.old_issue.issue_dependencies_summary.blocked_by fixed
777 github.event.changes.old_issue.issue_dependencies_summary.blocking fixed
778 github.event.changes.old_issue.issue_dependencies_summary.total_blocked_by fixed
779 github.event.changes.old_issue.issue_dependencies_summary.total_blocking fixed
780 github.event.changes.old_issue.issue_field_values.*.data_type fixed
781 github.event.changes.old_issue.issue_field_values.*.issue_field_id fixed
782 github.event.changes.old_issue.issue_field_values.*.node_id arbitrary
783 github.event.changes.old_issue.issue_field_values.*.single_select_option.color arbitrary
784 github.event.changes.old_issue.issue_field_values.*.single_select_option.id fixed
785 github.event.changes.old_issue.issue_field_values.*.single_select_option.name arbitrary
786 github.event.changes.old_issue.issue_field_values.*.value arbitrary
787 github.event.changes.old_issue.labels.*.color arbitrary
788 github.event.changes.old_issue.labels.*.default fixed
789 github.event.changes.old_issue.labels.*.description arbitrary
1082 github.event.changes.required_status_checks_enforcement_level.from fixed
1083 github.event.changes.tag_name.from arbitrary
1084 github.event.changes.title.from arbitrary
github.event.check_run.app fixed
1085 github.event.check_run.app.client_id arbitrary
1086 github.event.check_run.app.created_at fixed
1087 github.event.check_run.app.description arbitrary
1212 github.event.check_run.check_suite.repository.contents_url arbitrary
1213 github.event.check_run.check_suite.repository.contributors_url structured
1214 github.event.check_run.check_suite.repository.created_at fixed
1215 github.event.check_run.check_suite.repository.custom_properties arbitrary
1216 github.event.check_run.check_suite.repository.custom_properties.* arbitrary
1217 github.event.check_run.check_suite.repository.default_branch arbitrary
1218 github.event.check_run.check_suite.repository.delete_branch_on_merge fixed
1219 github.event.check_run.check_suite.repository.deployments_url structured
2230 github.event.issue.events_url arbitrary
2231 github.event.issue.html_url arbitrary
2232 github.event.issue.id fixed
2233 github.event.issue.issue_dependencies_summary.blocked_by fixed
2234 github.event.issue.issue_dependencies_summary.blocking fixed
2235 github.event.issue.issue_dependencies_summary.total_blocked_by fixed
2236 github.event.issue.issue_dependencies_summary.total_blocking fixed
2237 github.event.issue.issue_field_values.*.data_type fixed
2238 github.event.issue.issue_field_values.*.issue_field_id fixed
2239 github.event.issue.issue_field_values.*.node_id arbitrary
2240 github.event.issue.issue_field_values.*.single_select_option.color arbitrary
2241 github.event.issue.issue_field_values.*.single_select_option.id fixed
2242 github.event.issue.issue_field_values.*.single_select_option.name arbitrary
2243 github.event.issue.issue_field_values.*.value arbitrary
2244 github.event.issue.labels.* arbitrary
2245 github.event.issue.labels.*.color arbitrary
2246 github.event.issue.labels.*.default fixed
3081 github.event.pull_request.requested_reviewers.*.user_view_type arbitrary
3082 github.event.pull_request.requested_teams.*.deleted fixed
3083 github.event.pull_request.requested_teams.*.description arbitrary
3084 github.event.pull_request.requested_teams.*.enterprise_id fixed
3085 github.event.pull_request.requested_teams.*.html_url structured
3086 github.event.pull_request.requested_teams.*.id fixed
3087 github.event.pull_request.requested_teams.*.ldap_dn arbitrary
3089 github.event.pull_request.requested_teams.*.name arbitrary
3090 github.event.pull_request.requested_teams.*.node_id arbitrary
3091 github.event.pull_request.requested_teams.*.notification_setting arbitrary
3092 github.event.pull_request.requested_teams.*.organization_id fixed
3093 github.event.pull_request.requested_teams.*.parent.description arbitrary
3094 github.event.pull_request.requested_teams.*.parent.html_url structured
3095 github.event.pull_request.requested_teams.*.parent.id fixed
3105 github.event.pull_request.requested_teams.*.privacy arbitrary
3106 github.event.pull_request.requested_teams.*.repositories_url structured
3107 github.event.pull_request.requested_teams.*.slug arbitrary
3108 github.event.pull_request.requested_teams.*.type fixed
3109 github.event.pull_request.requested_teams.*.url structured
3110 github.event.pull_request.review_comment_url arbitrary
3111 github.event.pull_request.review_comments fixed
3386 github.event.release.draft fixed
3387 github.event.release.html_url structured
3388 github.event.release.id fixed
3389 github.event.release.immutable fixed
3390 github.event.release.name arbitrary
3391 github.event.release.node_id arbitrary
3392 github.event.release.prerelease fixed
3404 github.event.release.tag_name arbitrary
3405 github.event.release.tarball_url structured
3406 github.event.release.target_commitish arbitrary
3407 github.event.release.updated_at fixed
3408 github.event.release.upload_url structured
3409 github.event.release.url structured
3410 github.event.release.zipball_url structured
3730 github.event.review.pull_request_url structured
3731 github.event.review.state arbitrary
3732 github.event.review.submitted_at fixed
3733 github.event.review.updated_at fixed
3734 github.event.review.user.avatar_url structured
3735 github.event.review.user.deleted fixed
3736 github.event.review.user.email arbitrary

View file

@ -39,6 +39,7 @@ impl Audit for AnonymousDefinition {
.confidence(Confidence::High)
.persona(Persona::Pedantic)
.add_location(workflow.location().primary())
.tip("use 'name: ...' to give this workflow a name")
.build(workflow)?,
);
}
@ -47,14 +48,13 @@ impl Audit for AnonymousDefinition {
match job {
Job::NormalJob(normal) => {
if normal.name.is_none() {
let location = normal.location().primary();
findings.push(
Self::finding()
.severity(ANONYMOUS_DEFINITION_JOB_SEVERITY)
.confidence(Confidence::High)
.persona(Persona::Pedantic)
.add_location(location)
.add_location(normal.location_with_grip().primary())
.tip("use 'name: ...' to give this job a name")
.build(workflow)?,
);
}

View file

@ -0,0 +1,140 @@
use std::sync::LazyLock;
use fst::Set;
use github_actions_models::common::{RepositoryUses, Uses};
use subfeature::Subfeature;
use crate::{
audit::{Audit, AuditError, AuditLoadError, audit_meta},
config::Config,
finding::{Confidence, Finding, FindingBuilder, Persona, Severity, location::Locatable},
models::{
StepCommon as _,
action::CompositeStep,
workflow::{ReusableWorkflowCallJob, Step},
},
state::AuditState,
};
static ARCHIVED_REPOS_FST: LazyLock<Set<&[u8]>> = LazyLock::new(|| {
fst::Set::new(include_bytes!(concat!(env!("OUT_DIR"), "/archived-repos.fst")).as_slice())
.expect("couldn't initialize archived repos FST")
});
pub(crate) struct ArchivedUses;
audit_meta!(
ArchivedUses,
"archived-uses",
"action or reusable workflow from archived repository"
);
impl ArchivedUses {
pub(crate) fn uses_is_archived<'doc>(uses: &RepositoryUses) -> Option<FindingBuilder<'doc>> {
// TODO: Annoying that we need to allocate for case normalization here; can we use an
// automaton to search the FST case-insensitively?
let normalized = format!(
"{owner}/{repo}",
owner = uses.owner().to_lowercase(),
repo = uses.repo().to_lowercase()
);
ARCHIVED_REPOS_FST.contains(normalized.as_bytes()).then(|| {
Self::finding()
.confidence(Confidence::High)
.severity(Severity::Medium)
.persona(Persona::Regular)
})
}
}
#[async_trait::async_trait]
impl Audit for ArchivedUses {
fn new(_state: &AuditState) -> Result<Self, AuditLoadError>
where
Self: Sized,
{
Ok(Self)
}
async fn audit_step<'doc>(
&self,
step: &Step<'doc>,
_config: &Config,
) -> Result<Vec<Finding<'doc>>, AuditError> {
let mut findings = vec![];
if let Some(Uses::Repository(uses)) = step.uses()
&& let Some(finding) = Self::uses_is_archived(uses)
{
findings.push(
finding
.add_location(step.location_with_grip())
.add_location(
step.location()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.slug()))
.annotated("repository is archived")
.primary(),
)
.build(step)?,
)
}
Ok(findings)
}
async fn audit_composite_step<'doc>(
&self,
step: &CompositeStep<'doc>,
_config: &Config,
) -> Result<Vec<Finding<'doc>>, AuditError> {
let mut findings = vec![];
if let Some(Uses::Repository(uses)) = step.uses()
&& let Some(finding) = Self::uses_is_archived(uses)
{
findings.push(
finding
.add_location(step.location_with_grip())
.add_location(
step.location()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.slug()))
.annotated("repository is archived")
.primary(),
)
.build(step)?,
)
}
Ok(findings)
}
async fn audit_reusable_job<'doc>(
&self,
job: &ReusableWorkflowCallJob<'doc>,
_config: &Config,
) -> Result<Vec<Finding<'doc>>, AuditError> {
let mut findings = vec![];
if let Uses::Repository(uses) = &job.uses
&& let Some(finding) = Self::uses_is_archived(uses)
{
findings.push(
finding
.add_location(job.location_with_grip())
.add_location(
job.location()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.slug()))
.annotated("repository is archived")
.primary(),
)
.build(job)?,
)
}
Ok(findings)
}
}

View file

@ -42,12 +42,12 @@ impl Artipacked {
uses: &github_actions_models::common::RepositoryUses,
) -> Result<Option<bool>, ClientError> {
let version = if !uses.ref_is_commit() {
uses.git_ref.clone()
uses.git_ref().to_string()
} else {
match self.client {
Some(ref client) => {
let tag = client
.longest_tag_for_commit(&uses.owner, &uses.repo, &uses.git_ref)
.longest_tag_for_commit(uses.owner(), uses.repo(), uses.git_ref())
.await?;
match tag {
@ -271,8 +271,6 @@ impl Audit for Artipacked {
#[cfg(test)]
mod tests {
use std::str::FromStr;
use github_actions_models::common::RepositoryUses;
use super::*;
@ -325,11 +323,11 @@ mod tests {
#[tokio::test]
async fn test_is_checkout_v6_or_higher_offline() {
// Test v6 and higher versions
let v6 = RepositoryUses::from_str("actions/checkout@v6").unwrap();
let v6_0 = RepositoryUses::from_str("actions/checkout@v6.0").unwrap();
let v6_1_0 = RepositoryUses::from_str("actions/checkout@v6.1.0").unwrap();
let v7 = RepositoryUses::from_str("actions/checkout@v7").unwrap();
let v10 = RepositoryUses::from_str("actions/checkout@v10").unwrap();
let v6 = RepositoryUses::parse("actions/checkout@v6").unwrap();
let v6_0 = RepositoryUses::parse("actions/checkout@v6.0").unwrap();
let v6_1_0 = RepositoryUses::parse("actions/checkout@v6.1.0").unwrap();
let v7 = RepositoryUses::parse("actions/checkout@v7").unwrap();
let v10 = RepositoryUses::parse("actions/checkout@v10").unwrap();
let artipacked = Artipacked { client: None };
@ -355,9 +353,9 @@ mod tests {
);
// Test versions below v6
let v4 = RepositoryUses::from_str("actions/checkout@v4").unwrap();
let v5 = RepositoryUses::from_str("actions/checkout@v5").unwrap();
let v5_9 = RepositoryUses::from_str("actions/checkout@v5.9").unwrap();
let v4 = RepositoryUses::parse("actions/checkout@v4").unwrap();
let v5 = RepositoryUses::parse("actions/checkout@v5").unwrap();
let v5_9 = RepositoryUses::parse("actions/checkout@v5.9").unwrap();
assert_eq!(
artipacked.is_checkout_v6_or_higher(&v4).await.unwrap(),
@ -374,7 +372,7 @@ mod tests {
// Test commit SHA (should return None when offline)
let commit_sha =
RepositoryUses::from_str("actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683")
RepositoryUses::parse("actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683")
.unwrap();
assert_eq!(
artipacked
@ -385,7 +383,7 @@ mod tests {
);
// Test invalid/unparseable refs (should return None)
let invalid = RepositoryUses::from_str("actions/checkout@main").unwrap();
let invalid = RepositoryUses::parse("actions/checkout@main").unwrap();
assert_eq!(
artipacked.is_checkout_v6_or_higher(&invalid).await.unwrap(),
None
@ -410,7 +408,7 @@ mod tests {
// Points to v6.0.0.
let commit_sha_v6 =
RepositoryUses::from_str("actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3")
RepositoryUses::parse("actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3")
.unwrap();
assert_eq!(
@ -423,7 +421,7 @@ mod tests {
// Points to v5.0.1.
let commit_sha_v5 =
RepositoryUses::from_str("actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd")
RepositoryUses::parse("actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd")
.unwrap();
assert_eq!(
@ -526,6 +524,7 @@ jobs:
|workflow: &Workflow, findings| {
let fixed = apply_fix_for_snapshot(workflow.as_document(), findings);
insta::assert_snapshot!(fixed.source(), @r"
name: Test Workflow
on: push
jobs:
@ -573,6 +572,7 @@ jobs:
|workflow: &Workflow, findings| {
let fixed = apply_fix_for_snapshot(workflow.as_document(), findings);
insta::assert_snapshot!(fixed.source(), @r"
name: Test Workflow
on: push
jobs:

View file

@ -15,7 +15,7 @@ use super::{Audit, AuditLoadError, AuditState, audit_meta};
use crate::{
audit::AuditError,
finding::{Confidence, Fix, FixDisposition, Severity, location::Locatable as _},
models::workflow::{JobExt, Workflow},
models::workflow::{JobCommon, Workflow},
utils::{self, ExtractedExpr},
};
use subfeature::Subfeature;
@ -81,7 +81,7 @@ impl Audit for BotConditions {
if let Some(If::Expr(expr)) = &job.r#if {
conds.push((
expr,
job.location_with_name(),
job.location_with_grip(),
job.location().with_keys(["if".into()]),
));
}
@ -91,7 +91,7 @@ impl Audit for BotConditions {
if let Some(If::Expr(expr)) = &step.r#if {
conds.push((
expr,
step.location_with_name(),
step.location_with_grip(),
step.location().with_keys(["if".into()]),
));
}
@ -538,6 +538,7 @@ jobs:
}
insta::assert_snapshot!(document.source(), @r#"
name: Test Workflow
on:
pull_request_target:
@ -588,6 +589,7 @@ jobs:
}
}
insta::assert_snapshot!(document.source(), @r#"
name: Test Workflow
on:
pull_request_target:
@ -641,6 +643,7 @@ jobs:
// Verify it suggests comment.user.login for issue_comment events
insta::assert_snapshot!(document.source(), @r#"
name: Test Issue Comment
on: issue_comment
@ -690,6 +693,7 @@ jobs:
// Verify it suggests review.user.login for pull_request_review events
insta::assert_snapshot!(document.source(), @r#"
name: Test PR Review
on: pull_request_review
@ -739,6 +743,7 @@ jobs:
// Verify it suggests issue.user.login for issues events
insta::assert_snapshot!(document.source(), @r#"
name: Test Issues
on: issues
@ -788,6 +793,7 @@ jobs:
// Verify it suggests release.author.login for release events
insta::assert_snapshot!(document.source(), @r#"
name: Test Release
on: release
@ -836,6 +842,7 @@ jobs:
}
insta::assert_snapshot!(document.source(), @r#"
name: Test Create
on: create
@ -885,6 +892,7 @@ jobs:
}
insta::assert_snapshot!(document.source(), @r#"
name: Test Workflow
on:
pull_request_target:

View file

@ -9,7 +9,7 @@ use crate::finding::location::{Locatable as _, Routable};
use crate::finding::{Confidence, Finding, Fix, FixDisposition, Severity};
use crate::models::StepCommon;
use crate::models::coordinate::{ActionCoordinate, ControlExpr, ControlFieldType, Toggle, Usage};
use crate::models::workflow::{JobExt as _, NormalJob, Step, Steps};
use crate::models::workflow::{JobCommon as _, NormalJob, Step, Steps};
use crate::state::AuditState;
use indexmap::IndexMap;
@ -444,7 +444,7 @@ impl CachePoisoning {
finding_builder = finding_builder.fix(fix);
}
finding_builder.build(step.workflow()).ok()
finding_builder.build(step).ok()
}
}
@ -555,6 +555,7 @@ jobs:
|findings: Vec<Finding>| {
let fixed_content = apply_fix_for_snapshot(workflow_content, findings);
insta::assert_snapshot!(fixed_content, @r"
name: Test Workflow
on: release
@ -599,6 +600,7 @@ jobs:
|findings: Vec<Finding>| {
let fixed_content = apply_fix_for_snapshot(workflow_content, findings);
insta::assert_snapshot!(fixed_content, @r"
name: Test Workflow
on: release

View file

@ -135,7 +135,7 @@ impl Audit for DependabotCooldown {
Self::finding()
.add_location(
update
.location_with_name()
.location_with_grip()
.primary()
.annotated("missing cooldown configuration"),
)
@ -202,6 +202,7 @@ updates:
let fix = &finding.fixes[0];
let fixed_document = fix.apply(dependabot.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r"
version: 2
updates:
@ -243,6 +244,7 @@ updates:
let fix = &finding.fixes[0];
let fixed_document = fix.apply(dependabot.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r"
version: 2
updates:
@ -284,6 +286,7 @@ updates:
let fix = &finding.fixes[0];
let fixed_document = fix.apply(dependabot.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r"
version: 2
updates:
@ -335,6 +338,7 @@ updates:
}
insta::assert_snapshot!(document.source(), @r"
version: 2
updates:
@ -379,6 +383,7 @@ updates:
// Verify the document remains unchanged
insta::assert_snapshot!(dependabot.as_document().source(), @r"
version: 2
updates:

View file

@ -61,7 +61,7 @@ impl Audit for DependabotExecution {
.primary()
.annotated("enabled here"),
)
.add_location(update.location_with_name())
.add_location(update.location_with_grip())
.fix(Self::create_set_deny_fix(update))
.build(dependabot)?,
);
@ -123,6 +123,7 @@ updates:
let fix = &finding.fixes[0];
let fixed_document = fix.apply(dependabot.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r"
version: 2
updates:
@ -158,6 +159,7 @@ updates:
// Verify the document remains unchanged
insta::assert_snapshot!(dependabot.as_document().source(), @r"
version: 2
updates:
@ -192,6 +194,7 @@ updates:
// Verify the document remains unchanged
insta::assert_snapshot!(dependabot.as_document().source(), @r"
version: 2
updates:
@ -240,6 +243,7 @@ updates:
}
insta::assert_snapshot!(document.source(), @r"
version: 2
updates:

View file

@ -14,6 +14,7 @@ use crate::{
static KNOWN_PERMISSIONS: LazyLock<HashMap<&str, Severity>> = LazyLock::new(|| {
[
("actions", Severity::High),
("artifact-metadata", Severity::Medium),
("attestations", Severity::High),
("checks", Severity::Medium),
("contents", Severity::High),
@ -21,6 +22,8 @@ static KNOWN_PERMISSIONS: LazyLock<HashMap<&str, Severity>> = LazyLock::new(|| {
("discussions", Severity::Medium),
("id-token", Severity::High),
("issues", Severity::High),
// What does the write permission even do here?
("models", Severity::Low),
("packages", Severity::High),
("pages", Severity::High),
("pull-requests", Severity::High),

View file

@ -1,4 +1,5 @@
use github_actions_models::common::Uses;
use subfeature::Subfeature;
use super::{Audit, AuditLoadError, AuditState, audit_meta};
use crate::audit::AuditError;
@ -59,6 +60,7 @@ impl ForbiddenUses {
step.location()
.primary()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.raw()))
.annotated("use of this action is forbidden"),
)
.build(step)?,

View file

@ -13,7 +13,7 @@ use crate::config::Config;
use crate::finding::location::Locatable as _;
use crate::finding::{Confidence, Finding, Severity};
use crate::models::StepCommon;
use crate::models::{workflow::JobExt as _, workflow::Step};
use crate::models::{workflow::JobCommon as _, workflow::Step};
use crate::state::AuditState;
use crate::utils;
use crate::utils::once::static_regex;
@ -88,20 +88,22 @@ const PWSH_REDIRECT_QUERY: &str = r#"
const PWSH_PIPELINE_QUERY: &str = r#"
(pipeline
(command
command_name: (command_name) @cmd
command_elements: (command_elements
(_)*
(array_literal_expression
(unary_expression [
(string_literal
(expandable_string_literal (variable) @destination))
(variable) @destination
])
)
(_)*))
(#match? @cmd "(?i)out-file|add-content|set-content|tee-object")
(#match? @destination "(?i)ENV:GITHUB_ENV|ENV:GITHUB_PATH")
(pipeline_chain
(command
command_name: (command_name) @cmd
command_elements: (command_elements
(_)*
(array_literal_expression
(unary_expression [
(string_literal
(expandable_string_literal (variable) @destination))
(variable) @destination
])
)
(_)*))
(#match? @cmd "(?i)out-file|add-content|set-content|tee-object")
(#match? @destination "(?i)ENV:GITHUB_ENV|ENV:GITHUB_PATH")
)
) @span
"#;
@ -392,7 +394,7 @@ impl Audit for GitHubEnv {
}
if let StepBody::Run { run, .. } = &step.deref().body {
let shell = step.shell().unwrap_or_else(|| {
let shell = step.shell().map(|s| s.0).unwrap_or_else(|| {
tracing::warn!(
"github-env: couldn't determine shell type for {workflow}:{job} step {stepno}; assuming bash",
workflow = step.workflow().key.presentation_path(),
@ -419,7 +421,7 @@ impl Audit for GitHubEnv {
.with_keys(["run".into()])
.annotated(format!("write to {dest} may allow code execution")),
)
.build(step.workflow())?,
.build(step)?,
)
}
}
@ -438,7 +440,7 @@ impl Audit for GitHubEnv {
return Ok(findings);
};
let shell = step.shell().unwrap_or_else(|| {
let shell = step.shell().map(|s| s.0).unwrap_or_else(|| {
tracing::warn!(
"github-env: couldn't determine shell type for {action} step {stepno}; assuming bash",
action = step.action().key.presentation_path(),
@ -463,7 +465,7 @@ impl Audit for GitHubEnv {
.with_keys(["run".into()])
.annotated(format!("write to {dest} may allow code execution")),
)
.build(step.action())?,
.build(step)?,
)
}

View file

@ -7,6 +7,7 @@
use anyhow::anyhow;
use github_actions_models::common::{RepositoryUses, Uses};
use subfeature::Subfeature;
use super::{Audit, AuditLoadError, Job, audit_meta};
use crate::{
@ -51,7 +52,7 @@ impl ImpostorCommit {
Ok(
match self
.client
.compare_commits(&uses.owner, &uses.repo, base_ref, head_ref)
.compare_commits(uses.owner(), uses.repo(), base_ref, head_ref)
.await
.map_err(Self::err)?
{
@ -76,13 +77,13 @@ impl ImpostorCommit {
return Ok(false);
};
// Fast path: almost all commit refs will be at the tip of
// Fastest path: almost all commit refs will be at the tip of
// the branch or tag's history, so check those first.
// Check tags before branches, since in practice version tags
// are more commonly pinned.
let tags = self
.client
.list_tags(&uses.owner, &uses.repo)
.list_tags(uses.owner(), uses.repo())
.await
.map_err(Self::err)?;
@ -94,7 +95,7 @@ impl ImpostorCommit {
let branches = self
.client
.list_branches(&uses.owner, &uses.repo)
.list_branches(uses.owner(), uses.repo())
.await
.map_err(Self::err)?;
@ -104,6 +105,21 @@ impl ImpostorCommit {
}
}
// Fast path: attempt to use GitHub's undocumented `branch_commits`
// API to see if the commit is present in any branch/tag.
// There are no stabilitiy guarantees for this API, so we fall back
// to the slow(er) paths if it fails.
match self
.client
.branch_commits(uses.owner(), uses.repo(), head_ref)
.await
{
Ok(branch_commits) => return Ok(branch_commits.is_empty()),
Err(e) => tracing::warn!("fast path impostor check failed for {uses}: {e}"),
}
// Slow path: use GitHub's comparison API to check each branch and tag's
// history for presence of the commit.
for branch in &branches {
if self
.named_ref_contains_commit(uses, &format!("refs/heads/{}", &branch.name), head_ref)
@ -131,7 +147,7 @@ impl ImpostorCommit {
async fn get_highest_tag(&self, uses: &RepositoryUses) -> Result<Option<String>, AuditError> {
let tags = self
.client
.list_tags(&uses.owner, &uses.repo)
.list_tags(uses.owner(), uses.repo())
.await
.map_err(Self::err)?;
@ -186,16 +202,16 @@ impl ImpostorCommit {
Ok(None) => {
tracing::warn!(
"No tags found for {}/{}, cannot create fix",
uses.owner,
uses.repo
uses.owner(),
uses.repo()
);
return None;
}
Err(e) => {
tracing::error!(
"Failed to get latest tag for {}/{}: {}",
uses.owner,
uses.repo,
uses.owner(),
uses.repo(),
e
);
return None;
@ -203,8 +219,8 @@ impl ImpostorCommit {
};
// Build the new uses string with the latest tag
let mut uses_slug = format!("{}/{}", uses.owner, uses.repo);
if let Some(subpath) = &uses.subpath {
let mut uses_slug = format!("{}/{}", uses.owner(), uses.repo());
if let Some(subpath) = &uses.subpath() {
uses_slug.push_str(&format!("/{subpath}"));
}
let fixed_uses = format!("{uses_slug}@{latest_tag}");
@ -256,8 +272,13 @@ impl Audit for ImpostorCommit {
let mut finding_builder = Self::finding()
.severity(Severity::High)
.confidence(Confidence::High)
.add_location(step.location_with_grip())
.add_location(
step.location().primary().annotated(IMPOSTOR_ANNOTATION),
step.location()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.raw()))
.primary()
.annotated(IMPOSTOR_ANNOTATION),
);
if let Some(fix) = self.create_impostor_fix(uses, &step).await {
@ -279,8 +300,14 @@ impl Audit for ImpostorCommit {
let mut finding_builder = Self::finding()
.severity(Severity::High)
.confidence(Confidence::High)
.add_location(reusable.location_with_grip())
.add_location(
reusable.location().primary().annotated(IMPOSTOR_ANNOTATION),
reusable
.location()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.raw()))
.primary()
.annotated(IMPOSTOR_ANNOTATION),
);
if let Some(fix) = self.create_reusable_fix(uses, &reusable).await {
@ -310,13 +337,20 @@ impl Audit for ImpostorCommit {
let mut finding_builder = Self::finding()
.severity(Severity::High)
.confidence(Confidence::High)
.add_location(step.location().primary().annotated(IMPOSTOR_ANNOTATION));
.add_location(step.location_with_grip())
.add_location(
step.location()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.raw()))
.primary()
.annotated(IMPOSTOR_ANNOTATION),
);
if let Some(fix) = self.create_impostor_fix(uses, step).await {
finding_builder = finding_builder.fix(fix);
}
findings.push(finding_builder.build(step.action()).map_err(Self::err)?);
findings.push(finding_builder.build(step).map_err(Self::err)?);
}
Ok(findings)
@ -381,6 +415,7 @@ jobs:
// Apply the fix and snapshot test the result
let new_doc = findings[0].fixes[0].apply(input.as_document()).unwrap();
assert_snapshot!(new_doc.source(), @r"
name: Test Impostor Commit Fix
on: push
jobs:

View file

@ -295,6 +295,7 @@ jobs:
assert!(fixed_document.source().contains("ANOTHER_VAR: also-keep"));
insta::assert_snapshot!(fixed_document.source(), @r#"
on: push
jobs:
@ -349,6 +350,7 @@ jobs:
assert!(fixed_document.source().contains("GLOBAL_VAR: keep-me"));
insta::assert_snapshot!(fixed_document.source(), @r#"
on: push
env:
@ -403,6 +405,7 @@ jobs:
assert!(fixed_document.source().contains("STEP_VAR: keep-me"));
insta::assert_snapshot!(fixed_document.source(), @r#"
on: push
jobs:
@ -446,6 +449,7 @@ jobs:
let fixed_document = fix.apply(workflow.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r#"
on: push
jobs:

View file

@ -34,7 +34,7 @@ impl KnownVulnerableActions {
&self,
uses: &RepositoryUses,
) -> Result<Vec<(Severity, String, Option<String>)>, AuditError> {
let version = match &uses.git_ref {
let version = match &uses.git_ref() {
// If `uses` is pinned to a symbolic ref, we need to perform
// feats of heroism to figure out what's going on.
// In the "happy" case the symbolic ref is an exact version tag,
@ -54,7 +54,7 @@ impl KnownVulnerableActions {
version if !uses.ref_is_commit() => {
let Some(commit_ref) = self
.client
.commit_for_ref(&uses.owner, &uses.repo, version)
.commit_for_ref(uses.owner(), uses.repo(), version)
.await
.map_err(Self::err)?
else {
@ -65,7 +65,7 @@ impl KnownVulnerableActions {
match self
.client
.longest_tag_for_commit(&uses.owner, &uses.repo, &commit_ref)
.longest_tag_for_commit(uses.owner(), uses.repo(), &commit_ref)
.await
.map_err(Self::err)?
{
@ -84,7 +84,7 @@ impl KnownVulnerableActions {
commit_ref => {
match self
.client
.longest_tag_for_commit(&uses.owner, &uses.repo, commit_ref)
.longest_tag_for_commit(uses.owner(), uses.repo(), commit_ref)
.await
.map_err(Self::err)?
{
@ -100,7 +100,7 @@ impl KnownVulnerableActions {
let vulns = self
.client
.gha_advisories(&uses.owner, &uses.repo, &version)
.gha_advisories(uses.owner(), uses.repo(), &version)
.await
.map_err(Self::err)?;
@ -135,8 +135,8 @@ impl KnownVulnerableActions {
target_version: String,
step: &impl StepCommon<'doc>,
) -> Result<Fix<'doc>, AuditError> {
let mut uses_slug = format!("{}/{}", uses.owner, uses.repo);
if let Some(subpath) = &uses.subpath {
let mut uses_slug = format!("{}/{}", uses.owner(), uses.repo());
if let Some(subpath) = &uses.subpath() {
uses_slug.push_str(&format!("/{subpath}"));
}
@ -162,13 +162,13 @@ impl KnownVulnerableActions {
let (target_ref, target_commit) = match self
.client
.commit_for_ref(&uses.owner, &uses.repo, &prefixed_version)
.commit_for_ref(uses.owner(), uses.repo(), &prefixed_version)
.await
{
Ok(commit) => commit.map(|commit| (&prefixed_version, commit)),
Err(_) => self
.client
.commit_for_ref(&uses.owner, &uses.repo, &bare_version)
.commit_for_ref(uses.owner(), uses.repo(), &bare_version)
.await
.map_err(Self::err)?
.map(|commit| (&bare_version, commit)),
@ -176,8 +176,8 @@ impl KnownVulnerableActions {
.ok_or_else(|| {
Self::err(anyhow!(
"Cannot resolve version {bare_version} to commit hash for {}/{}",
uses.owner,
uses.repo
uses.owner(),
uses.repo()
))
})?;
@ -208,7 +208,7 @@ impl KnownVulnerableActions {
// prefixed with `v` or not. Instead of trying to figure it out
// via the GitHub API, we match the style of the current `uses`
// clause.
let target_version_tag = if uses.git_ref.starts_with('v') {
let target_version_tag = if uses.git_ref().starts_with('v') {
prefixed_version
} else {
bare_version
@ -362,12 +362,7 @@ jobs:
let step = &steps[0];
// Test the fix directly
let uses = RepositoryUses {
owner: "actions".to_string(),
repo: "checkout".to_string(),
git_ref: "v2".to_string(),
subpath: None,
};
let uses = RepositoryUses::parse("actions/checkout@v2").unwrap();
let audit = create_test_audit();
let fix = audit
@ -377,6 +372,7 @@ jobs:
let fixed_document = fix.apply(workflow.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r#"
name: Test Vulnerable Actions
on: push
jobs:
@ -417,12 +413,7 @@ jobs:
let step = &steps[0];
// Test the fix directly
let uses = RepositoryUses {
owner: "actions".to_string(),
repo: "setup-node".to_string(),
git_ref: "v1".to_string(),
subpath: None,
};
let uses = RepositoryUses::parse("actions/setup-node@v1").unwrap();
let audit = create_test_audit();
let fix = audit
@ -431,7 +422,8 @@ jobs:
.unwrap();
let fixed_document = fix.apply(workflow.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r#"
insta::assert_snapshot!(fixed_document.source(), @r"
name: Test Node Setup
on: push
jobs:
@ -444,7 +436,7 @@ jobs:
node-version: '18'
- name: Install dependencies
run: npm install
"#);
");
}
#[tokio::test]
@ -474,12 +466,7 @@ jobs:
let step = &steps[0];
// Test the fix directly
let uses = RepositoryUses {
owner: "codecov".to_string(),
repo: "codecov-action".to_string(),
git_ref: "v1".to_string(),
subpath: None,
};
let uses = RepositoryUses::parse("codecov/codecov-action@v1").unwrap();
let audit = create_test_audit();
let fix = audit
@ -489,6 +476,7 @@ jobs:
let fixed_document = fix.apply(workflow.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r#"
name: Test Third Party Action
on: push
jobs:
@ -541,12 +529,7 @@ jobs:
let audit = create_test_audit();
// Fix checkout action
let uses_checkout = RepositoryUses {
owner: "actions".to_string(),
repo: "checkout".to_string(),
git_ref: "v2".to_string(),
subpath: None,
};
let uses_checkout = RepositoryUses::parse("actions/checkout@v2").unwrap();
let fix_checkout = audit
.create_upgrade_fix(&uses_checkout, "v4".into(), &steps[0])
.await
@ -554,12 +537,7 @@ jobs:
current_document = fix_checkout.apply(&current_document).unwrap();
// Fix setup-node action
let uses_node = RepositoryUses {
owner: "actions".to_string(),
repo: "setup-node".to_string(),
git_ref: "v1".to_string(),
subpath: None,
};
let uses_node = RepositoryUses::parse("actions/setup-node@v1").unwrap();
let fix_node = audit
.create_upgrade_fix(&uses_node, "v4".into(), &steps[1])
.await
@ -567,19 +545,15 @@ jobs:
current_document = fix_node.apply(&current_document).unwrap();
// Fix cache action
let uses_cache = RepositoryUses {
owner: "actions".to_string(),
repo: "cache".to_string(),
git_ref: "v2".to_string(),
subpath: None,
};
let uses_cache = RepositoryUses::parse("actions/cache@v2").unwrap();
let fix_cache = audit
.create_upgrade_fix(&uses_cache, "v4".into(), &steps[2])
.await
.unwrap();
current_document = fix_cache.apply(&current_document).unwrap();
insta::assert_snapshot!(current_document.source(), @r#"
insta::assert_snapshot!(current_document.source(), @r"
name: Test Multiple Vulnerable Actions
on: push
jobs:
@ -599,7 +573,7 @@ jobs:
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
- name: Install dependencies
run: npm install
"#);
");
}
#[tokio::test]
@ -627,12 +601,7 @@ jobs:
let step = &steps[0];
// Test the fix with subpath
let uses = RepositoryUses {
owner: "owner".to_string(),
repo: "repo".to_string(),
git_ref: "v1".to_string(),
subpath: Some("subpath".to_string()),
};
let uses = RepositoryUses::parse("owner/repo/subpath@v1").unwrap();
let audit = create_test_audit();
let fix = audit
@ -642,6 +611,7 @@ jobs:
let fixed_document = fix.apply(workflow.as_document()).unwrap();
insta::assert_snapshot!(fixed_document.source(), @r"
name: Test Action with Subpath
on: push
jobs:
@ -678,12 +648,7 @@ jobs:
};
let step = &steps[0];
let uses = RepositoryUses {
owner: "actions".to_string(),
repo: "checkout".to_string(),
git_ref: "v2".to_string(),
subpath: None,
};
let uses = RepositoryUses::parse("actions/checkout@v2").unwrap();
// Test that when first_patched_version is provided, it's used
let audit = create_test_audit();
@ -695,7 +660,8 @@ jobs:
.apply(workflow.as_document())
.unwrap();
insta::assert_snapshot!(fixed_document.source(), @r#"
insta::assert_snapshot!(fixed_document.source(), @r"
name: Test First Patched Version Priority
on: push
jobs:
@ -704,7 +670,7 @@ jobs:
steps:
- name: Vulnerable action
uses: actions/checkout@v3.1.0
"#);
");
}
#[tokio::test]
@ -729,12 +695,7 @@ jobs:
};
let step = &steps[0];
let uses = RepositoryUses {
owner: "actions".to_string(),
repo: "checkout".to_string(),
git_ref: "v2".to_string(),
subpath: None,
};
let uses = RepositoryUses::parse("actions/checkout@v2").unwrap();
let audit = create_test_audit();
let fix = audit
@ -745,6 +706,7 @@ jobs:
let new_doc = fix.apply(workflow.as_document()).unwrap();
assert_snapshot!(new_doc.source(), @r"
name: Test Non-Commit Ref
on: push
jobs:
@ -808,6 +770,7 @@ jobs:
let new_doc = findings[0].fixes[0].apply(input.as_document()).unwrap();
assert_snapshot!(new_doc.source(), @r"
name: Test Commit Hash Pinning Real API
on: push
permissions: {}
@ -864,6 +827,7 @@ jobs:
let new_doc = findings[0].fixes[0].apply(input.as_document()).unwrap();
assert_snapshot!(new_doc.source(), @r"
name: Test Commit Hash Pinning Real API
on: push
permissions: {}

View file

@ -18,6 +18,7 @@ use crate::{
};
pub(crate) mod anonymous_definition;
pub(crate) mod archived_uses;
pub(crate) mod artipacked;
pub(crate) mod bot_conditions;
pub(crate) mod cache_poisoning;
@ -188,10 +189,9 @@ pub(crate) enum AuditLoadError {
}
#[derive(Error, Debug)]
#[error("error in {ident}")]
#[error("error in '{ident}' audit")]
pub(crate) struct AuditError {
ident: &'static str,
#[source]
source: anyhow::Error,
}

View file

@ -33,7 +33,7 @@ impl Obfuscation {
// GitHub happily interprets but otherwise gums up pattern matching
// in audits like unpinned-uses, forbidden-uses, and cache-poisoning.
// We check for some of these forms of nonsense here and report them.
if let Some(subpath) = uses.subpath.as_deref() {
if let Some(subpath) = uses.subpath() {
for component in subpath.split('/') {
match component {
// . and .. are valid in uses subpaths, but are impossible to
@ -60,7 +60,7 @@ impl Obfuscation {
/// Normalizes a uses path by removing unnecessary components like empty slashes, `.`, and `..`.
fn normalize_uses_path(&self, uses: &RepositoryUses) -> Option<String> {
let subpath = uses.subpath.as_deref()?;
let subpath = uses.subpath()?;
let mut components = Vec::new();
for component in subpath.split('/') {
@ -83,14 +83,19 @@ impl Obfuscation {
// If all components were removed, the subpath should be empty
if components.is_empty() {
Some(format!("{}/{}@{}", uses.owner, uses.repo, uses.git_ref))
Some(format!(
"{}/{}@{}",
uses.owner(),
uses.repo(),
uses.git_ref()
))
} else {
Some(format!(
"{}/{}/{}@{}",
uses.owner,
uses.repo,
uses.owner(),
uses.repo(),
components.join("/"),
uses.git_ref
uses.git_ref()
))
}
}
@ -220,7 +225,10 @@ impl Obfuscation {
}
}
crate::models::StepBodyCommon::Run { .. } => {
if let Some("cmd" | "cmd.exe") = step.shell().map(utils::normalize_shell) {
if let Some(("cmd" | "cmd.exe", shell_loc)) = step
.shell()
.map(|(shell, loc)| (utils::normalize_shell(shell), loc))
{
// `shell: cmd` is basically impossible to analyze: it has no formal
// grammar and has several line continuation mechanisms that stymie
// naive matching. It also hasn't been the default shell on Windows
@ -230,11 +238,10 @@ impl Obfuscation {
.confidence(Confidence::High)
.severity(Severity::Low)
.add_location(
step.location()
.primary()
.with_keys(["shell".into()])
step.location_with_grip()
.annotated("Windows CMD shell limits analysis"),
)
.add_location(shell_loc.primary())
.tip("use 'shell: pwsh' or 'shell: bash' for improved analysis")
.build(step)
.map_err(Self::err)?,
@ -400,7 +407,8 @@ jobs:
"#;
let result = apply_fix_for_snapshot(workflow_content, "obfuscation").await;
insta::assert_snapshot!(result, @r#"
insta::assert_snapshot!(result, @r"
name: Test Workflow
on: push
@ -409,7 +417,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
"#);
");
}
#[tokio::test]
@ -426,7 +434,8 @@ jobs:
"#;
let result = apply_fix_for_snapshot(workflow_content, "obfuscation").await;
insta::assert_snapshot!(result, @r#"
insta::assert_snapshot!(result, @r"
name: Test Workflow
on: push
@ -435,7 +444,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: github/codeql-action/init@v2
"#);
");
}
#[tokio::test]
@ -452,7 +461,8 @@ jobs:
"#;
let result = apply_fix_for_snapshot(workflow_content, "obfuscation").await;
insta::assert_snapshot!(result, @r#"
insta::assert_snapshot!(result, @r"
name: Test Workflow
on: push
@ -461,6 +471,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/cache/save@v4
"#);
");
}
}

View file

@ -43,12 +43,12 @@ impl RefConfusion {
// TODO: use a tokio JoinSet here?
let branches_match = self
.client
.has_branch(&uses.owner, &uses.repo, sym_ref)
.has_branch(uses.owner(), uses.repo(), sym_ref)
.await
.map_err(Self::err)?;
let tags_match = self
.client
.has_tag(&uses.owner, &uses.repo, sym_ref)
.has_tag(uses.owner(), uses.repo(), sym_ref)
.await
.map_err(Self::err)?;
@ -158,7 +158,7 @@ impl Audit for RefConfusion {
.with_keys(["uses".into()])
.annotated(REF_CONFUSION_ANNOTATION),
)
.build(step.action())
.build(step)
.map_err(Self::err)?,
);
}

View file

@ -106,7 +106,7 @@ impl RefVersionMismatch {
let Some(commit_for_ref) = self
.client
.commit_for_ref(&uses.owner, &uses.repo, version_from_comment)
.commit_for_ref(uses.owner(), uses.repo(), version_from_comment)
.await
.map_err(Self::err)?
else {
@ -138,7 +138,7 @@ impl RefVersionMismatch {
if let Some(suggestion) = self
.client
.longest_tag_for_commit(&uses.owner, &uses.repo, commit_sha)
.longest_tag_for_commit(uses.owner(), uses.repo(), commit_sha)
.await
.map_err(Self::err)?
{

View file

@ -1,10 +1,10 @@
use github_actions_models::workflow::job::Secrets;
use subfeature::Subfeature;
use super::{Audit, AuditLoadError, AuditState, audit_meta};
use crate::{
audit::AuditError,
finding::{Confidence, location::Locatable as _},
models::workflow::JobExt as _,
};
pub(crate) struct SecretsInherit;
@ -38,6 +38,7 @@ impl Audit for SecretsInherit {
job.location()
.primary()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, job.uses.raw()))
.annotated("this reusable workflow"),
)
.add_location(
@ -47,7 +48,7 @@ impl Audit for SecretsInherit {
)
.confidence(Confidence::High)
.severity(crate::finding::Severity::Medium)
.build(job.parent())?,
.build(job)?,
);
}

View file

@ -2,6 +2,7 @@
use anyhow::anyhow;
use github_actions_models::common::{RepositoryUses, Uses};
use subfeature::Subfeature;
use super::{Audit, AuditLoadError, audit_meta};
use crate::{
@ -29,7 +30,7 @@ impl StaleActionRefs {
let tag = match &uses.commit_ref() {
Some(commit_ref) => self
.client
.longest_tag_for_commit(&uses.owner, &uses.repo, commit_ref)
.longest_tag_for_commit(uses.owner(), uses.repo(), commit_ref)
.await
.map_err(Self::err)?,
None => return Ok(false),
@ -53,7 +54,12 @@ impl StaleActionRefs {
.confidence(Confidence::High)
.severity(Severity::Low)
.persona(Persona::Pedantic)
.add_location(step.location().primary().with_keys(["uses".into()]))
.add_location(
step.location()
.primary()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.raw())),
)
.build(step)?,
);
}

View file

@ -185,7 +185,7 @@ impl TemplateInjection {
return None;
}
let shell = utils::normalize_shell(step.shell()?);
let shell = utils::normalize_shell(step.shell()?.0);
match shell {
"bash" | "sh" | "zsh" => Some(format!("${{{env_var}}}")),
@ -707,6 +707,7 @@ jobs:
"replace expression with environment variable",
);
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection
on: push
jobs:
@ -758,6 +759,7 @@ jobs:
"replace expression with environment variable",
);
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection
on: push
jobs:
@ -811,6 +813,7 @@ jobs:
"replace expression with environment variable",
);
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection
on: push
jobs:
@ -920,6 +923,7 @@ jobs:
}
insta::assert_snapshot!(current_document.source(), @r#"
name: Test Multiple Template Injections
on: push
jobs:
@ -986,6 +990,7 @@ jobs:
}
insta::assert_snapshot!(current_document.source(), @r#"
name: Test Duplicate Template Injections
on: push
jobs:
@ -1046,6 +1051,7 @@ jobs:
}
insta::assert_snapshot!(current_document.source(), @r#"
name: Test Duplicate Template Injections
on: push
jobs:
@ -1109,6 +1115,7 @@ jobs:
}
insta::assert_snapshot!(current_document.source(), @r#"
name: Test Duplicate Template Injections
on: push
jobs:
@ -1225,6 +1232,7 @@ jobs:
"replace expression with environment variable",
);
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection - Bash
on: push
jobs:
@ -1271,6 +1279,7 @@ jobs:
"replace expression with environment variable",
);
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection - Bash
on: push
jobs:
@ -1316,7 +1325,8 @@ jobs:
finding,
"replace expression with environment variable",
);
insta::assert_snapshot!(fixed_content.source(), @r#"
insta::assert_snapshot!(fixed_content.source(), @r"
name: Test Template Injection - CMD
on: push
jobs:
@ -1326,7 +1336,7 @@ jobs:
- name: Vulnerable step with cmd shell
shell: cmd
run: echo User is %GITHUB_ACTOR%
"#);
");
}
}
);
@ -1363,6 +1373,7 @@ jobs:
"replace expression with environment variable",
);
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection - PowerShell
on: push
jobs:
@ -1409,6 +1420,7 @@ jobs:
);
// Ubuntu default shell is bash, so should use ${VAR} syntax
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection - Default Shell Ubuntu
on: push
jobs:
@ -1454,6 +1466,7 @@ jobs:
);
// Windows default shell is pwsh, so should use $env:VAR syntax
insta::assert_snapshot!(fixed_content.source(), @r#"
name: Test Template Injection - Default Shell Windows
on: push
jobs:
@ -1498,7 +1511,8 @@ jobs:
finding,
"replace expression with environment variable",
);
insta::assert_snapshot!(fixed_content.source(), @r#"
insta::assert_snapshot!(fixed_content.source(), @r"
name: Test Template Injection - CMD with Custom Env
on: push
jobs:
@ -1510,7 +1524,7 @@ jobs:
run: echo PR title is %GITHUB_EVENT_PULL_REQUEST_TITLE%
env:
GITHUB_EVENT_PULL_REQUEST_TITLE: ${{ github.event.pull_request.title }}
"#);
");
}
}
);

View file

@ -4,7 +4,6 @@ use crate::{
Confidence, Finding, Persona, Severity,
location::{Locatable as _, SymbolicLocation},
},
models::workflow::JobExt as _,
state::AuditState,
};
@ -30,7 +29,7 @@ impl UnpinnedImages {
.confidence(Confidence::High)
.add_location(annotated_location)
.persona(persona)
.build(job.parent())
.build(job)
}
}
@ -52,13 +51,11 @@ impl Audit for UnpinnedImages {
_config: &crate::config::Config,
) -> anyhow::Result<Vec<Finding<'doc>>, AuditError> {
let mut findings = vec![];
let mut image_refs_with_locations: Vec<(DockerUses, SymbolicLocation<'doc>)> = vec![];
let mut image_refs_with_locations: Vec<(&'doc DockerUses, SymbolicLocation<'doc>)> = vec![];
if let Some(Container::Container { image, .. }) = &job.container {
image_refs_with_locations.push((
image
.parse()
.expect("failed to parse job container image as DockerUses"),
image,
job.location()
.primary()
.with_keys(["container".into(), "image".into()]),
@ -68,9 +65,7 @@ impl Audit for UnpinnedImages {
for (service, config) in job.services.iter() {
if let Container::Container { image, .. } = &config {
image_refs_with_locations.push((
image
.parse()
.expect("failed to parse service container image as DockerUses"),
image,
job.location().primary().with_keys([
"services".into(),
service.as_str().into(),
@ -81,9 +76,9 @@ impl Audit for UnpinnedImages {
}
for (image, location) in image_refs_with_locations {
match image.hash {
match image.hash() {
Some(_) => continue,
None => match image.tag.as_deref() {
None => match image.tag() {
Some("latest") => {
findings.push(self.build_finding(
location,

View file

@ -1,4 +1,5 @@
use github_actions_models::common::Uses;
use subfeature::Subfeature;
use super::{Audit, AuditLoadError, AuditState, audit_meta};
use crate::audit::AuditError;
@ -31,7 +32,7 @@ impl UnpinnedUses {
Uses::Docker(_) => {
if uses.unpinned() {
Some((
"action is not pinned to a tag, branch, or hash ref".into(),
"image is not pinned to a tag, branch, or hash ref".into(),
Severity::Medium,
Persona::default(),
))
@ -108,6 +109,7 @@ impl UnpinnedUses {
step.location()
.primary()
.with_keys(["uses".into()])
.subfeature(Subfeature::new(0, uses.raw()))
.annotated(annotation),
)
.build(step)?,

View file

@ -6,7 +6,7 @@ use crate::{
Confidence, Fix, FixDisposition, Severity,
location::{Locatable as _, SymbolicLocation},
},
models::{AsDocument, workflow::JobExt},
models::AsDocument,
utils,
};
use yamlpatch::{Op, Patch};
@ -158,7 +158,7 @@ impl Audit for UnsoundCondition {
job: &crate::models::workflow::NormalJob<'doc>,
_config: &crate::config::Config,
) -> Result<Vec<crate::finding::Finding<'doc>>, AuditError> {
self.process_conditions(job.parent(), job.conditions())
self.process_conditions(job, job.conditions())
}
async fn audit_reusable_job<'doc>(
@ -167,7 +167,7 @@ impl Audit for UnsoundCondition {
_config: &crate::config::Config,
) -> Result<Vec<crate::finding::Finding<'doc>>, AuditError> {
let conds = job.r#if.iter().map(|cond| (cond, job.location()));
self.process_conditions(job.parent(), conds)
self.process_conditions(job, conds)
}
async fn audit_action<'doc>(
@ -244,6 +244,7 @@ jobs:
let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings);
insta::assert_snapshot!(fixed_document.source(), @r#"
name: Test
on: push
jobs:
@ -283,6 +284,7 @@ jobs:
let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings);
insta::assert_snapshot!(fixed_document.source(), @r#"
name: Test
on: push
jobs:
@ -323,6 +325,7 @@ jobs:
let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings);
insta::assert_snapshot!(fixed_document.source(), @r#"
name: Test
on: push
jobs:
@ -367,6 +370,7 @@ jobs:
let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings);
insta::assert_snapshot!(fixed_document.source(), @r#"
name: Test
on: push
jobs:
@ -406,7 +410,8 @@ jobs:
assert_eq!(findings.len(), 1);
let fixed_document = apply_fix_for_snapshot(workflow.as_document(), findings);
insta::assert_snapshot!(fixed_document.source(), @r#"
insta::assert_snapshot!(fixed_document.source(), @r"
name: Test
on: push
jobs:
@ -414,7 +419,7 @@ jobs:
if: |-
${{ github.event_name == 'pull_request' }}
uses: ./.github/workflows/reusable.yml
"#);
");
}
);
}
@ -463,6 +468,7 @@ jobs:
}
insta::assert_snapshot!(document.source(), @r#"
name: Test
on: push
jobs:

View file

@ -9,7 +9,6 @@ use super::{Audit, AuditLoadError, AuditState, audit_meta};
use crate::{
audit::AuditError,
finding::{Confidence, Severity},
models::workflow::JobExt as _,
utils::{self, ExtractedExpr},
};
@ -68,7 +67,7 @@ impl Audit for UnsoundContains {
.primary()
.annotated(format!("contains(..) condition can be bypassed if attacker can control '{context}'")),
)
.build(job.parent())
.build(job)
})
})
.collect()

View file

@ -1,4 +1,3 @@
use std::collections::HashSet;
use std::{sync::LazyLock, vec};
use anyhow::Context as _;
@ -13,7 +12,7 @@ use crate::{
models::{
StepBodyCommon, StepCommon,
coordinate::{ActionCoordinate, ControlExpr, ControlFieldType, Toggle},
workflow::JobExt as _,
workflow::JobCommon as _,
},
state::AuditState,
utils,
@ -180,66 +179,101 @@ impl UseTrustedPublishing {
match cmd {
"cargo" => {
let args = args.collect::<HashSet<_>>();
// Looking for `cargo ... publish` without `--dry-run` or `-n`.
args.contains("publish") && !args.contains("--dry-run") && !args.contains("-n")
args.any(|arg| arg == "publish")
&& args.all(|arg| arg != "--dry-run" && arg != "-n")
}
"uv" => {
let args = args.collect::<HashSet<_>>();
match args.find(|arg| *arg == "publish" || *arg == "run") {
Some("publish") => {
// `uv ... publish` without `--dry-run`.
args.all(|arg| arg != "--dry-run")
}
Some("run") => {
// `uv ... run ... twine ... upload`.
args.any(|arg| arg == "twine") && args.any(|arg| arg == "upload")
}
_ => false,
}
}
"uvx" => {
// Looking for `uvx twine ... upload`.
// Like with pipx, we loosely match the `twine` part
// to allow for version specifiers. In uvx's case, these
// are formatted like `twine@X.Y.Z`.
// Looking for `uv ... publish` without `--dry-run`.
args.contains("publish") && !args.contains("--dry-run")
args.any(|arg| arg.starts_with("twine")) && args.any(|arg| arg == "upload")
}
"hatch" | "pdm" => {
// Looking for `hatch ... publish` or `pdm ... publish`.
args.any(|arg| arg == "publish")
}
"poetry" => {
// Looking for `poetry ... publish` without `--dry-run`.
//
// Poetry has no support for Trusted Publishing at all as
// of 2025-12-1: https://github.com/python-poetry/poetry/issues/7940
args.any(|arg| arg == "publish") && args.all(|arg| arg != "--dry-run")
}
"twine" => {
// Looking for `twine ... upload`.
args.any(|arg| arg == "upload")
}
"pipx" => {
// TODO: also match `pipx ... run ... uv ... publish`, etc.
// Looking for `pipx ... run ... twine ... upload`.
//
// A wrinkle here is that `pipx run` takes version specifiers
// too, e.g. `pipx run twine==X.Y.Z upload ...`. So we only
// loosely match the `twine` part.
args.any(|arg| arg == "run")
&& args.any(|arg| arg.starts_with("twine"))
&& args.any(|arg| arg == "upload")
}
_ if cmd.starts_with("python") => {
// Looking for `python* ... -m ... twine ... upload`.
args.any(|arg| arg == "-m")
&& args.any(|arg| arg == "twine")
&& args.any(|arg| arg == "upload")
}
"gem" => {
// Looking for `gem ... push`.
args.any(|arg| arg == "push")
}
"bundle" => {
// Looking for `bundle ... exec ... gem ... push`.
args.any(|arg| arg == "exec")
&& args.any(|arg| arg == "gem")
&& args.any(|arg| arg == "push")
}
"npm" => {
let args = args.collect::<HashSet<_>>();
// Looking for `npm ... publish` without `--dry-run`.
// TODO: Figure out `npm run ... publish` patterns.
// Looking for `npm ... publish` without `--dry-run`.
args.contains("publish") && !args.contains("--dry-run")
args.any(|arg| arg == "publish") && args.all(|arg| arg != "--dry-run")
}
"yarn" => {
let args = args.collect::<HashSet<_>>();
// TODO: Figure out `yarn run ... publish` patterns.
// TODO: Figure out `yarn ... publish` patterns for lerna/npm workspaces.
// Looking for `yarn ... npm publish` without `--dry-run` or `-n`.
args.contains("npm")
&& args.contains("publish")
&& !args.contains("--dry-run")
&& !args.contains("-n")
args.any(|arg| arg == "npm") && args.all(|arg| arg != "--dry-run" && arg != "-n")
}
"pnpm" => {
let args = args.collect::<HashSet<_>>();
// TODO: Figure out `pnpm run ... publish` patterns.
// Looking for `pnpm ... publish` without `--dry-run`.
args.contains("publish") && !args.contains("--dry-run")
args.any(|arg| arg == "publish") && args.all(|arg| arg != "--dry-run")
}
"nuget" | "nuget.exe" => {
// Looking for `nuget ... push`.
args.any(|arg| arg == "push")
}
"dotnet" => {
// Looking for `dotnet ... nuget push`.
args.next()
.map(|cmd| cmd == "nuget" && Self::is_publish_command(cmd, args))
.unwrap_or(false)
// Looking for `dotnet ... nuget ... push`.
args.any(|arg| arg == "nuget") && args.any(|arg| arg == "push")
}
_ => false,
}
@ -396,7 +430,7 @@ impl Audit for UseTrustedPublishing {
if let StepBodyCommon::Run { run, .. } = step.body()
&& !step.parent.has_id_token()
{
let shell = step.shell().unwrap_or_else(|| {
let shell = step.shell().map(|s| s.0).unwrap_or_else(|| {
tracing::debug!(
"use-trusted-publishing: couldn't determine shell type for {workflow}:{job} step {stepno}",
workflow = step.workflow().key.filename(),
@ -442,3 +476,69 @@ impl Audit for UseTrustedPublishing {
self.process_step(step)
}
}
#[cfg(test)]
mod tests {
#[test]
fn test_is_publish_command() {
for (args, is_publish_command) in &[
(&["cargo", "publish"][..], true),
(&["cargo", "publish", "-p", "foo"][..], true),
(&["cargo", "publish", "--dry-run"][..], false),
(&["cargo", "publish", "-n"][..], false),
(&["cargo", "build"][..], false),
(&["uv", "publish"][..], true),
(&["uv", "publish", "dist/*"][..], true),
(&["uv", "publish", "--dry-run"][..], false),
(&["uv", "run", "--dev", "twine", "upload"][..], true),
(&["uv", "run", "twine", "upload"][..], true),
(&["uv"][..], false),
(&["uv", "sync"][..], false),
(&["uvx", "twine", "upload"][..], true),
(&["uvx", "twine@3.4.1", "upload"][..], true),
(&["uvx", "twine@6.1.0", "upload"][..], true),
(&["uvx", "twine"][..], false),
(&["poetry", "publish"][..], true),
(&["poetry", "publish", "--dry-run"][..], false),
(&["hatch", "publish"][..], true),
(&["pdm", "publish"][..], true),
(&["twine", "upload", "dist/*"][..], true),
(&["pipx", "run", "twine", "upload", "dist/*"][..], true),
(
&["pipx", "run", "twine==3.4.1", "upload", "dist/*"][..],
true,
),
(
&["pipx", "run", "twine==6.1.0", "upload", "dist/*"][..],
true,
),
(&["python", "-m", "twine", "upload", "dist/*"][..], true),
(&["python3.9", "-m", "twine", "upload", "dist/*"][..], true),
(&["twine", "check", "dist/*"], false),
(&["gem", "push", "mygem-0.1.0.gem"][..], true),
(
&["bundle", "exec", "gem", "push", "mygem-0.1.0.gem"][..],
true,
),
(&["npm", "publish"][..], true),
(&["npm", "run", "publish"][..], true),
(&["npm", "publish", "--dry-run"][..], false),
(&["yarn", "npm", "publish"][..], true),
(&["yarn", "npm", "publish", "--dry-run"][..], false),
(&["pnpm", "publish"][..], true),
(&["pnpm", "publish", "--dry-run"][..], false),
(&["nuget", "push", "MyPackage.nupkg"][..], true),
(&["nuget.exe", "push", "MyPackage.nupkg"][..], true),
(&["dotnet", "nuget", "push", "MyPackage.nupkg"][..], true),
(&["dotnet", "build"][..], false),
] {
let cmd = args[0];
let args_iter = args[1..].iter().map(|s| *s);
assert_eq!(
super::UseTrustedPublishing::is_publish_command(cmd, args_iter),
*is_publish_command,
"cmd: {cmd:?}, args: {args:?}"
);
}
}
}

View file

@ -21,7 +21,12 @@ use crate::{
registry::input::RepoSlug,
};
const CONFIG_CANDIDATES: &[&str] = &[".github/zizmor.yml", "zizmor.yml"];
const CONFIG_CANDIDATES: &[&str] = &[
".github/zizmor.yml",
".github/zizmor.yaml",
"zizmor.yml",
"zizmor.yaml",
];
#[derive(Error, Debug)]
#[error("configuration error in {path}")]
@ -254,7 +259,7 @@ impl UnpinnedUsesPolicies {
&self,
uses: &RepositoryUses,
) -> (Option<&RepositoryUsesPattern>, UsesPolicy) {
match self.policy_tree.get(&uses.owner) {
match self.policy_tree.get(uses.owner()) {
Some(policies) => {
// Policies are ordered by specificity, so we can
// iterate and return eagerly.

View file

@ -647,24 +647,28 @@
},
"package-ecosystem-values": {
"enum": [
"bazel",
"bun",
"bundler",
"cargo",
"composer",
"conda",
"devcontainers",
"docker",
"docker-compose",
"dotnet-sdk",
"elm",
"gitsubmodule",
"github-actions",
"gitsubmodule",
"gomod",
"gradle",
"helm",
"julia",
"maven",
"mix",
"npm",
"nuget",
"opentofu",
"pip",
"pub",
"rust-toolchain",
@ -1045,6 +1049,21 @@
"versioning-strategy": {
"$ref": "#/definitions/versioning-strategy",
"description": "How to update manifest version requirements"
},
"patterns": {
"description": "Array of dependency patterns to include in a multi-ecosystem group. Required when using multi-ecosystem-group. Use '*' to include all dependencies.",
"type": "array",
"items": {
"type": "string",
"minLength": 1
},
"minItems": 1,
"uniqueItems": true
},
"multi-ecosystem-group": {
"description": "String identifier linking this ecosystem to a multi-ecosystem group",
"type": "string",
"minLength": 1
}
},
"allOf": [
@ -1056,6 +1075,15 @@
{ "required": ["directories"] },
{ "required": ["directory"] }
]
},
{
"$comment": "If multi-ecosystem-group is specified, patterns is required",
"if": {
"required": ["multi-ecosystem-group"]
},
"then": {
"required": ["patterns"]
}
}
]
},
@ -1132,6 +1160,129 @@
}
},
"minProperties": 1
},
"multi-ecosystem-group": {
"type": "object",
"description": "Define a group that spans multiple package ecosystems, allowing consolidated pull requests across different ecosystems",
"additionalProperties": false,
"properties": {
"schedule": {
"description": "Schedule preferences for the group",
"type": "object",
"properties": {
"interval": {
"$ref": "#/definitions/schedule-interval"
},
"day": {
"$ref": "#/definitions/schedule-day",
"description": "Specify an alternative day to check for updates"
},
"time": {
"type": "string",
"description": "Specify an alternative time of day to check for updates (format: hh:mm)",
"pattern": "^([01][0-9]|2[0-3]):[0-5][0-9]$"
},
"timezone": {
"$ref": "#/definitions/timezone",
"description": "The time zone identifier must be from the Time Zone database maintained by IANA"
},
"cronjob": {
"type": "string",
"description": "Specify a valid cron expression for updates"
}
},
"allOf": [
{
"$comment": "If interval type is 'cron', enforce 'cronjob' property.",
"if": {
"properties": {
"interval": {
"const": "cron"
}
}
},
"then": {
"required": ["interval", "cronjob"]
},
"else": {
"required": ["interval"]
}
}
]
},
"labels": {
"description": "Labels to set on pull requests (additive - merges with ecosystem-level labels)",
"type": "array",
"items": {
"type": "string",
"minLength": 1
},
"minItems": 0,
"uniqueItems": true
},
"assignees": {
"description": "Assignees to set on pull requests (additive - merges with ecosystem-level assignees)",
"type": "array",
"items": {
"type": "string",
"minLength": 1
},
"minItems": 1,
"uniqueItems": true
},
"milestone": {
"description": "Associate all pull requests raised for this group with a milestone. You need to specify the numeric identifier of the milestone and not its label.",
"type": "integer",
"minimum": 1
},
"target-branch": {
"description": "Specify a different branch for manifest files and for pull requests.",
"type": "string",
"minLength": 1
},
"commit-message": {
"description": "Commit message preferences for the group",
"type": "object",
"properties": {
"prefix": {
"description": "A prefix for all commit messages",
"type": "string",
"maxLength": 50
},
"prefix-development": {
"description": "A separate prefix for all commit messages that update dependencies in the Development dependency group",
"type": "string",
"maxLength": 50
},
"include": {
"description": "Specifies that any prefix is followed by a list of the dependencies updated in the commit",
"type": "string",
"enum": ["scope"]
}
},
"anyOf": [
{ "required": ["prefix"] },
{ "required": ["prefix-development"] },
{ "required": ["include"] }
],
"additionalProperties": false
},
"pull-request-branch-name": {
"description": "Pull request branch name preferences for the group",
"type": "object",
"properties": {
"separator": {
"description": "Change separator for PR branch name",
"type": "string",
"default": "/",
"enum": ["-", "_", "/"]
}
},
"required": ["separator"],
"additionalProperties": false
}
},
"required": ["schedule"]
}
},
"properties": {
@ -1155,6 +1306,14 @@
},
"registries": {
"$ref": "#/definitions/registry"
},
"multi-ecosystem-groups": {
"type": "object",
"description": "Define groups that span multiple package ecosystems, allowing consolidated pull requests across different ecosystems",
"additionalProperties": {
"$ref": "#/definitions/multi-ecosystem-group"
},
"minProperties": 1
}
},
"required": ["version", "updates"],

View file

@ -5,7 +5,10 @@ use clap::ValueEnum;
use serde::{Deserialize, Serialize};
use self::location::{Location, SymbolicLocation};
use crate::{InputKey, audit::AuditError, models::AsDocument, registry::input::Group};
use crate::{
InputKey, audit::AuditError, finding::location::LocationKind, models::AsDocument,
registry::input::Group,
};
use yamlpatch::{self, Patch};
pub(crate) mod location;
@ -246,7 +249,12 @@ impl<'doc> FindingBuilder<'doc> {
locations.extend(self.raw_locations);
if !locations.iter().any(|l| l.symbolic.is_primary()) {
if locations.len() == 1
&& let Some(location) = locations.get_mut(0)
{
// If there's only one location, then it's primary by definition.
location.symbolic.kind = LocationKind::Primary;
} else if !locations.iter().any(|l| l.symbolic.is_primary()) {
return Err(AuditError::new(
self.ident,
anyhow!("API misuse: at least one location must be marked with primary()"),

View file

@ -208,12 +208,12 @@ pub(crate) trait Locatable<'doc> {
fn location(&self) -> SymbolicLocation<'doc>;
/// Returns an "enriched" symbolic location of this model,
/// when the model is of a type that has a name. Otherwise,
/// returns the same symbolic location as `location()`.
/// when the model has one or more "grip" fields that are
/// visually useful to key off of (like a `name` or `id` field).
///
/// For example, a GitHub Actions workflow step has an optional name,
/// which is included in this symbolic location if present.
fn location_with_name(&self) -> SymbolicLocation<'doc> {
fn location_with_grip(&self) -> SymbolicLocation<'doc> {
self.location()
}
}

View file

@ -575,6 +575,29 @@ impl Client {
.max_by_key(|t| t.name.len()))
}
#[instrument(skip(self))]
pub(crate) async fn branch_commits(
&self,
owner: &str,
repo: &str,
commit: &str,
) -> Result<BranchCommits, ClientError> {
// NOTE(ww): This API is undocumented.
// See: https://github.com/orgs/community/discussions/78161
let url = format!("https://github.com/{owner}/{repo}/branch_commits/{commit}");
// We ask GitHub for JSON, because it sends HTML by default for this endpoint.
self.base_client
.get(&url)
.header(ACCEPT, "application/json")
.send()
.await?
.error_for_status()?
.json()
.await
.map_err(Into::into)
}
#[instrument(skip(self))]
pub(crate) async fn compare_commits(
&self,
@ -859,6 +882,23 @@ pub(crate) struct Commit {
pub(crate) sha: String,
}
/// The response structure from GitHub's undocumented `branch_commits` API.
///
/// This model is intentionally incomplete.
#[derive(Clone, Deserialize)]
#[serde(rename_all = "lowercase")]
#[non_exhaustive]
pub(crate) struct BranchCommits {
branches: Vec<serde_json::Value>,
tags: Vec<String>,
}
impl BranchCommits {
pub(crate) fn is_empty(&self) -> bool {
self.branches.is_empty() && self.tags.is_empty()
}
}
#[derive(Clone, Deserialize)]
#[serde(rename_all = "lowercase")]
pub(crate) enum ComparisonStatus {

View file

@ -4,7 +4,7 @@ use std::str::FromStr;
use camino::Utf8Path;
use thiserror::Error;
use tower_lsp_server::lsp_types::{self, TextDocumentSyncKind};
use tower_lsp_server::ls_types::{self, TextDocumentSyncKind};
use tower_lsp_server::{Client, LanguageServer, LspService, Server};
use crate::audit::AuditInput;
@ -25,7 +25,7 @@ pub(crate) struct Error {
}
struct LspDocumentCommon {
uri: lsp_types::Uri,
uri: ls_types::Uri,
text: String,
version: Option<i32>,
}
@ -39,35 +39,35 @@ struct Backend {
impl LanguageServer for Backend {
async fn initialize(
&self,
_: lsp_types::InitializeParams,
) -> tower_lsp_server::jsonrpc::Result<lsp_types::InitializeResult> {
Ok(lsp_types::InitializeResult {
server_info: Some(lsp_types::ServerInfo {
_: ls_types::InitializeParams,
) -> tower_lsp_server::jsonrpc::Result<ls_types::InitializeResult> {
Ok(ls_types::InitializeResult {
server_info: Some(ls_types::ServerInfo {
name: "zizmor (LSP)".into(),
version: Some(env!("CARGO_PKG_VERSION").into()),
}),
capabilities: lsp_types::ServerCapabilities {
text_document_sync: Some(lsp_types::TextDocumentSyncCapability::Kind(
lsp_types::TextDocumentSyncKind::FULL,
capabilities: ls_types::ServerCapabilities {
text_document_sync: Some(ls_types::TextDocumentSyncCapability::Kind(
ls_types::TextDocumentSyncKind::FULL,
)),
..Default::default()
},
})
}
async fn initialized(&self, _: lsp_types::InitializedParams) {
async fn initialized(&self, _: ls_types::InitializedParams) {
let selectors = vec![
lsp_types::DocumentFilter {
ls_types::DocumentFilter {
language: Some("yaml".into()),
scheme: None,
pattern: Some("**/.github/workflows/*.{yml,yaml}".into()),
},
lsp_types::DocumentFilter {
ls_types::DocumentFilter {
language: Some("yaml".into()),
scheme: None,
pattern: Some("**/action.{yml,yaml}".into()),
},
lsp_types::DocumentFilter {
ls_types::DocumentFilter {
language: Some("yaml".into()),
scheme: None,
pattern: Some("**/.github/dependabot.{yml,yaml}".into()),
@ -80,46 +80,46 @@ impl LanguageServer for Backend {
// neglects to.
self.client
.register_capability(vec![
lsp_types::Registration {
ls_types::Registration {
id: "zizmor-didopen".into(),
method: "textDocument/didOpen".into(),
register_options: Some(
serde_json::to_value(lsp_types::TextDocumentRegistrationOptions {
serde_json::to_value(ls_types::TextDocumentRegistrationOptions {
document_selector: Some(selectors.clone()),
})
.expect("failed to serialize LSP document registration options"),
),
},
lsp_types::Registration {
ls_types::Registration {
id: "zizmor-didchange".into(),
method: "textDocument/didChange".into(),
register_options: Some(
serde_json::to_value(lsp_types::TextDocumentChangeRegistrationOptions {
serde_json::to_value(ls_types::TextDocumentChangeRegistrationOptions {
document_selector: Some(selectors.clone()),
sync_kind: TextDocumentSyncKind::FULL,
})
.expect("failed to serialize LSP document registration options"),
),
},
lsp_types::Registration {
ls_types::Registration {
id: "zizmor-didsave".into(),
method: "textDocument/didSave".into(),
register_options: Some(
serde_json::to_value(lsp_types::TextDocumentSaveRegistrationOptions {
serde_json::to_value(ls_types::TextDocumentSaveRegistrationOptions {
include_text: Some(true),
text_document_registration_options:
lsp_types::TextDocumentRegistrationOptions {
ls_types::TextDocumentRegistrationOptions {
document_selector: Some(selectors.clone()),
},
})
.expect("failed to serialize LSP document registration options"),
),
},
lsp_types::Registration {
ls_types::Registration {
id: "zizmor-didclose".into(),
method: "textDocument/didClose".into(),
register_options: Some(
serde_json::to_value(lsp_types::TextDocumentRegistrationOptions {
serde_json::to_value(ls_types::TextDocumentRegistrationOptions {
document_selector: Some(selectors),
})
.expect("failed to serialize LSP document registration options"),
@ -130,7 +130,7 @@ impl LanguageServer for Backend {
.expect("failed to register text document capabilities with the LSP client");
self.client
.log_message(lsp_types::MessageType::INFO, "server initialized!")
.log_message(ls_types::MessageType::INFO, "server initialized!")
.await;
}
@ -139,7 +139,7 @@ impl LanguageServer for Backend {
Ok(())
}
async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) {
async fn did_open(&self, params: ls_types::DidOpenTextDocumentParams) {
tracing::debug!("did_open: {:?}", params);
self.audit(LspDocumentCommon {
uri: params.text_document.uri,
@ -149,7 +149,7 @@ impl LanguageServer for Backend {
.await;
}
async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) {
async fn did_change(&self, params: ls_types::DidChangeTextDocumentParams) {
tracing::debug!("did_change: {:?}", params);
let mut params = params;
let Some(change) = params.content_changes.pop() else {
@ -164,7 +164,7 @@ impl LanguageServer for Backend {
.await;
}
async fn did_save(&self, params: lsp_types::DidSaveTextDocumentParams) {
async fn did_save(&self, params: ls_types::DidSaveTextDocumentParams) {
tracing::debug!("did_save: {:?}", params);
if let Some(text) = params.text {
self.audit(LspDocumentCommon {
@ -217,15 +217,15 @@ impl Backend {
.iter()
.map(|finding| {
let primary = finding.primary_location();
lsp_types::Diagnostic {
range: lsp_types::Range {
ls_types::Diagnostic {
range: ls_types::Range {
start: primary.concrete.location.start_point.into(),
end: primary.concrete.location.end_point.into(),
},
severity: Some(finding.determinations.severity.into()),
code: Some(lsp_types::NumberOrString::String(finding.ident.into())),
code_description: Some(lsp_types::CodeDescription {
href: lsp_types::Uri::from_str(finding.url)
code: Some(ls_types::NumberOrString::String(finding.ident.into())),
code_description: Some(ls_types::CodeDescription {
href: ls_types::Uri::from_str(finding.url)
.expect("finding contains an invalid URL somehow"),
}),
source: Some("zizmor".into()),
@ -248,25 +248,25 @@ impl Backend {
async fn audit(&self, params: LspDocumentCommon) {
if let Err(e) = self.audit_inner(params).await {
self.client
.log_message(lsp_types::MessageType::ERROR, format!("audit failed: {e}"))
.log_message(ls_types::MessageType::ERROR, format!("audit failed: {e}"))
.await;
}
}
}
impl From<Severity> for lsp_types::DiagnosticSeverity {
impl From<Severity> for ls_types::DiagnosticSeverity {
fn from(value: Severity) -> Self {
// TODO: Does this mapping make sense?
match value {
Severity::Informational => lsp_types::DiagnosticSeverity::INFORMATION,
Severity::Low => lsp_types::DiagnosticSeverity::WARNING,
Severity::Medium => lsp_types::DiagnosticSeverity::WARNING,
Severity::High => lsp_types::DiagnosticSeverity::ERROR,
Severity::Informational => ls_types::DiagnosticSeverity::INFORMATION,
Severity::Low => ls_types::DiagnosticSeverity::WARNING,
Severity::Medium => ls_types::DiagnosticSeverity::WARNING,
Severity::High => ls_types::DiagnosticSeverity::ERROR,
}
}
}
impl From<Point> for lsp_types::Position {
impl From<Point> for ls_types::Position {
fn from(value: Point) -> Self {
Self {
line: value.row as u32,

View file

@ -2,6 +2,7 @@
use std::{
collections::HashSet,
env,
io::{Write, stdout},
process::ExitCode,
};
@ -29,6 +30,7 @@ use tracing_indicatif::{IndicatifLayer, span_ext::IndicatifSpanExt};
use tracing_subscriber::{EnvFilter, layer::SubscriberExt as _, util::SubscriberInitExt as _};
use crate::{
audit::AuditError,
config::{Config, ConfigError, ConfigErrorInner},
github::Client,
models::AsDocument,
@ -116,6 +118,22 @@ struct App {
#[arg(long, value_enum, default_value_t)]
format: OutputFormat,
/// Whether to render OSC 8 links in the output.
///
/// This affects links under audit IDs, as well as any links
/// produced by audit rules.
///
/// Only affects `--format=plain` (the default).
#[arg(long, value_enum, default_value_t, env = "ZIZMOR_RENDER_LINKS")]
render_links: CliRenderLinks,
/// Whether to render audit URLs in the output, separately from any URLs
/// embedded in OSC 8 links.
///
/// Only affects `--format=plain` (the default).
#[arg(long, value_enum, default_value_t, env = "ZIZMOR_SHOW_AUDIT_URLS")]
show_audit_urls: CliShowAuditUrls,
/// Control the use of color in output.
#[arg(long, value_enum, value_name = "MODE")]
color: Option<ColorMode>,
@ -316,6 +334,79 @@ pub(crate) enum OutputFormat {
Github,
}
#[derive(Debug, Default, Copy, Clone, ValueEnum)]
pub(crate) enum CliRenderLinks {
/// Render OSC 8 links in output if support is detected.
#[default]
Auto,
/// Always render OSC 8 links in output.
Always,
/// Never render OSC 8 links in output.
Never,
}
#[derive(Debug, Copy, Clone)]
pub(crate) enum RenderLinks {
Always,
Never,
}
impl From<CliRenderLinks> for RenderLinks {
fn from(value: CliRenderLinks) -> Self {
match value {
CliRenderLinks::Auto => {
// We render links if stdout is a terminal. This is assumed
// to preclude CI environments and log files.
//
// TODO: Switch this to the support-hyperlinks crate?
// See: https://github.com/zkat/supports-hyperlinks/pull/8
if stdout().is_terminal() {
RenderLinks::Always
} else {
RenderLinks::Never
}
}
CliRenderLinks::Always => RenderLinks::Always,
CliRenderLinks::Never => RenderLinks::Never,
}
}
}
#[derive(Debug, Default, Copy, Clone, ValueEnum)]
pub(crate) enum CliShowAuditUrls {
/// Render audit URLs in output automatically based on output format and runtime context.
///
/// For example, URLs will be shown if a CI runtime is detected.
#[default]
Auto,
/// Always render audit URLs in output.
Always,
/// Never render audit URLs in output.
Never,
}
#[derive(Debug, Copy, Clone)]
pub(crate) enum ShowAuditUrls {
Always,
Never,
}
impl From<CliShowAuditUrls> for ShowAuditUrls {
fn from(value: CliShowAuditUrls) -> Self {
match value {
CliShowAuditUrls::Auto => {
if utils::is_ci() || !stdout().is_terminal() {
ShowAuditUrls::Always
} else {
ShowAuditUrls::Never
}
}
CliShowAuditUrls::Always => ShowAuditUrls::Always,
CliShowAuditUrls::Never => ShowAuditUrls::Never,
}
}
}
#[derive(Debug, Copy, Clone, ValueEnum)]
pub(crate) enum ColorMode {
/// Use color output if the output supports it.
@ -551,10 +642,10 @@ enum Error {
#[error("failed to load audit rules")]
AuditLoad(#[source] anyhow::Error),
/// An error while running an audit.
#[error("{ident} failed on {input}")]
#[error("'{ident}' audit failed on {input}")]
Audit {
ident: &'static str,
source: anyhow::Error,
source: AuditError,
input: String,
},
/// An error while rendering output.
@ -597,6 +688,7 @@ async fn run(app: &mut App) -> Result<ExitCode, Error> {
ColorMode::Never
} else if std::env::var("FORCE_COLOR").is_ok()
|| std::env::var("CLICOLOR_FORCE").is_ok()
|| utils::is_ci()
{
ColorMode::Always
} else {
@ -751,7 +843,7 @@ async fn run(app: &mut App) -> Result<ExitCode, Error> {
while let Some(findings) = completion_stream.next().await {
let findings = findings.map_err(|err| Error::Audit {
ident: err.ident(),
source: err.into(),
source: err,
input: input.key().to_string(),
})?;
@ -768,7 +860,13 @@ async fn run(app: &mut App) -> Result<ExitCode, Error> {
}
match app.format {
OutputFormat::Plain => output::plain::render_findings(&registry, &results, app.naches),
OutputFormat::Plain => output::plain::render_findings(
&registry,
&results,
&app.show_audit_urls.into(),
&app.render_links.into(),
app.naches,
),
OutputFormat::Json | OutputFormat::JsonV1 => {
output::json::v1::output(stdout(), results.findings()).map_err(Error::Output)?
}
@ -811,7 +909,7 @@ async fn main() -> ExitCode {
// which is then typically inaccessible from an already failed
// CI job. In those cases, it's better to dump directly to stderr,
// since that'll typically be captured by console logging.
if std::env::var_os("CI").is_some() {
if utils::is_ci() {
std::panic::set_hook(Box::new(|info| {
let trace = std::backtrace::Backtrace::force_capture();
eprintln!("FATAL: zizmor crashed. This is a bug that should be reported.");
@ -871,6 +969,16 @@ async fn main() -> ExitCode {
Some(report)
}
Error::Collection(err) => match err.inner() {
CollectionError::NoInputs => {
let group = Group::with_title(Level::ERROR.primary_title(err.to_string()))
.element(Level::HELP.message("collection yielded no auditable inputs"))
.element(Level::HELP.message("inputs must contain at least one valid workflow, action, or Dependabot config"));
let renderer = Renderer::styled();
let report = renderer.render(&[group]);
Some(report)
}
CollectionError::DuplicateInput(..) => {
let group = Group::with_title(Level::ERROR.primary_title(err.to_string()))
.element(Level::HELP.message(format!(
@ -907,7 +1015,8 @@ async fn main() -> ExitCode {
Some(report)
}
CollectionError::Yamlpath(..) => {
// These errors only happen if something is wrong with zizmor itself.
CollectionError::Yamlpath(..) | CollectionError::Model(..) => {
let group = Group::with_title(Level::ERROR.primary_title(err.to_string())).elements([
Level::HELP.message("this typically indicates a bug in zizmor; please report it"),
Level::HELP.message(

View file

@ -7,7 +7,7 @@ use github_actions_models::common::Env;
use github_actions_models::common::expr::LoE;
use github_actions_models::workflow::job::Strategy;
use crate::finding::location::Locatable;
use crate::finding::location::{Locatable, SymbolicLocation};
use crate::models::inputs::HasInputs;
pub(crate) mod action;
@ -45,7 +45,7 @@ pub(crate) trait StepCommon<'doc>: Locatable<'doc> + HasInputs {
fn env_is_static(&self, ctx: &context::Context) -> bool;
/// Returns a [`common::Uses`] for this step, if it has one.
fn uses(&self) -> Option<&common::Uses>;
fn uses(&self) -> Option<&'doc common::Uses>;
/// Returns this step's job's strategy, if present.
///
@ -64,7 +64,7 @@ pub(crate) trait StepCommon<'doc>: Locatable<'doc> + HasInputs {
///
/// Returns `None` if the shell cannot be statically determined, including
/// if the shell is specified via an expression.
fn shell(&self) -> Option<&str>;
fn shell(&self) -> Option<(&str, SymbolicLocation<'doc>)>;
}
impl<'a, 'doc, T: StepCommon<'doc>> AsDocument<'a, 'doc> for T {

View file

@ -175,10 +175,13 @@ impl<'doc> Locatable<'doc> for CompositeStep<'doc> {
])
}
fn location_with_name(&self) -> SymbolicLocation<'doc> {
match self.inner.name {
Some(_) => self.location().with_keys(["name".into()]),
None => self.location(),
fn location_with_grip(&self) -> SymbolicLocation<'doc> {
if self.inner.name.is_some() {
self.location().with_keys(["name".into()])
} else if self.inner.id.is_some() {
self.location().with_keys(["id".into()])
} else {
self.location()
}
}
}
@ -198,7 +201,7 @@ impl<'doc> StepCommon<'doc> for CompositeStep<'doc> {
utils::env_is_static(ctx, &[&self.env])
}
fn uses(&self) -> Option<&common::Uses> {
fn uses(&self) -> Option<&'doc common::Uses> {
let action::StepBody::Uses { uses, .. } = &self.inner.body else {
return None;
};
@ -229,14 +232,19 @@ impl<'doc> StepCommon<'doc> for CompositeStep<'doc> {
self.action().as_document()
}
fn shell(&self) -> Option<&str> {
fn shell(&self) -> Option<(&str, SymbolicLocation<'doc>)> {
// For composite action steps, shell is always explicitly specified in the YAML.
if let action::StepBody::Run {
shell: LoE::Literal(shell),
..
} = &self.inner.body
{
Some(shell)
Some((
shell,
self.location()
.with_keys(["shell".into()])
.annotated("shell defined here"),
))
} else {
None
}

View file

@ -131,7 +131,7 @@ impl<'doc> Locatable<'doc> for Update<'doc> {
.annotated("this update rule")
}
fn location_with_name(&self) -> SymbolicLocation<'doc> {
fn location_with_grip(&self) -> SymbolicLocation<'doc> {
self.location()
.with_keys(["package-ecosystem".into()])
.annotated("this ecosystem")

View file

@ -1,18 +1,18 @@
//! Extension traits for the `Uses` APIs.
use std::str::FromStr;
use std::{str::FromStr, sync::LazyLock};
use github_actions_models::common::{RepositoryUses, Uses};
use regex::Regex;
use serde::Deserialize;
use crate::utils::once::static_regex;
// Matches all variants of [`RepositoryUsesPattern`] except `*`.
//
// TODO: Replace this with a real parser; this is ridiculous.
static_regex!(
REPOSITORY_USES_PATTERN,
r#"(?xmi) # verbose, multi-line mode, case-insensitive
/// Matches all variants of [`RepositoryUsesPattern`] except `*`.
///
/// TODO: Replace this with a real parser; this is ridiculous.
static REPOSITORY_USES_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
#[allow(clippy::unwrap_used)]
Regex::new(
r#"(?xmi) # verbose, multi-line mode, case-insensitive
^ # start of line
([\w-]+) # (1) owner
/ # /
@ -30,8 +30,10 @@ static_regex!(
([[[:graph:]]&&[^\*]]+) # (4) git ref (any non-space, non-* characters)
)? # end of non-capturing group for optional git ref
$ # end of line
"#
);
"#,
)
.unwrap()
});
/// Represents a pattern for matching repository `uses` references.
/// These patterns are ordered by specificity; more specific patterns
@ -71,10 +73,10 @@ impl RepositoryUsesPattern {
subpath,
git_ref,
} => {
uses.owner.eq_ignore_ascii_case(owner)
&& uses.repo.eq_ignore_ascii_case(repo)
&& uses.subpath == *subpath
&& uses.git_ref.as_str() == git_ref
uses.owner().eq_ignore_ascii_case(owner)
&& uses.repo().eq_ignore_ascii_case(repo)
&& uses.subpath() == subpath.as_deref()
&& uses.git_ref() == git_ref
}
RepositoryUsesPattern::ExactPath {
owner,
@ -87,19 +89,19 @@ impl RepositoryUsesPattern {
// Utf8Path gets us part of the way there, but is
// platform dependent (i.e. will do the wrong thing
// if the platform separator is not /).
uses.owner.eq_ignore_ascii_case(owner)
&& uses.repo.eq_ignore_ascii_case(repo)
&& uses.subpath.as_deref().is_some_and(|s| s == subpath)
uses.owner().eq_ignore_ascii_case(owner)
&& uses.repo().eq_ignore_ascii_case(repo)
&& uses.subpath().is_some_and(|s| s == subpath)
}
RepositoryUsesPattern::ExactRepo { owner, repo } => {
uses.owner.eq_ignore_ascii_case(owner)
&& uses.repo.eq_ignore_ascii_case(repo)
&& uses.subpath.is_none()
uses.owner().eq_ignore_ascii_case(owner)
&& uses.repo().eq_ignore_ascii_case(repo)
&& uses.subpath().is_none()
}
RepositoryUsesPattern::InRepo { owner, repo } => {
uses.owner.eq_ignore_ascii_case(owner) && uses.repo.eq_ignore_ascii_case(repo)
uses.owner().eq_ignore_ascii_case(owner) && uses.repo().eq_ignore_ascii_case(repo)
}
RepositoryUsesPattern::InOwner(owner) => uses.owner.eq_ignore_ascii_case(owner),
RepositoryUsesPattern::InOwner(owner) => uses.owner().eq_ignore_ascii_case(owner),
RepositoryUsesPattern::Any => true,
}
}
@ -218,18 +220,18 @@ impl RepositoryUsesExt for RepositoryUses {
}
fn ref_is_commit(&self) -> bool {
self.git_ref.len() == 40 && self.git_ref.chars().all(|c| c.is_ascii_hexdigit())
self.git_ref().len() == 40 && self.git_ref().chars().all(|c| c.is_ascii_hexdigit())
}
fn commit_ref(&self) -> Option<&str> {
match &self.git_ref {
match &self.git_ref() {
git_ref if self.ref_is_commit() => Some(git_ref),
_ => None,
}
}
fn symbolic_ref(&self) -> Option<&str> {
match &self.git_ref {
match &self.git_ref() {
git_ref if !self.ref_is_commit() => Some(git_ref),
_ => None,
}
@ -246,7 +248,7 @@ impl UsesExt for Uses {
/// Whether the `uses:` is unpinned.
fn unpinned(&self) -> bool {
match self {
Uses::Docker(docker) => docker.hash.is_none() && docker.tag.is_none(),
Uses::Docker(docker) => docker.hash().is_none() && docker.tag().is_none(),
Uses::Repository(_) => false,
// Local `uses:` are always unpinned; any `@ref` component
// is actually part of the path.
@ -263,7 +265,7 @@ impl UsesExt for Uses {
// (since it's fully contained within the calling repo),
Uses::Local(_) => false,
Uses::Repository(repo) => !repo.ref_is_commit(),
Uses::Docker(docker) => docker.hash.is_none(),
Uses::Docker(docker) => docker.hash().is_none(),
}
}
}
@ -477,7 +479,7 @@ mod tests {
("actions/checkout/foo@v3", "actions/checkout/foo@v3", true),
("actions/checkout/foo@v1", "actions/checkout/foo@v3", false),
] {
let Ok(Uses::Repository(uses)) = Uses::from_str(uses) else {
let Ok(Uses::Repository(uses)) = Uses::parse(uses) else {
return Err(anyhow!("invalid uses: {uses}"));
};

View file

@ -286,7 +286,13 @@ impl<'doc> NormalJob<'doc> {
}
}
impl<'doc> JobExt<'doc> for NormalJob<'doc> {
impl<'a, 'doc> AsDocument<'a, 'doc> for NormalJob<'doc> {
fn as_document(&'a self) -> &'doc yamlpath::Document {
self.parent.as_document()
}
}
impl<'doc> JobCommon<'doc> for NormalJob<'doc> {
fn id(&self) -> &'doc str {
self.id
}
@ -329,7 +335,13 @@ impl<'doc> ReusableWorkflowCallJob<'doc> {
}
}
impl<'doc> JobExt<'doc> for ReusableWorkflowCallJob<'doc> {
impl<'a, 'doc> AsDocument<'a, 'doc> for ReusableWorkflowCallJob<'doc> {
fn as_document(&'a self) -> &'doc yamlpath::Document {
self.parent.as_document()
}
}
impl<'doc> JobCommon<'doc> for ReusableWorkflowCallJob<'doc> {
fn id(&self) -> &'doc str {
self.id
}
@ -352,7 +364,7 @@ impl<'doc> std::ops::Deref for ReusableWorkflowCallJob<'doc> {
}
/// Common behavior across both normal and reusable jobs.
pub(crate) trait JobExt<'doc> {
pub(crate) trait JobCommon<'doc>: Locatable<'doc> {
/// The job's unique ID (i.e., its key in the workflow's `jobs:` block).
fn id(&self) -> &'doc str;
@ -363,7 +375,7 @@ pub(crate) trait JobExt<'doc> {
fn parent(&self) -> &'doc Workflow;
}
impl<'doc, T: JobExt<'doc>> Locatable<'doc> for T {
impl<'doc, T: JobCommon<'doc>> Locatable<'doc> for T {
/// Returns this job's [`SymbolicLocation`].
fn location(&self) -> SymbolicLocation<'doc> {
self.parent()
@ -372,10 +384,15 @@ impl<'doc, T: JobExt<'doc>> Locatable<'doc> for T {
.with_keys(["jobs".into(), self.id().into()])
}
fn location_with_name(&self) -> SymbolicLocation<'doc> {
match self.name() {
Some(_) => self.location().with_keys(["name".into()]),
None => self.location(),
fn location_with_grip(&self) -> SymbolicLocation<'doc> {
if self.name().is_some() {
self.location().with_keys(["name".into()])
} else {
self.parent()
.location()
.annotated("this job")
.with_keys(["jobs".into(), self.id().into()])
.key_only()
}
}
}
@ -617,10 +634,13 @@ impl<'doc> Locatable<'doc> for Step<'doc> {
.annotated("this step")
}
fn location_with_name(&self) -> SymbolicLocation<'doc> {
match self.inner.name {
Some(_) => self.location().with_keys(["name".into()]),
None => self.location(),
fn location_with_grip(&self) -> SymbolicLocation<'doc> {
if self.inner.name.is_some() {
self.location().with_keys(["name".into()])
} else if self.inner.id.is_some() {
self.location().with_keys(["id".into()])
} else {
self.location()
}
}
}
@ -640,7 +660,7 @@ impl<'doc> StepCommon<'doc> for Step<'doc> {
utils::env_is_static(ctx, &[&self.env, &self.job().env, &self.workflow().env])
}
fn uses(&self) -> Option<&common::Uses> {
fn uses(&self) -> Option<&'doc common::Uses> {
let StepBody::Uses { uses, .. } = &self.inner.body else {
return None;
};
@ -671,7 +691,7 @@ impl<'doc> StepCommon<'doc> for Step<'doc> {
self.workflow().as_document()
}
fn shell(&self) -> Option<&str> {
fn shell(&self) -> Option<(&str, SymbolicLocation<'doc>)> {
// For workflow steps, we can use the existing shell() method
self.shell()
}
@ -700,7 +720,7 @@ impl<'doc> Step<'doc> {
/// if the shell can't be statically inferred.
///
/// Invariant: panics if the step is not a `run:` step.
pub(crate) fn shell(&self) -> Option<&str> {
pub(crate) fn shell(&self) -> Option<(&str, SymbolicLocation<'doc>)> {
let StepBody::Run {
run: _,
working_directory: _,
@ -716,7 +736,12 @@ impl<'doc> Step<'doc> {
// If any of these is an expression, we can't infer the shell
// statically, so we terminate early with `None`.
let shell = match shell {
Some(LoE::Literal(shell)) => Some(shell.as_str()),
Some(LoE::Literal(shell)) => Some((
shell.as_str(),
self.location()
.with_keys(["shell".into()])
.annotated("shell defined here"),
)),
Some(LoE::Expr(_)) => return None,
None => match self
.job()
@ -724,7 +749,13 @@ impl<'doc> Step<'doc> {
.as_ref()
.and_then(|d| d.run.as_ref().and_then(|r| r.shell.as_ref()))
{
Some(LoE::Literal(shell)) => Some(shell.as_str()),
Some(LoE::Literal(shell)) => Some((
shell.as_str(),
self.job()
.location()
.with_keys(["defaults".into(), "run".into(), "shell".into()])
.annotated("job default shell defined here"),
)),
Some(LoE::Expr(_)) => return None,
None => match self
.workflow()
@ -732,14 +763,30 @@ impl<'doc> Step<'doc> {
.as_ref()
.and_then(|d| d.run.as_ref().and_then(|r| r.shell.as_ref()))
{
Some(LoE::Literal(shell)) => Some(shell.as_str()),
Some(LoE::Literal(shell)) => Some((
shell.as_str(),
self.workflow()
.location()
.with_keys(["defaults".into(), "run".into(), "shell".into()])
.annotated("workflow default shell defined here"),
)),
Some(LoE::Expr(_)) => return None,
None => None,
},
},
};
shell.or_else(|| self.parent.runner_default_shell())
shell.or_else(|| {
self.parent.runner_default_shell().map(|shell| {
(
shell,
self.job()
.location()
.with_keys(["runs-on".into()])
.annotated("shell implied by runner"),
)
})
})
}
}

View file

@ -7,6 +7,7 @@ use anstream::{eprintln, print, println};
use owo_colors::OwoColorize;
use crate::{
RenderLinks, ShowAuditUrls,
finding::{
Finding, Severity,
location::{Location, LocationKind},
@ -43,6 +44,7 @@ impl From<&Severity> for Level<'_> {
pub(crate) fn finding_snippets<'doc>(
registry: &'doc InputRegistry,
finding: &'doc Finding<'doc>,
render_links_mode: &RenderLinks,
) -> Vec<Snippet<'doc, Annotation<'doc>>> {
// Our finding might span multiple workflows, so we need to group locations
// by their enclosing workflow to generate each snippet correctly.
@ -67,15 +69,20 @@ pub(crate) fn finding_snippets<'doc>(
for (input_key, locations) in locations_by_workflow {
let input = registry.get_input(input_key);
let path = match render_links_mode {
RenderLinks::Always => input.link().unwrap_or(input_key.presentation_path()),
RenderLinks::Never => input_key.presentation_path(),
};
snippets.push(
Snippet::source(input.as_document().source())
.fold(true)
.line_start(1)
.path(input.link().unwrap_or(input_key.presentation_path()))
.path(path)
.annotations(locations.iter().map(|loc| {
let annotation = match loc.symbolic.link {
Some(ref link) => link,
None => &loc.symbolic.annotation,
let annotation = match (loc.symbolic.link.as_deref(), render_links_mode) {
(Some(link), RenderLinks::Always) => link,
_ => &loc.symbolic.annotation,
};
AnnotationKind::from(loc.symbolic.kind)
@ -94,10 +101,12 @@ pub(crate) fn finding_snippets<'doc>(
pub(crate) fn render_findings(
registry: &InputRegistry,
findings: &FindingRegistry,
show_urls_mode: &ShowAuditUrls,
render_links_mode: &RenderLinks,
naches_mode: bool,
) {
for finding in findings.findings() {
render_finding(registry, finding);
render_finding(registry, finding, show_urls_mode, render_links_mode);
println!();
}
@ -190,11 +199,19 @@ pub(crate) fn render_findings(
}
}
fn render_finding(registry: &InputRegistry, finding: &Finding) {
let title = Level::from(&finding.determinations.severity)
fn render_finding(
registry: &InputRegistry,
finding: &Finding,
show_urls_mode: &ShowAuditUrls,
render_links_mode: &RenderLinks,
) {
let mut title = Level::from(&finding.determinations.severity)
.primary_title(finding.desc)
.id(finding.ident)
.id_url(finding.url);
.id(finding.ident);
if matches!(render_links_mode, RenderLinks::Always) {
title = title.id_url(finding.url);
}
let confidence = format!(
"audit confidence → {:?}",
@ -202,7 +219,7 @@ fn render_finding(registry: &InputRegistry, finding: &Finding) {
);
let mut group = Group::with_title(title)
.elements(finding_snippets(registry, finding))
.elements(finding_snippets(registry, finding, render_links_mode))
.element(Level::NOTE.message(confidence));
if let Some(tip) = &finding.tip {
@ -213,6 +230,13 @@ fn render_finding(registry: &InputRegistry, finding: &Finding) {
group = group.element(Level::NOTE.message("this finding has an auto-fix"));
}
if matches!(show_urls_mode, ShowAuditUrls::Always) {
group = group.element(Level::HELP.message(format!(
"audit documentation → {url}",
url = finding.url.green()
)))
}
// TODO: Evaluate alternative decor styles.
let renderer = Renderer::styled();
println!("{}", renderer.render(&[group]));

View file

@ -74,6 +74,7 @@ impl AuditRegistry {
register_audit!(audit::dependabot_execution::DependabotExecution);
register_audit!(audit::dependabot_cooldown::DependabotCooldown);
register_audit!(audit::concurrency_limits::ConcurrencyLimits);
register_audit!(audit::archived_uses::ArchivedUses);
Ok(registry)
}

View file

@ -34,7 +34,7 @@ pub(crate) enum CollectionError {
/// The input couldn't be converted into the expected model.
/// This typically indicates a bug in `github-actions-models`.
#[error("couldn't turn input into a an appropriate model")]
Model(#[source] anyhow::Error),
Model(#[from] serde_yaml::Error),
/// The input couldn't be loaded into an internal yamlpath document.
/// This typically indicates a bug in `yamlpath`.

View file

@ -1,15 +1,11 @@
//! Helper routines.
use anyhow::{Context as _, Error, anyhow};
use anyhow::{Error, anyhow};
use camino::Utf8Path;
use github_actions_expressions::context::{Context, ContextPattern};
use github_actions_models::common::{Env, expr::LoE};
use jsonschema::{
BasicOutput::{Invalid, Valid},
Validator,
output::{ErrorDescription, OutputUnit},
validator_for,
};
use jsonschema::ErrorEntry;
use jsonschema::{Validator, validator_for};
use std::ops::{Deref, Range};
use std::{fmt::Write, sync::LazyLock};
@ -307,11 +303,11 @@ pub(crate) static DEFAULT_ENVIRONMENT_VARIABLES: &[(
),
];
fn parse_validation_errors(errors: Vec<OutputUnit<ErrorDescription>>) -> Error {
fn parse_validation_errors(errors: Vec<ErrorEntry<'_>>) -> Error {
let mut message = String::new();
for error in errors {
let description = error.error_description().to_string();
let description = error.error.to_string();
// HACK: error descriptions are sometimes a long rats' nest
// of JSON objects. We should render this in a palatable way
// but doing so is nontrivial, so we just skip them for now.
@ -319,7 +315,7 @@ fn parse_validation_errors(errors: Vec<OutputUnit<ErrorDescription>>) -> Error {
// the error for an unmatched "oneOf", so these errors are
// typically less useful anyways.
if !description.starts_with("{") {
let location = error.instance_location().as_str();
let location = error.instance_location.as_str();
if location.is_empty() {
writeln!(message, "{description}").expect("I/O on a String failed");
} else {
@ -353,11 +349,17 @@ where
// to distinguish between syntax and semantic errors,
// but serde-yaml doesn't give us an API to do that.
// To approximate it, we re-parse the input as a
// `Value` and use that as an oracle -- a successful
// `serde_yaml::Mapping`, then convert that `serde_yaml::Mapping`
// into a `serde_json::Value` and use it as an oracle -- a successful
// re-parse indicates that the input is valid YAML and
// that our error is semantic, while a failed re-parse
// indicates a syntax error.
//
// We need to round-trip through a `serde_yaml::Mapping` to ensure that
// all of YAML's validity rules are preserved -- directly deserializing
// into a `serde_json::Value` would miss some YAML-specific checks,
// like duplicate keys within mappings. See #1395 for an example of this.
//
// We do this in a nested fashion to avoid re-parsing
// the input twice if we can help it, and because the
// more obvious trick (`serde_yaml::from_value`) doesn't
@ -366,21 +368,26 @@ where
// See: https://github.com/dtolnay/serde-yaml/issues/170
// See: https://github.com/dtolnay/serde-yaml/issues/395
match serde_yaml::from_str(contents) {
match serde_yaml::from_str::<serde_yaml::Mapping>(contents) {
// We know we have valid YAML, so one of two things happened here:
// 1. The input is semantically valid, but we have a bug in
// `github-actions-models`.
// 2. The input is semantically invalid, and the user
// needs to fix it.
// We the JSON schema `validator` to separate these.
Ok(raw_value) => match validator.apply(&raw_value).basic() {
Valid(_) => Err(e)
.context("this suggests a bug in zizmor; please report it!")
.map_err(CollectionError::Model),
Invalid(errors) => {
Ok(raw_value) => {
let evaluation = validator.evaluate(
&serde_json::to_value(&raw_value)
.map_err(|e| CollectionError::Syntax(e.into()))?,
);
if evaluation.flag().valid {
Err(e.into())
} else {
let errors = evaluation.iter_errors().collect::<Vec<_>>();
Err(CollectionError::Schema(parse_validation_errors(errors)))
}
},
}
// Syntax error.
Err(e) => Err(CollectionError::Syntax(e.into())),
}
@ -703,6 +710,13 @@ pub(crate) mod once {
pub(crate) use warn_once;
}
/// Returns whether we are running in a CI environment.
pub(crate) fn is_ci() -> bool {
static IS_CI: LazyLock<bool> = LazyLock::new(|| std::env::var_os("CI").is_some());
*IS_CI
}
#[cfg(test)]
mod tests {
use anyhow::Result;

View file

@ -196,15 +196,11 @@ fn audit_unpinned_uses() -> anyhow::Result<()> {
assert_value_match(&findings, "$[0].determinations.confidence", "High");
assert_value_match(&findings, "$[0].determinations.severity", "Medium");
assert_value_match(
&findings,
"$[0].locations[0].concrete.feature",
"uses: docker://ubuntu",
);
assert_value_match(&findings, "$[0].locations[0].concrete.feature", "ubuntu");
assert_value_match(
&findings,
"$[1].locations[0].concrete.feature",
"uses: docker://ghcr.io/pypa/gh-action-pypi-publish",
"ghcr.io/pypa/gh-action-pypi-publish",
);
Ok(())

View file

@ -7,7 +7,7 @@ fn test_regular_persona() -> anyhow::Result<()> {
zizmor()
.input(input_under_test("anonymous-definition.yml"))
.run()?,
@r"No findings to report. Good job! (2 suppressed)"
@"No findings to report. Good job! (2 suppressed)"
);
Ok(())
@ -34,17 +34,16 @@ fn test_pedantic_persona() -> anyhow::Result<()> {
| |__________________________________________^ this workflow
|
= note: audit confidence High
= tip: use 'name: ...' to give this workflow a name
info[anonymous-definition]: workflow or action definition without a name
--> @@INPUT@@:21:3
|
21 | / will-trigger:
22 | | runs-on: ubuntu-latest
23 | | steps:
24 | | - run: "echo this job will trigger"
| |__________________________________________^ this job
21 | will-trigger:
| ^^^^^^^^^^^^ this job
|
= note: audit confidence High
= tip: use 'name: ...' to give this job a name
2 findings: 1 informational, 1 low, 0 medium, 0 high
"#

View file

@ -0,0 +1,67 @@
use crate::common::{input_under_test, zizmor};
#[test]
fn test_regular_persona() -> anyhow::Result<()> {
insta::assert_snapshot!(
zizmor().input(input_under_test("archived-uses.yml")).run()?,
@r"
warning[archived-uses]: action or reusable workflow from archived repository
--> @@INPUT@@:17:15
|
16 | - name: setup ruby
| ---------------- this step
17 | uses: actions/setup-ruby@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3
| ^^^^^^^^^^^^^^^^^^ repository is archived
|
= note: audit confidence High
warning[archived-uses]: action or reusable workflow from archived repository
--> @@INPUT@@:20:15
|
19 | - name: SETUP RUBY BUT LOUDLY
| --------------------------- this step
20 | uses: ACTIONS/SETUP-RUBY@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3
| ^^^^^^^^^^^^^^^^^^ repository is archived
|
= note: audit confidence High
warning[archived-uses]: action or reusable workflow from archived repository
--> @@INPUT@@:24:11
|
23 | name: archived-uses-reusable
| ---------------------------- this job
24 | uses: actions/setup-ruby/.github/workflows/notreal.yml@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3
| ^^^^^^^^^^^^^^^^^^ repository is archived
|
= note: audit confidence High
3 findings: 0 informational, 0 low, 3 medium, 0 high
"
);
Ok(())
}
#[test]
fn test_composite_action() -> anyhow::Result<()> {
insta::assert_snapshot!(
zizmor()
.input(input_under_test("archived-uses/action/"))
.run()?,
@r"
warning[archived-uses]: action or reusable workflow from archived repository
--> @@INPUT@@action.yml:9:13
|
8 | - name: setup ruby
| ---------------- this step
9 | uses: actions/setup-ruby@e932e7af67fc4a8fc77bd86b744acd4e42fe3543 # v1.1.3
| ^^^^^^^^^^^^^^^^^^ repository is archived
|
= note: audit confidence High
1 finding: 0 informational, 0 low, 1 medium, 0 high
"
);
Ok(())
}

View file

@ -80,28 +80,28 @@ fn test_jobs_missing_no_cancel() -> anyhow::Result<()> {
.args(["--persona=pedantic"])
.run()?,
@r"
help[concurrency-limits]: insufficient job-level concurrency limits
--> @@INPUT@@:9:5
|
9 | concurrency: group
| ^^^^^^^^^^^^^^^^^^ job concurrency is missing cancel-in-progress
|
= note: audit confidence High
help[concurrency-limits]: insufficient job-level concurrency limits
--> @@INPUT@@:9:5
|
9 | concurrency: group
| ^^^^^^^^^^^^^^^^^^ job concurrency is missing cancel-in-progress
|
= note: audit confidence High
help[concurrency-limits]: insufficient job-level concurrency limits
--> @@INPUT@@:1:1
|
1 | / name: Workflow with job 1 missing cancel-in-progress and job 2 missing concurrency
2 | | on: push
3 | | permissions: {}
... |
17 | | - name: 2-ok
18 | | run: echo ok
| |___________________^ missing concurrency setting
|
= note: audit confidence High
help[concurrency-limits]: insufficient job-level concurrency limits
--> @@INPUT@@:1:1
|
1 | / name: Workflow with job 1 missing cancel-in-progress and job 2 missing concurrency
2 | | on: push
3 | | permissions: {}
... |
17 | | - name: 2-ok
18 | | run: echo ok
| |___________________^ missing concurrency setting
|
= note: audit confidence High
2 findings: 0 informational, 2 low, 0 medium, 0 high
2 findings: 0 informational, 2 low, 0 medium, 0 high
"
);

View file

@ -165,7 +165,7 @@ fn test_config_short_cooldown_permitted() -> anyhow::Result<()> {
.input(input_under_test("dependabot-cooldown/default-days-too-short/dependabot.yml"))
.config(input_under_test("dependabot-cooldown/configs/cooldown-one-day.yml"))
.run()?,
@r"No findings to report. Good job!"
@"No findings to report. Good job!"
);
Ok(())

View file

@ -27,26 +27,26 @@ fn test_deny_all() -> Result<()> {
.run()?,
@r"
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:13:9
--> @@INPUT@@:13:15
|
13 | - uses: actions/setup-python@v4
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:14:9
--> @@INPUT@@:14:15
|
14 | - uses: pypa/gh-action-pypi-publish@release/v1
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:15:9
--> @@INPUT@@:15:15
|
15 | - uses: actions/checkout@v4
| ^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High
@ -68,10 +68,10 @@ fn test_allow_some() -> Result<()> {
.run()?,
@r"
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:13:9
--> @@INPUT@@:13:15
|
13 | - uses: actions/setup-python@v4
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High
@ -93,18 +93,18 @@ fn test_deny_some() -> Result<()> {
.run()?,
@r"
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:14:9
--> @@INPUT@@:14:15
|
14 | - uses: pypa/gh-action-pypi-publish@release/v1
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:15:9
--> @@INPUT@@:15:15
|
15 | - uses: actions/checkout@v4
| ^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High
@ -126,18 +126,18 @@ fn test_deny_some_refs() -> Result<()> {
.run()?,
@r"
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:13:9
--> @@INPUT@@:13:15
|
13 | - uses: actions/setup-python@v4
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:14:9
--> @@INPUT@@:14:15
|
14 | - uses: pypa/gh-action-pypi-publish@release/v1
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High
@ -159,10 +159,10 @@ fn test_allow_some_refs() -> Result<()> {
.run()?,
@r"
error[forbidden-uses]: forbidden action used
--> @@INPUT@@:15:9
--> @@INPUT@@:15:15
|
15 | - uses: actions/checkout@v4
| ^^^^^^^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
| ^^^^^^^^^^^^^^^^^^^ use of this action is forbidden
|
= note: audit confidence High

View file

@ -0,0 +1,31 @@
use crate::common::{input_under_test, zizmor};
#[cfg_attr(not(feature = "gh-token-tests"), ignore)]
#[test]
fn test_regular_persona() -> anyhow::Result<()> {
insta::assert_snapshot!(
zizmor()
.input(input_under_test("impostor-commit.yml"))
.offline(false)
.run()?,
@r"
error[impostor-commit]: commit with no history in referenced repository
--> @@INPUT@@:29:15
|
29 | - uses: actions/checkout@c7d749a2d57b4b375d1ebcd17cfbfb60c676f18e
| - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ uses a commit that doesn't belong to the specified org/repo
| _________|
| |
30 | | with:
31 | | persist-credentials: false
| |____________________________________- this step
|
= note: audit confidence High
= note: this finding has an auto-fix
4 findings (3 suppressed, 1 fixable): 0 informational, 0 low, 0 medium, 1 high
"
);
Ok(())
}

View file

@ -1,6 +1,7 @@
//! Per-audit integrationt tests, including snapshots.
mod anonymous_definition;
mod archived_uses;
mod artipacked;
mod bot_conditions;
mod cache_poisoning;
@ -12,7 +13,7 @@ mod excessive_permissions;
mod forbidden_uses;
mod github_env;
// mod hardcoded_container_credentials; // TODO
// mod impostor_commit; // TODO
mod impostor_commit;
mod insecure_commands;
// mod known_vulnerable_actions; // TODO
mod obfuscation;

View file

@ -237,3 +237,56 @@ fn test_issue_1177_repro_pedantic() -> Result<()> {
Ok(())
}
/// Reproduces issue #1414: the obfuscation audit should not crash if the
/// user has `shell: cmd` defined as a job or workflow default rather than
/// at the step level.
///
/// See: https://github.com/zizmorcore/zizmor/issues/1414
#[test]
fn test_issue_1414_repro() -> Result<()> {
insta::assert_snapshot!(
zizmor()
.input(input_under_test("obfuscation/issue-1414-repro.yml"))
.run()?,
@r"
help[obfuscation]: obfuscated usage of GitHub Actions features
--> @@INPUT@@:13:9
|
13 | shell: cmd
| ^^^^^^^^^^ job default shell defined here
14 | steps:
15 | - name: say hi
| ------------ Windows CMD shell limits analysis
|
= note: audit confidence High
= tip: use 'shell: pwsh' or 'shell: bash' for improved analysis
3 findings (2 suppressed): 0 informational, 1 low, 0 medium, 0 high
"
);
// Like #1414, but with `shell: cmd` defined at the workflow level.
insta::assert_snapshot!(
zizmor()
.input(input_under_test("obfuscation/workflow-cmd-default-shell.yml"))
.run()?,
@r"
help[obfuscation]: obfuscated usage of GitHub Actions features
--> @@INPUT@@:10:5
|
10 | shell: cmd
| ^^^^^^^^^^ workflow default shell defined here
...
16 | - name: say hi
| ------------ Windows CMD shell limits analysis
|
= note: audit confidence High
= tip: use 'shell: pwsh' or 'shell: bash' for improved analysis
3 findings (2 suppressed): 0 informational, 1 low, 0 medium, 0 high
"
);
Ok(())
}

View file

@ -33,7 +33,7 @@ fn test_issue_518_repro() -> Result<()> {
.input(input_under_test("ref-confusion/issue-518-repro.yml"))
.offline(false)
.run()?,
@r"No findings to report. Good job! (1 ignored, 1 suppressed)"
@"No findings to report. Good job! (1 ignored, 1 suppressed)"
);
Ok(())

View file

@ -42,7 +42,7 @@ fn test_nested_annotated_tags() -> Result<()> {
"ref-version-mismatch/nested-annotated-tags.yml"
))
.run()?,
@r"No findings to report. Good job! (1 suppressed)"
@"No findings to report. Good job! (1 suppressed)"
);
Ok(())

View file

@ -8,10 +8,10 @@ fn secrets_inherit() -> anyhow::Result<()> {
.run()?,
@r"
warning[secrets-inherit]: secrets unconditionally inherited by called workflow
--> @@INPUT@@:10:5
--> @@INPUT@@:10:11
|
10 | uses: octo-org/example-repo/.github/workflows/called-workflow.yml@main
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this reusable workflow
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this reusable workflow
11 | # NOT OK: unconditionally inherits
12 | secrets: inherit
| ---------------- inherits all parent secrets

View file

@ -30,7 +30,7 @@ fn test_self_hosted_default() -> Result<()> {
zizmor()
.input(input_under_test("self-hosted.yml"))
.run()?,
@r"No findings to report. Good job! (1 suppressed)"
@"No findings to report. Good job! (1 suppressed)"
);
Ok(())

View file

@ -11,10 +11,10 @@ fn test_pedantic_persona() -> anyhow::Result<()> {
.run()?,
@r"
help[stale-action-refs]: commit hash does not point to a Git tag
--> @@INPUT@@:34:7
--> @@INPUT@@:34:13
|
34 | - uses: actions/checkout@009b9ae9e446ad8d9b8c809870b0fbcc5e03573e
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this step
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this step
|
= note: audit confidence High

View file

@ -183,7 +183,7 @@ fn test_issue_418_repro() -> Result<()> {
zizmor()
.input(input_under_test("template-injection/issue-418-repro.yml"))
.run()?,
@r"No findings to report. Good job! (3 suppressed)"
@"No findings to report. Good job! (3 suppressed)"
);
Ok(())
@ -247,10 +247,10 @@ fn test_pr_425_backstop_action() -> Result<()> {
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:29:7
--> @@INPUT@@:29:13
|
29 | uses: azure/powershell@whatever
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High

View file

@ -64,7 +64,7 @@ fn test_undocumented_permissions_default() -> Result<()> {
zizmor()
.input(input_under_test("undocumented-permissions.yml"))
.run()?,
@r"No findings to report. Good job! (5 suppressed)"
@"No findings to report. Good job! (5 suppressed)"
);
Ok(())
@ -78,7 +78,7 @@ fn test_documented_permissions_pedantic() -> Result<()> {
.input(input_under_test("undocumented-permissions/documented.yml"))
.args(["--persona=pedantic"])
.run()?,
@r"No findings to report. Good job! (1 ignored)"
@"No findings to report. Good job! (1 ignored)"
);
Ok(())
@ -94,7 +94,7 @@ fn test_contents_read_only_pedantic() -> Result<()> {
))
.args(["--persona=pedantic"])
.run()?,
@r"No findings to report. Good job!"
@"No findings to report. Good job!"
);
Ok(())
@ -110,7 +110,7 @@ fn test_empty_permissions_pedantic() -> Result<()> {
))
.args(["--persona=pedantic"])
.run()?,
@r"No findings to report. Good job!"
@"No findings to report. Good job!"
);
Ok(())

View file

@ -10,18 +10,18 @@ fn test_unpinned_uses_pedantic() -> Result<()> {
.run()?,
@r"
warning[unpinned-uses]: unpinned action reference
--> @@INPUT@@:21:9
--> @@INPUT@@:21:24
|
21 | - uses: docker://ubuntu
| ^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref
| ^^^^^^ image is not pinned to a tag, branch, or hash ref
|
= note: audit confidence High
warning[unpinned-uses]: unpinned action reference
--> @@INPUT@@:27:9
--> @@INPUT@@:27:24
|
27 | - uses: docker://ghcr.io/pypa/gh-action-pypi-publish
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ image is not pinned to a tag, branch, or hash ref
|
= note: audit confidence High
@ -40,18 +40,18 @@ fn test_unpinned_uses_default() -> Result<()> {
.run()?,
@r"
warning[unpinned-uses]: unpinned action reference
--> @@INPUT@@:21:9
--> @@INPUT@@:21:24
|
21 | - uses: docker://ubuntu
| ^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref
| ^^^^^^ image is not pinned to a tag, branch, or hash ref
|
= note: audit confidence High
warning[unpinned-uses]: unpinned action reference
--> @@INPUT@@:27:9
--> @@INPUT@@:27:24
|
27 | - uses: docker://ghcr.io/pypa/gh-action-pypi-publish
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a tag, branch, or hash ref
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ image is not pinned to a tag, branch, or hash ref
|
= note: audit confidence High
@ -71,18 +71,18 @@ fn test_action_pedantic() -> Result<()> {
.run()?,
@r"
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:12:7
--> @@INPUT@@:12:13
|
12 | uses: asdf-vm/actions/setup@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:15:7
--> @@INPUT@@:15:13
|
15 | uses: asdf-vm/actions/setup@main
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
@ -114,7 +114,7 @@ fn test_issue_659_repro() -> Result<()> {
.input(input_under_test("unpinned-uses/issue-659-repro.yml"))
.args(["--pedantic"])
.run()?,
@r"No findings to report. Good job!"
@"No findings to report. Good job!"
);
Ok(())
@ -129,10 +129,10 @@ fn test_default_config() -> Result<()> {
.run()?,
@r"
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:22:9
--> @@INPUT@@:22:15
|
22 | - uses: pypa/gh-action-pypi-publish@release/v1
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
@ -155,42 +155,42 @@ fn test_hash_pin_everything_config() -> Result<()> {
.run()?,
@r"
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:12:9
--> @@INPUT@@:12:15
|
12 | - uses: actions/setup-python@v4
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:14:9
--> @@INPUT@@:14:15
|
14 | - uses: actions/checkout@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:22:9
--> @@INPUT@@:22:15
|
22 | - uses: pypa/gh-action-pypi-publish@release/v1
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:24:9
--> @@INPUT@@:24:15
|
24 | - uses: github/codeql-action/init@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:26:9
--> @@INPUT@@:26:15
|
26 | - uses: github/codeql-action/upload-sarif@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
@ -226,26 +226,26 @@ fn test_composite_config() -> Result<()> {
.run()?,
@r"
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:12:9
--> @@INPUT@@:12:15
|
12 | - uses: actions/setup-python@v4
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by actions/setup-python policy)
| ^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by actions/setup-python policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:24:9
--> @@INPUT@@:24:15
|
24 | - uses: github/codeql-action/init@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:26:9
--> @@INPUT@@:26:15
|
26 | - uses: github/codeql-action/upload-sarif@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
@ -265,18 +265,18 @@ fn test_composite_config_2() -> Result<()> {
.run()?,
@r"
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:24:9
--> @@INPUT@@:24:15
|
24 | - uses: github/codeql-action/init@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by github/codeql-action/init policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by github/codeql-action/init policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:26:9
--> @@INPUT@@:26:15
|
26 | - uses: github/codeql-action/upload-sarif@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by github/codeql-action/upload-sarif policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by github/codeql-action/upload-sarif policy)
|
= note: audit confidence High
@ -296,42 +296,42 @@ fn test_empty_config() -> Result<()> {
.run()?,
@r"
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:12:9
--> @@INPUT@@:12:15
|
12 | - uses: actions/setup-python@v4
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:14:9
--> @@INPUT@@:14:15
|
14 | - uses: actions/checkout@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:22:9
--> @@INPUT@@:22:15
|
22 | - uses: pypa/gh-action-pypi-publish@release/v1
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:24:9
--> @@INPUT@@:24:15
|
24 | - uses: github/codeql-action/init@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High
error[unpinned-uses]: unpinned action reference
--> @@INPUT@@:26:9
--> @@INPUT@@:26:15
|
26 | - uses: github/codeql-action/upload-sarif@v3
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ action is not pinned to a hash (required by blanket policy)
|
= note: audit confidence High

View file

@ -357,7 +357,7 @@ fn test_issue_1191_repro() -> Result<()> {
"use-trusted-publishing/issue-1191-repro.yml"
))
.run()?,
@r"No findings to report. Good job! (3 suppressed)"
@"No findings to report. Good job! (3 suppressed)"
);
Ok(())
@ -406,3 +406,106 @@ fn test_nuget_push() -> Result<()> {
Ok(())
}
#[test]
fn test_gem_push() -> Result<()> {
insta::assert_snapshot!(
zizmor()
.input(input_under_test("use-trusted-publishing/gem-push.yml"))
.run()?,
@r"
info[use-trusted-publishing]: prefer trusted publishing for authentication
--> @@INPUT@@:12:14
|
12 | run: gem push foo-0.1.0.gem
| --- ^^^^^^^^^^^^^^^^^^^^^^ this command
| |
| this step
|
= note: audit confidence High
info[use-trusted-publishing]: prefer trusted publishing for authentication
--> @@INPUT@@:15:14
|
15 | run: bundle exec gem push foo-0.1.0.gem
| --- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this command
| |
| this step
|
= note: audit confidence High
info[use-trusted-publishing]: prefer trusted publishing for authentication
--> @@INPUT@@:20:11
|
19 | run: |
| --- this step
20 | / gem \
21 | | push \
22 | | foo-0.1.0.gem
| |_________________________^ this command
|
= note: audit confidence High
5 findings (2 suppressed): 3 informational, 0 low, 0 medium, 0 high
"
);
Ok(())
}
#[test]
fn test_twine_upload() -> Result<()> {
insta::assert_snapshot!(
zizmor()
.input(input_under_test("use-trusted-publishing/twine-upload.yml"))
.run()?,
@r"
info[use-trusted-publishing]: prefer trusted publishing for authentication
--> @@INPUT@@:12:14
|
12 | run: twine upload dist/*
| --- ^^^^^^^^^^^^^^^^^^^ this command
| |
| this step
|
= note: audit confidence High
info[use-trusted-publishing]: prefer trusted publishing for authentication
--> @@INPUT@@:15:14
|
15 | run: python -m twine upload dist/*
| --- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this command
| |
| this step
|
= note: audit confidence High
info[use-trusted-publishing]: prefer trusted publishing for authentication
--> @@INPUT@@:19:11
|
18 | run: |
| --- this step
19 | / python3.10 -m \
20 | | twine \
21 | | upload \
22 | | dist/*
| |__________________^ this command
|
= note: audit confidence High
info[use-trusted-publishing]: prefer trusted publishing for authentication
--> @@INPUT@@:26:11
|
25 | run: |
| --- this step
26 | pipx run twine==6.1.0 upload dist/*
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this command
|
= note: audit confidence High
6 findings (2 suppressed): 4 informational, 0 low, 0 medium, 0 high
"
);
Ok(())
}

View file

@ -42,28 +42,37 @@ pub struct Zizmor {
stdin: Option<String>,
unbuffer: bool,
offline: bool,
gh_token: bool,
inputs: Vec<String>,
config: Option<String>,
no_config: bool,
output: OutputMode,
expects_failure: bool,
show_audit_urls: bool,
}
impl Zizmor {
/// Create a new zizmor runner.
pub fn new() -> Self {
let cmd = Command::new(cargo::cargo_bin!());
let mut cmd = Command::new(cargo::cargo_bin!());
// Our child `zizmor` process starts with a clean environment, to
// ensure we explicitly test interactions with things like `CI`
// and `GH_TOKEN`.
cmd.env_clear();
Self {
cmd,
stdin: None,
unbuffer: false,
offline: true,
gh_token: true,
inputs: vec![],
config: None,
no_config: false,
output: OutputMode::Stdout,
expects_failure: false,
show_audit_urls: false,
}
}
@ -82,11 +91,6 @@ impl Zizmor {
self
}
pub fn unsetenv(mut self, key: &str) -> Self {
self.cmd.env_remove(key);
self
}
pub fn input(mut self, input: impl Into<String>) -> Self {
self.inputs.push(input.into());
self
@ -112,6 +116,11 @@ impl Zizmor {
self
}
pub fn gh_token(mut self, flag: bool) -> Self {
self.gh_token = flag;
self
}
pub fn output(mut self, output: OutputMode) -> Self {
self.output = output;
self
@ -125,6 +134,11 @@ impl Zizmor {
self
}
pub fn show_audit_urls(mut self, flag: bool) -> Self {
self.show_audit_urls = flag;
self
}
pub fn working_dir(mut self, dir: impl Into<String>) -> Self {
self.cmd.current_dir(dir.into());
self
@ -140,7 +154,12 @@ impl Zizmor {
} else {
// If we're running in online mode, we pre-assert the
// presence of GH_TOKEN to make configuration failures more obvious.
std::env::var("GH_TOKEN").context("online tests require GH_TOKEN to be set")?;
let token =
std::env::var("GH_TOKEN").context("online tests require GH_TOKEN to be set")?;
if self.gh_token {
self.cmd.env("GH_TOKEN", token);
}
}
if self.no_config && self.config.is_some() {
@ -165,6 +184,12 @@ impl Zizmor {
self.cmd.arg("--no-progress");
}
if self.show_audit_urls {
self.cmd.arg("--show-audit-urls=always");
} else {
self.cmd.arg("--show-audit-urls=never");
}
for input in &self.inputs {
self.cmd.arg(input);
}

Some files were not shown because too many files have changed in this diff Show more