Compare commits

...

81 Commits

Author SHA1 Message Date
Michael B. Gale 6531f80d49 Document regular expressions 2026-03-05 16:51:15 +00:00
Michael B. Gale b1b5550715 Merge pull request #3529 from github/mbg/ts/sync-back
Convert `sync_back.py` to TypeScript
2026-03-05 12:36:22 +00:00
Henry Mercer b6dfacb528 Merge pull request #3542 from github/henrymercer/parallel-unit-tests
Run some unit tests in parallel
2026-03-04 16:07:10 +00:00
Henry Mercer 6123416ead Merge remote-tracking branch 'origin/main' into henrymercer/parallel-unit-tests 2026-03-04 15:12:33 +01:00
Henry Mercer a6594f96a3 Merge pull request #3540 from github/henrymercer/stub-actions-vars
Testing: Provide default value for more environment variables in `setupActionsVars`
2026-03-04 13:27:40 +00:00
Henry Mercer 71d7981285 Address review comments 2026-03-04 13:27:59 +01:00
Henry Mercer e9e9733cb5 Merge branch 'main' into henrymercer/stub-actions-vars 2026-03-04 13:26:43 +01:00
Henry Mercer 8e17ec94b4 Merge branch 'main' into henrymercer/parallel-unit-tests 2026-03-04 13:25:01 +01:00
Henry Mercer aae94187c1 Fix test name 2026-03-04 13:09:10 +01:00
Henry Mercer 36148cccb9 Run more actions util tests serially 2026-03-04 13:08:37 +01:00
Henry Mercer a5b959e10d Merge pull request #3537 from github/henrymercer/overlay-status-record-job
Record the job that published an overlay status
2026-03-04 11:49:52 +00:00
Michael B. Gale d1ac77f26d Merge pull request #3527 from github/mbg/start-proxy/remove-unused
Remove unused registry types from `LANGUAGE_TO_REGISTRY_TYPE`
2026-03-04 11:48:08 +00:00
Henry Mercer 675af55c60 Run some unit tests in parallel 2026-03-04 12:40:22 +01:00
Henry Mercer 281b265245 Address review comments 2026-03-04 12:16:54 +01:00
Henry Mercer 335f08ccc6 Merge pull request #3539 from github/update-supported-enterprise-server-versions
Update supported GitHub Enterprise Server versions
2026-03-04 11:01:18 +00:00
github-actions[bot] 4593dc2f8f Update supported GitHub Enterprise Server versions 2026-03-04 00:23:29 +00:00
Henry Mercer d4f1b14259 Use new setupActionsVars pattern 2026-03-03 19:24:18 +01:00
Henry Mercer 8a884bdb36 Extend setupActionsVars 2026-03-03 19:09:57 +01:00
Henry Mercer 129d771399 Add check run ID 2026-03-03 19:04:04 +01:00
Henry Mercer a05f541a6e Record the job that published an overlay status
This makes it easier to find the job that produced the status.
2026-03-03 16:56:18 +01:00
Michael B. Gale 40f0fa95c4 Merge pull request #3535 from github/mbg/ci/no-skip-overlay
Disable overlay status check for CS config test workflow
2026-03-03 12:26:50 +00:00
Michael B. Gale 9bf973324f Merge pull request #3528 from github/mbg/refactor/sarif
Refactor SARIF-related types and functions into a separate module
2026-03-03 12:10:30 +00:00
Michael B. Gale 1175fd9b5d Add some docs to some newer overlay Features
To make it easier to see what they do at a glance
2026-03-03 12:06:46 +00:00
Michael B. Gale 1faad73c9a Disable resource checks as well 2026-03-03 12:06:46 +00:00
Michael B. Gale 6b246e4709 Disable overlay status check for CS config test workflow 2026-03-03 11:53:33 +00:00
Michael B. Gale 0a5b95cdcc Update pr-checks README 2026-03-03 11:45:18 +00:00
Michael B. Gale 77fc89c78d Remove python files from pr-checks 2026-03-03 11:42:49 +00:00
Michael B. Gale bf9bf1c027 Remove python setup from rebuild workflow 2026-03-03 11:41:24 +00:00
Michael B. Gale 24fa947692 Update pr-checks to run new tests 2026-03-03 11:40:54 +00:00
Michael B. Gale aaed7b75f9 Merge remote-tracking branch 'origin/main' into mbg/ts/sync-back 2026-03-03 11:36:59 +00:00
Michael B. Gale 2a2f4c30a1 Add docs for automationId 2026-03-03 11:35:43 +00:00
Michael B. Gale 6d060bbaa1 Return Partial<Log> from readSarifFile
Our previous definition had `tools` as a mandatory field, so this
also makes some changes to deal with the case where that may
be `undefined` by treating it as equivalent to `[]`.
2026-03-03 11:34:01 +00:00
Michael B. Gale 28b449d8c7 Improve version handling in combineSarifFiles 2026-03-03 11:18:47 +00:00
Michael B. Gale 1721ce7afd Address minor review comments 2026-03-03 11:05:37 +00:00
Michael B. Gale ff2daa0aba Merge pull request #3526 from github/mbg/pr-checks/ts
Convert `sync.py` to TypeScript
2026-03-03 10:49:56 +00:00
Michael B. Gale b43d146e37 Do not alias types 2026-03-02 20:47:19 +00:00
Michael B. Gale 66e08d2b3f Make entries in new mapping mandatory 2026-03-02 18:08:53 +00:00
Michael B. Gale 9a31859f78 Use @types/sarif 2026-03-02 18:04:11 +00:00
Michael B. Gale ae9cb02459 Add dependency on @types/sarif 2026-03-02 17:41:41 +00:00
Michael B. Gale c0b22b827b Replace filename in CONTRIBUTING.md 2026-03-02 15:40:32 +00:00
Michael B. Gale d09af9d5b8 Type workflow input names 2026-03-02 15:39:46 +00:00
Michael B. Gale e7ec96cee0 Remove isTruthy: consistently use booleans in templates 2026-03-02 15:34:11 +00:00
Michael B. Gale 41d5a06bfd Address basic style comments 2026-03-02 15:32:30 +00:00
Michael B. Gale 4ca06280ba Merge remote-tracking branch 'origin/main' into mbg/pr-checks/ts 2026-03-02 14:03:56 +00:00
Michael B. Gale a6892dcba5 Use sync_back.ts in rebuild workflow 2026-03-01 16:04:35 +00:00
Michael B. Gale 8eb0202e9d Port tests 2026-03-01 16:04:35 +00:00
Michael B. Gale dd779fa7d3 Add updateTemplateFiles 2026-03-01 16:04:35 +00:00
Michael B. Gale f05cfae018 Add updateSyncTs 2026-03-01 16:04:35 +00:00
Michael B. Gale e1b83ccb74 Add scanGeneratedWorkflows 2026-03-01 16:04:35 +00:00
Michael B. Gale 6a6bd778b6 Add initial sync_back.ts script 2026-03-01 16:04:35 +00:00
Michael B. Gale f0f92a1dc8 Remove sync.py 2026-03-01 16:03:47 +00:00
Michael B. Gale e931a2475a Replace remaining uses of sync.py 2026-03-01 16:03:35 +00:00
Michael B. Gale 8bfaf96434 Run npm ci in actions 2026-03-01 15:20:30 +00:00
Michael B. Gale 8a1cd7656d Put change behind a FF 2026-03-01 15:07:47 +00:00
Michael B. Gale 3b16d31abc Delete unused fixInvalidNotifications function 2026-03-01 14:26:41 +00:00
Michael B. Gale 40aec383a1 Move more SARIF helpers to sarif module 2026-03-01 14:22:49 +00:00
Michael B. Gale 2fce45b8e6 Add wrapper around JSON.parse to sarif module 2026-03-01 14:10:25 +00:00
Michael B. Gale d7cfd19fb8 Move SARIF types out of util.ts 2026-03-01 13:42:46 +00:00
Michael B. Gale 68d73442fa Remove unused registry types from LANGUAGE_TO_REGISTRY_TYPE 2026-02-28 23:24:41 +00:00
Michael B. Gale f91cab1409 Adjust quotes and re-generate workflows 2026-02-28 18:13:05 +00:00
Michael B. Gale 5876a93a5f Switch sync.sh script to only use sync.ts 2026-02-28 17:58:00 +00:00
Michael B. Gale 0ea8490473 Switch from js-yaml to yaml for better output formatting 2026-02-28 17:55:41 +00:00
Michael B. Gale a85af80f34 Generate and write collections 2026-02-28 16:47:22 +00:00
Michael B. Gale 47671ab7aa Track collections 2026-02-28 16:46:47 +00:00
Michael B. Gale 96e6b655c1 Add tool-specific setup steps 2026-02-28 16:32:32 +00:00
Michael B. Gale 57c7bc6885 Add analysisKinds 2026-02-28 16:32:32 +00:00
Michael B. Gale d52917b510 Add useAllPlatformBundle 2026-02-28 16:32:32 +00:00
Michael B. Gale b948e562f4 Add basic job steps 2026-02-28 16:32:31 +00:00
Michael B. Gale c889588a2c Add env, container, and services 2026-02-28 16:32:31 +00:00
Michael B. Gale b77ebbe4d8 Add CODEQL_ACTION_TEST_MODE 2026-02-28 16:32:31 +00:00
Michael B. Gale 9a0fe9e006 Add permissions 2026-02-28 16:32:31 +00:00
Michael B. Gale dd78add36d Add matrix to job 2026-02-28 16:32:31 +00:00
Michael B. Gale e62a268a73 Add job construction 2026-02-28 16:32:31 +00:00
Michael B. Gale 63b4776d64 Add matrix construction 2026-02-28 16:32:30 +00:00
Michael B. Gale 6932b1cda2 Add concurrency settings 2026-02-28 16:32:30 +00:00
Michael B. Gale 40aefb0faf Add basic workflow construction 2026-02-28 16:32:30 +00:00
Michael B. Gale efe64e03d9 Add isTruthy helper 2026-02-28 16:32:30 +00:00
Michael B. Gale 898d46e783 Strip trailing whitespace in output 2026-02-28 16:32:30 +00:00
Michael B. Gale 04c1e601ab Add defaultTestVersions constant 2026-02-28 16:18:04 +00:00
Michael B. Gale 2f77cd04d4 Add specification types 2026-02-28 16:06:14 +00:00
Michael B. Gale c7e378f003 Scaffold project for sync.ts script 2026-02-28 15:58:47 +00:00
117 changed files with 7903 additions and 6332 deletions
+2 -3
View File
@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
all-platform-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: all-platform-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
all-platform-bundle:
strategy:
@@ -95,7 +94,7 @@ jobs:
- id: init
uses: ./../action/init
with:
# Swift is not supported on Ubuntu so we manually exclude it from the list here
# Swift is not supported on Ubuntu so we manually exclude it from the list here
languages: cpp,csharp,go,java,javascript,python,ruby
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
+6 -6
View File
@@ -87,16 +87,16 @@ jobs:
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
post-processed-sarif-path: ${{ runner.temp }}/post-processed
post-processed-sarif-path: '${{ runner.temp }}/post-processed'
- name: Upload SARIF files
uses: actions/upload-artifact@v6
with:
name: |
analysis-kinds-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}
path: ${{ runner.temp }}/results/*.sarif
path: '${{ runner.temp }}/results/*.sarif'
retention-days: 7
- name: Upload post-processed SARIF
@@ -104,7 +104,7 @@ jobs:
with:
name: |
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}
path: ${{ runner.temp }}/post-processed
path: '${{ runner.temp }}/post-processed'
retention-days: 7
if-no-files-found: error
@@ -112,7 +112,7 @@ jobs:
if: contains(matrix.analysis-kinds, 'code-scanning')
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif
SARIF_PATH: '${{ runner.temp }}/results/javascript.sarif'
EXPECT_PRESENT: 'false'
with:
script: ${{ env.CHECK_SCRIPT }}
@@ -120,7 +120,7 @@ jobs:
if: contains(matrix.analysis-kinds, 'code-quality')
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/javascript.quality.sarif
SARIF_PATH: '${{ runner.temp }}/results/javascript.quality.sarif'
EXPECT_PRESENT: 'true'
with:
script: ${{ env.CHECK_SCRIPT }}
+4 -6
View File
@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
analyze-ref-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: analyze-ref-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
analyze-ref-input:
strategy:
@@ -107,13 +106,12 @@ jobs:
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
- name: Build code
run: ./build.sh
- uses: ./../action/analyze
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
env:
CODEQL_ACTION_TEST_MODE: true
+1 -1
View File
@@ -82,7 +82,7 @@ jobs:
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/autobuild
env:
# Explicitly disable the CLR tracer.
# Explicitly disable the CLR tracer.
COR_ENABLE_PROFILING: ''
COR_PROFILER: ''
COR_PROFILER_PATH_64: ''
@@ -42,8 +42,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
autobuild-direct-tracing-with-working-dir-${{github.ref}}-${{inputs.java-version}}
group: autobuild-direct-tracing-with-working-dir-${{github.ref}}-${{inputs.java-version}}
jobs:
autobuild-direct-tracing-with-working-dir:
strategy:
+1 -1
View File
@@ -97,7 +97,7 @@ jobs:
id: init
with:
build-mode: autobuild
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
languages: java
tools: ${{ steps.prepare-test.outputs.tools-url }}
+2 -3
View File
@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
build-mode-manual-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: build-mode-manual-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
build-mode-manual:
strategy:
@@ -92,7 +91,7 @@ jobs:
id: init
with:
build-mode: manual
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
languages: java
tools: ${{ steps.prepare-test.outputs.tools-url }}
+2 -2
View File
@@ -64,7 +64,7 @@ jobs:
id: init
with:
build-mode: none
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
languages: java
tools: ${{ steps.prepare-test.outputs.tools-url }}
@@ -77,7 +77,7 @@ jobs:
exit 1
fi
# The latest nightly supports omitting the autobuild Action when the build mode is specified.
# The latest nightly supports omitting the autobuild Action when the build mode is specified.
- uses: ./../action/autobuild
if: matrix.version != 'nightly-latest'
+1 -1
View File
@@ -68,7 +68,7 @@ jobs:
id: init
with:
build-mode: none
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
languages: java
tools: ${{ steps.prepare-test.outputs.tools-url }}
+1 -1
View File
@@ -66,7 +66,7 @@ jobs:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: javascript
- name: Fail if the CodeQL version is not a nightly
if: "!contains(steps.init.outputs.codeql-version, '+')"
if: ${{ !contains(steps.init.outputs.codeql-version, '+') }}
run: exit 1
env:
CODEQL_ACTION_TEST_MODE: true
+1 -1
View File
@@ -67,7 +67,7 @@ jobs:
id: init
with:
build-mode: none
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
+3 -3
View File
@@ -67,18 +67,18 @@ jobs:
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v6
with:
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif
path: '${{ runner.temp }}/results/javascript.sarif'
retention-days: 7
- name: Check config properties appear in SARIF
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif
SARIF_PATH: '${{ runner.temp }}/results/javascript.sarif'
with:
script: |
const fs = require('fs');
+3 -3
View File
@@ -78,18 +78,18 @@ jobs:
--ready-for-status-page
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v6
with:
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif
path: '${{ runner.temp }}/results/javascript.sarif'
retention-days: 7
- name: Check diagnostics appear in SARIF
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif
SARIF_PATH: '${{ runner.temp }}/results/javascript.sarif'
with:
script: |
const fs = require('fs');
+3 -4
View File
@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
export-file-baseline-information-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: export-file-baseline-information-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
export-file-baseline-information:
strategy:
@@ -101,12 +100,12 @@ jobs:
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
- name: Upload SARIF
uses: actions/upload-artifact@v6
with:
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif
path: '${{ runner.temp }}/results/javascript.sarif'
retention-days: 7
- name: Check results
run: |
+1 -2
View File
@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
go-custom-queries-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: go-custom-queries-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
go-custom-queries:
strategy:
@@ -77,7 +77,7 @@ jobs:
with:
languages: go
tools: ${{ steps.prepare-test.outputs.tools-url }}
# Deliberately change Go after the `init` step
# Deliberately change Go after the `init` step
- uses: actions/setup-go@v6
with:
go-version: '1.20'
@@ -85,12 +85,12 @@ jobs:
run: go build main.go
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Check diagnostic appears in SARIF
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/go.sarif
SARIF_PATH: '${{ runner.temp }}/results/go.sarif'
with:
script: |
const fs = require('fs');
@@ -42,8 +42,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
go-indirect-tracing-workaround-no-file-program-${{github.ref}}-${{inputs.go-version}}
group: go-indirect-tracing-workaround-no-file-program-${{github.ref}}-${{inputs.go-version}}
jobs:
go-indirect-tracing-workaround-no-file-program:
strategy:
@@ -87,12 +86,12 @@ jobs:
run: go build main.go
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Check diagnostic appears in SARIF
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/go.sarif
SARIF_PATH: '${{ runner.temp }}/results/go.sarif'
with:
script: |
const fs = require('fs');
+1 -2
View File
@@ -50,7 +50,6 @@ jobs:
permissions:
contents: read
packages: read
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
@@ -66,7 +65,7 @@ jobs:
- name: Init with registries
uses: ./../action/init
with:
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
tools: ${{ steps.prepare-test.outputs.tools-url }}
config-file: ./.github/codeql/codeql-config-registries.yml
languages: javascript
+2 -2
View File
@@ -65,12 +65,12 @@ jobs:
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
- name: Upload SARIF
uses: actions/upload-artifact@v6
with:
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif
path: '${{ runner.temp }}/results/javascript.sarif'
retention-days: 7
- name: Check results
run: |
+1 -1
View File
@@ -63,7 +63,7 @@ jobs:
languages: C#,java-kotlin,swift,typescript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Check languages
- name: 'Check languages'
run: |
expected_languages="csharp,java,swift,javascript"
actual_languages=$(jq -r '.languages | join(",")' "$RUNNER_TEMP"/config)
+2 -3
View File
@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
local-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: local-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
local-bundle:
strategy:
@@ -109,7 +108,7 @@ jobs:
- id: init
uses: ./../action/init
with:
# Swift is not supported on Ubuntu so we manually exclude it from the list here
# Swift is not supported on Ubuntu so we manually exclude it from the list here
languages: cpp,csharp,go,java,javascript,python,ruby
tools: ./codeql-bundle-linux64.tar.zst
- name: Build code
+3 -5
View File
@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
multi-language-autodetect-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: multi-language-autodetect-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
multi-language-autodetect:
strategy:
@@ -144,9 +143,8 @@ jobs:
- uses: ./../action/init
id: init
with:
db-location: ${{ runner.temp }}/customDbLocation
languages: ${{ runner.os == 'Linux' && 'cpp,csharp,go,java,javascript,python,ruby'
|| '' }}
db-location: '${{ runner.temp }}/customDbLocation'
languages: ${{ runner.os == 'Linux' && 'cpp,csharp,go,java,javascript,python,ruby' || '' }}
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
packaging-codescanning-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: packaging-codescanning-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
packaging-codescanning-config-inputs-js:
strategy:
@@ -116,7 +115,7 @@ jobs:
dotnet-version: ${{ inputs.dotnet-version || '9.x' }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging3.yml
config-file: '.github/codeql/codeql-config-packaging3.yml'
packs: +codeql-testing/codeql-pack1@1.0.0
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
@@ -124,15 +123,14 @@ jobs:
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Check results
uses: ./../action/.github/actions/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run:
javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results
+4 -6
View File
@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
packaging-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: packaging-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
packaging-config-inputs-js:
strategy:
@@ -101,7 +100,7 @@ jobs:
dotnet-version: ${{ inputs.dotnet-version || '9.x' }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging3.yml
config-file: '.github/codeql/codeql-config-packaging3.yml'
packs: +codeql-testing/codeql-pack1@1.0.0
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
@@ -109,15 +108,14 @@ jobs:
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Check results
uses: ./../action/.github/actions/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run:
javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results
+4 -6
View File
@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
packaging-config-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: packaging-config-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
packaging-config-js:
strategy:
@@ -101,22 +100,21 @@ jobs:
dotnet-version: ${{ inputs.dotnet-version || '9.x' }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging.yml
config-file: '.github/codeql/codeql-config-packaging.yml'
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Check results
uses: ./../action/.github/actions/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run:
javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results
+4 -6
View File
@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
packaging-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: packaging-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
packaging-inputs-js:
strategy:
@@ -101,7 +100,7 @@ jobs:
dotnet-version: ${{ inputs.dotnet-version || '9.x' }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging2.yml
config-file: '.github/codeql/codeql-config-packaging2.yml'
languages: javascript
packs: codeql-testing/codeql-pack1@1.0.0, codeql-testing/codeql-pack2, codeql-testing/codeql-pack3:other-query.ql
tools: ${{ steps.prepare-test.outputs.tools-url }}
@@ -109,14 +108,13 @@ jobs:
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
- name: Check results
uses: ./../action/.github/actions/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run:
javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results
+2 -4
View File
@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
remote-config-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: remote-config-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
remote-config:
strategy:
@@ -109,8 +108,7 @@ jobs:
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
- name: Build code
run: ./build.sh
- uses: ./../action/analyze
+1 -2
View File
@@ -84,8 +84,7 @@ jobs:
language: javascript-typescript
- name: Fail if JavaScript/TypeScript configuration present
if:
fromJSON(steps.resolve-environment-js.outputs.environment).configuration.javascript
if: fromJSON(steps.resolve-environment-js.outputs.environment).configuration.javascript
run: exit 1
env:
CODEQL_ACTION_TEST_MODE: true
+3 -3
View File
@@ -99,7 +99,7 @@ jobs:
dotnet-version: ${{ inputs.dotnet-version || '9.x' }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging3.yml
config-file: '.github/codeql/codeql-config-packaging3.yml'
packs: +codeql-testing/codeql-pack1@1.0.0
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
@@ -108,7 +108,7 @@ jobs:
- uses: ./../action/analyze
with:
skip-queries: true
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Assert No Results
@@ -119,7 +119,7 @@ jobs:
fi
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Assert Results
run: |
+2 -4
View File
@@ -71,8 +71,7 @@ jobs:
id: proxy
uses: ./../action/start-proxy
with:
registry_secrets: '[{ "type": "nuget_feed", "url": "https://api.nuget.org/v3/index.json"
}]'
registry_secrets: '[{ "type": "nuget_feed", "url": "https://api.nuget.org/v3/index.json" }]'
- name: Print proxy outputs
run: |
@@ -81,8 +80,7 @@ jobs:
echo "${{ steps.proxy.outputs.proxy_urls }}"
- name: Fail if proxy outputs are not set
if: (!steps.proxy.outputs.proxy_host) || (!steps.proxy.outputs.proxy_port)
|| (!steps.proxy.outputs.proxy_ca_certificate) || (!steps.proxy.outputs.proxy_urls)
if: (!steps.proxy.outputs.proxy_host) || (!steps.proxy.outputs.proxy_port) || (!steps.proxy.outputs.proxy_ca_certificate) || (!steps.proxy.outputs.proxy_urls)
run: exit 1
env:
CODEQL_ACTION_TEST_MODE: true
+8 -15
View File
@@ -49,8 +49,7 @@ jobs:
if: github.triggering_actor != 'dependabot[bot]'
permissions:
contents: read
security-events: write # needed to upload the SARIF file
security-events: write
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
@@ -69,26 +68,20 @@ jobs:
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Fail
# We want this job to pass if the Action correctly uploads the SARIF file for
# the failed run.
# Setting this step to continue on error means that it is marked as completing
# successfully, so will not fail the job.
# We want this job to pass if the Action correctly uploads the SARIF file for
# the failed run.
# Setting this step to continue on error means that it is marked as completing
# successfully, so will not fail the job.
continue-on-error: true
run: exit 1
- uses: ./analyze
# In a real workflow, this step wouldn't run. Since we used `continue-on-error`
# above, we manually disable it with an `if` condition.
# In a real workflow, this step wouldn't run. Since we used `continue-on-error`
# above, we manually disable it with an `if` condition.
if: false
with:
category: /test-codeql-version:${{ matrix.version }}
category: '/test-codeql-version:${{ matrix.version }}'
env:
# Internal-only environment variable used to indicate that the post-init Action
# should expect to upload a SARIF file for the failed run.
CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF: true
# Make sure the uploading SARIF files feature is enabled.
CODEQL_ACTION_UPLOAD_FAILED_SARIF: true
# Upload the failed SARIF file as an integration test of the API endpoint.
CODEQL_ACTION_TEST_MODE: false
# Mark telemetry for this workflow so it can be treated separately.
CODEQL_ACTION_TESTING_ENVIRONMENT: codeql-action-pr-checks
+1 -2
View File
@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
swift-custom-build-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: swift-custom-build-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
swift-custom-build:
strategy:
+2 -3
View File
@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
unset-environment-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: unset-environment-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
unset-environment:
strategy:
@@ -109,7 +108,7 @@ jobs:
id: init
with:
db-location: ${{ runner.temp }}/customDbLocation
# Swift is not supported on Ubuntu so we manually exclude it from the list here
# Swift is not supported on Ubuntu so we manually exclude it from the list here
languages: cpp,csharp,go,java,javascript,python,ruby
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
+7 -9
View File
@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
upload-ref-sha-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: upload-ref-sha-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
upload-ref-sha-input:
strategy:
@@ -107,19 +106,18 @@ jobs:
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
- name: Build code
run: ./build.sh
# Generate some SARIF we can upload with the upload-sarif step
# Generate some SARIF we can upload with the upload-sarif step
- uses: ./../action/analyze
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
upload: never
- uses: ./../action/upload-sarif
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
env:
CODEQL_ACTION_TEST_MODE: true
+19 -22
View File
@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
upload-sarif-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: upload-sarif-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
upload-sarif:
strategy:
@@ -117,11 +116,11 @@ jobs:
analysis-kinds: ${{ matrix.analysis-kinds }}
- name: Build code
run: ./build.sh
# Generate some SARIF we can upload with the upload-sarif step
# Generate some SARIF we can upload with the upload-sarif step
- uses: ./../action/analyze
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
upload: never
output: ${{ runner.temp }}/results
@@ -130,15 +129,15 @@ jobs:
uses: ./../action/upload-sarif
id: upload-sarif
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
sarif_file: ${{ runner.temp }}/results
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:all-files/
- name: Fail for missing output from `upload-sarif` step for `code-scanning`
- name: 'Fail for missing output from `upload-sarif` step for `code-scanning`'
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-scanning)
run: exit 1
- name: Fail for missing output from `upload-sarif` step for `code-quality`
- name: 'Fail for missing output from `upload-sarif` step for `code-quality`'
if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)
run: exit 1
@@ -147,28 +146,26 @@ jobs:
id: upload-single-sarif-code-scanning
if: contains(matrix.analysis-kinds, 'code-scanning')
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
sarif_file: ${{ runner.temp }}/results/javascript.sarif
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-scanning/
- name: Fail for missing output from `upload-single-sarif-code-scanning` step
if: contains(matrix.analysis-kinds, 'code-scanning') &&
!(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning)
- name: 'Fail for missing output from `upload-single-sarif-code-scanning` step'
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning)
run: exit 1
- name: Upload single SARIF file for Code Quality
uses: ./../action/upload-sarif
id: upload-single-sarif-code-quality
if: contains(matrix.analysis-kinds, 'code-quality')
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
sarif_file: ${{ runner.temp }}/results/javascript.quality.sarif
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-quality/
- name: Fail for missing output from `upload-single-sarif-code-quality` step
if: contains(matrix.analysis-kinds, 'code-quality') &&
!(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality)
- name: 'Fail for missing output from `upload-single-sarif-code-quality` step'
if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality)
run: exit 1
- name: Change SARIF file extension
@@ -179,12 +176,12 @@ jobs:
id: upload-single-non-sarif
if: contains(matrix.analysis-kinds, 'code-scanning')
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
sarif_file: ${{ runner.temp }}/results/javascript.sarif.json
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:non-sarif/
- name: Fail for missing output from `upload-single-non-sarif` step
- name: 'Fail for missing output from `upload-single-non-sarif` step'
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-non-sarif.outputs.sarif-ids).code-scanning)
run: exit 1
env:
+5 -5
View File
@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
with-checkout-path-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: with-checkout-path-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
with-checkout-path:
strategy:
@@ -80,6 +79,7 @@ jobs:
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
# This ensures we don't accidentally use the original checkout for any part of the test.
- name: Check out repository
uses: actions/checkout@v6
- name: Prepare test
@@ -109,8 +109,8 @@ jobs:
# Actions does not support deleting the current working directory, so we
# delete the contents of the directory instead.
rm -rf ./* .github .git
# Check out the actions repo again, but at a different location.
# choose an arbitrary SHA so that we can later test that the commit_oid is not from main
# Check out the actions repo again, but at a different location.
# choose an arbitrary SHA so that we can later test that the commit_oid is not from main
- uses: actions/checkout@v6
with:
ref: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
@@ -119,7 +119,7 @@ jobs:
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
# it's enough to test one compiled language and one interpreted language
# it's enough to test one compiled language and one interpreted language
languages: csharp,javascript
source-root: x/y/z/some-path/tests/multi-language-repo
@@ -11,6 +11,8 @@ env:
CODEQL_ACTION_OVERLAY_ANALYSIS: true
CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT: false
CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT: true
CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_CHECK: false
CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS: true
on:
push:
+1 -6
View File
@@ -42,11 +42,6 @@ jobs:
node-version: ${{ matrix.node-version }}
cache: 'npm'
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: 3.11
- name: Install dependencies
run: |
# Use the system Bash shell to ensure we can run commands like `npm ci`
@@ -68,7 +63,7 @@ jobs:
- name: Run pr-checks tests
if: always()
working-directory: pr-checks
run: python -m unittest discover
run: npm ci && npx tsx --test
- name: Lint
if: always() && matrix.os != 'windows-latest'
+3 -10
View File
@@ -73,24 +73,17 @@ jobs:
npm run lint -- --fix
npm run build
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: 3.11
- name: Sync back version updates to generated workflows
# Only sync back versions on Dependabot update PRs
if: startsWith(env.HEAD_REF, 'dependabot/')
working-directory: pr-checks
run: |
python3 sync_back.py -v
npm ci
npx tsx sync_back.ts --verbose
- name: Generate workflows
working-directory: pr-checks
run: |
python -m pip install --upgrade pip
pip install ruamel.yaml==0.17.31
python3 sync.py
run: ./sync.sh
- name: "Merge in progress: Finish merge and push"
if: steps.merge.outputs.merge-in-progress == 'true'
+1 -1
View File
@@ -19,7 +19,7 @@ if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then the PR needs attention
git diff
git status
>&2 echo "Failed: PR checks are not up to date. Run 'cd pr-checks && python3 sync.py' to update"
>&2 echo "Failed: PR checks are not up to date. Run 'cd pr-checks && ./sync.sh' to update"
echo "### Generated workflows diff" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
+1 -1
View File
@@ -92,7 +92,7 @@ We typically deprecate a version of CodeQL when the GitHub Enterprise Server (GH
1. Remove support for the old version of CodeQL.
- Bump `CODEQL_MINIMUM_VERSION` in `src/codeql.ts` to the new minimum version of CodeQL.
- Remove any code that is only needed to support the old version of CodeQL. This is often behind a version guard, so look for instances of version numbers between the old minimum version and the new minimum version in the codebase. A good place to start is the list of version numbers in `src/codeql.ts`.
- Update the default set of CodeQL test versions in `pr-checks/sync.py`.
- Update the default set of CodeQL test versions in `pr-checks/sync.ts`.
- Remove the old minimum version of CodeQL.
- Add the latest patch release for any new CodeQL minor version series that have shipped in GHES.
- Run the script to update the generated PR checks.
+1
View File
@@ -21,6 +21,7 @@ export default [
"build.mjs",
"eslint.config.mjs",
".github/**/*",
"pr-checks/**/*",
],
},
// eslint recommended config
+5
View File
@@ -159,6 +159,11 @@ inputs:
description: >-
Explicitly enable or disable caching of project build dependencies.
required: false
check-run-id:
description: >-
[Internal] The ID of the check run, as provided by the Actions runtime environment. Do not set this value manually.
default: ${{ job.check_run_id }}
required: false
outputs:
codeql-path:
description: The path of the CodeQL binary used for analysis
+8 -2
View File
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -161099,7 +161100,7 @@ var safeDump = renamed("safeDump", "dump");
var semver = __toESM(require_semver2());
// src/api-compatibility.json
var maximumVersion = "3.20";
var maximumVersion = "3.21";
var minimumVersion = "3.14";
// src/util.ts
@@ -162226,6 +162227,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
+154 -140
View File
@@ -204,7 +204,7 @@ var require_file_command = __commonJS({
exports2.issueFileCommand = issueFileCommand;
exports2.prepareKeyValueMessage = prepareKeyValueMessage;
var crypto3 = __importStar2(require("crypto"));
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var os5 = __importStar2(require("os"));
var utils_1 = require_utils();
function issueFileCommand(command, message) {
@@ -212,10 +212,10 @@ var require_file_command = __commonJS({
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs17.existsSync(filePath)) {
if (!fs18.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs17.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os5.EOL}`, {
fs18.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os5.EOL}`, {
encoding: "utf8"
});
}
@@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({
exports2.isRooted = isRooted;
exports2.tryGetExecutablePath = tryGetExecutablePath;
exports2.getCmdPath = getCmdPath;
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var path16 = __importStar2(require("path"));
_a = fs17.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
_a = fs18.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
exports2.IS_WINDOWS = process.platform === "win32";
function readlink(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
const result = yield fs17.promises.readlink(fsPath);
const result = yield fs18.promises.readlink(fsPath);
if (exports2.IS_WINDOWS && !result.endsWith("\\")) {
return `${result}\\`;
}
@@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({
});
}
exports2.UV_FS_O_EXLOCK = 268435456;
exports2.READONLY = fs17.constants.O_RDONLY;
exports2.READONLY = fs18.constants.O_RDONLY;
function exists(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
try {
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -50403,7 +50404,7 @@ var require_internal_globber = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.DefaultGlobber = void 0;
var core15 = __importStar2(require_core());
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var globOptionsHelper = __importStar2(require_internal_glob_options_helper());
var path16 = __importStar2(require("path"));
var patternHelper = __importStar2(require_internal_pattern_helper());
@@ -50457,7 +50458,7 @@ var require_internal_globber = __commonJS({
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
core15.debug(`Search path '${searchPath}'`);
try {
yield __await2(fs17.promises.lstat(searchPath));
yield __await2(fs18.promises.lstat(searchPath));
} catch (err) {
if (err.code === "ENOENT") {
continue;
@@ -50491,7 +50492,7 @@ var require_internal_globber = __commonJS({
continue;
}
const childLevel = item.level + 1;
const childItems = (yield __await2(fs17.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path16.join(item.path, x), childLevel));
const childItems = (yield __await2(fs18.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path16.join(item.path, x), childLevel));
stack.push(...childItems.reverse());
} else if (match & internal_match_kind_1.MatchKind.File) {
yield yield __await2(item.path);
@@ -50526,7 +50527,7 @@ var require_internal_globber = __commonJS({
let stats;
if (options.followSymbolicLinks) {
try {
stats = yield fs17.promises.stat(item.path);
stats = yield fs18.promises.stat(item.path);
} catch (err) {
if (err.code === "ENOENT") {
if (options.omitBrokenSymbolicLinks) {
@@ -50538,10 +50539,10 @@ var require_internal_globber = __commonJS({
throw err;
}
} else {
stats = yield fs17.promises.lstat(item.path);
stats = yield fs18.promises.lstat(item.path);
}
if (stats.isDirectory() && options.followSymbolicLinks) {
const realPath = yield fs17.promises.realpath(item.path);
const realPath = yield fs18.promises.realpath(item.path);
while (traversalChain.length >= item.level) {
traversalChain.pop();
}
@@ -50650,7 +50651,7 @@ var require_internal_hash_files = __commonJS({
exports2.hashFiles = hashFiles2;
var crypto3 = __importStar2(require("crypto"));
var core15 = __importStar2(require_core());
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var path16 = __importStar2(require("path"));
@@ -50673,13 +50674,13 @@ var require_internal_hash_files = __commonJS({
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
continue;
}
if (fs17.statSync(file).isDirectory()) {
if (fs18.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash2 = crypto3.createHash("sha256");
const pipeline = util.promisify(stream2.pipeline);
yield pipeline(fs17.createReadStream(file), hash2);
yield pipeline(fs18.createReadStream(file), hash2);
result.write(hash2.digest());
count++;
if (!hasMatch) {
@@ -52054,7 +52055,7 @@ var require_cacheUtils = __commonJS({
var glob2 = __importStar2(require_glob());
var io7 = __importStar2(require_io());
var crypto3 = __importStar2(require("crypto"));
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var path16 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
@@ -52083,7 +52084,7 @@ var require_cacheUtils = __commonJS({
});
}
function getArchiveFileSizeInBytes(filePath) {
return fs17.statSync(filePath).size;
return fs18.statSync(filePath).size;
}
function resolvePaths(patterns) {
return __awaiter2(this, void 0, void 0, function* () {
@@ -52121,7 +52122,7 @@ var require_cacheUtils = __commonJS({
}
function unlinkFile(filePath) {
return __awaiter2(this, void 0, void 0, function* () {
return util.promisify(fs17.unlink)(filePath);
return util.promisify(fs18.unlink)(filePath);
});
}
function getVersion(app_1) {
@@ -52163,7 +52164,7 @@ var require_cacheUtils = __commonJS({
}
function getGnuTarPathOnWindows() {
return __awaiter2(this, void 0, void 0, function* () {
if (fs17.existsSync(constants_1.GnuTarPathOnWindows)) {
if (fs18.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion("tar");
@@ -92320,7 +92321,7 @@ var require_downloadUtils = __commonJS({
var http_client_1 = require_lib();
var storage_blob_1 = require_commonjs15();
var buffer = __importStar2(require("buffer"));
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var utils = __importStar2(require_cacheUtils());
@@ -92431,7 +92432,7 @@ var require_downloadUtils = __commonJS({
exports2.DownloadProgress = DownloadProgress;
function downloadCacheHttpClient(archiveLocation, archivePath) {
return __awaiter2(this, void 0, void 0, function* () {
const writeStream = fs17.createWriteStream(archivePath);
const writeStream = fs18.createWriteStream(archivePath);
const httpClient = new http_client_1.HttpClient("actions/cache");
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () {
return httpClient.get(archiveLocation);
@@ -92456,7 +92457,7 @@ var require_downloadUtils = __commonJS({
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
return __awaiter2(this, void 0, void 0, function* () {
var _a;
const archiveDescriptor = yield fs17.promises.open(archivePath, "w");
const archiveDescriptor = yield fs18.promises.open(archivePath, "w");
const httpClient = new http_client_1.HttpClient("actions/cache", void 0, {
socketTimeout: options.timeoutInMs,
keepAlive: true
@@ -92572,7 +92573,7 @@ var require_downloadUtils = __commonJS({
} else {
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
const downloadProgress = new DownloadProgress(contentLength);
const fd = fs17.openSync(archivePath, "w");
const fd = fs18.openSync(archivePath, "w");
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
@@ -92590,12 +92591,12 @@ var require_downloadUtils = __commonJS({
controller.abort();
throw new Error("Aborting cache download as the download time exceeded the timeout.");
} else if (Buffer.isBuffer(result)) {
fs17.writeFileSync(fd, result);
fs18.writeFileSync(fd, result);
}
}
} finally {
downloadProgress.stopDisplayTimer();
fs17.closeSync(fd);
fs18.closeSync(fd);
}
}
});
@@ -92917,7 +92918,7 @@ var require_cacheHttpClient = __commonJS({
var core15 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var url_1 = require("url");
var utils = __importStar2(require_cacheUtils());
var uploadUtils_1 = require_uploadUtils();
@@ -93052,7 +93053,7 @@ Other caches with similar key:`);
return __awaiter2(this, void 0, void 0, function* () {
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs17.openSync(archivePath, "r");
const fd = fs18.openSync(archivePath, "r");
const uploadOptions = (0, options_1.getUploadOptions)(options);
const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency);
const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize);
@@ -93066,7 +93067,7 @@ Other caches with similar key:`);
const start = offset;
const end = offset + chunkSize - 1;
offset += maxChunkSize;
yield uploadChunk(httpClient, resourceUrl, () => fs17.createReadStream(archivePath, {
yield uploadChunk(httpClient, resourceUrl, () => fs18.createReadStream(archivePath, {
fd,
start,
end,
@@ -93077,7 +93078,7 @@ Other caches with similar key:`);
}
})));
} finally {
fs17.closeSync(fd);
fs18.closeSync(fd);
}
return;
});
@@ -99033,7 +99034,7 @@ var require_manifest = __commonJS({
var core_1 = require_core();
var os5 = require("os");
var cp = require("child_process");
var fs17 = require("fs");
var fs18 = require("fs");
function _findMatch(versionSpec, stable, candidates, archFilter) {
return __awaiter2(this, void 0, void 0, function* () {
const platFilter = os5.platform();
@@ -99095,10 +99096,10 @@ var require_manifest = __commonJS({
const lsbReleaseFile = "/etc/lsb-release";
const osReleaseFile = "/etc/os-release";
let contents = "";
if (fs17.existsSync(lsbReleaseFile)) {
contents = fs17.readFileSync(lsbReleaseFile).toString();
} else if (fs17.existsSync(osReleaseFile)) {
contents = fs17.readFileSync(osReleaseFile).toString();
if (fs18.existsSync(lsbReleaseFile)) {
contents = fs18.readFileSync(lsbReleaseFile).toString();
} else if (fs18.existsSync(osReleaseFile)) {
contents = fs18.readFileSync(osReleaseFile).toString();
}
return contents;
}
@@ -99307,7 +99308,7 @@ var require_tool_cache = __commonJS({
var core15 = __importStar2(require_core());
var io7 = __importStar2(require_io());
var crypto3 = __importStar2(require("crypto"));
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var mm = __importStar2(require_manifest());
var os5 = __importStar2(require("os"));
var path16 = __importStar2(require("path"));
@@ -99353,7 +99354,7 @@ var require_tool_cache = __commonJS({
}
function downloadToolAttempt(url2, dest, auth2, headers) {
return __awaiter2(this, void 0, void 0, function* () {
if (fs17.existsSync(dest)) {
if (fs18.existsSync(dest)) {
throw new Error(`Destination file path ${dest} already exists`);
}
const http = new httpm.HttpClient(userAgent2, [], {
@@ -99377,7 +99378,7 @@ var require_tool_cache = __commonJS({
const readStream = responseMessageFactory();
let succeeded = false;
try {
yield pipeline(readStream, fs17.createWriteStream(dest));
yield pipeline(readStream, fs18.createWriteStream(dest));
core15.debug("download complete");
succeeded = true;
return dest;
@@ -99589,11 +99590,11 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os5.arch();
core15.debug(`Caching tool ${tool} ${version} ${arch2}`);
core15.debug(`source dir: ${sourceDir}`);
if (!fs17.statSync(sourceDir).isDirectory()) {
if (!fs18.statSync(sourceDir).isDirectory()) {
throw new Error("sourceDir is not a directory");
}
const destPath = yield _createToolPath(tool, version, arch2);
for (const itemName of fs17.readdirSync(sourceDir)) {
for (const itemName of fs18.readdirSync(sourceDir)) {
const s = path16.join(sourceDir, itemName);
yield io7.cp(s, destPath, { recursive: true });
}
@@ -99607,7 +99608,7 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os5.arch();
core15.debug(`Caching tool ${tool} ${version} ${arch2}`);
core15.debug(`source file: ${sourceFile}`);
if (!fs17.statSync(sourceFile).isFile()) {
if (!fs18.statSync(sourceFile).isFile()) {
throw new Error("sourceFile is not a file");
}
const destFolder = yield _createToolPath(tool, version, arch2);
@@ -99636,7 +99637,7 @@ var require_tool_cache = __commonJS({
versionSpec = semver9.clean(versionSpec) || "";
const cachePath = path16.join(_getCacheDirectory(), toolName, versionSpec, arch2);
core15.debug(`checking cache: ${cachePath}`);
if (fs17.existsSync(cachePath) && fs17.existsSync(`${cachePath}.complete`)) {
if (fs18.existsSync(cachePath) && fs18.existsSync(`${cachePath}.complete`)) {
core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`);
toolPath = cachePath;
} else {
@@ -99649,12 +99650,12 @@ var require_tool_cache = __commonJS({
const versions = [];
arch2 = arch2 || os5.arch();
const toolPath = path16.join(_getCacheDirectory(), toolName);
if (fs17.existsSync(toolPath)) {
const children = fs17.readdirSync(toolPath);
if (fs18.existsSync(toolPath)) {
const children = fs18.readdirSync(toolPath);
for (const child of children) {
if (isExplicitVersion(child)) {
const fullPath = path16.join(toolPath, child, arch2 || "");
if (fs17.existsSync(fullPath) && fs17.existsSync(`${fullPath}.complete`)) {
if (fs18.existsSync(fullPath) && fs18.existsSync(`${fullPath}.complete`)) {
versions.push(child);
}
}
@@ -99725,7 +99726,7 @@ var require_tool_cache = __commonJS({
function _completeToolPath(tool, version, arch2) {
const folderPath = path16.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || "");
const markerPath = `${folderPath}.complete`;
fs17.writeFileSync(markerPath, "");
fs18.writeFileSync(markerPath, "");
core15.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
@@ -103232,7 +103233,7 @@ __export(analyze_action_exports, {
runPromise: () => runPromise
});
module.exports = __toCommonJS(analyze_action_exports);
var fs16 = __toESM(require("fs"));
var fs17 = __toESM(require("fs"));
var import_path4 = __toESM(require("path"));
var import_perf_hooks3 = require("perf_hooks");
var core14 = __toESM(require_core());
@@ -103261,21 +103262,21 @@ async function getFolderSize(itemPath, options) {
getFolderSize.loose = async (itemPath, options) => await core(itemPath, options);
getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true });
async function core(rootItemPath, options = {}, returnType = {}) {
const fs17 = options.fs || await import("node:fs/promises");
const fs18 = options.fs || await import("node:fs/promises");
let folderSize = 0n;
const foundInos = /* @__PURE__ */ new Set();
const errors = [];
await processItem(rootItemPath);
async function processItem(itemPath) {
if (options.ignore?.test(itemPath)) return;
const stats = returnType.strict ? await fs17.lstat(itemPath, { bigint: true }) : await fs17.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
const stats = returnType.strict ? await fs18.lstat(itemPath, { bigint: true }) : await fs18.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
if (typeof stats !== "object") return;
if (!foundInos.has(stats.ino)) {
foundInos.add(stats.ino);
folderSize += stats.size;
}
if (stats.isDirectory()) {
const directoryItems = returnType.strict ? await fs17.readdir(itemPath) : await fs17.readdir(itemPath).catch((error3) => errors.push(error3));
const directoryItems = returnType.strict ? await fs18.readdir(itemPath) : await fs18.readdir(itemPath).catch((error3) => errors.push(error3));
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
@@ -105916,17 +105917,6 @@ function getExtraOptionsEnvParam() {
);
}
}
function getToolNames(sarif) {
const toolNames = {};
for (const run2 of sarif.runs || []) {
const tool = run2.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, platform2) {
const fixedAmount = 1024 * (platform2 === "win32" ? 1.5 : 1);
const scaledAmount = getReservedRamScaleFactor() * Math.max(totalMemoryMegaBytes - 8 * 1024, 0);
@@ -107986,6 +107976,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
@@ -111235,7 +111230,7 @@ async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error
}
// src/upload-lib.ts
var fs15 = __toESM(require("fs"));
var fs16 = __toESM(require("fs"));
var path14 = __toESM(require("path"));
var url = __toESM(require("url"));
var import_zlib = __toESM(require("zlib"));
@@ -112316,12 +112311,12 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
}
return uri;
}
async function addFingerprints(sarif, sourceRoot, logger) {
async function addFingerprints(sarifLog, sourceRoot, logger) {
logger.info(
`Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.`
);
const callbacksByFile = {};
for (const run2 of sarif.runs || []) {
for (const run2 of sarifLog.runs || []) {
const artifacts = run2.artifacts || [];
for (const result of run2.results || []) {
const primaryLocation = (result.locations || [])[0];
@@ -112361,7 +112356,7 @@ async function addFingerprints(sarif, sourceRoot, logger) {
};
await hash(teeCallback, filepath);
}
return sarif;
return sarifLog;
}
// src/init.ts
@@ -112396,36 +112391,48 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
};
}
// src/upload-lib.ts
var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning.";
var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository.";
// src/sarif/index.ts
var fs15 = __toESM(require("fs"));
var InvalidSarifUploadError = class extends Error {
};
function getToolNames(sarifFile) {
const toolNames = {};
for (const run2 of sarifFile.runs || []) {
const tool = run2.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function readSarifFile(sarifFilePath) {
return JSON.parse(fs15.readFileSync(sarifFilePath, "utf8"));
}
function combineSarifFiles(sarifFiles, logger) {
logger.info(`Loading SARIF file(s)`);
const combinedSarif = {
version: null,
runs: []
};
const runs = [];
let version = void 0;
for (const sarifFile of sarifFiles) {
logger.debug(`Loading SARIF file: ${sarifFile}`);
const sarifObject = JSON.parse(
fs15.readFileSync(sarifFile, "utf8")
);
if (combinedSarif.version === null) {
combinedSarif.version = sarifObject.version;
} else if (combinedSarif.version !== sarifObject.version) {
const sarifLog = readSarifFile(sarifFile);
if (version === void 0) {
version = sarifLog.version;
} else if (version !== sarifLog.version) {
throw new InvalidSarifUploadError(
`Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`
`Different SARIF versions encountered: ${version} and ${sarifLog.version}`
);
}
combinedSarif.runs.push(...sarifObject.runs);
runs.push(...sarifLog?.runs || []);
}
return combinedSarif;
if (version === void 0) {
version = "2.1.0";
}
return { version, runs };
}
function areAllRunsProducedByCodeQL(sarifObjects) {
return sarifObjects.every((sarifObject) => {
return sarifObject.runs?.every(
(run2) => run2.tool?.driver?.name === "CodeQL"
);
function areAllRunsProducedByCodeQL(sarifLogs) {
return sarifLogs.every((sarifLog) => {
return sarifLog.runs?.every((run2) => run2.tool?.driver?.name === "CodeQL");
});
}
function createRunKey(run2) {
@@ -112438,10 +112445,13 @@ function createRunKey(run2) {
automationId: run2.automationDetails?.id
};
}
function areAllRunsUnique(sarifObjects) {
function areAllRunsUnique(sarifLogs) {
const keys = /* @__PURE__ */ new Set();
for (const sarifObject of sarifObjects) {
for (const run2 of sarifObject.runs) {
for (const sarifLog of sarifLogs) {
if (sarifLog.runs === void 0) {
continue;
}
for (const run2 of sarifLog.runs) {
const key = JSON.stringify(createRunKey(run2));
if (keys.has(key)) {
return false;
@@ -112451,6 +112461,10 @@ function areAllRunsUnique(sarifObjects) {
}
return true;
}
// src/upload-lib.ts
var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning.";
var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository.";
async function shouldShowCombineSarifFilesDeprecationWarning(sarifObjects, githubVersion) {
if (githubVersion.type === "GitHub Enterprise Server" /* GHES */ && satisfiesGHESVersion(githubVersion.version, "<3.14", true)) {
return false;
@@ -112479,9 +112493,7 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) {
}
async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) {
logger.info("Combining SARIF files using the CodeQL CLI");
const sarifObjects = sarifFiles.map((sarifFile) => {
return JSON.parse(fs15.readFileSync(sarifFile, "utf8"));
});
const sarifObjects = sarifFiles.map(readSarifFile);
const deprecationWarningMessage = gitHubVersion.type === "GitHub Enterprise Server" /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025";
const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload";
if (!areAllRunsProducedByCodeQL(sarifObjects)) {
@@ -112534,27 +112546,27 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
codeQL = initCodeQLResult.codeql;
}
const baseTempDir = path14.resolve(tempDir, "combined-sarif");
fs15.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs15.mkdtempSync(path14.resolve(baseTempDir, "output-"));
fs16.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs16.mkdtempSync(path14.resolve(baseTempDir, "output-"));
const outputFile = path14.resolve(outputDirectory, "combined-sarif.sarif");
await codeQL.mergeResults(sarifFiles, outputFile, {
mergeRunsFromEqualCategory: true
});
return JSON.parse(fs15.readFileSync(outputFile, "utf8"));
return readSarifFile(outputFile);
}
function populateRunAutomationDetails(sarif, category, analysis_key, environment) {
function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) {
const automationID = getAutomationID2(category, analysis_key, environment);
if (automationID !== void 0) {
for (const run2 of sarif.runs || []) {
for (const run2 of sarifFile.runs || []) {
if (run2.automationDetails === void 0) {
run2.automationDetails = {
id: automationID
};
}
}
return sarif;
return sarifFile;
}
return sarif;
return sarifFile;
}
function getAutomationID2(category, analysis_key, environment) {
if (category !== void 0) {
@@ -112577,7 +112589,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
`SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}`
);
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
fs15.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
fs16.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
return "dummy-sarif-id";
}
const client = getApiClient();
@@ -112611,7 +112623,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
function findSarifFilesInDir(sarifPath, isSarif) {
const sarifFiles = [];
const walkSarifFiles = (dir) => {
const entries = fs15.readdirSync(dir, { withFileTypes: true });
const entries = fs16.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile() && isSarif(entry.name)) {
sarifFiles.push(path14.resolve(dir, entry.name));
@@ -112624,7 +112636,7 @@ function findSarifFilesInDir(sarifPath, isSarif) {
return sarifFiles;
}
async function getGroupedSarifFilePaths(logger, sarifPath) {
const stats = fs15.statSync(sarifPath, { throwIfNoEntry: false });
const stats = fs16.statSync(sarifPath, { throwIfNoEntry: false });
if (stats === void 0) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
}
@@ -112671,9 +112683,9 @@ async function getGroupedSarifFilePaths(logger, sarifPath) {
}
return results;
}
function countResultsInSarif(sarif) {
function countResultsInSarif(sarifLog) {
let numResults = 0;
const parsedSarif = JSON.parse(sarif);
const parsedSarif = JSON.parse(sarifLog);
if (!Array.isArray(parsedSarif.runs)) {
throw new InvalidSarifUploadError("Invalid SARIF. Missing 'runs' array.");
}
@@ -112687,26 +112699,26 @@ function countResultsInSarif(sarif) {
}
return numResults;
}
function readSarifFile(sarifFilePath) {
function readSarifFileOrThrow(sarifFilePath) {
try {
return JSON.parse(fs15.readFileSync(sarifFilePath, "utf8"));
return readSarifFile(sarifFilePath);
} catch (e) {
throw new InvalidSarifUploadError(
`Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}`
);
}
}
function validateSarifFileSchema(sarif, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarif]) && // We want to validate CodeQL SARIF in testing environments.
function validateSarifFileSchema(sarifLog, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarifLog]) && // We want to validate CodeQL SARIF in testing environments.
!getTestingEnvironment()) {
logger.debug(
`Skipping SARIF schema validation for ${sarifFilePath} as all runs are produced by CodeQL.`
);
return;
return true;
}
logger.info(`Validating ${sarifFilePath}`);
const schema2 = require_sarif_schema_2_1_0();
const result = new jsonschema2.Validator().validate(sarif, schema2);
const result = new jsonschema2.Validator().validate(sarifLog, schema2);
const warningAttributes = ["uri-reference", "uri"];
const errors = (result.errors ?? []).filter(
(err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument))
@@ -112733,6 +112745,7 @@ ${sarifErrors.join(
)}`
);
}
return true;
}
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, mergeBaseCommitOid) {
const payloadObj = {
@@ -112758,7 +112771,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
payloadObj.base_sha = mergeBaseCommitOid;
} else if (process.env.GITHUB_EVENT_PATH) {
const githubEvent = JSON.parse(
fs15.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
fs16.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
);
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha;
@@ -112769,14 +112782,14 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) {
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif;
let sarifLog;
category = analysis.fixCategory(logger, category);
if (sarifPaths.length > 1) {
for (const sarifPath of sarifPaths) {
const parsedSarif = readSarifFile(sarifPath);
const parsedSarif = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(parsedSarif, sarifPath, logger);
}
sarif = await combineSarifFilesUsingCLI(
sarifLog = await combineSarifFilesUsingCLI(
sarifPaths,
gitHubVersion,
features,
@@ -112784,21 +112797,21 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths,
);
} else {
const sarifPath = sarifPaths[0];
sarif = readSarifFile(sarifPath);
validateSarifFileSchema(sarif, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
sarifLog = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(sarifLog, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarifLog], gitHubVersion);
}
sarif = filterAlertsByDiffRange(logger, sarif);
sarif = await addFingerprints(sarif, checkoutPath, logger);
sarifLog = filterAlertsByDiffRange(logger, sarifLog);
sarifLog = await addFingerprints(sarifLog, checkoutPath, logger);
const analysisKey = await getAnalysisKey();
const environment = getRequiredInput("matrix");
sarif = populateRunAutomationDetails(
sarif,
sarifLog = populateRunAutomationDetails(
sarifLog,
category,
analysisKey,
environment
);
return { sarif, analysisKey, environment };
return { sarif: sarifLog, analysisKey, environment };
}
async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) {
const outputPath = pathInput || getOptionalEnvVar("CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */);
@@ -112815,12 +112828,12 @@ async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProc
}
async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
const sarif = postProcessingResults.sarif;
const toolNames = getToolNames(sarif);
const sarifLog = postProcessingResults.sarif;
const toolNames = getToolNames(sarifLog);
logger.debug(`Validating that each SARIF run has a unique category`);
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
validateUniqueCategory(sarifLog, uploadTarget.sentinelPrefix);
logger.debug(`Serializing SARIF for upload`);
const sarifPayload = JSON.stringify(sarif);
const sarifPayload = JSON.stringify(sarifLog);
logger.debug(`Compressing serialized SARIF`);
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = url.pathToFileURL(checkoutPath).href;
@@ -112862,9 +112875,9 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post
};
}
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
if (!fs15.existsSync(outputDir)) {
fs15.mkdirSync(outputDir, { recursive: true });
} else if (!fs15.lstatSync(outputDir).isDirectory()) {
if (!fs16.existsSync(outputDir)) {
fs16.mkdirSync(outputDir, { recursive: true });
} else if (!fs16.lstatSync(outputDir).isDirectory()) {
throw new ConfigurationError(
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`
);
@@ -112874,7 +112887,7 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
`upload${uploadTarget.sarifExtension}`
);
logger.info(`Writing processed SARIF file to ${outputFile}`);
fs15.writeFileSync(outputFile, sarifPayload);
fs16.writeFileSync(outputFile, sarifPayload);
}
var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3;
var STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1e3;
@@ -112972,9 +112985,9 @@ function handleProcessingResultForUnsuccessfulExecution(response, status, logger
assertNever(status);
}
}
function validateUniqueCategory(sarif, sentinelPrefix) {
function validateUniqueCategory(sarifLog, sentinelPrefix) {
const categories = {};
for (const run2 of sarif.runs) {
for (const run2 of sarifLog.runs || []) {
const id = run2?.automationDetails?.id;
const tool = run2.tool?.driver?.name;
const category = `${sanitize(id)}_${sanitize(tool)}`;
@@ -112993,15 +113006,16 @@ function validateUniqueCategory(sarif, sentinelPrefix) {
function sanitize(str2) {
return (str2 ?? "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase();
}
var InvalidSarifUploadError = class extends Error {
};
function filterAlertsByDiffRange(logger, sarif) {
function filterAlertsByDiffRange(logger, sarifLog) {
const diffRanges = readDiffRangesJsonFile(logger);
if (!diffRanges?.length) {
return sarif;
return sarifLog;
}
if (sarifLog.runs === void 0) {
return sarifLog;
}
const checkoutPath = getRequiredInput("checkout_path");
for (const run2 of sarif.runs) {
for (const run2 of sarifLog.runs) {
if (run2.results) {
run2.results = run2.results.filter((result) => {
const locations = [
@@ -113022,7 +113036,7 @@ function filterAlertsByDiffRange(logger, sarif) {
});
}
}
return sarif;
return sarifLog;
}
// src/upload-sarif.ts
@@ -113107,7 +113121,7 @@ function doesGoExtractionOutputExist(config) {
"go" /* go */
);
const trapDirectory = import_path4.default.join(golangDbDirectory, "trap", "go" /* go */);
return fs16.existsSync(trapDirectory) && fs16.readdirSync(trapDirectory).some(
return fs17.existsSync(trapDirectory) && fs17.readdirSync(trapDirectory).some(
(fileName) => [
".trap",
".trap.gz",
+8 -2
View File
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -102991,7 +102992,7 @@ var safeDump = renamed("safeDump", "dump");
var semver = __toESM(require_semver2());
// src/api-compatibility.json
var maximumVersion = "3.20";
var maximumVersion = "3.21";
var minimumVersion = "3.14";
// src/util.ts
@@ -104275,6 +104276,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
+377 -346
View File
File diff suppressed because it is too large Load Diff
+8 -2
View File
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -103210,7 +103211,7 @@ var safeDump = renamed("safeDump", "dump");
var semver = __toESM(require_semver2());
// src/api-compatibility.json
var maximumVersion = "3.20";
var maximumVersion = "3.21";
var minimumVersion = "3.14";
// src/util.ts
@@ -105521,6 +105522,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
+8 -2
View File
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -102991,7 +102992,7 @@ var safeDump = renamed("safeDump", "dump");
var semver = __toESM(require_semver2());
// src/api-compatibility.json
var maximumVersion = "3.20";
var maximumVersion = "3.21";
var minimumVersion = "3.14";
// src/util.ts
@@ -104266,6 +104267,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
+8 -2
View File
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -103047,7 +103048,7 @@ var safeDump = renamed("safeDump", "dump");
var semver = __toESM(require_semver2());
// src/api-compatibility.json
var maximumVersion = "3.20";
var maximumVersion = "3.21";
var minimumVersion = "3.14";
// src/util.ts
@@ -104163,6 +104164,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
+8 -2
View File
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -161096,7 +161097,7 @@ var safeDump = renamed("safeDump", "dump");
var semver = __toESM(require_semver2());
// src/api-compatibility.json
var maximumVersion = "3.20";
var maximumVersion = "3.21";
var minimumVersion = "3.14";
// src/util.ts
@@ -161632,6 +161633,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
+27 -4
View File
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -120955,6 +120956,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
@@ -121747,6 +121753,18 @@ var LANGUAGE_TO_REGISTRY_TYPE = {
rust: ["cargo_registry"],
go: ["goproxy_server", "git_source"]
};
var NEW_LANGUAGE_TO_REGISTRY_TYPE = {
actions: [],
cpp: [],
java: ["maven_repository"],
csharp: ["nuget_feed"],
javascript: [],
python: [],
ruby: [],
rust: [],
swift: [],
go: ["goproxy_server", "git_source"]
};
function getRegistryAddress(registry) {
if (isDefined2(registry.url)) {
return {
@@ -121764,8 +121782,9 @@ function getRegistryAddress(registry) {
);
}
}
function getCredentials(logger, registrySecrets, registriesCredentials, language) {
const registryTypeForLanguage = language ? LANGUAGE_TO_REGISTRY_TYPE[language] : void 0;
function getCredentials(logger, registrySecrets, registriesCredentials, language, skipUnusedRegistries = false) {
const registryMapping = skipUnusedRegistries ? NEW_LANGUAGE_TO_REGISTRY_TYPE : LANGUAGE_TO_REGISTRY_TYPE;
const registryTypeForLanguage = language ? registryMapping[language] : void 0;
let credentialsStr;
if (registriesCredentials !== void 0) {
logger.info(`Using registries_credentials input.`);
@@ -122262,11 +122281,15 @@ async function run(startedAt) {
);
const languageInput = getOptionalInput("language");
language = languageInput ? parseLanguage(languageInput) : void 0;
const skipUnusedRegistries = await features.getValue(
"start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */
);
const credentials = getCredentials(
logger,
getOptionalInput("registry_secrets"),
getOptionalInput("registries_credentials"),
language
language,
skipUnusedRegistries
);
if (credentials.length === 0) {
logger.info("No credentials found, skipping proxy setup.");
+156 -144
View File
@@ -204,7 +204,7 @@ var require_file_command = __commonJS({
exports2.issueFileCommand = issueFileCommand;
exports2.prepareKeyValueMessage = prepareKeyValueMessage;
var crypto2 = __importStar2(require("crypto"));
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var os2 = __importStar2(require("os"));
var utils_1 = require_utils();
function issueFileCommand(command, message) {
@@ -212,10 +212,10 @@ var require_file_command = __commonJS({
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs12.existsSync(filePath)) {
if (!fs13.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs12.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, {
fs13.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, {
encoding: "utf8"
});
}
@@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({
exports2.isRooted = isRooted;
exports2.tryGetExecutablePath = tryGetExecutablePath;
exports2.getCmdPath = getCmdPath;
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var path12 = __importStar2(require("path"));
_a = fs12.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
_a = fs13.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
exports2.IS_WINDOWS = process.platform === "win32";
function readlink(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
const result = yield fs12.promises.readlink(fsPath);
const result = yield fs13.promises.readlink(fsPath);
if (exports2.IS_WINDOWS && !result.endsWith("\\")) {
return `${result}\\`;
}
@@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({
});
}
exports2.UV_FS_O_EXLOCK = 268435456;
exports2.READONLY = fs12.constants.O_RDONLY;
exports2.READONLY = fs13.constants.O_RDONLY;
function exists(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
try {
@@ -47292,7 +47292,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -47341,6 +47341,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -50403,7 +50404,7 @@ var require_internal_globber = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.DefaultGlobber = void 0;
var core12 = __importStar2(require_core());
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var globOptionsHelper = __importStar2(require_internal_glob_options_helper());
var path12 = __importStar2(require("path"));
var patternHelper = __importStar2(require_internal_pattern_helper());
@@ -50457,7 +50458,7 @@ var require_internal_globber = __commonJS({
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
core12.debug(`Search path '${searchPath}'`);
try {
yield __await2(fs12.promises.lstat(searchPath));
yield __await2(fs13.promises.lstat(searchPath));
} catch (err) {
if (err.code === "ENOENT") {
continue;
@@ -50491,7 +50492,7 @@ var require_internal_globber = __commonJS({
continue;
}
const childLevel = item.level + 1;
const childItems = (yield __await2(fs12.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path12.join(item.path, x), childLevel));
const childItems = (yield __await2(fs13.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path12.join(item.path, x), childLevel));
stack.push(...childItems.reverse());
} else if (match & internal_match_kind_1.MatchKind.File) {
yield yield __await2(item.path);
@@ -50526,7 +50527,7 @@ var require_internal_globber = __commonJS({
let stats;
if (options.followSymbolicLinks) {
try {
stats = yield fs12.promises.stat(item.path);
stats = yield fs13.promises.stat(item.path);
} catch (err) {
if (err.code === "ENOENT") {
if (options.omitBrokenSymbolicLinks) {
@@ -50538,10 +50539,10 @@ var require_internal_globber = __commonJS({
throw err;
}
} else {
stats = yield fs12.promises.lstat(item.path);
stats = yield fs13.promises.lstat(item.path);
}
if (stats.isDirectory() && options.followSymbolicLinks) {
const realPath = yield fs12.promises.realpath(item.path);
const realPath = yield fs13.promises.realpath(item.path);
while (traversalChain.length >= item.level) {
traversalChain.pop();
}
@@ -50650,7 +50651,7 @@ var require_internal_hash_files = __commonJS({
exports2.hashFiles = hashFiles;
var crypto2 = __importStar2(require("crypto"));
var core12 = __importStar2(require_core());
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var path12 = __importStar2(require("path"));
@@ -50673,13 +50674,13 @@ var require_internal_hash_files = __commonJS({
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
continue;
}
if (fs12.statSync(file).isDirectory()) {
if (fs13.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash2 = crypto2.createHash("sha256");
const pipeline = util.promisify(stream2.pipeline);
yield pipeline(fs12.createReadStream(file), hash2);
yield pipeline(fs13.createReadStream(file), hash2);
result.write(hash2.digest());
count++;
if (!hasMatch) {
@@ -52054,7 +52055,7 @@ var require_cacheUtils = __commonJS({
var glob = __importStar2(require_glob());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var path12 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
@@ -52083,7 +52084,7 @@ var require_cacheUtils = __commonJS({
});
}
function getArchiveFileSizeInBytes(filePath) {
return fs12.statSync(filePath).size;
return fs13.statSync(filePath).size;
}
function resolvePaths(patterns) {
return __awaiter2(this, void 0, void 0, function* () {
@@ -52121,7 +52122,7 @@ var require_cacheUtils = __commonJS({
}
function unlinkFile(filePath) {
return __awaiter2(this, void 0, void 0, function* () {
return util.promisify(fs12.unlink)(filePath);
return util.promisify(fs13.unlink)(filePath);
});
}
function getVersion(app_1) {
@@ -52163,7 +52164,7 @@ var require_cacheUtils = __commonJS({
}
function getGnuTarPathOnWindows() {
return __awaiter2(this, void 0, void 0, function* () {
if (fs12.existsSync(constants_1.GnuTarPathOnWindows)) {
if (fs13.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion("tar");
@@ -92320,7 +92321,7 @@ var require_downloadUtils = __commonJS({
var http_client_1 = require_lib();
var storage_blob_1 = require_commonjs15();
var buffer = __importStar2(require("buffer"));
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var utils = __importStar2(require_cacheUtils());
@@ -92431,7 +92432,7 @@ var require_downloadUtils = __commonJS({
exports2.DownloadProgress = DownloadProgress;
function downloadCacheHttpClient(archiveLocation, archivePath) {
return __awaiter2(this, void 0, void 0, function* () {
const writeStream = fs12.createWriteStream(archivePath);
const writeStream = fs13.createWriteStream(archivePath);
const httpClient = new http_client_1.HttpClient("actions/cache");
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () {
return httpClient.get(archiveLocation);
@@ -92456,7 +92457,7 @@ var require_downloadUtils = __commonJS({
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
return __awaiter2(this, void 0, void 0, function* () {
var _a;
const archiveDescriptor = yield fs12.promises.open(archivePath, "w");
const archiveDescriptor = yield fs13.promises.open(archivePath, "w");
const httpClient = new http_client_1.HttpClient("actions/cache", void 0, {
socketTimeout: options.timeoutInMs,
keepAlive: true
@@ -92572,7 +92573,7 @@ var require_downloadUtils = __commonJS({
} else {
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
const downloadProgress = new DownloadProgress(contentLength);
const fd = fs12.openSync(archivePath, "w");
const fd = fs13.openSync(archivePath, "w");
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
@@ -92590,12 +92591,12 @@ var require_downloadUtils = __commonJS({
controller.abort();
throw new Error("Aborting cache download as the download time exceeded the timeout.");
} else if (Buffer.isBuffer(result)) {
fs12.writeFileSync(fd, result);
fs13.writeFileSync(fd, result);
}
}
} finally {
downloadProgress.stopDisplayTimer();
fs12.closeSync(fd);
fs13.closeSync(fd);
}
}
});
@@ -92917,7 +92918,7 @@ var require_cacheHttpClient = __commonJS({
var core12 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var url_1 = require("url");
var utils = __importStar2(require_cacheUtils());
var uploadUtils_1 = require_uploadUtils();
@@ -93052,7 +93053,7 @@ Other caches with similar key:`);
return __awaiter2(this, void 0, void 0, function* () {
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs12.openSync(archivePath, "r");
const fd = fs13.openSync(archivePath, "r");
const uploadOptions = (0, options_1.getUploadOptions)(options);
const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency);
const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize);
@@ -93066,7 +93067,7 @@ Other caches with similar key:`);
const start = offset;
const end = offset + chunkSize - 1;
offset += maxChunkSize;
yield uploadChunk(httpClient, resourceUrl, () => fs12.createReadStream(archivePath, {
yield uploadChunk(httpClient, resourceUrl, () => fs13.createReadStream(archivePath, {
fd,
start,
end,
@@ -93077,7 +93078,7 @@ Other caches with similar key:`);
}
})));
} finally {
fs12.closeSync(fd);
fs13.closeSync(fd);
}
return;
});
@@ -99033,7 +99034,7 @@ var require_manifest = __commonJS({
var core_1 = require_core();
var os2 = require("os");
var cp = require("child_process");
var fs12 = require("fs");
var fs13 = require("fs");
function _findMatch(versionSpec, stable, candidates, archFilter) {
return __awaiter2(this, void 0, void 0, function* () {
const platFilter = os2.platform();
@@ -99095,10 +99096,10 @@ var require_manifest = __commonJS({
const lsbReleaseFile = "/etc/lsb-release";
const osReleaseFile = "/etc/os-release";
let contents = "";
if (fs12.existsSync(lsbReleaseFile)) {
contents = fs12.readFileSync(lsbReleaseFile).toString();
} else if (fs12.existsSync(osReleaseFile)) {
contents = fs12.readFileSync(osReleaseFile).toString();
if (fs13.existsSync(lsbReleaseFile)) {
contents = fs13.readFileSync(lsbReleaseFile).toString();
} else if (fs13.existsSync(osReleaseFile)) {
contents = fs13.readFileSync(osReleaseFile).toString();
}
return contents;
}
@@ -99307,7 +99308,7 @@ var require_tool_cache = __commonJS({
var core12 = __importStar2(require_core());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var mm = __importStar2(require_manifest());
var os2 = __importStar2(require("os"));
var path12 = __importStar2(require("path"));
@@ -99353,7 +99354,7 @@ var require_tool_cache = __commonJS({
}
function downloadToolAttempt(url2, dest, auth2, headers) {
return __awaiter2(this, void 0, void 0, function* () {
if (fs12.existsSync(dest)) {
if (fs13.existsSync(dest)) {
throw new Error(`Destination file path ${dest} already exists`);
}
const http = new httpm.HttpClient(userAgent2, [], {
@@ -99377,7 +99378,7 @@ var require_tool_cache = __commonJS({
const readStream = responseMessageFactory();
let succeeded = false;
try {
yield pipeline(readStream, fs12.createWriteStream(dest));
yield pipeline(readStream, fs13.createWriteStream(dest));
core12.debug("download complete");
succeeded = true;
return dest;
@@ -99589,11 +99590,11 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os2.arch();
core12.debug(`Caching tool ${tool} ${version} ${arch2}`);
core12.debug(`source dir: ${sourceDir}`);
if (!fs12.statSync(sourceDir).isDirectory()) {
if (!fs13.statSync(sourceDir).isDirectory()) {
throw new Error("sourceDir is not a directory");
}
const destPath = yield _createToolPath(tool, version, arch2);
for (const itemName of fs12.readdirSync(sourceDir)) {
for (const itemName of fs13.readdirSync(sourceDir)) {
const s = path12.join(sourceDir, itemName);
yield io6.cp(s, destPath, { recursive: true });
}
@@ -99607,7 +99608,7 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os2.arch();
core12.debug(`Caching tool ${tool} ${version} ${arch2}`);
core12.debug(`source file: ${sourceFile}`);
if (!fs12.statSync(sourceFile).isFile()) {
if (!fs13.statSync(sourceFile).isFile()) {
throw new Error("sourceFile is not a file");
}
const destFolder = yield _createToolPath(tool, version, arch2);
@@ -99636,7 +99637,7 @@ var require_tool_cache = __commonJS({
versionSpec = semver9.clean(versionSpec) || "";
const cachePath = path12.join(_getCacheDirectory(), toolName, versionSpec, arch2);
core12.debug(`checking cache: ${cachePath}`);
if (fs12.existsSync(cachePath) && fs12.existsSync(`${cachePath}.complete`)) {
if (fs13.existsSync(cachePath) && fs13.existsSync(`${cachePath}.complete`)) {
core12.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`);
toolPath = cachePath;
} else {
@@ -99649,12 +99650,12 @@ var require_tool_cache = __commonJS({
const versions = [];
arch2 = arch2 || os2.arch();
const toolPath = path12.join(_getCacheDirectory(), toolName);
if (fs12.existsSync(toolPath)) {
const children = fs12.readdirSync(toolPath);
if (fs13.existsSync(toolPath)) {
const children = fs13.readdirSync(toolPath);
for (const child of children) {
if (isExplicitVersion(child)) {
const fullPath = path12.join(toolPath, child, arch2 || "");
if (fs12.existsSync(fullPath) && fs12.existsSync(`${fullPath}.complete`)) {
if (fs13.existsSync(fullPath) && fs13.existsSync(`${fullPath}.complete`)) {
versions.push(child);
}
}
@@ -99725,7 +99726,7 @@ var require_tool_cache = __commonJS({
function _completeToolPath(tool, version, arch2) {
const folderPath = path12.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || "");
const markerPath = `${folderPath}.complete`;
fs12.writeFileSync(markerPath, "");
fs13.writeFileSync(markerPath, "");
core12.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
@@ -103229,13 +103230,12 @@ var require_sarif_schema_2_1_0 = __commonJS({
// src/upload-lib.ts
var upload_lib_exports = {};
__export(upload_lib_exports, {
InvalidSarifUploadError: () => InvalidSarifUploadError,
buildPayload: () => buildPayload,
findSarifFilesInDir: () => findSarifFilesInDir,
getGroupedSarifFilePaths: () => getGroupedSarifFilePaths,
populateRunAutomationDetails: () => populateRunAutomationDetails,
postProcessSarifFiles: () => postProcessSarifFiles,
readSarifFile: () => readSarifFile,
readSarifFileOrThrow: () => readSarifFileOrThrow,
shouldConsiderConfigurationError: () => shouldConsiderConfigurationError,
shouldConsiderInvalidRequest: () => shouldConsiderInvalidRequest,
shouldShowCombineSarifFilesDeprecationWarning: () => shouldShowCombineSarifFilesDeprecationWarning,
@@ -103249,7 +103249,7 @@ __export(upload_lib_exports, {
writePostProcessedFiles: () => writePostProcessedFiles
});
module.exports = __toCommonJS(upload_lib_exports);
var fs11 = __toESM(require("fs"));
var fs12 = __toESM(require("fs"));
var path11 = __toESM(require("path"));
var url = __toESM(require("url"));
var import_zlib = __toESM(require("zlib"));
@@ -103278,21 +103278,21 @@ async function getFolderSize(itemPath, options) {
getFolderSize.loose = async (itemPath, options) => await core(itemPath, options);
getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true });
async function core(rootItemPath, options = {}, returnType = {}) {
const fs12 = options.fs || await import("node:fs/promises");
const fs13 = options.fs || await import("node:fs/promises");
let folderSize = 0n;
const foundInos = /* @__PURE__ */ new Set();
const errors = [];
await processItem(rootItemPath);
async function processItem(itemPath) {
if (options.ignore?.test(itemPath)) return;
const stats = returnType.strict ? await fs12.lstat(itemPath, { bigint: true }) : await fs12.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
const stats = returnType.strict ? await fs13.lstat(itemPath, { bigint: true }) : await fs13.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
if (typeof stats !== "object") return;
if (!foundInos.has(stats.ino)) {
foundInos.add(stats.ino);
folderSize += stats.size;
}
if (stats.isDirectory()) {
const directoryItems = returnType.strict ? await fs12.readdir(itemPath) : await fs12.readdir(itemPath).catch((error3) => errors.push(error3));
const directoryItems = returnType.strict ? await fs13.readdir(itemPath) : await fs13.readdir(itemPath).catch((error3) => errors.push(error3));
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
@@ -105932,17 +105932,6 @@ function getExtraOptionsEnvParam() {
);
}
}
function getToolNames(sarif) {
const toolNames = {};
for (const run of sarif.runs || []) {
const tool = run.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function getCodeQLDatabasePath(config, language) {
return path.resolve(config.dbLocation, language);
}
@@ -107434,6 +107423,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
@@ -110204,12 +110198,12 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
}
return uri;
}
async function addFingerprints(sarif, sourceRoot, logger) {
async function addFingerprints(sarifLog, sourceRoot, logger) {
logger.info(
`Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.`
);
const callbacksByFile = {};
for (const run of sarif.runs || []) {
for (const run of sarifLog.runs || []) {
const artifacts = run.artifacts || [];
for (const result of run.results || []) {
const primaryLocation = (result.locations || [])[0];
@@ -110249,7 +110243,7 @@ async function addFingerprints(sarif, sourceRoot, logger) {
};
await hash(teeCallback, filepath);
}
return sarif;
return sarifLog;
}
// src/init.ts
@@ -110284,36 +110278,48 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
};
}
// src/upload-lib.ts
var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning.";
var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository.";
// src/sarif/index.ts
var fs11 = __toESM(require("fs"));
var InvalidSarifUploadError = class extends Error {
};
function getToolNames(sarifFile) {
const toolNames = {};
for (const run of sarifFile.runs || []) {
const tool = run.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function readSarifFile(sarifFilePath) {
return JSON.parse(fs11.readFileSync(sarifFilePath, "utf8"));
}
function combineSarifFiles(sarifFiles, logger) {
logger.info(`Loading SARIF file(s)`);
const combinedSarif = {
version: null,
runs: []
};
const runs = [];
let version = void 0;
for (const sarifFile of sarifFiles) {
logger.debug(`Loading SARIF file: ${sarifFile}`);
const sarifObject = JSON.parse(
fs11.readFileSync(sarifFile, "utf8")
);
if (combinedSarif.version === null) {
combinedSarif.version = sarifObject.version;
} else if (combinedSarif.version !== sarifObject.version) {
const sarifLog = readSarifFile(sarifFile);
if (version === void 0) {
version = sarifLog.version;
} else if (version !== sarifLog.version) {
throw new InvalidSarifUploadError(
`Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`
`Different SARIF versions encountered: ${version} and ${sarifLog.version}`
);
}
combinedSarif.runs.push(...sarifObject.runs);
runs.push(...sarifLog?.runs || []);
}
return combinedSarif;
if (version === void 0) {
version = "2.1.0";
}
return { version, runs };
}
function areAllRunsProducedByCodeQL(sarifObjects) {
return sarifObjects.every((sarifObject) => {
return sarifObject.runs?.every(
(run) => run.tool?.driver?.name === "CodeQL"
);
function areAllRunsProducedByCodeQL(sarifLogs) {
return sarifLogs.every((sarifLog) => {
return sarifLog.runs?.every((run) => run.tool?.driver?.name === "CodeQL");
});
}
function createRunKey(run) {
@@ -110326,10 +110332,13 @@ function createRunKey(run) {
automationId: run.automationDetails?.id
};
}
function areAllRunsUnique(sarifObjects) {
function areAllRunsUnique(sarifLogs) {
const keys = /* @__PURE__ */ new Set();
for (const sarifObject of sarifObjects) {
for (const run of sarifObject.runs) {
for (const sarifLog of sarifLogs) {
if (sarifLog.runs === void 0) {
continue;
}
for (const run of sarifLog.runs) {
const key = JSON.stringify(createRunKey(run));
if (keys.has(key)) {
return false;
@@ -110339,6 +110348,10 @@ function areAllRunsUnique(sarifObjects) {
}
return true;
}
// src/upload-lib.ts
var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning.";
var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository.";
async function shouldShowCombineSarifFilesDeprecationWarning(sarifObjects, githubVersion) {
if (githubVersion.type === "GitHub Enterprise Server" /* GHES */ && satisfiesGHESVersion(githubVersion.version, "<3.14", true)) {
return false;
@@ -110367,9 +110380,7 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) {
}
async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) {
logger.info("Combining SARIF files using the CodeQL CLI");
const sarifObjects = sarifFiles.map((sarifFile) => {
return JSON.parse(fs11.readFileSync(sarifFile, "utf8"));
});
const sarifObjects = sarifFiles.map(readSarifFile);
const deprecationWarningMessage = gitHubVersion.type === "GitHub Enterprise Server" /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025";
const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload";
if (!areAllRunsProducedByCodeQL(sarifObjects)) {
@@ -110422,27 +110433,27 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
codeQL = initCodeQLResult.codeql;
}
const baseTempDir = path11.resolve(tempDir, "combined-sarif");
fs11.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs11.mkdtempSync(path11.resolve(baseTempDir, "output-"));
fs12.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs12.mkdtempSync(path11.resolve(baseTempDir, "output-"));
const outputFile = path11.resolve(outputDirectory, "combined-sarif.sarif");
await codeQL.mergeResults(sarifFiles, outputFile, {
mergeRunsFromEqualCategory: true
});
return JSON.parse(fs11.readFileSync(outputFile, "utf8"));
return readSarifFile(outputFile);
}
function populateRunAutomationDetails(sarif, category, analysis_key, environment) {
function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) {
const automationID = getAutomationID2(category, analysis_key, environment);
if (automationID !== void 0) {
for (const run of sarif.runs || []) {
for (const run of sarifFile.runs || []) {
if (run.automationDetails === void 0) {
run.automationDetails = {
id: automationID
};
}
}
return sarif;
return sarifFile;
}
return sarif;
return sarifFile;
}
function getAutomationID2(category, analysis_key, environment) {
if (category !== void 0) {
@@ -110465,7 +110476,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
`SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}`
);
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
fs11.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
fs12.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
return "dummy-sarif-id";
}
const client = getApiClient();
@@ -110499,7 +110510,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
function findSarifFilesInDir(sarifPath, isSarif) {
const sarifFiles = [];
const walkSarifFiles = (dir) => {
const entries = fs11.readdirSync(dir, { withFileTypes: true });
const entries = fs12.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile() && isSarif(entry.name)) {
sarifFiles.push(path11.resolve(dir, entry.name));
@@ -110512,11 +110523,11 @@ function findSarifFilesInDir(sarifPath, isSarif) {
return sarifFiles;
}
function getSarifFilePaths(sarifPath, isSarif) {
if (!fs11.existsSync(sarifPath)) {
if (!fs12.existsSync(sarifPath)) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
}
let sarifFiles;
if (fs11.lstatSync(sarifPath).isDirectory()) {
if (fs12.lstatSync(sarifPath).isDirectory()) {
sarifFiles = findSarifFilesInDir(sarifPath, isSarif);
if (sarifFiles.length === 0) {
throw new ConfigurationError(
@@ -110529,7 +110540,7 @@ function getSarifFilePaths(sarifPath, isSarif) {
return sarifFiles;
}
async function getGroupedSarifFilePaths(logger, sarifPath) {
const stats = fs11.statSync(sarifPath, { throwIfNoEntry: false });
const stats = fs12.statSync(sarifPath, { throwIfNoEntry: false });
if (stats === void 0) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
}
@@ -110576,9 +110587,9 @@ async function getGroupedSarifFilePaths(logger, sarifPath) {
}
return results;
}
function countResultsInSarif(sarif) {
function countResultsInSarif(sarifLog) {
let numResults = 0;
const parsedSarif = JSON.parse(sarif);
const parsedSarif = JSON.parse(sarifLog);
if (!Array.isArray(parsedSarif.runs)) {
throw new InvalidSarifUploadError("Invalid SARIF. Missing 'runs' array.");
}
@@ -110592,26 +110603,26 @@ function countResultsInSarif(sarif) {
}
return numResults;
}
function readSarifFile(sarifFilePath) {
function readSarifFileOrThrow(sarifFilePath) {
try {
return JSON.parse(fs11.readFileSync(sarifFilePath, "utf8"));
return readSarifFile(sarifFilePath);
} catch (e) {
throw new InvalidSarifUploadError(
`Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}`
);
}
}
function validateSarifFileSchema(sarif, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarif]) && // We want to validate CodeQL SARIF in testing environments.
function validateSarifFileSchema(sarifLog, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarifLog]) && // We want to validate CodeQL SARIF in testing environments.
!getTestingEnvironment()) {
logger.debug(
`Skipping SARIF schema validation for ${sarifFilePath} as all runs are produced by CodeQL.`
);
return;
return true;
}
logger.info(`Validating ${sarifFilePath}`);
const schema2 = require_sarif_schema_2_1_0();
const result = new jsonschema2.Validator().validate(sarif, schema2);
const result = new jsonschema2.Validator().validate(sarifLog, schema2);
const warningAttributes = ["uri-reference", "uri"];
const errors = (result.errors ?? []).filter(
(err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument))
@@ -110638,6 +110649,7 @@ ${sarifErrors.join(
)}`
);
}
return true;
}
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, mergeBaseCommitOid) {
const payloadObj = {
@@ -110663,7 +110675,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
payloadObj.base_sha = mergeBaseCommitOid;
} else if (process.env.GITHUB_EVENT_PATH) {
const githubEvent = JSON.parse(
fs11.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
fs12.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
);
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha;
@@ -110674,14 +110686,14 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) {
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif;
let sarifLog;
category = analysis.fixCategory(logger, category);
if (sarifPaths.length > 1) {
for (const sarifPath of sarifPaths) {
const parsedSarif = readSarifFile(sarifPath);
const parsedSarif = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(parsedSarif, sarifPath, logger);
}
sarif = await combineSarifFilesUsingCLI(
sarifLog = await combineSarifFilesUsingCLI(
sarifPaths,
gitHubVersion,
features,
@@ -110689,21 +110701,21 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths,
);
} else {
const sarifPath = sarifPaths[0];
sarif = readSarifFile(sarifPath);
validateSarifFileSchema(sarif, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
sarifLog = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(sarifLog, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarifLog], gitHubVersion);
}
sarif = filterAlertsByDiffRange(logger, sarif);
sarif = await addFingerprints(sarif, checkoutPath, logger);
sarifLog = filterAlertsByDiffRange(logger, sarifLog);
sarifLog = await addFingerprints(sarifLog, checkoutPath, logger);
const analysisKey = await getAnalysisKey();
const environment = getRequiredInput("matrix");
sarif = populateRunAutomationDetails(
sarif,
sarifLog = populateRunAutomationDetails(
sarifLog,
category,
analysisKey,
environment
);
return { sarif, analysisKey, environment };
return { sarif: sarifLog, analysisKey, environment };
}
async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) {
const outputPath = pathInput || getOptionalEnvVar("CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */);
@@ -110750,12 +110762,12 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
}
async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
const sarif = postProcessingResults.sarif;
const toolNames = getToolNames(sarif);
const sarifLog = postProcessingResults.sarif;
const toolNames = getToolNames(sarifLog);
logger.debug(`Validating that each SARIF run has a unique category`);
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
validateUniqueCategory(sarifLog, uploadTarget.sentinelPrefix);
logger.debug(`Serializing SARIF for upload`);
const sarifPayload = JSON.stringify(sarif);
const sarifPayload = JSON.stringify(sarifLog);
logger.debug(`Compressing serialized SARIF`);
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = url.pathToFileURL(checkoutPath).href;
@@ -110797,9 +110809,9 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post
};
}
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
if (!fs11.existsSync(outputDir)) {
fs11.mkdirSync(outputDir, { recursive: true });
} else if (!fs11.lstatSync(outputDir).isDirectory()) {
if (!fs12.existsSync(outputDir)) {
fs12.mkdirSync(outputDir, { recursive: true });
} else if (!fs12.lstatSync(outputDir).isDirectory()) {
throw new ConfigurationError(
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`
);
@@ -110809,7 +110821,7 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
`upload${uploadTarget.sarifExtension}`
);
logger.info(`Writing processed SARIF file to ${outputFile}`);
fs11.writeFileSync(outputFile, sarifPayload);
fs12.writeFileSync(outputFile, sarifPayload);
}
var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3;
var STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1e3;
@@ -110907,9 +110919,9 @@ function handleProcessingResultForUnsuccessfulExecution(response, status, logger
assertNever(status);
}
}
function validateUniqueCategory(sarif, sentinelPrefix) {
function validateUniqueCategory(sarifLog, sentinelPrefix) {
const categories = {};
for (const run of sarif.runs) {
for (const run of sarifLog.runs || []) {
const id = run?.automationDetails?.id;
const tool = run.tool?.driver?.name;
const category = `${sanitize(id)}_${sanitize(tool)}`;
@@ -110928,15 +110940,16 @@ function validateUniqueCategory(sarif, sentinelPrefix) {
function sanitize(str2) {
return (str2 ?? "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase();
}
var InvalidSarifUploadError = class extends Error {
};
function filterAlertsByDiffRange(logger, sarif) {
function filterAlertsByDiffRange(logger, sarifLog) {
const diffRanges = readDiffRangesJsonFile(logger);
if (!diffRanges?.length) {
return sarif;
return sarifLog;
}
if (sarifLog.runs === void 0) {
return sarifLog;
}
const checkoutPath = getRequiredInput("checkout_path");
for (const run of sarif.runs) {
for (const run of sarifLog.runs) {
if (run.results) {
run.results = run.results.filter((result) => {
const locations = [
@@ -110957,17 +110970,16 @@ function filterAlertsByDiffRange(logger, sarif) {
});
}
}
return sarif;
return sarifLog;
}
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
InvalidSarifUploadError,
buildPayload,
findSarifFilesInDir,
getGroupedSarifFilePaths,
populateRunAutomationDetails,
postProcessSarifFiles,
readSarifFile,
readSarifFileOrThrow,
shouldConsiderConfigurationError,
shouldConsiderInvalidRequest,
shouldShowCombineSarifFilesDeprecationWarning,
+8 -2
View File
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -161096,7 +161097,7 @@ var safeDump = renamed("safeDump", "dump");
var semver = __toESM(require_semver2());
// src/api-compatibility.json
var maximumVersion = "3.20";
var maximumVersion = "3.21";
var minimumVersion = "3.14";
// src/util.ts
@@ -161794,6 +161795,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
+200 -186
View File
@@ -204,7 +204,7 @@ var require_file_command = __commonJS({
exports2.issueFileCommand = issueFileCommand;
exports2.prepareKeyValueMessage = prepareKeyValueMessage;
var crypto2 = __importStar2(require("crypto"));
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var os3 = __importStar2(require("os"));
var utils_1 = require_utils();
function issueFileCommand(command, message) {
@@ -212,10 +212,10 @@ var require_file_command = __commonJS({
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs13.existsSync(filePath)) {
if (!fs14.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs13.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os3.EOL}`, {
fs14.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os3.EOL}`, {
encoding: "utf8"
});
}
@@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({
exports2.isRooted = isRooted;
exports2.tryGetExecutablePath = tryGetExecutablePath;
exports2.getCmdPath = getCmdPath;
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var path13 = __importStar2(require("path"));
_a = fs13.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
_a = fs14.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
exports2.IS_WINDOWS = process.platform === "win32";
function readlink(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
const result = yield fs13.promises.readlink(fsPath);
const result = yield fs14.promises.readlink(fsPath);
if (exports2.IS_WINDOWS && !result.endsWith("\\")) {
return `${result}\\`;
}
@@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({
});
}
exports2.UV_FS_O_EXLOCK = 268435456;
exports2.READONLY = fs13.constants.O_RDONLY;
exports2.READONLY = fs14.constants.O_RDONLY;
function exists(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
try {
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -49106,7 +49107,7 @@ var require_internal_globber = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.DefaultGlobber = void 0;
var core14 = __importStar2(require_core());
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var globOptionsHelper = __importStar2(require_internal_glob_options_helper());
var path13 = __importStar2(require("path"));
var patternHelper = __importStar2(require_internal_pattern_helper());
@@ -49160,7 +49161,7 @@ var require_internal_globber = __commonJS({
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
core14.debug(`Search path '${searchPath}'`);
try {
yield __await2(fs13.promises.lstat(searchPath));
yield __await2(fs14.promises.lstat(searchPath));
} catch (err) {
if (err.code === "ENOENT") {
continue;
@@ -49194,7 +49195,7 @@ var require_internal_globber = __commonJS({
continue;
}
const childLevel = item.level + 1;
const childItems = (yield __await2(fs13.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path13.join(item.path, x), childLevel));
const childItems = (yield __await2(fs14.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path13.join(item.path, x), childLevel));
stack.push(...childItems.reverse());
} else if (match & internal_match_kind_1.MatchKind.File) {
yield yield __await2(item.path);
@@ -49229,7 +49230,7 @@ var require_internal_globber = __commonJS({
let stats;
if (options.followSymbolicLinks) {
try {
stats = yield fs13.promises.stat(item.path);
stats = yield fs14.promises.stat(item.path);
} catch (err) {
if (err.code === "ENOENT") {
if (options.omitBrokenSymbolicLinks) {
@@ -49241,10 +49242,10 @@ var require_internal_globber = __commonJS({
throw err;
}
} else {
stats = yield fs13.promises.lstat(item.path);
stats = yield fs14.promises.lstat(item.path);
}
if (stats.isDirectory() && options.followSymbolicLinks) {
const realPath = yield fs13.promises.realpath(item.path);
const realPath = yield fs14.promises.realpath(item.path);
while (traversalChain.length >= item.level) {
traversalChain.pop();
}
@@ -49353,7 +49354,7 @@ var require_internal_hash_files = __commonJS({
exports2.hashFiles = hashFiles;
var crypto2 = __importStar2(require("crypto"));
var core14 = __importStar2(require_core());
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var path13 = __importStar2(require("path"));
@@ -49376,13 +49377,13 @@ var require_internal_hash_files = __commonJS({
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
continue;
}
if (fs13.statSync(file).isDirectory()) {
if (fs14.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash2 = crypto2.createHash("sha256");
const pipeline = util.promisify(stream2.pipeline);
yield pipeline(fs13.createReadStream(file), hash2);
yield pipeline(fs14.createReadStream(file), hash2);
result.write(hash2.digest());
count++;
if (!hasMatch) {
@@ -50757,7 +50758,7 @@ var require_cacheUtils = __commonJS({
var glob = __importStar2(require_glob());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var path13 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
@@ -50786,7 +50787,7 @@ var require_cacheUtils = __commonJS({
});
}
function getArchiveFileSizeInBytes(filePath) {
return fs13.statSync(filePath).size;
return fs14.statSync(filePath).size;
}
function resolvePaths(patterns) {
return __awaiter2(this, void 0, void 0, function* () {
@@ -50824,7 +50825,7 @@ var require_cacheUtils = __commonJS({
}
function unlinkFile(filePath) {
return __awaiter2(this, void 0, void 0, function* () {
return util.promisify(fs13.unlink)(filePath);
return util.promisify(fs14.unlink)(filePath);
});
}
function getVersion(app_1) {
@@ -50866,7 +50867,7 @@ var require_cacheUtils = __commonJS({
}
function getGnuTarPathOnWindows() {
return __awaiter2(this, void 0, void 0, function* () {
if (fs13.existsSync(constants_1.GnuTarPathOnWindows)) {
if (fs14.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion("tar");
@@ -91023,7 +91024,7 @@ var require_downloadUtils = __commonJS({
var http_client_1 = require_lib();
var storage_blob_1 = require_commonjs15();
var buffer = __importStar2(require("buffer"));
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var utils = __importStar2(require_cacheUtils());
@@ -91134,7 +91135,7 @@ var require_downloadUtils = __commonJS({
exports2.DownloadProgress = DownloadProgress;
function downloadCacheHttpClient(archiveLocation, archivePath) {
return __awaiter2(this, void 0, void 0, function* () {
const writeStream = fs13.createWriteStream(archivePath);
const writeStream = fs14.createWriteStream(archivePath);
const httpClient = new http_client_1.HttpClient("actions/cache");
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () {
return httpClient.get(archiveLocation);
@@ -91159,7 +91160,7 @@ var require_downloadUtils = __commonJS({
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
return __awaiter2(this, void 0, void 0, function* () {
var _a;
const archiveDescriptor = yield fs13.promises.open(archivePath, "w");
const archiveDescriptor = yield fs14.promises.open(archivePath, "w");
const httpClient = new http_client_1.HttpClient("actions/cache", void 0, {
socketTimeout: options.timeoutInMs,
keepAlive: true
@@ -91275,7 +91276,7 @@ var require_downloadUtils = __commonJS({
} else {
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
const downloadProgress = new DownloadProgress(contentLength);
const fd = fs13.openSync(archivePath, "w");
const fd = fs14.openSync(archivePath, "w");
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
@@ -91293,12 +91294,12 @@ var require_downloadUtils = __commonJS({
controller.abort();
throw new Error("Aborting cache download as the download time exceeded the timeout.");
} else if (Buffer.isBuffer(result)) {
fs13.writeFileSync(fd, result);
fs14.writeFileSync(fd, result);
}
}
} finally {
downloadProgress.stopDisplayTimer();
fs13.closeSync(fd);
fs14.closeSync(fd);
}
}
});
@@ -91620,7 +91621,7 @@ var require_cacheHttpClient = __commonJS({
var core14 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var url_1 = require("url");
var utils = __importStar2(require_cacheUtils());
var uploadUtils_1 = require_uploadUtils();
@@ -91755,7 +91756,7 @@ Other caches with similar key:`);
return __awaiter2(this, void 0, void 0, function* () {
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs13.openSync(archivePath, "r");
const fd = fs14.openSync(archivePath, "r");
const uploadOptions = (0, options_1.getUploadOptions)(options);
const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency);
const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize);
@@ -91769,7 +91770,7 @@ Other caches with similar key:`);
const start = offset;
const end = offset + chunkSize - 1;
offset += maxChunkSize;
yield uploadChunk(httpClient, resourceUrl, () => fs13.createReadStream(archivePath, {
yield uploadChunk(httpClient, resourceUrl, () => fs14.createReadStream(archivePath, {
fd,
start,
end,
@@ -91780,7 +91781,7 @@ Other caches with similar key:`);
}
})));
} finally {
fs13.closeSync(fd);
fs14.closeSync(fd);
}
return;
});
@@ -99033,7 +99034,7 @@ var require_manifest = __commonJS({
var core_1 = require_core();
var os3 = require("os");
var cp = require("child_process");
var fs13 = require("fs");
var fs14 = require("fs");
function _findMatch(versionSpec, stable, candidates, archFilter) {
return __awaiter2(this, void 0, void 0, function* () {
const platFilter = os3.platform();
@@ -99095,10 +99096,10 @@ var require_manifest = __commonJS({
const lsbReleaseFile = "/etc/lsb-release";
const osReleaseFile = "/etc/os-release";
let contents = "";
if (fs13.existsSync(lsbReleaseFile)) {
contents = fs13.readFileSync(lsbReleaseFile).toString();
} else if (fs13.existsSync(osReleaseFile)) {
contents = fs13.readFileSync(osReleaseFile).toString();
if (fs14.existsSync(lsbReleaseFile)) {
contents = fs14.readFileSync(lsbReleaseFile).toString();
} else if (fs14.existsSync(osReleaseFile)) {
contents = fs14.readFileSync(osReleaseFile).toString();
}
return contents;
}
@@ -99307,7 +99308,7 @@ var require_tool_cache = __commonJS({
var core14 = __importStar2(require_core());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var mm = __importStar2(require_manifest());
var os3 = __importStar2(require("os"));
var path13 = __importStar2(require("path"));
@@ -99353,7 +99354,7 @@ var require_tool_cache = __commonJS({
}
function downloadToolAttempt(url2, dest, auth2, headers) {
return __awaiter2(this, void 0, void 0, function* () {
if (fs13.existsSync(dest)) {
if (fs14.existsSync(dest)) {
throw new Error(`Destination file path ${dest} already exists`);
}
const http = new httpm.HttpClient(userAgent2, [], {
@@ -99377,7 +99378,7 @@ var require_tool_cache = __commonJS({
const readStream = responseMessageFactory();
let succeeded = false;
try {
yield pipeline(readStream, fs13.createWriteStream(dest));
yield pipeline(readStream, fs14.createWriteStream(dest));
core14.debug("download complete");
succeeded = true;
return dest;
@@ -99589,11 +99590,11 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os3.arch();
core14.debug(`Caching tool ${tool} ${version} ${arch2}`);
core14.debug(`source dir: ${sourceDir}`);
if (!fs13.statSync(sourceDir).isDirectory()) {
if (!fs14.statSync(sourceDir).isDirectory()) {
throw new Error("sourceDir is not a directory");
}
const destPath = yield _createToolPath(tool, version, arch2);
for (const itemName of fs13.readdirSync(sourceDir)) {
for (const itemName of fs14.readdirSync(sourceDir)) {
const s = path13.join(sourceDir, itemName);
yield io6.cp(s, destPath, { recursive: true });
}
@@ -99607,7 +99608,7 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os3.arch();
core14.debug(`Caching tool ${tool} ${version} ${arch2}`);
core14.debug(`source file: ${sourceFile}`);
if (!fs13.statSync(sourceFile).isFile()) {
if (!fs14.statSync(sourceFile).isFile()) {
throw new Error("sourceFile is not a file");
}
const destFolder = yield _createToolPath(tool, version, arch2);
@@ -99636,7 +99637,7 @@ var require_tool_cache = __commonJS({
versionSpec = semver9.clean(versionSpec) || "";
const cachePath = path13.join(_getCacheDirectory(), toolName, versionSpec, arch2);
core14.debug(`checking cache: ${cachePath}`);
if (fs13.existsSync(cachePath) && fs13.existsSync(`${cachePath}.complete`)) {
if (fs14.existsSync(cachePath) && fs14.existsSync(`${cachePath}.complete`)) {
core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`);
toolPath = cachePath;
} else {
@@ -99649,12 +99650,12 @@ var require_tool_cache = __commonJS({
const versions = [];
arch2 = arch2 || os3.arch();
const toolPath = path13.join(_getCacheDirectory(), toolName);
if (fs13.existsSync(toolPath)) {
const children = fs13.readdirSync(toolPath);
if (fs14.existsSync(toolPath)) {
const children = fs14.readdirSync(toolPath);
for (const child of children) {
if (isExplicitVersion(child)) {
const fullPath = path13.join(toolPath, child, arch2 || "");
if (fs13.existsSync(fullPath) && fs13.existsSync(`${fullPath}.complete`)) {
if (fs14.existsSync(fullPath) && fs14.existsSync(`${fullPath}.complete`)) {
versions.push(child);
}
}
@@ -99725,7 +99726,7 @@ var require_tool_cache = __commonJS({
function _completeToolPath(tool, version, arch2) {
const folderPath = path13.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || "");
const markerPath = `${folderPath}.complete`;
fs13.writeFileSync(markerPath, "");
fs14.writeFileSync(markerPath, "");
core14.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
@@ -103252,21 +103253,21 @@ async function getFolderSize(itemPath, options) {
getFolderSize.loose = async (itemPath, options) => await core(itemPath, options);
getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true });
async function core(rootItemPath, options = {}, returnType = {}) {
const fs13 = options.fs || await import("node:fs/promises");
const fs14 = options.fs || await import("node:fs/promises");
let folderSize = 0n;
const foundInos = /* @__PURE__ */ new Set();
const errors = [];
await processItem(rootItemPath);
async function processItem(itemPath) {
if (options.ignore?.test(itemPath)) return;
const stats = returnType.strict ? await fs13.lstat(itemPath, { bigint: true }) : await fs13.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
const stats = returnType.strict ? await fs14.lstat(itemPath, { bigint: true }) : await fs14.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
if (typeof stats !== "object") return;
if (!foundInos.has(stats.ino)) {
foundInos.add(stats.ino);
folderSize += stats.size;
}
if (stats.isDirectory()) {
const directoryItems = returnType.strict ? await fs13.readdir(itemPath) : await fs13.readdir(itemPath).catch((error3) => errors.push(error3));
const directoryItems = returnType.strict ? await fs14.readdir(itemPath) : await fs14.readdir(itemPath).catch((error3) => errors.push(error3));
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
@@ -105906,17 +105907,6 @@ function getExtraOptionsEnvParam() {
);
}
}
function getToolNames(sarif) {
const toolNames = {};
for (const run2 of sarif.runs || []) {
const tool = run2.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function getCodeQLDatabasePath(config, language) {
return path.resolve(config.dbLocation, language);
}
@@ -107147,6 +107137,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
@@ -107493,12 +107488,83 @@ function initFeatures(gitHubVersion, repositoryNwo, tempDir, logger) {
}
}
// src/sarif/index.ts
var fs5 = __toESM(require("fs"));
var InvalidSarifUploadError = class extends Error {
};
function getToolNames(sarifFile) {
const toolNames = {};
for (const run2 of sarifFile.runs || []) {
const tool = run2.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function readSarifFile(sarifFilePath) {
return JSON.parse(fs5.readFileSync(sarifFilePath, "utf8"));
}
function combineSarifFiles(sarifFiles, logger) {
logger.info(`Loading SARIF file(s)`);
const runs = [];
let version = void 0;
for (const sarifFile of sarifFiles) {
logger.debug(`Loading SARIF file: ${sarifFile}`);
const sarifLog = readSarifFile(sarifFile);
if (version === void 0) {
version = sarifLog.version;
} else if (version !== sarifLog.version) {
throw new InvalidSarifUploadError(
`Different SARIF versions encountered: ${version} and ${sarifLog.version}`
);
}
runs.push(...sarifLog?.runs || []);
}
if (version === void 0) {
version = "2.1.0";
}
return { version, runs };
}
function areAllRunsProducedByCodeQL(sarifLogs) {
return sarifLogs.every((sarifLog) => {
return sarifLog.runs?.every((run2) => run2.tool?.driver?.name === "CodeQL");
});
}
function createRunKey(run2) {
return {
name: run2.tool?.driver?.name,
fullName: run2.tool?.driver?.fullName,
version: run2.tool?.driver?.version,
semanticVersion: run2.tool?.driver?.semanticVersion,
guid: run2.tool?.driver?.guid,
automationId: run2.automationDetails?.id
};
}
function areAllRunsUnique(sarifLogs) {
const keys = /* @__PURE__ */ new Set();
for (const sarifLog of sarifLogs) {
if (sarifLog.runs === void 0) {
continue;
}
for (const run2 of sarifLog.runs) {
const key = JSON.stringify(createRunKey(run2));
if (keys.has(key)) {
return false;
}
keys.add(key);
}
}
return true;
}
// src/status-report.ts
var os = __toESM(require("os"));
var core9 = __toESM(require_core());
// src/config-utils.ts
var fs6 = __toESM(require("fs"));
var fs7 = __toESM(require("fs"));
var path7 = __toESM(require("path"));
// src/config/db-config.ts
@@ -107583,18 +107649,18 @@ function writeDiagnostic(config, language, diagnostic) {
}
// src/diff-informed-analysis-utils.ts
var fs5 = __toESM(require("fs"));
var fs6 = __toESM(require("fs"));
var path6 = __toESM(require("path"));
function getDiffRangesJsonFilePath() {
return path6.join(getTemporaryDirectory(), "pr-diff-range.json");
}
function readDiffRangesJsonFile(logger) {
const jsonFilePath = getDiffRangesJsonFilePath();
if (!fs5.existsSync(jsonFilePath)) {
if (!fs6.existsSync(jsonFilePath)) {
logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`);
return void 0;
}
const jsonContents = fs5.readFileSync(jsonFilePath, "utf8");
const jsonContents = fs6.readFileSync(jsonFilePath, "utf8");
logger.debug(
`Read pr-diff-range JSON file from ${jsonFilePath}:
${jsonContents}`
@@ -107643,10 +107709,10 @@ function getPathToParsedConfigFile(tempDir) {
}
async function getConfig(tempDir, logger) {
const configFile = getPathToParsedConfigFile(tempDir);
if (!fs6.existsSync(configFile)) {
if (!fs7.existsSync(configFile)) {
return void 0;
}
const configString = fs6.readFileSync(configFile, "utf8");
const configString = fs7.readFileSync(configFile, "utf8");
logger.debug("Loaded config:");
logger.debug(configString);
const config = JSON.parse(configString);
@@ -107890,7 +107956,7 @@ async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error
}
// src/upload-lib.ts
var fs12 = __toESM(require("fs"));
var fs13 = __toESM(require("fs"));
var path12 = __toESM(require("path"));
var url = __toESM(require("url"));
var import_zlib = __toESM(require("zlib"));
@@ -107898,7 +107964,7 @@ var core12 = __toESM(require_core());
var jsonschema2 = __toESM(require_lib2());
// src/codeql.ts
var fs10 = __toESM(require("fs"));
var fs11 = __toESM(require("fs"));
var path10 = __toESM(require("path"));
var core11 = __toESM(require_core());
var toolrunner3 = __toESM(require_toolrunner());
@@ -108146,7 +108212,7 @@ function wrapCliConfigurationError(cliError) {
}
// src/setup-codeql.ts
var fs9 = __toESM(require("fs"));
var fs10 = __toESM(require("fs"));
var path9 = __toESM(require("path"));
var toolcache3 = __toESM(require_tool_cache());
var import_fast_deep_equal = __toESM(require_fast_deep_equal());
@@ -108208,7 +108274,7 @@ var v4_default = v4;
// src/tar.ts
var import_child_process = require("child_process");
var fs7 = __toESM(require("fs"));
var fs8 = __toESM(require("fs"));
var stream = __toESM(require("stream"));
var import_toolrunner = __toESM(require_toolrunner());
var io4 = __toESM(require_io());
@@ -108281,7 +108347,7 @@ async function isZstdAvailable(logger) {
}
}
async function extract(tarPath, dest, compressionMethod, tarVersion, logger) {
fs7.mkdirSync(dest, { recursive: true });
fs8.mkdirSync(dest, { recursive: true });
switch (compressionMethod) {
case "gzip":
return await toolcache.extractTar(tarPath, dest);
@@ -108365,7 +108431,7 @@ function inferCompressionMethod(tarPath) {
}
// src/tools-download.ts
var fs8 = __toESM(require("fs"));
var fs9 = __toESM(require("fs"));
var os2 = __toESM(require("os"));
var path8 = __toESM(require("path"));
var import_perf_hooks = require("perf_hooks");
@@ -108472,7 +108538,7 @@ async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorizat
};
}
async function downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorization, headers, tarVersion, logger) {
fs8.mkdirSync(dest, { recursive: true });
fs9.mkdirSync(dest, { recursive: true });
const agent = new import_http_client.HttpClient().getAgent(codeqlURL);
headers = Object.assign(
{ "User-Agent": "CodeQL Action" },
@@ -108509,7 +108575,7 @@ function getToolcacheDirectory(version) {
}
function writeToolcacheMarkerFile(extractedPath, logger) {
const markerFilePath = `${extractedPath}.complete`;
fs8.writeFileSync(markerFilePath, "");
fs9.writeFileSync(markerFilePath, "");
logger.info(`Created toolcache marker file ${markerFilePath}`);
}
function sanitizeUrlForStatusReport(url2) {
@@ -108644,7 +108710,7 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
const candidates = toolcache3.findAllVersions("CodeQL").filter(isGoodVersion).map((version) => ({
folder: toolcache3.find("CodeQL", version),
version
})).filter(({ folder }) => fs9.existsSync(path9.join(folder, "pinned-version")));
})).filter(({ folder }) => fs10.existsSync(path9.join(folder, "pinned-version")));
if (candidates.length === 1) {
const candidate = candidates[0];
logger.debug(
@@ -109198,7 +109264,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
"tools",
"tracing-config.lua"
);
return fs10.existsSync(tracingConfigPath);
return fs11.existsSync(tracingConfigPath);
},
async isScannedLanguage(language) {
return !await this.isTracedLanguage(language);
@@ -109678,7 +109744,7 @@ async function writeCodeScanningConfigFile(config, logger) {
logger.startGroup("Augmented user configuration file contents");
logger.info(dump(augmentedConfig));
logger.endGroup();
fs10.writeFileSync(codeScanningConfigFile, dump(augmentedConfig));
fs11.writeFileSync(codeScanningConfigFile, dump(augmentedConfig));
return codeScanningConfigFile;
}
var TRAP_CACHE_SIZE_MB = 1024;
@@ -109722,7 +109788,7 @@ async function getJobRunUuidSarifOptions(codeql) {
}
// src/fingerprints.ts
var fs11 = __toESM(require("fs"));
var fs12 = __toESM(require("fs"));
var import_path2 = __toESM(require("path"));
// node_modules/long/index.js
@@ -110710,7 +110776,7 @@ async function hash(callback, filepath) {
}
updateHash(current);
};
const readStream = fs11.createReadStream(filepath, "utf8");
const readStream = fs12.createReadStream(filepath, "utf8");
for await (const data of readStream) {
for (let i = 0; i < data.length; ++i) {
processCharacter(data.charCodeAt(i));
@@ -110785,22 +110851,22 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
if (!import_path2.default.isAbsolute(uri)) {
uri = srcRootPrefix + uri;
}
if (!fs11.existsSync(uri)) {
if (!fs12.existsSync(uri)) {
logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`);
return void 0;
}
if (fs11.statSync(uri).isDirectory()) {
if (fs12.statSync(uri).isDirectory()) {
logger.debug(`Unable to compute fingerprint for directory: ${uri}`);
return void 0;
}
return uri;
}
async function addFingerprints(sarif, sourceRoot, logger) {
async function addFingerprints(sarifLog, sourceRoot, logger) {
logger.info(
`Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.`
);
const callbacksByFile = {};
for (const run2 of sarif.runs || []) {
for (const run2 of sarifLog.runs || []) {
const artifacts = run2.artifacts || [];
for (const result of run2.results || []) {
const primaryLocation = (result.locations || [])[0];
@@ -110840,7 +110906,7 @@ async function addFingerprints(sarif, sourceRoot, logger) {
};
await hash(teeCallback, filepath);
}
return sarif;
return sarifLog;
}
// src/init.ts
@@ -110878,58 +110944,6 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
// src/upload-lib.ts
var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning.";
var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository.";
function combineSarifFiles(sarifFiles, logger) {
logger.info(`Loading SARIF file(s)`);
const combinedSarif = {
version: null,
runs: []
};
for (const sarifFile of sarifFiles) {
logger.debug(`Loading SARIF file: ${sarifFile}`);
const sarifObject = JSON.parse(
fs12.readFileSync(sarifFile, "utf8")
);
if (combinedSarif.version === null) {
combinedSarif.version = sarifObject.version;
} else if (combinedSarif.version !== sarifObject.version) {
throw new InvalidSarifUploadError(
`Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`
);
}
combinedSarif.runs.push(...sarifObject.runs);
}
return combinedSarif;
}
function areAllRunsProducedByCodeQL(sarifObjects) {
return sarifObjects.every((sarifObject) => {
return sarifObject.runs?.every(
(run2) => run2.tool?.driver?.name === "CodeQL"
);
});
}
function createRunKey(run2) {
return {
name: run2.tool?.driver?.name,
fullName: run2.tool?.driver?.fullName,
version: run2.tool?.driver?.version,
semanticVersion: run2.tool?.driver?.semanticVersion,
guid: run2.tool?.driver?.guid,
automationId: run2.automationDetails?.id
};
}
function areAllRunsUnique(sarifObjects) {
const keys = /* @__PURE__ */ new Set();
for (const sarifObject of sarifObjects) {
for (const run2 of sarifObject.runs) {
const key = JSON.stringify(createRunKey(run2));
if (keys.has(key)) {
return false;
}
keys.add(key);
}
}
return true;
}
async function shouldShowCombineSarifFilesDeprecationWarning(sarifObjects, githubVersion) {
if (githubVersion.type === "GitHub Enterprise Server" /* GHES */ && satisfiesGHESVersion(githubVersion.version, "<3.14", true)) {
return false;
@@ -110958,9 +110972,7 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) {
}
async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) {
logger.info("Combining SARIF files using the CodeQL CLI");
const sarifObjects = sarifFiles.map((sarifFile) => {
return JSON.parse(fs12.readFileSync(sarifFile, "utf8"));
});
const sarifObjects = sarifFiles.map(readSarifFile);
const deprecationWarningMessage = gitHubVersion.type === "GitHub Enterprise Server" /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025";
const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload";
if (!areAllRunsProducedByCodeQL(sarifObjects)) {
@@ -111013,27 +111025,27 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
codeQL = initCodeQLResult.codeql;
}
const baseTempDir = path12.resolve(tempDir, "combined-sarif");
fs12.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs12.mkdtempSync(path12.resolve(baseTempDir, "output-"));
fs13.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs13.mkdtempSync(path12.resolve(baseTempDir, "output-"));
const outputFile = path12.resolve(outputDirectory, "combined-sarif.sarif");
await codeQL.mergeResults(sarifFiles, outputFile, {
mergeRunsFromEqualCategory: true
});
return JSON.parse(fs12.readFileSync(outputFile, "utf8"));
return readSarifFile(outputFile);
}
function populateRunAutomationDetails(sarif, category, analysis_key, environment) {
function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) {
const automationID = getAutomationID2(category, analysis_key, environment);
if (automationID !== void 0) {
for (const run2 of sarif.runs || []) {
for (const run2 of sarifFile.runs || []) {
if (run2.automationDetails === void 0) {
run2.automationDetails = {
id: automationID
};
}
}
return sarif;
return sarifFile;
}
return sarif;
return sarifFile;
}
function getAutomationID2(category, analysis_key, environment) {
if (category !== void 0) {
@@ -111056,7 +111068,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
`SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}`
);
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
fs12.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
fs13.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
return "dummy-sarif-id";
}
const client = getApiClient();
@@ -111090,7 +111102,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
function findSarifFilesInDir(sarifPath, isSarif) {
const sarifFiles = [];
const walkSarifFiles = (dir) => {
const entries = fs12.readdirSync(dir, { withFileTypes: true });
const entries = fs13.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile() && isSarif(entry.name)) {
sarifFiles.push(path12.resolve(dir, entry.name));
@@ -111103,7 +111115,7 @@ function findSarifFilesInDir(sarifPath, isSarif) {
return sarifFiles;
}
async function getGroupedSarifFilePaths(logger, sarifPath) {
const stats = fs12.statSync(sarifPath, { throwIfNoEntry: false });
const stats = fs13.statSync(sarifPath, { throwIfNoEntry: false });
if (stats === void 0) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
}
@@ -111150,9 +111162,9 @@ async function getGroupedSarifFilePaths(logger, sarifPath) {
}
return results;
}
function countResultsInSarif(sarif) {
function countResultsInSarif(sarifLog) {
let numResults = 0;
const parsedSarif = JSON.parse(sarif);
const parsedSarif = JSON.parse(sarifLog);
if (!Array.isArray(parsedSarif.runs)) {
throw new InvalidSarifUploadError("Invalid SARIF. Missing 'runs' array.");
}
@@ -111166,26 +111178,26 @@ function countResultsInSarif(sarif) {
}
return numResults;
}
function readSarifFile(sarifFilePath) {
function readSarifFileOrThrow(sarifFilePath) {
try {
return JSON.parse(fs12.readFileSync(sarifFilePath, "utf8"));
return readSarifFile(sarifFilePath);
} catch (e) {
throw new InvalidSarifUploadError(
`Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}`
);
}
}
function validateSarifFileSchema(sarif, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarif]) && // We want to validate CodeQL SARIF in testing environments.
function validateSarifFileSchema(sarifLog, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarifLog]) && // We want to validate CodeQL SARIF in testing environments.
!getTestingEnvironment()) {
logger.debug(
`Skipping SARIF schema validation for ${sarifFilePath} as all runs are produced by CodeQL.`
);
return;
return true;
}
logger.info(`Validating ${sarifFilePath}`);
const schema2 = require_sarif_schema_2_1_0();
const result = new jsonschema2.Validator().validate(sarif, schema2);
const result = new jsonschema2.Validator().validate(sarifLog, schema2);
const warningAttributes = ["uri-reference", "uri"];
const errors = (result.errors ?? []).filter(
(err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument))
@@ -111212,6 +111224,7 @@ ${sarifErrors.join(
)}`
);
}
return true;
}
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, mergeBaseCommitOid) {
const payloadObj = {
@@ -111237,7 +111250,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
payloadObj.base_sha = mergeBaseCommitOid;
} else if (process.env.GITHUB_EVENT_PATH) {
const githubEvent = JSON.parse(
fs12.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
fs13.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
);
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha;
@@ -111248,14 +111261,14 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) {
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif;
let sarifLog;
category = analysis.fixCategory(logger, category);
if (sarifPaths.length > 1) {
for (const sarifPath of sarifPaths) {
const parsedSarif = readSarifFile(sarifPath);
const parsedSarif = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(parsedSarif, sarifPath, logger);
}
sarif = await combineSarifFilesUsingCLI(
sarifLog = await combineSarifFilesUsingCLI(
sarifPaths,
gitHubVersion,
features,
@@ -111263,21 +111276,21 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths,
);
} else {
const sarifPath = sarifPaths[0];
sarif = readSarifFile(sarifPath);
validateSarifFileSchema(sarif, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
sarifLog = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(sarifLog, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarifLog], gitHubVersion);
}
sarif = filterAlertsByDiffRange(logger, sarif);
sarif = await addFingerprints(sarif, checkoutPath, logger);
sarifLog = filterAlertsByDiffRange(logger, sarifLog);
sarifLog = await addFingerprints(sarifLog, checkoutPath, logger);
const analysisKey = await getAnalysisKey();
const environment = getRequiredInput("matrix");
sarif = populateRunAutomationDetails(
sarif,
sarifLog = populateRunAutomationDetails(
sarifLog,
category,
analysisKey,
environment
);
return { sarif, analysisKey, environment };
return { sarif: sarifLog, analysisKey, environment };
}
async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) {
const outputPath = pathInput || getOptionalEnvVar("CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */);
@@ -111294,12 +111307,12 @@ async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProc
}
async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
const sarif = postProcessingResults.sarif;
const toolNames = getToolNames(sarif);
const sarifLog = postProcessingResults.sarif;
const toolNames = getToolNames(sarifLog);
logger.debug(`Validating that each SARIF run has a unique category`);
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
validateUniqueCategory(sarifLog, uploadTarget.sentinelPrefix);
logger.debug(`Serializing SARIF for upload`);
const sarifPayload = JSON.stringify(sarif);
const sarifPayload = JSON.stringify(sarifLog);
logger.debug(`Compressing serialized SARIF`);
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = url.pathToFileURL(checkoutPath).href;
@@ -111341,9 +111354,9 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post
};
}
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
if (!fs12.existsSync(outputDir)) {
fs12.mkdirSync(outputDir, { recursive: true });
} else if (!fs12.lstatSync(outputDir).isDirectory()) {
if (!fs13.existsSync(outputDir)) {
fs13.mkdirSync(outputDir, { recursive: true });
} else if (!fs13.lstatSync(outputDir).isDirectory()) {
throw new ConfigurationError(
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`
);
@@ -111353,7 +111366,7 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
`upload${uploadTarget.sarifExtension}`
);
logger.info(`Writing processed SARIF file to ${outputFile}`);
fs12.writeFileSync(outputFile, sarifPayload);
fs13.writeFileSync(outputFile, sarifPayload);
}
var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3;
var STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1e3;
@@ -111451,9 +111464,9 @@ function handleProcessingResultForUnsuccessfulExecution(response, status, logger
assertNever(status);
}
}
function validateUniqueCategory(sarif, sentinelPrefix) {
function validateUniqueCategory(sarifLog, sentinelPrefix) {
const categories = {};
for (const run2 of sarif.runs) {
for (const run2 of sarifLog.runs || []) {
const id = run2?.automationDetails?.id;
const tool = run2.tool?.driver?.name;
const category = `${sanitize(id)}_${sanitize(tool)}`;
@@ -111472,15 +111485,16 @@ function validateUniqueCategory(sarif, sentinelPrefix) {
function sanitize(str2) {
return (str2 ?? "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase();
}
var InvalidSarifUploadError = class extends Error {
};
function filterAlertsByDiffRange(logger, sarif) {
function filterAlertsByDiffRange(logger, sarifLog) {
const diffRanges = readDiffRangesJsonFile(logger);
if (!diffRanges?.length) {
return sarif;
return sarifLog;
}
if (sarifLog.runs === void 0) {
return sarifLog;
}
const checkoutPath = getRequiredInput("checkout_path");
for (const run2 of sarif.runs) {
for (const run2 of sarifLog.runs) {
if (run2.results) {
run2.results = run2.results.filter((result) => {
const locations = [
@@ -111501,7 +111515,7 @@ function filterAlertsByDiffRange(logger, sarif) {
});
}
}
return sarif;
return sarifLog;
}
// src/upload-sarif.ts
+8
View File
@@ -43,6 +43,7 @@
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
"ava": "^6.4.1",
@@ -2522,6 +2523,13 @@
"@types/node": "*"
}
},
"node_modules/@types/sarif": {
"version": "2.1.7",
"resolved": "https://registry.npmjs.org/@types/sarif/-/sarif-2.1.7.tgz",
"integrity": "sha512-kRz0VEkJqWLf1LLVN4pT1cg1Z9wAuvI6L97V3m2f5B76Tg8d413ddvLBPTEHAZJlnn4XSvu0FkZtViCQGVyrXQ==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/semver": {
"version": "7.7.1",
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.7.1.tgz",
+2 -1
View File
@@ -9,7 +9,7 @@
"lint": "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
"ava": "npm run transpile && ava --serial --verbose",
"ava": "npm run transpile && ava --verbose",
"test": "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
"transpile": "tsc --build --verbose"
@@ -58,6 +58,7 @@
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
"ava": "^6.4.1",
+1 -3
View File
@@ -1,3 +1 @@
env
__pycache__/
*.pyc
node_modules/
View File
@@ -5,7 +5,7 @@ description: >
autobuild Action.
operatingSystems: ["ubuntu", "windows"]
versions: ["linked", "nightly-latest"]
installJava: "true"
installJava: true
env:
CODEQL_ACTION_AUTOBUILD_BUILD_MODE_DIRECT_TRACING: true
steps:
+2 -2
View File
@@ -2,8 +2,8 @@ name: "Build mode autobuild"
description: "An end-to-end integration test of a Java repository built using 'build-mode: autobuild'"
operatingSystems: ["ubuntu", "windows"]
versions: ["linked", "nightly-latest"]
installJava: "true"
installYq: "true"
installJava: true
installYq: true
steps:
- name: Set up Java test repo configuration
run: |
+1 -1
View File
@@ -11,5 +11,5 @@ steps:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: javascript
- name: Fail if the CodeQL version is not a nightly
if: "!contains(steps.init.outputs.codeql-version, '+')"
if: ${{ !contains(steps.init.outputs.codeql-version, '+') }}
run: exit 1
+9 -9
View File
@@ -32,16 +32,16 @@ steps:
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:all-files/
- name: "Fail for missing output from `upload-sarif` step for `code-scanning`"
if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-scanning)"
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-scanning)
run: exit 1
- name: "Fail for missing output from `upload-sarif` step for `code-quality`"
if: "contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)"
if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)
run: exit 1
- name: Upload single SARIF file for Code Scanning
uses: ./../action/upload-sarif
id: upload-single-sarif-code-scanning
if: "contains(matrix.analysis-kinds, 'code-scanning')"
if: contains(matrix.analysis-kinds, 'code-scanning')
with:
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
@@ -49,12 +49,12 @@ steps:
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-scanning/
- name: "Fail for missing output from `upload-single-sarif-code-scanning` step"
if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning)"
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning)
run: exit 1
- name: Upload single SARIF file for Code Quality
uses: ./../action/upload-sarif
id: upload-single-sarif-code-quality
if: "contains(matrix.analysis-kinds, 'code-quality')"
if: contains(matrix.analysis-kinds, 'code-quality')
with:
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
@@ -62,16 +62,16 @@ steps:
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-quality/
- name: "Fail for missing output from `upload-single-sarif-code-quality` step"
if: "contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality)"
if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality)
run: exit 1
- name: Change SARIF file extension
if: "contains(matrix.analysis-kinds, 'code-scanning')"
if: contains(matrix.analysis-kinds, 'code-scanning')
run: mv ${{ runner.temp }}/results/javascript.sarif ${{ runner.temp }}/results/javascript.sarif.json
- name: Upload single non-`.sarif` file
uses: ./../action/upload-sarif
id: upload-single-non-sarif
if: "contains(matrix.analysis-kinds, 'code-scanning')"
if: contains(matrix.analysis-kinds, 'code-scanning')
with:
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
@@ -79,5 +79,5 @@ steps:
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:non-sarif/
- name: "Fail for missing output from `upload-single-non-sarif` step"
if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-non-sarif.outputs.sarif-ids).code-scanning)"
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-non-sarif.outputs.sarif-ids).code-scanning)
run: exit 1
+605
View File
@@ -0,0 +1,605 @@
{
"name": "pr-checks",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"dependencies": {
"yaml": "^2.8.2"
},
"devDependencies": {
"@types/node": "^20.19.9",
"tsx": "^4.21.0",
"typescript": "^5.9.3"
}
},
"node_modules/@esbuild/aix-ppc64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.3.tgz",
"integrity": "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"aix"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-arm": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.3.tgz",
"integrity": "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.3.tgz",
"integrity": "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.3.tgz",
"integrity": "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/darwin-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz",
"integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/darwin-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz",
"integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/freebsd-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.3.tgz",
"integrity": "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/freebsd-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.3.tgz",
"integrity": "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-arm": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.3.tgz",
"integrity": "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.3.tgz",
"integrity": "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-ia32": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.3.tgz",
"integrity": "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-loong64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.3.tgz",
"integrity": "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==",
"cpu": [
"loong64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-mips64el": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.3.tgz",
"integrity": "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==",
"cpu": [
"mips64el"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-ppc64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.3.tgz",
"integrity": "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-riscv64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.3.tgz",
"integrity": "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==",
"cpu": [
"riscv64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-s390x": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.3.tgz",
"integrity": "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==",
"cpu": [
"s390x"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.3.tgz",
"integrity": "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/netbsd-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.3.tgz",
"integrity": "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/netbsd-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.3.tgz",
"integrity": "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openbsd-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.3.tgz",
"integrity": "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openbsd-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.3.tgz",
"integrity": "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openharmony-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.3.tgz",
"integrity": "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openharmony"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/sunos-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.3.tgz",
"integrity": "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"sunos"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.3.tgz",
"integrity": "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-ia32": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.3.tgz",
"integrity": "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.3.tgz",
"integrity": "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@types/node": {
"version": "20.19.35",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.35.tgz",
"integrity": "sha512-Uarfe6J91b9HAUXxjvSOdiO2UPOKLm07Q1oh0JHxoZ1y8HoqxDAu3gVrsrOHeiio0kSsoVBt4wFrKOm0dKxVPQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"undici-types": "~6.21.0"
}
},
"node_modules/esbuild": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz",
"integrity": "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"bin": {
"esbuild": "bin/esbuild"
},
"engines": {
"node": ">=18"
},
"optionalDependencies": {
"@esbuild/aix-ppc64": "0.27.3",
"@esbuild/android-arm": "0.27.3",
"@esbuild/android-arm64": "0.27.3",
"@esbuild/android-x64": "0.27.3",
"@esbuild/darwin-arm64": "0.27.3",
"@esbuild/darwin-x64": "0.27.3",
"@esbuild/freebsd-arm64": "0.27.3",
"@esbuild/freebsd-x64": "0.27.3",
"@esbuild/linux-arm": "0.27.3",
"@esbuild/linux-arm64": "0.27.3",
"@esbuild/linux-ia32": "0.27.3",
"@esbuild/linux-loong64": "0.27.3",
"@esbuild/linux-mips64el": "0.27.3",
"@esbuild/linux-ppc64": "0.27.3",
"@esbuild/linux-riscv64": "0.27.3",
"@esbuild/linux-s390x": "0.27.3",
"@esbuild/linux-x64": "0.27.3",
"@esbuild/netbsd-arm64": "0.27.3",
"@esbuild/netbsd-x64": "0.27.3",
"@esbuild/openbsd-arm64": "0.27.3",
"@esbuild/openbsd-x64": "0.27.3",
"@esbuild/openharmony-arm64": "0.27.3",
"@esbuild/sunos-x64": "0.27.3",
"@esbuild/win32-arm64": "0.27.3",
"@esbuild/win32-ia32": "0.27.3",
"@esbuild/win32-x64": "0.27.3"
}
},
"node_modules/fsevents": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/get-tsconfig": {
"version": "4.13.6",
"resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.6.tgz",
"integrity": "sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==",
"dev": true,
"license": "MIT",
"dependencies": {
"resolve-pkg-maps": "^1.0.0"
},
"funding": {
"url": "https://github.com/privatenumber/get-tsconfig?sponsor=1"
}
},
"node_modules/resolve-pkg-maps": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz",
"integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==",
"dev": true,
"license": "MIT",
"funding": {
"url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1"
}
},
"node_modules/tsx": {
"version": "4.21.0",
"resolved": "https://registry.npmjs.org/tsx/-/tsx-4.21.0.tgz",
"integrity": "sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==",
"dev": true,
"license": "MIT",
"dependencies": {
"esbuild": "~0.27.0",
"get-tsconfig": "^4.7.5"
},
"bin": {
"tsx": "dist/cli.mjs"
},
"engines": {
"node": ">=18.0.0"
},
"optionalDependencies": {
"fsevents": "~2.3.3"
}
},
"node_modules/typescript": {
"version": "5.9.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz",
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
"dev": true,
"license": "Apache-2.0",
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=14.17"
}
},
"node_modules/undici-types": {
"version": "6.21.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
"dev": true,
"license": "MIT"
},
"node_modules/yaml": {
"version": "2.8.2",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz",
"integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==",
"license": "ISC",
"bin": {
"yaml": "bin.mjs"
},
"engines": {
"node": ">= 14.6"
},
"funding": {
"url": "https://github.com/sponsors/eemeli"
}
}
}
}
+12
View File
@@ -0,0 +1,12 @@
{
"private": true,
"description": "Dependencies for the sync.ts",
"dependencies": {
"yaml": "^2.8.2"
},
"devDependencies": {
"@types/node": "^20.19.9",
"tsx": "^4.21.0",
"typescript": "^5.9.3"
}
}
+4 -4
View File
@@ -6,9 +6,9 @@ to one of the files in this directory.
## Updating workflows
Run `./sync.sh` to invoke the workflow generator and re-generate the workflow files in `.github/workflows/` based on the templates in `pr-checks/checks/`.
Alternatively, you can use `just`:
1. Install https://github.com/casey/just by whichever way you prefer.
2. Run `just update-pr-checks` in your terminal.
### If you don't want to install `just`
Manually run each step in the `justfile`.
-402
View File
@@ -1,402 +0,0 @@
#!/usr/bin/env python
import ruamel.yaml
from ruamel.yaml.scalarstring import SingleQuotedScalarString, LiteralScalarString
import pathlib
import os
# The default set of CodeQL Bundle versions to use for the PR checks.
defaultTestVersions = [
# The oldest supported CodeQL version. If bumping, update `CODEQL_MINIMUM_VERSION` in `codeql.ts`
"stable-v2.17.6",
# The last CodeQL release in the 2.18 series.
"stable-v2.18.4",
# The last CodeQL release in the 2.19 series.
"stable-v2.19.4",
# The last CodeQL release in the 2.20 series.
"stable-v2.20.7",
# The last CodeQL release in the 2.21 series.
"stable-v2.21.4",
# The last CodeQL release in the 2.22 series.
"stable-v2.22.4",
# The default version of CodeQL for Dotcom, as determined by feature flags.
"default",
# The version of CodeQL shipped with the Action in `defaults.json`. During the release process
# for a new CodeQL release, there will be a period of time during which this will be newer than
# the default version on Dotcom.
"linked",
# A nightly build directly from the our private repo, built in the last 24 hours.
"nightly-latest"
]
# When updating the ruamel.yaml version here, update the PR check in
# `.github/workflows/pr-checks.yml` too.
header = """# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pr-checks/sync.sh
# to regenerate this file.
"""
def is_truthy(value):
if isinstance(value, str):
return value.lower() == 'true'
return bool(value)
class NonAliasingRTRepresenter(ruamel.yaml.representer.RoundTripRepresenter):
def ignore_aliases(self, data):
return True
def writeHeader(checkStream):
checkStream.write(header)
yaml = ruamel.yaml.YAML()
yaml.Representer = NonAliasingRTRepresenter
yaml.indent(mapping=2, sequence=4, offset=2)
this_dir = pathlib.Path(__file__).resolve().parent
allJobs = {}
collections = {}
for file in sorted((this_dir / 'checks').glob('*.yml')):
with open(file, 'r') as checkStream:
checkSpecification = yaml.load(checkStream)
matrix = []
workflowInputs = {}
if 'inputs' in checkSpecification:
workflowInputs = checkSpecification['inputs']
for version in checkSpecification.get('versions', defaultTestVersions):
if version == "latest":
raise ValueError('Did not recognize "version: latest". Did you mean "version: linked"?')
runnerImages = ["ubuntu-latest", "macos-latest", "windows-latest"]
operatingSystems = checkSpecification.get('operatingSystems', ["ubuntu"])
for operatingSystem in operatingSystems:
runnerImagesForOs = [image for image in runnerImages if image.startswith(operatingSystem)]
for runnerImage in runnerImagesForOs:
matrix.append({
'os': runnerImage,
'version': version
})
useAllPlatformBundle = "false" # Default to false
if checkSpecification.get('useAllPlatformBundle'):
useAllPlatformBundle = checkSpecification['useAllPlatformBundle']
if 'analysisKinds' in checkSpecification:
newMatrix = []
for matrixInclude in matrix:
for analysisKind in checkSpecification.get('analysisKinds'):
newMatrix.append(
matrixInclude |
{ 'analysis-kinds': analysisKind }
)
matrix = newMatrix
# Construct the workflow steps needed for this check.
steps = [
{
'name': 'Check out repository',
'uses': 'actions/checkout@v6'
},
]
installNode = is_truthy(checkSpecification.get('installNode', ''))
if installNode:
steps.extend([
{
'name': 'Install Node.js',
'uses': 'actions/setup-node@v6',
'with': {
'node-version': '20.x',
'cache': 'npm',
},
},
{
'name': 'Install dependencies',
'run': 'npm ci',
},
])
steps.append({
'name': 'Prepare test',
'id': 'prepare-test',
'uses': './.github/actions/prepare-test',
'with': {
'version': '${{ matrix.version }}',
'use-all-platform-bundle': useAllPlatformBundle,
# If the action is being run from a container, then do not setup kotlin.
# This is because the kotlin binaries cannot be downloaded from the container.
'setup-kotlin': str(not 'container' in checkSpecification).lower(),
}
})
installGo = is_truthy(checkSpecification.get('installGo', ''))
if installGo:
baseGoVersionExpr = '>=1.21.0'
workflowInputs['go-version'] = {
'type': 'string',
'description': 'The version of Go to install',
'required': False,
'default': baseGoVersionExpr,
}
steps.append({
'name': 'Install Go',
'uses': 'actions/setup-go@v6',
'with': {
'go-version': '${{ inputs.go-version || \'' + baseGoVersionExpr + '\' }}',
# to avoid potentially misleading autobuilder results where we expect it to download
# dependencies successfully, but they actually come from a warm cache
'cache': False
}
})
installJava = is_truthy(checkSpecification.get('installJava', ''))
if installJava:
baseJavaVersionExpr = '17'
workflowInputs['java-version'] = {
'type': 'string',
'description': 'The version of Java to install',
'required': False,
'default': baseJavaVersionExpr,
}
steps.append({
'name': 'Install Java',
'uses': 'actions/setup-java@v5',
'with': {
'java-version': '${{ inputs.java-version || \'' + baseJavaVersionExpr + '\' }}',
'distribution': 'temurin'
}
})
installPython = is_truthy(checkSpecification.get('installPython', ''))
if installPython:
basePythonVersionExpr = '3.13'
workflowInputs['python-version'] = {
'type': 'string',
'description': 'The version of Python to install',
'required': False,
'default': basePythonVersionExpr,
}
steps.append({
'name': 'Install Python',
'if': 'matrix.version != \'nightly-latest\'',
'uses': 'actions/setup-python@v6',
'with': {
'python-version': '${{ inputs.python-version || \'' + basePythonVersionExpr + '\' }}'
}
})
installDotNet = is_truthy(checkSpecification.get('installDotNet', ''))
if installDotNet:
baseDotNetVersionExpr = '9.x'
workflowInputs['dotnet-version'] = {
'type': 'string',
'description': 'The version of .NET to install',
'required': False,
'default': baseDotNetVersionExpr,
}
steps.append({
'name': 'Install .NET',
'uses': 'actions/setup-dotnet@v5',
'with': {
'dotnet-version': '${{ inputs.dotnet-version || \'' + baseDotNetVersionExpr + '\' }}'
}
})
installYq = is_truthy(checkSpecification.get('installYq', ''))
if installYq:
steps.append({
'name': 'Install yq',
'if': "runner.os == 'Windows'",
'env': {
'YQ_PATH': '${{ runner.temp }}/yq',
# This is essentially an arbitrary version of `yq`, which happened to be the one that
# `choco` fetched when we moved away from using that here.
# See https://github.com/github/codeql-action/pull/3423
'YQ_VERSION': 'v4.50.1'
},
'run': LiteralScalarString(
'gh release download --repo mikefarah/yq --pattern "yq_windows_amd64.exe" "$YQ_VERSION" -O "$YQ_PATH/yq.exe"\n'
'echo "$YQ_PATH" >> "$GITHUB_PATH"'
),
})
# If container initialisation steps are present in the check specification,
# make sure to execute them first.
if 'container' in checkSpecification and 'container-init-steps' in checkSpecification:
steps.insert(0, checkSpecification['container-init-steps'])
steps.extend(checkSpecification['steps'])
checkJob = {
'strategy': {
'fail-fast': False,
'matrix': {
'include': matrix
}
},
'name': checkSpecification['name'],
'if': 'github.triggering_actor != \'dependabot[bot]\'',
'permissions': {
'contents': 'read',
'security-events': 'read'
},
'timeout-minutes': 45,
'runs-on': '${{ matrix.os }}',
'steps': steps,
}
if 'permissions' in checkSpecification:
checkJob['permissions'] = checkSpecification['permissions']
for key in ["env", "container", "services"]:
if key in checkSpecification:
checkJob[key] = checkSpecification[key]
checkJob['env'] = checkJob.get('env', {})
if 'CODEQL_ACTION_TEST_MODE' not in checkJob['env']:
checkJob['env']['CODEQL_ACTION_TEST_MODE'] = True
checkName = file.stem
# If this check belongs to a named collection, record it.
if 'collection' in checkSpecification:
collection_name = checkSpecification['collection']
collections.setdefault(collection_name, []).append({
'specification': checkSpecification,
'checkName': checkName,
'inputs': workflowInputs
})
raw_file = this_dir.parent / ".github" / "workflows" / f"__{checkName}.yml.raw"
with open(raw_file, 'w', newline='\n') as output_stream:
extraGroupName = ""
for inputName in workflowInputs.keys():
extraGroupName += "-${{inputs." + inputName + "}}"
writeHeader(output_stream)
yaml.dump({
'name': f"PR Check - {checkSpecification['name']}",
'env': {
'GITHUB_TOKEN': '${{ secrets.GITHUB_TOKEN }}',
'GO111MODULE': 'auto'
},
'on': {
'push': {
'branches': ['main', 'releases/v*']
},
'pull_request': {
'types': ["opened", "synchronize", "reopened", "ready_for_review"]
},
'merge_group': {
'types': ['checks_requested']
},
'schedule': [{'cron': SingleQuotedScalarString('0 5 * * *')}],
'workflow_dispatch': {
'inputs': workflowInputs
},
'workflow_call': {
'inputs': workflowInputs
}
},
'defaults': {
'run': {
'shell': 'bash',
},
},
'concurrency': {
# Cancel in-progress workflows in the same 'group' for pull_request events,
# but not other event types. This should have the effect that workflows on PRs
# get cancelled if there is a newer workflow in the same concurrency group.
# For other events, the new workflows should wait until earlier ones have finished.
# This should help reduce the number of concurrent workflows on the repo, and
# consequently the number of concurrent API requests.
# Note, the `|| false` is intentional to rule out that this somehow ends up being
# `true` since we observed workflows for non-`pull_request` events getting cancelled.
'cancel-in-progress': "${{ github.event_name == 'pull_request' || false }}",
# The group is determined by the workflow name, the ref, and the input values.
# The base name is hard-coded to avoid issues when the workflow is triggered by
# a `workflow_call` event (where `github.workflow` would be the name of the caller).
# The input values are added, since they may result in different behaviour for a
# given workflow on the same ref.
'group': checkName + "-${{github.ref}}" + extraGroupName
},
'jobs': {
checkName: checkJob
}
}, output_stream)
with open(raw_file, 'r') as input_stream:
with open(this_dir.parent / ".github" / "workflows" / f"__{checkName}.yml", 'w', newline='\n') as output_stream:
content = input_stream.read()
output_stream.write("\n".join(list(map(lambda x:x.rstrip(), content.splitlines()))+['']))
os.remove(raw_file)
# write workflow files for collections
for collection_name in collections:
jobs = {}
combinedInputs = {}
for check in collections[collection_name]:
checkName = check['checkName']
checkSpecification = check['specification']
checkInputs = check['inputs']
checkWith = {}
combinedInputs |= checkInputs
for inputName in checkInputs.keys():
checkWith[inputName] = "${{ inputs." + inputName + " }}"
jobs[checkName] = {
'name': checkSpecification['name'],
'permissions': {
'contents': 'read',
'security-events': 'read'
},
'uses': "./.github/workflows/" + f"__{checkName}.yml",
'with': checkWith
}
raw_file = this_dir.parent / ".github" / "workflows" / f"__{collection_name}.yml.raw"
with open(raw_file, 'w') as output_stream:
writeHeader(output_stream)
yaml.dump({
'name': f"Manual Check - {collection_name}",
'env': {
'GITHUB_TOKEN': '${{ secrets.GITHUB_TOKEN }}',
'GO111MODULE': 'auto'
},
'on': {
'workflow_dispatch': {
'inputs': combinedInputs
},
},
'jobs': jobs
}, output_stream)
with open(raw_file, 'r') as input_stream:
with open(this_dir.parent / ".github" / "workflows" / f"__{collection_name}.yml", 'w', newline='\n') as output_stream:
content = input_stream.read()
output_stream.write("\n".join(list(map(lambda x:x.rstrip(), content.splitlines()))+['']))
os.remove(raw_file)
+10 -4
View File
@@ -2,8 +2,14 @@
set -e
cd "$(dirname "$0")"
python3 -m venv env
source env/*/activate
pip3 install ruamel.yaml==0.17.31
python3 sync.py
# Run `npm ci` in CI or `npm install` otherwise.
if [ "$GITHUB_ACTIONS" = "true" ]; then
echo "In Actions, running 'npm ci' for 'sync.ts'..."
npm ci
else
echo "Running 'npm install' for 'sync.ts'..."
npm install --no-audit --no-fund
fi
npx tsx sync.ts
+525
View File
@@ -0,0 +1,525 @@
#!/usr/bin/env npx tsx
import * as fs from "fs";
import * as path from "path";
import * as yaml from "yaml";
/** Known workflow input names. */
enum KnownInputName {
GoVersion = "go-version",
JavaVersion = "java-version",
PythonVersion = "python-version",
DotnetVersion = "dotnet-version",
}
/**
* Represents workflow input definitions.
*/
interface WorkflowInput {
type: string;
description: string;
required: boolean;
default: string;
}
/** A partial mapping from known input names to input definitions. */
type WorkflowInputs = Partial<Record<KnownInputName, WorkflowInput>>;
/**
* Represents PR check specifications.
*/
interface Specification {
/** The display name for the check. */
name: string;
/** The workflow steps specific to this check. */
steps: any[];
/** Workflow-level input definitions forwarded to `workflow_dispatch`/`workflow_call`. */
inputs?: Record<string, WorkflowInput>;
/** CodeQL bundle versions to test against. Defaults to `DEFAULT_TEST_VERSIONS`. */
versions?: string[];
/** Operating system prefixes used to select runner images (e.g. `["ubuntu", "macos"]`). */
operatingSystems?: string[];
/** Whether to use the all-platform CodeQL bundle. */
useAllPlatformBundle?: string;
/** Values for the `analysis-kinds` matrix dimension. */
analysisKinds?: string[];
installNode?: boolean;
installGo?: boolean;
installJava?: boolean;
installPython?: boolean;
installDotNet?: boolean;
installYq?: boolean;
/** Container image configuration for the job. */
container?: any;
/** Service containers for the job. */
services?: any;
/** Custom permissions override for the job. */
permissions?: Record<string, string>;
/** Extra environment variables for the job. */
env?: Record<string, any>;
/** If set, this check is part of a named collection that gets its own caller workflow. */
collection?: string;
}
// The default set of CodeQL Bundle versions to use for the PR checks.
const defaultTestVersions = [
// The oldest supported CodeQL version. If bumping, update `CODEQL_MINIMUM_VERSION` in `codeql.ts`
"stable-v2.17.6",
// The last CodeQL release in the 2.18 series.
"stable-v2.18.4",
// The last CodeQL release in the 2.19 series.
"stable-v2.19.4",
// The last CodeQL release in the 2.20 series.
"stable-v2.20.7",
// The last CodeQL release in the 2.21 series.
"stable-v2.21.4",
// The last CodeQL release in the 2.22 series.
"stable-v2.22.4",
// The default version of CodeQL for Dotcom, as determined by feature flags.
"default",
// The version of CodeQL shipped with the Action in `defaults.json`. During the release process
// for a new CodeQL release, there will be a period of time during which this will be newer than
// the default version on Dotcom.
"linked",
// A nightly build directly from the our private repo, built in the last 24 hours.
"nightly-latest",
];
const THIS_DIR = __dirname;
const CHECKS_DIR = path.join(THIS_DIR, "checks");
const OUTPUT_DIR = path.join(THIS_DIR, "..", ".github", "workflows");
/**
* Loads and parses a YAML file.
*/
function loadYaml(filePath: string): yaml.Document {
const content = fs.readFileSync(filePath, "utf8");
return yaml.parseDocument(content);
}
/**
* Serialize a value to YAML and write it to a file, prepended with the
* standard header comment.
*/
function writeYaml(filePath: string, workflow: any): void {
const header = `# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pr-checks/sync.sh
# to regenerate this file.
`;
const workflowDoc = new yaml.Document(workflow, {
aliasDuplicateObjects: false,
});
const yamlStr = yaml.stringify(workflowDoc, {
aliasDuplicateObjects: false,
singleQuote: true,
lineWidth: 0,
});
fs.writeFileSync(filePath, stripTrailingWhitespace(header + yamlStr), "utf8");
}
/**
* Strip trailing whitespace from each line.
*/
function stripTrailingWhitespace(content: string): string {
return content
.split("\n")
.map((line) => line.trimEnd())
.join("\n");
}
/**
* Main entry point for the sync script.
*/
function main(): void {
// Ensure the output directory exists.
fs.mkdirSync(OUTPUT_DIR, { recursive: true });
// Discover and sort all check specification files.
const checkFiles = fs
.readdirSync(CHECKS_DIR)
.filter((f) => f.endsWith(".yml"))
.sort()
.map((f) => path.join(CHECKS_DIR, f));
console.log(`Found ${checkFiles.length} check specification(s).`);
const collections: Record<
string,
Array<{
specification: Specification;
checkName: string;
inputs: Record<string, WorkflowInput>;
}>
> = {};
for (const file of checkFiles) {
const checkName = path.basename(file, ".yml");
const specDocument = loadYaml(file);
const checkSpecification = specDocument.toJS() as Specification;
console.log(`Processing: ${checkName} — "${checkSpecification.name}"`);
const workflowInputs: WorkflowInputs = {};
let matrix: Array<Record<string, any>> = [];
for (const version of checkSpecification.versions ?? defaultTestVersions) {
if (version === "latest") {
throw new Error(
'Did not recognise "version: latest". Did you mean "version: linked"?',
);
}
const runnerImages = ["ubuntu-latest", "macos-latest", "windows-latest"];
const operatingSystems = checkSpecification.operatingSystems ?? [
"ubuntu",
];
for (const operatingSystem of operatingSystems) {
const runnerImagesForOs = runnerImages.filter((image) =>
image.startsWith(operatingSystem),
);
for (const runnerImage of runnerImagesForOs) {
matrix.push({
os: runnerImage,
version,
});
}
}
}
const useAllPlatformBundle = checkSpecification.useAllPlatformBundle
? checkSpecification.useAllPlatformBundle
: "false";
if (checkSpecification.analysisKinds) {
const newMatrix: Array<Record<string, any>> = [];
for (const matrixInclude of matrix) {
for (const analysisKind of checkSpecification.analysisKinds) {
newMatrix.push({
...matrixInclude,
"analysis-kinds": analysisKind,
});
}
}
matrix = newMatrix;
}
// Construct the workflow steps needed for this check.
const steps: any[] = [
{
name: "Check out repository",
uses: "actions/checkout@v6",
},
];
const installNode = checkSpecification.installNode;
if (installNode) {
steps.push(
{
name: "Install Node.js",
uses: "actions/setup-node@v6",
with: {
"node-version": "20.x",
cache: "npm",
},
},
{
name: "Install dependencies",
run: "npm ci",
},
);
}
steps.push({
name: "Prepare test",
id: "prepare-test",
uses: "./.github/actions/prepare-test",
with: {
version: "${{ matrix.version }}",
"use-all-platform-bundle": useAllPlatformBundle,
// If the action is being run from a container, then do not setup kotlin.
// This is because the kotlin binaries cannot be downloaded from the container.
"setup-kotlin": "container" in checkSpecification ? "false" : "true",
},
});
const installGo = checkSpecification.installGo;
if (installGo) {
const baseGoVersionExpr = ">=1.21.0";
workflowInputs[KnownInputName.GoVersion] = {
type: "string",
description: "The version of Go to install",
required: false,
default: baseGoVersionExpr,
};
steps.push({
name: "Install Go",
uses: "actions/setup-go@v6",
with: {
"go-version":
"${{ inputs.go-version || '" + baseGoVersionExpr + "' }}",
// to avoid potentially misleading autobuilder results where we expect it to download
// dependencies successfully, but they actually come from a warm cache
cache: false,
},
});
}
const installJava = checkSpecification.installJava;
if (installJava) {
const baseJavaVersionExpr = "17";
workflowInputs[KnownInputName.JavaVersion] = {
type: "string",
description: "The version of Java to install",
required: false,
default: baseJavaVersionExpr,
};
steps.push({
name: "Install Java",
uses: "actions/setup-java@v5",
with: {
"java-version":
"${{ inputs.java-version || '" + baseJavaVersionExpr + "' }}",
distribution: "temurin",
},
});
}
const installPython = checkSpecification.installPython;
if (installPython) {
const basePythonVersionExpr = "3.13";
workflowInputs[KnownInputName.PythonVersion] = {
type: "string",
description: "The version of Python to install",
required: false,
default: basePythonVersionExpr,
};
steps.push({
name: "Install Python",
if: "matrix.version != 'nightly-latest'",
uses: "actions/setup-python@v6",
with: {
"python-version":
"${{ inputs.python-version || '" + basePythonVersionExpr + "' }}",
},
});
}
const installDotNet = checkSpecification.installDotNet;
if (installDotNet) {
const baseDotNetVersionExpr = "9.x";
workflowInputs[KnownInputName.DotnetVersion] = {
type: "string",
description: "The version of .NET to install",
required: false,
default: baseDotNetVersionExpr,
};
steps.push({
name: "Install .NET",
uses: "actions/setup-dotnet@v5",
with: {
"dotnet-version":
"${{ inputs.dotnet-version || '" + baseDotNetVersionExpr + "' }}",
},
});
}
const installYq = checkSpecification.installYq;
if (installYq) {
steps.push({
name: "Install yq",
if: "runner.os == 'Windows'",
env: {
YQ_PATH: "${{ runner.temp }}/yq",
// This is essentially an arbitrary version of `yq`, which happened to be the one that
// `choco` fetched when we moved away from using that here.
// See https://github.com/github/codeql-action/pull/3423
YQ_VERSION: "v4.50.1",
},
run:
'gh release download --repo mikefarah/yq --pattern "yq_windows_amd64.exe" "$YQ_VERSION" -O "$YQ_PATH/yq.exe"\n' +
'echo "$YQ_PATH" >> "$GITHUB_PATH"',
});
}
// Extract the sequence of steps from the YAML document to persist as much formatting as possible.
const specSteps = specDocument.get("steps") as yaml.YAMLSeq;
// A handful of workflow specifications use double quotes for values, while we generally use single quotes.
// This replaces double quotes with single quotes for consistency.
yaml.visit(specSteps, {
Scalar(_key, node) {
if (node.type === "QUOTE_DOUBLE") {
node.type = "QUOTE_SINGLE";
}
},
});
// Add the generated steps in front of the ones from the specification.
specSteps.items.unshift(...steps);
const checkJob: Record<string, any> = {
strategy: {
"fail-fast": false,
matrix: {
include: matrix,
},
},
name: checkSpecification.name,
if: "github.triggering_actor != 'dependabot[bot]'",
permissions: {
contents: "read",
"security-events": "read",
},
"timeout-minutes": 45,
"runs-on": "${{ matrix.os }}",
steps: specSteps,
};
if (checkSpecification.permissions) {
checkJob.permissions = checkSpecification.permissions;
}
for (const key of ["env", "container", "services"] as const) {
if (checkSpecification[key] !== undefined) {
checkJob[key] = checkSpecification[key];
}
}
checkJob.env = checkJob.env ?? {};
if (!("CODEQL_ACTION_TEST_MODE" in checkJob.env)) {
checkJob.env.CODEQL_ACTION_TEST_MODE = true;
}
// If this check belongs to a named collection, record it.
if (checkSpecification.collection) {
const collectionName = checkSpecification.collection;
if (!collections[collectionName]) {
collections[collectionName] = [];
}
collections[collectionName].push({
specification: checkSpecification,
checkName,
inputs: workflowInputs,
});
}
let extraGroupName = "";
for (const inputName of Object.keys(workflowInputs)) {
extraGroupName += "-${{inputs." + inputName + "}}";
}
const cron = new yaml.Scalar("0 5 * * *");
cron.type = yaml.Scalar.QUOTE_SINGLE;
const workflow = {
name: `PR Check - ${checkSpecification.name}`,
env: {
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}",
GO111MODULE: "auto",
},
on: {
push: {
branches: ["main", "releases/v*"],
},
pull_request: {
types: ["opened", "synchronize", "reopened", "ready_for_review"],
},
merge_group: {
types: ["checks_requested"],
},
schedule: [{ cron }],
workflow_dispatch: {
inputs: workflowInputs,
},
workflow_call: {
inputs: workflowInputs,
},
},
defaults: {
run: {
shell: "bash",
},
},
concurrency: {
"cancel-in-progress":
"${{ github.event_name == 'pull_request' || false }}",
group: checkName + "-${{github.ref}}" + extraGroupName,
},
jobs: {
[checkName]: checkJob,
},
};
const outputPath = path.join(OUTPUT_DIR, `__${checkName}.yml`);
writeYaml(outputPath, workflow);
}
// Write workflow files for collections.
for (const collectionName of Object.keys(collections)) {
const jobs: Record<string, any> = {};
let combinedInputs: Record<string, WorkflowInput> = {};
for (const check of collections[collectionName]) {
const { checkName, specification, inputs: checkInputs } = check;
const checkWith: Record<string, string> = {};
combinedInputs = { ...combinedInputs, ...checkInputs };
for (const inputName of Object.keys(checkInputs)) {
checkWith[inputName] = "${{ inputs." + inputName + " }}";
}
jobs[checkName] = {
name: specification.name,
permissions: {
contents: "read",
"security-events": "read",
},
uses: `./.github/workflows/__${checkName}.yml`,
with: checkWith,
};
}
const collectionWorkflow = {
name: `Manual Check - ${collectionName}`,
env: {
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}",
GO111MODULE: "auto",
},
on: {
workflow_dispatch: {
inputs: combinedInputs,
},
},
jobs,
};
const outputPath = path.join(OUTPUT_DIR, `__${collectionName}.yml`);
writeYaml(outputPath, collectionWorkflow);
}
console.log(
`\nDone. Wrote ${checkFiles.length} workflow file(s) to ${OUTPUT_DIR}`,
);
}
main();
-185
View File
@@ -1,185 +0,0 @@
#!/usr/bin/env python3
"""
Sync-back script to automatically update action versions in source templates
from the generated workflow files after Dependabot updates.
This script scans the generated workflow files (.github/workflows/__*.yml) to find
all external action versions used, then updates:
1. Hardcoded action versions in pr-checks/sync.py
2. Action version references in template files in pr-checks/checks/
The script automatically detects all actions used in generated workflows and
preserves version comments (e.g., # v1.2.3) when syncing versions.
This ensures that when Dependabot updates action versions in generated workflows,
those changes are properly synced back to the source templates. Regular workflow
files are updated directly by Dependabot and don't need sync-back.
"""
import os
import re
import glob
import argparse
import sys
from pathlib import Path
from typing import Dict, List
def scan_generated_workflows(workflow_dir: str) -> Dict[str, str]:
"""
Scan generated workflow files to extract the latest action versions.
Args:
workflow_dir: Path to .github/workflows directory
Returns:
Dictionary mapping action names to their latest versions (including comments)
"""
action_versions = {}
generated_files = glob.glob(os.path.join(workflow_dir, "__*.yml"))
for file_path in generated_files:
with open(file_path, 'r') as f:
content = f.read()
# Find all action uses in the file, including potential comments
# This pattern captures: action_name@version_with_possible_comment
pattern = r'uses:\s+([^/\s]+/[^@\s]+)@([^@\n]+)'
matches = re.findall(pattern, content)
for action_name, version_with_comment in matches:
# Only track non-local actions (those with / but not starting with ./)
if not action_name.startswith('./'):
# Assume that version numbers are consistent (this should be the case on a Dependabot update PR)
action_versions[action_name] = version_with_comment.rstrip()
return action_versions
def update_sync_py(sync_py_path: str, action_versions: Dict[str, str]) -> bool:
"""
Update hardcoded action versions in pr-checks/sync.py
Args:
sync_py_path: Path to sync.py file
action_versions: Dictionary of action names to versions (may include comments)
Returns:
True if file was modified, False otherwise
"""
if not os.path.exists(sync_py_path):
raise FileNotFoundError(f"Could not find {sync_py_path}")
with open(sync_py_path, 'r') as f:
content = f.read()
original_content = content
# Update hardcoded action versions
for action_name, version_with_comment in action_versions.items():
# Extract just the version part (before any comment) for sync.py
version = version_with_comment.split('#')[0].strip() if '#' in version_with_comment else version_with_comment.strip()
# Look for patterns like 'uses': 'actions/setup-node@v4'
# Note that this will break if we store an Action uses reference in a
# variable - that's a risk we're happy to take since in that case the
# PR checks will just fail.
pattern = rf"('uses':\s*'){re.escape(action_name)}@(?:[^']+)(')"
replacement = rf"\1{action_name}@{version}\2"
content = re.sub(pattern, replacement, content)
if content != original_content:
with open(sync_py_path, 'w') as f:
f.write(content)
print(f"Updated {sync_py_path}")
return True
else:
print(f"No changes needed in {sync_py_path}")
return False
def update_template_files(checks_dir: str, action_versions: Dict[str, str]) -> List[str]:
"""
Update action versions in template files in pr-checks/checks/
Args:
checks_dir: Path to pr-checks/checks directory
action_versions: Dictionary of action names to versions (may include comments)
Returns:
List of files that were modified
"""
modified_files = []
template_files = glob.glob(os.path.join(checks_dir, "*.yml"))
for file_path in template_files:
with open(file_path, 'r') as f:
content = f.read()
original_content = content
# Update action versions
for action_name, version_with_comment in action_versions.items():
# Look for patterns like 'uses: actions/setup-node@v4' or 'uses: actions/setup-node@sha # comment'
pattern = rf"(uses:\s+{re.escape(action_name)})@(?:[^@\n]+)"
replacement = rf"\1@{version_with_comment}"
content = re.sub(pattern, replacement, content)
if content != original_content:
with open(file_path, 'w') as f:
f.write(content)
modified_files.append(file_path)
print(f"Updated {file_path}")
return modified_files
def main():
parser = argparse.ArgumentParser(description="Sync action versions from generated workflows back to templates")
parser.add_argument("--verbose", "-v", action="store_true", help="Enable verbose output")
args = parser.parse_args()
# Get the repository root (assuming script is in pr-checks/)
script_dir = Path(__file__).parent
repo_root = script_dir.parent
workflow_dir = repo_root / ".github" / "workflows"
checks_dir = script_dir / "checks"
sync_py_path = script_dir / "sync.py"
print("Scanning generated workflows for latest action versions...")
action_versions = scan_generated_workflows(str(workflow_dir))
if args.verbose:
print("Found action versions:")
for action, version in action_versions.items():
print(f" {action}@{version}")
if not action_versions:
print("No action versions found in generated workflows")
return 1
# Update files
print("\nUpdating source files...")
modified_files = []
# Update sync.py
if update_sync_py(str(sync_py_path), action_versions):
modified_files.append(str(sync_py_path))
# Update template files
template_modified = update_template_files(str(checks_dir), action_versions)
modified_files.extend(template_modified)
if modified_files:
print(f"\nSync completed. Modified {len(modified_files)} files:")
for file_path in modified_files:
print(f" {file_path}")
else:
print("\nNo files needed updating - all action versions are already in sync")
return 0
if __name__ == "__main__":
sys.exit(main())
+250
View File
@@ -0,0 +1,250 @@
#!/usr/bin/env npx tsx
/*
Tests for the sync_back.ts script
*/
import * as assert from "node:assert/strict";
import * as fs from "node:fs";
import * as os from "node:os";
import * as path from "node:path";
import { afterEach, beforeEach, describe, it } from "node:test";
import {
scanGeneratedWorkflows,
updateSyncTs,
updateTemplateFiles,
} from "./sync_back";
let testDir: string;
let workflowDir: string;
let checksDir: string;
let syncTsPath: string;
beforeEach(() => {
/** Set up temporary directories and files for testing */
testDir = fs.mkdtempSync(path.join(os.tmpdir(), "sync-back-test-"));
workflowDir = path.join(testDir, ".github", "workflows");
checksDir = path.join(testDir, "pr-checks", "checks");
fs.mkdirSync(workflowDir, { recursive: true });
fs.mkdirSync(checksDir, { recursive: true });
// Create sync.ts file path
syncTsPath = path.join(testDir, "pr-checks", "sync.ts");
});
afterEach(() => {
/** Clean up temporary directories */
fs.rmSync(testDir, { recursive: true, force: true });
});
describe("scanGeneratedWorkflows", () => {
it("basic workflow scanning", () => {
/** Test basic workflow scanning functionality */
const workflowContent = `
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v5
- uses: actions/setup-go@v6
`;
fs.writeFileSync(path.join(workflowDir, "__test.yml"), workflowContent);
const result = scanGeneratedWorkflows(workflowDir);
assert.equal(result["actions/checkout"], "v4");
assert.equal(result["actions/setup-node"], "v5");
assert.equal(result["actions/setup-go"], "v6");
});
it("scanning workflows with version comments", () => {
/** Test scanning workflows with version comments */
const workflowContent = `
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0
- uses: actions/setup-python@v6 # Latest Python
`;
fs.writeFileSync(path.join(workflowDir, "__test.yml"), workflowContent);
const result = scanGeneratedWorkflows(workflowDir);
assert.equal(result["actions/checkout"], "v4");
assert.equal(
result["ruby/setup-ruby"],
"44511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0",
);
assert.equal(result["actions/setup-python"], "v6 # Latest Python");
});
it("ignores local actions", () => {
/** Test that local actions (starting with ./) are ignored */
const workflowContent = `
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/local-action
- uses: ./another-local-action@v1
`;
fs.writeFileSync(path.join(workflowDir, "__test.yml"), workflowContent);
const result = scanGeneratedWorkflows(workflowDir);
assert.equal(result["actions/checkout"], "v4");
assert.equal("./.github/actions/local-action" in result, false);
assert.equal("./another-local-action" in result, false);
});
});
describe("updateSyncTs", () => {
it("updates sync.ts file", () => {
/** Test updating sync.ts file */
const syncTsContent = `
const steps = [
{
uses: "actions/setup-node@v4",
with: { "node-version": "16" },
},
{
uses: "actions/setup-go@v5",
with: { "go-version": "1.19" },
},
];
`;
fs.writeFileSync(syncTsPath, syncTsContent);
const actionVersions = {
"actions/setup-node": "v5",
"actions/setup-go": "v6",
};
const result = updateSyncTs(syncTsPath, actionVersions);
assert.equal(result, true);
const updatedContent = fs.readFileSync(syncTsPath, "utf8");
assert.ok(updatedContent.includes('uses: "actions/setup-node@v5"'));
assert.ok(updatedContent.includes('uses: "actions/setup-go@v6"'));
});
it("strips comments from versions", () => {
/** Test updating sync.ts file when versions have comments */
const syncTsContent = `
const steps = [
{
uses: "actions/setup-node@v4",
with: { "node-version": "16" },
},
];
`;
fs.writeFileSync(syncTsPath, syncTsContent);
const actionVersions = {
"actions/setup-node": "v5 # Latest version",
};
const result = updateSyncTs(syncTsPath, actionVersions);
assert.equal(result, true);
const updatedContent = fs.readFileSync(syncTsPath, "utf8");
// sync.ts should get the version without comment
assert.ok(updatedContent.includes('uses: "actions/setup-node@v5"'));
assert.ok(!updatedContent.includes("# Latest version"));
});
it("returns false when no changes are needed", () => {
/** Test that updateSyncTs returns false when no changes are needed */
const syncTsContent = `
const steps = [
{
uses: "actions/setup-node@v5",
with: { "node-version": "16" },
},
];
`;
fs.writeFileSync(syncTsPath, syncTsContent);
const actionVersions = {
"actions/setup-node": "v5",
};
const result = updateSyncTs(syncTsPath, actionVersions);
assert.equal(result, false);
});
});
describe("updateTemplateFiles", () => {
it("updates template files", () => {
/** Test updating template files */
const templateContent = `
name: Test Template
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v4
with:
node-version: 16
`;
const templatePath = path.join(checksDir, "test.yml");
fs.writeFileSync(templatePath, templateContent);
const actionVersions = {
"actions/checkout": "v4",
"actions/setup-node": "v5 # Latest",
};
const result = updateTemplateFiles(checksDir, actionVersions);
assert.equal(result.length, 1);
assert.ok(result.includes(templatePath));
const updatedContent = fs.readFileSync(templatePath, "utf8");
assert.ok(updatedContent.includes("uses: actions/checkout@v4"));
assert.ok(updatedContent.includes("uses: actions/setup-node@v5 # Latest"));
});
it("preserves version comments", () => {
/** Test that updating template files preserves version comments */
const templateContent = `
name: Test Template
steps:
- uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb # v1.256.0
`;
const templatePath = path.join(checksDir, "test.yml");
fs.writeFileSync(templatePath, templateContent);
const actionVersions = {
"ruby/setup-ruby":
"55511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0",
};
const result = updateTemplateFiles(checksDir, actionVersions);
assert.equal(result.length, 1);
const updatedContent = fs.readFileSync(templatePath, "utf8");
assert.ok(
updatedContent.includes(
"uses: ruby/setup-ruby@55511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0",
),
);
});
});
+275
View File
@@ -0,0 +1,275 @@
#!/usr/bin/env npx tsx
/*
Sync-back script to automatically update action versions in source templates
from the generated workflow files after Dependabot updates.
This script scans the generated workflow files (.github/workflows/__*.yml) to find
all external action versions used, then updates:
1. Hardcoded action versions in pr-checks/sync.ts
2. Action version references in template files in pr-checks/checks/
The script automatically detects all actions used in generated workflows and
preserves version comments (e.g., # v1.2.3) when syncing versions.
This ensures that when Dependabot updates action versions in generated workflows,
those changes are properly synced back to the source templates. Regular workflow
files are updated directly by Dependabot and don't need sync-back.
*/
import { parseArgs } from "node:util";
import * as fs from "fs";
import * as path from "path";
const THIS_DIR = __dirname;
const CHECKS_DIR = path.join(THIS_DIR, "checks");
const WORKFLOW_DIR = path.join(THIS_DIR, "..", ".github", "workflows");
const SYNC_TS_PATH = path.join(THIS_DIR, "sync.ts");
/**
* Used to find action references (including versions and comments) in a workflow file.
*
* This pattern captures `action_name` and `version_with_possible_comment` from
* `uses: action_name@version_with_possible_comment`. For example, if we have
*
* ```
* uses: ruby/setup-ruby@09a7688d3b55cf0e976497ff046b70949eeaccfd # v1.288.0
* ```
*
* in a workflow file, this regular expression gets us:
*
* - `ruby/setup-ruby`; and
* - `09a7688d3b55cf0e976497ff046b70949eeaccfd # v1.288.0`.
*/
const EXTRACT_ACTION_REF_PATTERN: RegExp =
/uses:\s+([^/\s]+\/[^@\s]+)@([^@\n]+)/g;
/**
* Used to identify characters in `action_name` strings that need to
* be escaped before inserting them into TypeScript or YAML strings.
*/
const ESCAPE_PATTERN = /[.*+?^${}()|[\]\\]/g;
/**
* A `SyncBackPattern` is a function which constructs a regular expression for a specific `actionName`,
* which finds references to `actionName` and surrounding context in a particular file that we want
* to sync updated versions back to.
*/
type SyncBackPattern = (actionName: string) => RegExp;
/**
* Used to find lines containing action references in `sync.ts`.
*
* Matches `uses: "actionName@version_str"` in PR check specifications and groups `uses: "`
* and `"`, allowing `actionName@version_str` to be replaced with a new action reference.
*/
const TS_PATTERN: SyncBackPattern = (actionName: string) =>
new RegExp(`(uses:\\s*")${actionName}@(?:[^"]+)(")`, "g");
/**
* Used to find lines containing action references in a PR check specification.
*
* Matches `uses: actionName@rest_of_line` in PR check specifications and extracts `uses: actionName`,
* allowing `rest_of_line` to be replaced with a new version string.
*/
const YAML_PATTERN: SyncBackPattern = (actionName: string) =>
new RegExp(`(uses:\\s+${actionName})@(?:[^@\n]+)`, "g");
/**
* Constructs a regular expression using `patternFunction` for `actionName`, which is sanitised
* before `patternFunction` is called.
*
* @param patternFunction The pattern builder to use.
* @param actionName The action name, which will be sanitised.
* @returns The regular expression returned by `patternFunction`.
*/
function makeReplacementPattern(
patternFunction: SyncBackPattern,
actionName: string,
): RegExp {
return patternFunction(actionName.replace(ESCAPE_PATTERN, "\\$&"));
}
/**
* Scan generated workflow files to extract the latest action versions.
*
* @param workflowDir - Path to .github/workflows directory
* @returns Map from action names to their latest versions (including comments)
*/
export function scanGeneratedWorkflows(
workflowDir: string,
): Record<string, string> {
const actionVersions: Record<string, string> = {};
const generatedFiles = fs
.readdirSync(workflowDir)
.filter((f) => f.startsWith("__") && f.endsWith(".yml"))
.map((f) => path.join(workflowDir, f));
for (const filePath of generatedFiles) {
const content = fs.readFileSync(filePath, "utf8");
let match: RegExpExecArray | null;
EXTRACT_ACTION_REF_PATTERN.lastIndex = 0;
while ((match = EXTRACT_ACTION_REF_PATTERN.exec(content)) !== null) {
const actionName = match[1];
const versionWithComment = match[2].trimEnd();
// Only track non-local actions (those with / but not starting with ./)
if (!actionName.startsWith("./")) {
// Assume that version numbers are consistent (this should be the case on a Dependabot update PR)
actionVersions[actionName] = versionWithComment;
}
}
}
return actionVersions;
}
/**
* Update hardcoded action versions in pr-checks/sync.ts
*
* @param syncTsPath - Path to sync.ts file
* @param actionVersions - Map of action names to versions (may include comments)
* @returns True if the file was modified, false otherwise
*/
export function updateSyncTs(
syncTsPath: string,
actionVersions: Record<string, string>,
): boolean {
if (!fs.existsSync(syncTsPath)) {
throw new Error(`Could not find ${syncTsPath}`);
}
let content = fs.readFileSync(syncTsPath, "utf8");
const originalContent = content;
// Update hardcoded action versions
for (const [actionName, versionWithComment] of Object.entries(
actionVersions,
)) {
// Extract just the version part (before any comment) for sync.ts
const version = versionWithComment.includes("#")
? versionWithComment.split("#")[0].trim()
: versionWithComment.trim();
// Update uses of `actionName` for `version`.
// Note that this will break if we store an Action uses reference in a
// variable - that's a risk we're happy to take since in that case the
// PR checks will just fail.
const pattern = makeReplacementPattern(TS_PATTERN, actionName);
content = content.replace(pattern, `$1${actionName}@${version}$2`);
}
if (content !== originalContent) {
fs.writeFileSync(syncTsPath, content, "utf8");
console.info(`Updated ${syncTsPath}`);
return true;
} else {
console.info(`No changes needed in ${syncTsPath}`);
return false;
}
}
/**
* Update action versions in template files in pr-checks/checks/
*
* @param checksDir - Path to pr-checks/checks directory
* @param actionVersions - Map of action names to versions (may include comments)
* @returns List of files that were modified
*/
export function updateTemplateFiles(
checksDir: string,
actionVersions: Record<string, string>,
): string[] {
const modifiedFiles: string[] = [];
const templateFiles = fs
.readdirSync(checksDir)
.filter((f) => f.endsWith(".yml"))
.map((f) => path.join(checksDir, f));
for (const filePath of templateFiles) {
let content = fs.readFileSync(filePath, "utf8");
const originalContent = content;
// Update action versions
for (const [actionName, versionWithComment] of Object.entries(
actionVersions,
)) {
// Update uses of `actionName` for `versionWithComment`.
const pattern = makeReplacementPattern(YAML_PATTERN, actionName);
content = content.replace(pattern, `$1@${versionWithComment}`);
}
if (content !== originalContent) {
fs.writeFileSync(filePath, content, "utf8");
modifiedFiles.push(filePath);
console.info(`Updated ${filePath}`);
}
}
return modifiedFiles;
}
function main(): number {
const { values } = parseArgs({
options: {
verbose: {
type: "boolean",
short: "v",
default: false,
},
},
strict: true,
});
const verbose = values.verbose ?? false;
console.info("Scanning generated workflows for latest action versions...");
const actionVersions = scanGeneratedWorkflows(WORKFLOW_DIR);
if (verbose) {
console.info("Found action versions:");
for (const [action, version] of Object.entries(actionVersions)) {
console.info(` ${action}@${version}`);
}
}
if (Object.keys(actionVersions).length === 0) {
console.error("No action versions found in generated workflows");
return 1;
}
// Update files
console.info("\nUpdating source files...");
const modifiedFiles: string[] = [];
// Update sync.ts
if (updateSyncTs(SYNC_TS_PATH, actionVersions)) {
modifiedFiles.push(SYNC_TS_PATH);
}
// Update template files
const templateModified = updateTemplateFiles(CHECKS_DIR, actionVersions);
modifiedFiles.push(...templateModified);
if (modifiedFiles.length > 0) {
console.info(`\nSync completed. Modified ${modifiedFiles.length} files:`);
for (const filePath of modifiedFiles) {
console.info(` ${filePath}`);
}
} else {
console.info(
"\nNo files needed updating - all action versions are already in sync",
);
}
return 0;
}
// Only call `main` if this script was run directly.
if (require.main === module) {
process.exit(main());
}
-237
View File
@@ -1,237 +0,0 @@
#!/usr/bin/env python3
"""
Tests for the sync_back.py script
"""
import os
import shutil
import tempfile
import unittest
import sync_back
class TestSyncBack(unittest.TestCase):
def setUp(self):
"""Set up temporary directories and files for testing"""
self.test_dir = tempfile.mkdtemp()
self.workflow_dir = os.path.join(self.test_dir, ".github", "workflows")
self.checks_dir = os.path.join(self.test_dir, "pr-checks", "checks")
os.makedirs(self.workflow_dir)
os.makedirs(self.checks_dir)
# Create sync.py file
self.sync_py_path = os.path.join(self.test_dir, "pr-checks", "sync.py")
def tearDown(self):
"""Clean up temporary directories"""
shutil.rmtree(self.test_dir)
def test_scan_generated_workflows_basic(self):
"""Test basic workflow scanning functionality"""
# Create a test generated workflow file
workflow_content = """
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v5
- uses: actions/setup-go@v6
"""
with open(os.path.join(self.workflow_dir, "__test.yml"), 'w') as f:
f.write(workflow_content)
result = sync_back.scan_generated_workflows(self.workflow_dir)
self.assertEqual(result['actions/checkout'], 'v4')
self.assertEqual(result['actions/setup-node'], 'v5')
self.assertEqual(result['actions/setup-go'], 'v6')
def test_scan_generated_workflows_with_comments(self):
"""Test scanning workflows with version comments"""
workflow_content = """
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0
- uses: actions/setup-python@v6 # Latest Python
"""
with open(os.path.join(self.workflow_dir, "__test.yml"), 'w') as f:
f.write(workflow_content)
result = sync_back.scan_generated_workflows(self.workflow_dir)
self.assertEqual(result['actions/checkout'], 'v4')
self.assertEqual(result['ruby/setup-ruby'], '44511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0')
self.assertEqual(result['actions/setup-python'], 'v6 # Latest Python')
def test_scan_generated_workflows_ignores_local_actions(self):
"""Test that local actions (starting with ./) are ignored"""
workflow_content = """
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/local-action
- uses: ./another-local-action@v1
"""
with open(os.path.join(self.workflow_dir, "__test.yml"), 'w') as f:
f.write(workflow_content)
result = sync_back.scan_generated_workflows(self.workflow_dir)
self.assertEqual(result['actions/checkout'], 'v4')
self.assertNotIn('./.github/actions/local-action', result)
self.assertNotIn('./another-local-action', result)
def test_update_sync_py(self):
"""Test updating sync.py file"""
sync_py_content = """
steps = [
{
'uses': 'actions/setup-node@v4',
'with': {'node-version': '16'}
},
{
'uses': 'actions/setup-go@v5',
'with': {'go-version': '1.19'}
}
]
"""
with open(self.sync_py_path, 'w') as f:
f.write(sync_py_content)
action_versions = {
'actions/setup-node': 'v5',
'actions/setup-go': 'v6'
}
result = sync_back.update_sync_py(self.sync_py_path, action_versions)
self.assertTrue(result)
with open(self.sync_py_path, 'r') as f:
updated_content = f.read()
self.assertIn("'uses': 'actions/setup-node@v5'", updated_content)
self.assertIn("'uses': 'actions/setup-go@v6'", updated_content)
def test_update_sync_py_with_comments(self):
"""Test updating sync.py file when versions have comments"""
sync_py_content = """
steps = [
{
'uses': 'actions/setup-node@v4',
'with': {'node-version': '16'}
}
]
"""
with open(self.sync_py_path, 'w') as f:
f.write(sync_py_content)
action_versions = {
'actions/setup-node': 'v5 # Latest version'
}
result = sync_back.update_sync_py(self.sync_py_path, action_versions)
self.assertTrue(result)
with open(self.sync_py_path, 'r') as f:
updated_content = f.read()
# sync.py should get the version without comment
self.assertIn("'uses': 'actions/setup-node@v5'", updated_content)
self.assertNotIn("# Latest version", updated_content)
def test_update_template_files(self):
"""Test updating template files"""
template_content = """
name: Test Template
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v4
with:
node-version: 16
"""
template_path = os.path.join(self.checks_dir, "test.yml")
with open(template_path, 'w') as f:
f.write(template_content)
action_versions = {
'actions/checkout': 'v4',
'actions/setup-node': 'v5 # Latest'
}
result = sync_back.update_template_files(self.checks_dir, action_versions)
self.assertEqual(len(result), 1)
self.assertIn(template_path, result)
with open(template_path, 'r') as f:
updated_content = f.read()
self.assertIn("uses: actions/checkout@v4", updated_content)
self.assertIn("uses: actions/setup-node@v5 # Latest", updated_content)
def test_update_template_files_preserves_comments(self):
"""Test that updating template files preserves version comments"""
template_content = """
name: Test Template
steps:
- uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb # v1.256.0
"""
template_path = os.path.join(self.checks_dir, "test.yml")
with open(template_path, 'w') as f:
f.write(template_content)
action_versions = {
'ruby/setup-ruby': '55511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0'
}
result = sync_back.update_template_files(self.checks_dir, action_versions)
self.assertEqual(len(result), 1)
with open(template_path, 'r') as f:
updated_content = f.read()
self.assertIn("uses: ruby/setup-ruby@55511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0", updated_content)
def test_no_changes_needed(self):
"""Test that functions return False/empty when no changes are needed"""
# Test sync.py with no changes needed
sync_py_content = """
steps = [
{
'uses': 'actions/setup-node@v5',
'with': {'node-version': '16'}
}
]
"""
with open(self.sync_py_path, 'w') as f:
f.write(sync_py_content)
action_versions = {
'actions/setup-node': 'v5'
}
result = sync_back.update_sync_py(self.sync_py_path, action_versions)
self.assertFalse(result)
if __name__ == '__main__':
unittest.main()
+95 -77
View File
@@ -100,7 +100,7 @@ test("computeAutomationID()", async (t) => {
);
});
test("getPullRequestBranches() with pull request context", (t) => {
test.serial("getPullRequestBranches() with pull request context", (t) => {
withMockedContext(
{
pull_request: {
@@ -119,89 +119,104 @@ test("getPullRequestBranches() with pull request context", (t) => {
);
});
test("getPullRequestBranches() returns undefined with push context", (t) => {
withMockedContext(
{
push: {
ref: "refs/heads/main",
},
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
test("getPullRequestBranches() with Default Setup environment variables", (t) => {
withMockedContext({}, () => {
withMockedEnv(
test.serial(
"getPullRequestBranches() returns undefined with push context",
(t) => {
withMockedContext(
{
CODE_SCANNING_REF: "refs/heads/feature-branch",
CODE_SCANNING_BASE_BRANCH: "main",
},
() => {
t.deepEqual(getPullRequestBranches(), {
base: "main",
head: "refs/heads/feature-branch",
});
t.is(isAnalyzingPullRequest(), true);
},
);
});
});
test("getPullRequestBranches() returns undefined when only CODE_SCANNING_REF is set", (t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: "refs/heads/feature-branch",
CODE_SCANNING_BASE_BRANCH: undefined,
push: {
ref: "refs/heads/main",
},
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
});
},
);
test("getPullRequestBranches() returns undefined when only CODE_SCANNING_BASE_BRANCH is set", (t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: undefined,
CODE_SCANNING_BASE_BRANCH: "main",
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
});
test.serial(
"getPullRequestBranches() with Default Setup environment variables",
(t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: "refs/heads/feature-branch",
CODE_SCANNING_BASE_BRANCH: "main",
},
() => {
t.deepEqual(getPullRequestBranches(), {
base: "main",
head: "refs/heads/feature-branch",
});
t.is(isAnalyzingPullRequest(), true);
},
);
});
},
);
test("getPullRequestBranches() returns undefined when no PR context", (t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: undefined,
CODE_SCANNING_BASE_BRANCH: undefined,
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
});
test.serial(
"getPullRequestBranches() returns undefined when only CODE_SCANNING_REF is set",
(t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: "refs/heads/feature-branch",
CODE_SCANNING_BASE_BRANCH: undefined,
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
},
);
test("initializeEnvironment", (t) => {
test.serial(
"getPullRequestBranches() returns undefined when only CODE_SCANNING_BASE_BRANCH is set",
(t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: undefined,
CODE_SCANNING_BASE_BRANCH: "main",
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
},
);
test.serial(
"getPullRequestBranches() returns undefined when no PR context",
(t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: undefined,
CODE_SCANNING_BASE_BRANCH: undefined,
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
},
);
test.serial("initializeEnvironment", (t) => {
initializeEnvironment("1.2.3");
t.deepEqual(process.env[EnvVar.VERSION], "1.2.3");
});
test("fixCodeQualityCategory", (t) => {
test.serial("fixCodeQualityCategory", (t) => {
withMockedEnv(
{
GITHUB_EVENT_NAME: "dynamic",
@@ -249,14 +264,17 @@ test("fixCodeQualityCategory", (t) => {
);
});
test("isDynamicWorkflow() returns true if event name is `dynamic`", (t) => {
process.env.GITHUB_EVENT_NAME = "dynamic";
t.assert(isDynamicWorkflow());
process.env.GITHUB_EVENT_NAME = "push";
t.false(isDynamicWorkflow());
});
test.serial(
"isDynamicWorkflow() returns true if event name is `dynamic`",
(t) => {
process.env.GITHUB_EVENT_NAME = "dynamic";
t.assert(isDynamicWorkflow());
process.env.GITHUB_EVENT_NAME = "push";
t.false(isDynamicWorkflow());
},
);
test("isDefaultSetup() returns true when expected", (t) => {
test.serial("isDefaultSetup() returns true when expected", (t) => {
process.env.GITHUB_EVENT_NAME = "dynamic";
process.env[EnvVar.ANALYSIS_KEY] = "dynamic/github-code-scanning";
t.assert(isDefaultSetup());
+97 -76
View File
@@ -50,31 +50,40 @@ test("Parsing analysis kinds requires at least one analysis kind", async (t) =>
});
});
test("getAnalysisKinds - returns expected analysis kinds for `analysis-kinds` input", async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns("code-scanning,code-quality");
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.assert(result.includes(AnalysisKind.CodeScanning));
t.assert(result.includes(AnalysisKind.CodeQuality));
});
test.serial(
"getAnalysisKinds - returns expected analysis kinds for `analysis-kinds` input",
async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns("code-scanning,code-quality");
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.assert(result.includes(AnalysisKind.CodeScanning));
t.assert(result.includes(AnalysisKind.CodeQuality));
},
);
test("getAnalysisKinds - includes `code-quality` when deprecated `quality-queries` input is used", async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("analysis-kinds").returns("code-scanning");
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("quality-queries").returns("code-quality");
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.assert(result.includes(AnalysisKind.CodeScanning));
t.assert(result.includes(AnalysisKind.CodeQuality));
});
test.serial(
"getAnalysisKinds - includes `code-quality` when deprecated `quality-queries` input is used",
async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("analysis-kinds").returns("code-scanning");
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("quality-queries").returns("code-quality");
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.assert(result.includes(AnalysisKind.CodeScanning));
t.assert(result.includes(AnalysisKind.CodeQuality));
},
);
test("getAnalysisKinds - throws if `analysis-kinds` input is invalid", async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("analysis-kinds").returns("no-such-thing");
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true));
});
test.serial(
"getAnalysisKinds - throws if `analysis-kinds` input is invalid",
async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("analysis-kinds").returns("no-such-thing");
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true));
},
);
// Test the compatibility matrix by looping through all analysis kinds.
const analysisKinds = Object.values(AnalysisKind);
@@ -86,25 +95,31 @@ for (let i = 0; i < analysisKinds.length; i++) {
if (analysisKind === otherAnalysis) continue;
if (compatibilityMatrix[analysisKind].has(otherAnalysis)) {
test(`getAnalysisKinds - allows ${analysisKind} with ${otherAnalysis}`, async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns([analysisKind, otherAnalysis].join(","));
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.is(result.length, 2);
});
test.serial(
`getAnalysisKinds - allows ${analysisKind} with ${otherAnalysis}`,
async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns([analysisKind, otherAnalysis].join(","));
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.is(result.length, 2);
},
);
} else {
test(`getAnalysisKinds - throws if ${analysisKind} is enabled with ${otherAnalysis}`, async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns([analysisKind, otherAnalysis].join(","));
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true), {
instanceOf: ConfigurationError,
message: `${analysisKind} and ${otherAnalysis} cannot be enabled at the same time`,
});
});
test.serial(
`getAnalysisKinds - throws if ${analysisKind} is enabled with ${otherAnalysis}`,
async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns([analysisKind, otherAnalysis].join(","));
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true), {
instanceOf: ConfigurationError,
message: `${analysisKind} and ${otherAnalysis} cannot be enabled at the same time`,
});
},
);
}
}
}
@@ -122,44 +137,50 @@ test("Code Scanning configuration does not accept other SARIF extensions", (t) =
}
});
test("Risk Assessment configuration transforms SARIF upload payload", (t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "1";
const payload = RiskAssessment.transformPayload({
commit_oid: "abc",
sarif: "sarif",
ref: "ref",
workflow_run_attempt: 1,
workflow_run_id: 1,
checkout_uri: "uri",
tool_names: [],
}) as AssessmentPayload;
test.serial(
"Risk Assessment configuration transforms SARIF upload payload",
(t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "1";
const payload = RiskAssessment.transformPayload({
commit_oid: "abc",
sarif: "sarif",
ref: "ref",
workflow_run_attempt: 1,
workflow_run_id: 1,
checkout_uri: "uri",
tool_names: [],
}) as AssessmentPayload;
const expected: AssessmentPayload = { sarif: "sarif", assessment_id: 1 };
t.deepEqual(expected, payload);
});
const expected: AssessmentPayload = { sarif: "sarif", assessment_id: 1 };
t.deepEqual(expected, payload);
},
);
test("Risk Assessment configuration throws for negative assessment IDs", (t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "-1";
t.throws(
() =>
RiskAssessment.transformPayload({
commit_oid: "abc",
sarif: "sarif",
ref: "ref",
workflow_run_attempt: 1,
workflow_run_id: 1,
checkout_uri: "uri",
tool_names: [],
}),
{
instanceOf: Error,
message: (msg) =>
msg.startsWith(`${EnvVar.RISK_ASSESSMENT_ID} must not be negative: `),
},
);
});
test.serial(
"Risk Assessment configuration throws for negative assessment IDs",
(t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "-1";
t.throws(
() =>
RiskAssessment.transformPayload({
commit_oid: "abc",
sarif: "sarif",
ref: "ref",
workflow_run_attempt: 1,
workflow_run_id: 1,
checkout_uri: "uri",
tool_names: [],
}),
{
instanceOf: Error,
message: (msg) =>
msg.startsWith(`${EnvVar.RISK_ASSESSMENT_ID} must not be negative: `),
},
);
},
);
test("Risk Assessment configuration throws for invalid IDs", (t) => {
test.serial("Risk Assessment configuration throws for invalid IDs", (t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "foo";
t.throws(
() =>
+1 -4
View File
@@ -28,9 +28,7 @@ test("analyze action with RAM & threads from environment variables", async (t) =
// it a bit to 20s.
t.timeout(1000 * 20);
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
process.env["GITHUB_API_URL"] = "https://api.github.com";
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(statusReport, "createStatusReportBase")
.resolves({} as statusReport.StatusReportBase);
@@ -54,7 +52,6 @@ test("analyze action with RAM & threads from environment variables", async (t) =
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("expect-error").returns("false");
sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion);
setupActionsVars(tmpDir, tmpDir);
mockFeatureFlagApiEndpoint(200, {});
// When there are no action inputs for RAM and threads, the action uses
+1 -4
View File
@@ -26,9 +26,7 @@ setupTests(test);
test("analyze action with RAM & threads from action inputs", async (t) => {
t.timeout(1000 * 20);
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
process.env["GITHUB_API_URL"] = "https://api.github.com";
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(statusReport, "createStatusReportBase")
.resolves({} as statusReport.StatusReportBase);
@@ -51,7 +49,6 @@ test("analyze action with RAM & threads from action inputs", async (t) => {
optionalInputStub.withArgs("expect-error").returns("false");
sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion);
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
setupActionsVars(tmpDir, tmpDir);
mockFeatureFlagApiEndpoint(200, {});
process.env["CODEQL_THREADS"] = "1";
+1 -1
View File
@@ -32,7 +32,7 @@ setupTests(test);
* - Checks that the duration fields are populated for the correct language.
* - Checks that the QA telemetry status report fields are populated when the QA feature flag is enabled.
*/
test("status report fields", async (t) => {
test.serial("status report fields", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
+2 -1
View File
@@ -25,6 +25,7 @@ import { FeatureEnablement, Feature } from "./feature-flags";
import { KnownLanguage, Language } from "./languages";
import { Logger, withGroupAsync } from "./logging";
import { OverlayDatabaseMode } from "./overlay";
import type * as sarif from "./sarif";
import { DatabaseCreationTimings, EventReport } from "./status-report";
import { endTracingForCluster } from "./tracer-config";
import * as util from "./util";
@@ -594,7 +595,7 @@ export async function runQueries(
function getPerQueryAlertCounts(sarifPath: string): Record<string, number> {
const sarifObject = JSON.parse(
fs.readFileSync(sarifPath, "utf8"),
) as util.SarifFile;
) as sarif.Log;
// We do not need to compute fingerprints because we are not sending data based off of locations.
// Generate the query: alert count object
+89 -86
View File
@@ -14,7 +14,7 @@ test.beforeEach(() => {
util.initializeEnvironment(actionsUtil.getActionVersion());
});
test("getApiClient", async (t) => {
test.serial("getApiClient", async (t) => {
const pluginStub: sinon.SinonStub = sinon.stub(githubUtils.GitHub, "plugin");
const githubStub: sinon.SinonStub = sinon.stub();
pluginStub.returns(githubStub);
@@ -61,7 +61,7 @@ function mockGetMetaVersionHeader(
return spyGetContents;
}
test("getGitHubVersion for Dotcom", async (t) => {
test.serial("getGitHubVersion for Dotcom", async (t) => {
const apiDetails = {
auth: "",
url: "https://github.com",
@@ -75,7 +75,7 @@ test("getGitHubVersion for Dotcom", async (t) => {
t.deepEqual(util.GitHubVariant.DOTCOM, v.type);
});
test("getGitHubVersion for GHES", async (t) => {
test.serial("getGitHubVersion for GHES", async (t) => {
mockGetMetaVersionHeader("2.0");
const v2 = await api.getGitHubVersionFromApi(api.getApiClient(), {
auth: "",
@@ -88,7 +88,7 @@ test("getGitHubVersion for GHES", async (t) => {
);
});
test("getGitHubVersion for different domain", async (t) => {
test.serial("getGitHubVersion for different domain", async (t) => {
mockGetMetaVersionHeader(undefined);
const v3 = await api.getGitHubVersionFromApi(api.getApiClient(), {
auth: "",
@@ -98,7 +98,7 @@ test("getGitHubVersion for different domain", async (t) => {
t.deepEqual({ type: util.GitHubVariant.DOTCOM }, v3);
});
test("getGitHubVersion for GHEC-DR", async (t) => {
test.serial("getGitHubVersion for GHEC-DR", async (t) => {
mockGetMetaVersionHeader("ghe.com");
const gheDotcom = await api.getGitHubVersionFromApi(api.getApiClient(), {
auth: "",
@@ -108,96 +108,99 @@ test("getGitHubVersion for GHEC-DR", async (t) => {
t.deepEqual({ type: util.GitHubVariant.GHEC_DR }, gheDotcom);
});
test("wrapApiConfigurationError correctly wraps specific configuration errors", (t) => {
// We don't reclassify arbitrary errors
const arbitraryError = new Error("arbitrary error");
let res = api.wrapApiConfigurationError(arbitraryError);
t.is(res, arbitraryError);
test.serial(
"wrapApiConfigurationError correctly wraps specific configuration errors",
(t) => {
// We don't reclassify arbitrary errors
const arbitraryError = new Error("arbitrary error");
let res = api.wrapApiConfigurationError(arbitraryError);
t.is(res, arbitraryError);
// Same goes for arbitrary errors
const configError = new util.ConfigurationError("arbitrary error");
res = api.wrapApiConfigurationError(configError);
t.is(res, configError);
// Same goes for arbitrary errors
const configError = new util.ConfigurationError("arbitrary error");
res = api.wrapApiConfigurationError(configError);
t.is(res, configError);
// If an HTTP error doesn't contain a specific error message, we don't
// wrap is an an API error.
const httpError = new util.HTTPError("arbitrary HTTP error", 456);
res = api.wrapApiConfigurationError(httpError);
t.is(res, httpError);
// If an HTTP error doesn't contain a specific error message, we don't
// wrap is an an API error.
const httpError = new util.HTTPError("arbitrary HTTP error", 456);
res = api.wrapApiConfigurationError(httpError);
t.is(res, httpError);
// For other HTTP errors, we wrap them as Configuration errors if they contain
// specific error messages.
const httpNotFoundError = new util.HTTPError("commit not found", 404);
res = api.wrapApiConfigurationError(httpNotFoundError);
t.deepEqual(res, new util.ConfigurationError("commit not found"));
// For other HTTP errors, we wrap them as Configuration errors if they contain
// specific error messages.
const httpNotFoundError = new util.HTTPError("commit not found", 404);
res = api.wrapApiConfigurationError(httpNotFoundError);
t.deepEqual(res, new util.ConfigurationError("commit not found"));
const refNotFoundError = new util.HTTPError(
"ref 'refs/heads/jitsi' not found in this repository - https://docs.github.com/rest",
404,
);
res = api.wrapApiConfigurationError(refNotFoundError);
t.deepEqual(
res,
new util.ConfigurationError(
const refNotFoundError = new util.HTTPError(
"ref 'refs/heads/jitsi' not found in this repository - https://docs.github.com/rest",
),
);
404,
);
res = api.wrapApiConfigurationError(refNotFoundError);
t.deepEqual(
res,
new util.ConfigurationError(
"ref 'refs/heads/jitsi' not found in this repository - https://docs.github.com/rest",
),
);
const apiRateLimitError = new util.HTTPError(
"API rate limit exceeded for installation",
403,
);
res = api.wrapApiConfigurationError(apiRateLimitError);
t.deepEqual(
res,
new util.ConfigurationError("API rate limit exceeded for installation"),
);
const apiRateLimitError = new util.HTTPError(
"API rate limit exceeded for installation",
403,
);
res = api.wrapApiConfigurationError(apiRateLimitError);
t.deepEqual(
res,
new util.ConfigurationError("API rate limit exceeded for installation"),
);
const tokenSuggestionMessage =
"Please check that your token is valid and has the required permissions: contents: read, security-events: write";
const badCredentialsError = new util.HTTPError("Bad credentials", 401);
res = api.wrapApiConfigurationError(badCredentialsError);
t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage));
const tokenSuggestionMessage =
"Please check that your token is valid and has the required permissions: contents: read, security-events: write";
const badCredentialsError = new util.HTTPError("Bad credentials", 401);
res = api.wrapApiConfigurationError(badCredentialsError);
t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage));
const notFoundError = new util.HTTPError("Not Found", 404);
res = api.wrapApiConfigurationError(notFoundError);
t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage));
const notFoundError = new util.HTTPError("Not Found", 404);
res = api.wrapApiConfigurationError(notFoundError);
t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage));
const resourceNotAccessibleError = new util.HTTPError(
"Resource not accessible by integration",
403,
);
res = api.wrapApiConfigurationError(resourceNotAccessibleError);
t.deepEqual(
res,
new util.ConfigurationError("Resource not accessible by integration"),
);
const resourceNotAccessibleError = new util.HTTPError(
"Resource not accessible by integration",
403,
);
res = api.wrapApiConfigurationError(resourceNotAccessibleError);
t.deepEqual(
res,
new util.ConfigurationError("Resource not accessible by integration"),
);
// Enablement errors.
const enablementErrorMessages = [
"Code Security must be enabled for this repository to use code scanning",
"Advanced Security must be enabled for this repository to use code scanning",
"Code Scanning is not enabled for this repository. Please enable code scanning in the repository settings.",
];
const transforms = [
(msg: string) => msg,
(msg: string) => msg.toLowerCase(),
(msg: string) => msg.toLocaleUpperCase(),
];
// Enablement errors.
const enablementErrorMessages = [
"Code Security must be enabled for this repository to use code scanning",
"Advanced Security must be enabled for this repository to use code scanning",
"Code Scanning is not enabled for this repository. Please enable code scanning in the repository settings.",
];
const transforms = [
(msg: string) => msg,
(msg: string) => msg.toLowerCase(),
(msg: string) => msg.toLocaleUpperCase(),
];
for (const enablementErrorMessage of enablementErrorMessages) {
for (const transform of transforms) {
const enablementError = new util.HTTPError(
transform(enablementErrorMessage),
403,
);
res = api.wrapApiConfigurationError(enablementError);
t.deepEqual(
res,
new util.ConfigurationError(
api.getFeatureEnablementError(enablementError.message),
),
);
for (const enablementErrorMessage of enablementErrorMessages) {
for (const transform of transforms) {
const enablementError = new util.HTTPError(
transform(enablementErrorMessage),
403,
);
res = api.wrapApiConfigurationError(enablementError);
t.deepEqual(
res,
new util.ConfigurationError(
api.getFeatureEnablementError(enablementError.message),
),
);
}
}
}
});
},
);
+1 -1
View File
@@ -1 +1 @@
{"maximumVersion": "3.20", "minimumVersion": "3.14"}
{"maximumVersion": "3.21", "minimumVersion": "3.14"}
+22 -19
View File
@@ -131,27 +131,30 @@ for (const [platform, arch] of [
["linux", "arm64"],
["win32", "arm64"],
]) {
test(`wrapCliConfigurationError - ${platform}/${arch} unsupported`, (t) => {
sinon.stub(process, "platform").value(platform);
sinon.stub(process, "arch").value(arch);
const commandError = new CommandInvocationError(
"codeql",
["version"],
1,
"Some error",
);
const cliError = new CliError(commandError);
test.serial(
`wrapCliConfigurationError - ${platform}/${arch} unsupported`,
(t) => {
sinon.stub(process, "platform").value(platform);
sinon.stub(process, "arch").value(arch);
const commandError = new CommandInvocationError(
"codeql",
["version"],
1,
"Some error",
);
const cliError = new CliError(commandError);
const wrappedError = wrapCliConfigurationError(cliError);
const wrappedError = wrapCliConfigurationError(cliError);
t.true(wrappedError instanceof ConfigurationError);
t.true(
wrappedError.message.includes(
"CodeQL CLI does not support the platform/architecture combination",
),
);
t.true(wrappedError.message.includes(`${platform}/${arch}`));
});
t.true(wrappedError instanceof ConfigurationError);
t.true(
wrappedError.message.includes(
"CodeQL CLI does not support the platform/architecture combination",
),
);
t.true(wrappedError.message.includes(`${platform}/${arch}`));
},
);
}
test("wrapCliConfigurationError - supported platform", (t) => {
+392 -354
View File
@@ -120,19 +120,53 @@ async function stubCodeql(): Promise<codeql.CodeQL> {
return codeqlObject;
}
test("downloads and caches explicitly requested bundles that aren't in the toolcache", async (t) => {
const features = createFeatures([]);
test.serial(
"downloads and caches explicitly requested bundles that aren't in the toolcache",
async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
const versions = ["20200601", "20200610"];
const versions = ["20200601", "20200610"];
for (let i = 0; i < versions.length; i++) {
const version = versions[i];
for (let i = 0; i < versions.length; i++) {
const version = versions[i];
const url = mockBundleDownloadApi({
tagName: `codeql-bundle-${version}`,
isPinned: false,
});
const result = await codeql.setupCodeQL(
url,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
t.is(result.toolsVersion, `0.0.0-${version}`);
t.is(result.toolsSource, ToolsSource.Download);
}
t.is(toolcache.findAllVersions("CodeQL").length, 2);
});
},
);
test.serial(
"caches semantically versioned bundles using their semantic version number",
async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
const url = mockBundleDownloadApi({
tagName: `codeql-bundle-${version}`,
tagName: `codeql-bundle-v2.15.0`,
isPinned: false,
});
const result = await codeql.setupCodeQL(
@@ -146,78 +180,53 @@ test("downloads and caches explicitly requested bundles that aren't in the toolc
false,
);
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
t.is(result.toolsVersion, `0.0.0-${version}`);
t.is(toolcache.findAllVersions("CodeQL").length, 1);
t.assert(toolcache.find("CodeQL", `2.15.0`));
t.is(result.toolsVersion, `2.15.0`);
t.is(result.toolsSource, ToolsSource.Download);
}
t.is(toolcache.findAllVersions("CodeQL").length, 2);
});
});
test("caches semantically versioned bundles using their semantic version number", async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
const url = mockBundleDownloadApi({
tagName: `codeql-bundle-v2.15.0`,
isPinned: false,
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
});
const result = await codeql.setupCodeQL(
url,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
},
);
t.is(toolcache.findAllVersions("CodeQL").length, 1);
t.assert(toolcache.find("CodeQL", `2.15.0`));
t.is(result.toolsVersion, `2.15.0`);
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
});
});
test.serial(
"downloads an explicitly requested bundle even if a different version is cached",
async (t) => {
const features = createFeatures([]);
test("downloads an explicitly requested bundle even if a different version is cached", async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await installIntoToolcache({
tagName: "codeql-bundle-20200601",
isPinned: true,
tmpDir,
});
await installIntoToolcache({
tagName: "codeql-bundle-20200601",
isPinned: true,
tmpDir,
const url = mockBundleDownloadApi({
tagName: "codeql-bundle-20200610",
});
const result = await codeql.setupCodeQL(
url,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
t.deepEqual(result.toolsVersion, "0.0.0-20200610");
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
});
const url = mockBundleDownloadApi({
tagName: "codeql-bundle-20200610",
});
const result = await codeql.setupCodeQL(
url,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
t.deepEqual(result.toolsVersion, "0.0.0-20200610");
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
});
});
},
);
const EXPLICITLY_REQUESTED_BUNDLE_TEST_CASES = [
{
@@ -234,37 +243,42 @@ for (const {
tagName,
expectedToolcacheVersion,
} of EXPLICITLY_REQUESTED_BUNDLE_TEST_CASES) {
test(`caches explicitly requested bundle ${tagName} as ${expectedToolcacheVersion}`, async (t) => {
const features = createFeatures([]);
test.serial(
`caches explicitly requested bundle ${tagName} as ${expectedToolcacheVersion}`,
async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
mockApiDetails(SAMPLE_DOTCOM_API_DETAILS);
sinon.stub(actionsUtil, "isRunningLocalAction").returns(true);
mockApiDetails(SAMPLE_DOTCOM_API_DETAILS);
sinon.stub(actionsUtil, "isRunningLocalAction").returns(true);
const url = mockBundleDownloadApi({
tagName,
const url = mockBundleDownloadApi({
tagName,
});
const result = await codeql.setupCodeQL(
url,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.assert(toolcache.find("CodeQL", expectedToolcacheVersion));
t.deepEqual(result.toolsVersion, expectedToolcacheVersion);
t.is(result.toolsSource, ToolsSource.Download);
t.assert(
Number.isInteger(
result.toolsDownloadStatusReport?.downloadDurationMs,
),
);
});
const result = await codeql.setupCodeQL(
url,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.assert(toolcache.find("CodeQL", expectedToolcacheVersion));
t.deepEqual(result.toolsVersion, expectedToolcacheVersion);
t.is(result.toolsSource, ToolsSource.Download);
t.assert(
Number.isInteger(result.toolsDownloadStatusReport?.downloadDurationMs),
);
});
});
},
);
}
for (const toolcacheVersion of [
@@ -273,7 +287,7 @@ for (const toolcacheVersion of [
SAMPLE_DEFAULT_CLI_VERSION.cliVersion,
`${SAMPLE_DEFAULT_CLI_VERSION.cliVersion}-20230101`,
]) {
test(
test.serial(
`uses tools from toolcache when ${SAMPLE_DEFAULT_CLI_VERSION.cliVersion} is requested and ` +
`${toolcacheVersion} is installed`,
async (t) => {
@@ -308,158 +322,170 @@ for (const toolcacheVersion of [
);
}
test(`uses a cached bundle when no tools input is given on GHES`, async (t) => {
const features = createFeatures([]);
test.serial(
`uses a cached bundle when no tools input is given on GHES`,
async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await installIntoToolcache({
tagName: "codeql-bundle-20200601",
isPinned: true,
tmpDir,
await installIntoToolcache({
tagName: "codeql-bundle-20200601",
isPinned: true,
tmpDir,
});
const result = await codeql.setupCodeQL(
undefined,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.GHES,
{
cliVersion: defaults.cliVersion,
tagName: defaults.bundleVersion,
},
features,
getRunnerLogger(true),
false,
);
t.deepEqual(result.toolsVersion, "0.0.0-20200601");
t.is(result.toolsSource, ToolsSource.Toolcache);
t.is(result.toolsDownloadStatusReport?.combinedDurationMs, undefined);
t.is(result.toolsDownloadStatusReport?.downloadDurationMs, undefined);
t.is(result.toolsDownloadStatusReport?.extractionDurationMs, undefined);
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 1);
});
},
);
const result = await codeql.setupCodeQL(
undefined,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.GHES,
{
cliVersion: defaults.cliVersion,
test.serial(
`downloads bundle if only an unpinned version is cached on GHES`,
async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await installIntoToolcache({
tagName: "codeql-bundle-20200601",
isPinned: false,
tmpDir,
});
mockBundleDownloadApi({
tagName: defaults.bundleVersion,
},
features,
getRunnerLogger(true),
false,
);
t.deepEqual(result.toolsVersion, "0.0.0-20200601");
t.is(result.toolsSource, ToolsSource.Toolcache);
t.is(result.toolsDownloadStatusReport?.combinedDurationMs, undefined);
t.is(result.toolsDownloadStatusReport?.downloadDurationMs, undefined);
t.is(result.toolsDownloadStatusReport?.extractionDurationMs, undefined);
});
const result = await codeql.setupCodeQL(
undefined,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.GHES,
{
cliVersion: defaults.cliVersion,
tagName: defaults.bundleVersion,
},
features,
getRunnerLogger(true),
false,
);
t.deepEqual(result.toolsVersion, defaults.cliVersion);
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 1);
});
});
test(`downloads bundle if only an unpinned version is cached on GHES`, async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await installIntoToolcache({
tagName: "codeql-bundle-20200601",
isPinned: false,
tmpDir,
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
},
);
mockBundleDownloadApi({
tagName: defaults.bundleVersion,
});
const result = await codeql.setupCodeQL(
undefined,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.GHES,
{
cliVersion: defaults.cliVersion,
test.serial(
'downloads bundle if "latest" tools specified but not cached',
async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await installIntoToolcache({
tagName: "codeql-bundle-20200601",
isPinned: true,
tmpDir,
});
mockBundleDownloadApi({
tagName: defaults.bundleVersion,
},
features,
getRunnerLogger(true),
false,
);
t.deepEqual(result.toolsVersion, defaults.cliVersion);
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
});
const result = await codeql.setupCodeQL(
"latest",
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.deepEqual(result.toolsVersion, defaults.cliVersion);
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
});
test('downloads bundle if "latest" tools specified but not cached', async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await installIntoToolcache({
tagName: "codeql-bundle-20200601",
isPinned: true,
tmpDir,
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
},
);
mockBundleDownloadApi({
tagName: defaults.bundleVersion,
test.serial(
"bundle URL from another repo is cached as 0.0.0-bundleVersion",
async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
mockApiDetails(SAMPLE_DOTCOM_API_DETAILS);
sinon.stub(actionsUtil, "isRunningLocalAction").returns(true);
const releasesApiMock = mockReleaseApi({
assetNames: ["cli-version-2.14.6.txt"],
tagName: "codeql-bundle-20230203",
});
mockBundleDownloadApi({
repo: "codeql-testing/codeql-cli-nightlies",
platformSpecific: false,
tagName: "codeql-bundle-20230203",
});
const result = await codeql.setupCodeQL(
"https://github.com/codeql-testing/codeql-cli-nightlies/releases/download/codeql-bundle-20230203/codeql-bundle.tar.gz",
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.is(result.toolsVersion, "0.0.0-20230203");
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 1);
t.is(cachedVersions[0], "0.0.0-20230203");
t.false(releasesApiMock.isDone());
});
const result = await codeql.setupCodeQL(
"latest",
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.deepEqual(result.toolsVersion, defaults.cliVersion);
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
});
test("bundle URL from another repo is cached as 0.0.0-bundleVersion", async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
mockApiDetails(SAMPLE_DOTCOM_API_DETAILS);
sinon.stub(actionsUtil, "isRunningLocalAction").returns(true);
const releasesApiMock = mockReleaseApi({
assetNames: ["cli-version-2.14.6.txt"],
tagName: "codeql-bundle-20230203",
});
mockBundleDownloadApi({
repo: "codeql-testing/codeql-cli-nightlies",
platformSpecific: false,
tagName: "codeql-bundle-20230203",
});
const result = await codeql.setupCodeQL(
"https://github.com/codeql-testing/codeql-cli-nightlies/releases/download/codeql-bundle-20230203/codeql-bundle.tar.gz",
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.is(result.toolsVersion, "0.0.0-20230203");
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 1);
t.is(cachedVersions[0], "0.0.0-20230203");
t.false(releasesApiMock.isDone());
});
});
},
);
function assertDurationsInteger(
t: ExecutionContext<unknown>,
@@ -472,7 +498,7 @@ function assertDurationsInteger(
}
}
test("getExtraOptions works for explicit paths", (t) => {
test.serial("getExtraOptions works for explicit paths", (t) => {
t.deepEqual(codeql.getExtraOptions({}, ["foo"], []), []);
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ["foo"], []), ["42"]);
@@ -483,11 +509,11 @@ test("getExtraOptions works for explicit paths", (t) => {
);
});
test("getExtraOptions works for wildcards", (t) => {
test.serial("getExtraOptions works for wildcards", (t) => {
t.deepEqual(codeql.getExtraOptions({ "*": [42] }, ["foo"], []), ["42"]);
});
test("getExtraOptions works for wildcards and explicit paths", (t) => {
test.serial("getExtraOptions works for wildcards and explicit paths", (t) => {
const o1 = { "*": [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o1, ["foo"], []), ["42", "87"]);
@@ -499,7 +525,7 @@ test("getExtraOptions works for wildcards and explicit paths", (t) => {
t.deepEqual(codeql.getExtraOptions(o3, p, []), ["42", "87", "99"]);
});
test("getExtraOptions throws for bad content", (t) => {
test.serial("getExtraOptions throws for bad content", (t) => {
t.throws(() => codeql.getExtraOptions({ "*": 42 }, ["foo"], []));
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ["foo"], []));
@@ -564,7 +590,7 @@ const injectedConfigMacro = test.macro({
`databaseInitCluster() injected config: ${providedTitle}`,
});
test(
test.serial(
"basic",
injectedConfigMacro,
{
@@ -574,7 +600,7 @@ test(
{},
);
test(
test.serial(
"injected packs from input",
injectedConfigMacro,
{
@@ -587,7 +613,7 @@ test(
},
);
test(
test.serial(
"injected packs from input with existing packs combines",
injectedConfigMacro,
{
@@ -609,7 +635,7 @@ test(
},
);
test(
test.serial(
"injected packs from input with existing packs overrides",
injectedConfigMacro,
{
@@ -629,7 +655,7 @@ test(
);
// similar, but with queries
test(
test.serial(
"injected queries from input",
injectedConfigMacro,
{
@@ -649,7 +675,7 @@ test(
},
);
test(
test.serial(
"injected queries from input overrides",
injectedConfigMacro,
{
@@ -673,7 +699,7 @@ test(
},
);
test(
test.serial(
"injected queries from input combines",
injectedConfigMacro,
{
@@ -701,7 +727,7 @@ test(
},
);
test(
test.serial(
"injected queries from input combines 2",
injectedConfigMacro,
{
@@ -723,7 +749,7 @@ test(
},
);
test(
test.serial(
"injected queries and packs, but empty",
injectedConfigMacro,
{
@@ -742,7 +768,7 @@ test(
{},
);
test(
test.serial(
"repo property queries have the highest precedence",
injectedConfigMacro,
{
@@ -764,7 +790,7 @@ test(
},
);
test(
test.serial(
"repo property queries combines with queries input",
injectedConfigMacro,
{
@@ -791,7 +817,7 @@ test(
},
);
test(
test.serial(
"repo property queries combines everything else",
injectedConfigMacro,
{
@@ -820,55 +846,61 @@ test(
},
);
test("passes a code scanning config AND qlconfig to the CLI", async (t: ExecutionContext<unknown>) => {
await util.withTmpDir(async (tempDir) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await stubCodeql();
await codeqlObject.databaseInitCluster(
{ ...stubConfig, tempDir },
"",
undefined,
"/path/to/qlconfig.yml",
getRunnerLogger(true),
);
test.serial(
"passes a code scanning config AND qlconfig to the CLI",
async (t: ExecutionContext<unknown>) => {
await util.withTmpDir(async (tempDir) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await stubCodeql();
await codeqlObject.databaseInitCluster(
{ ...stubConfig, tempDir },
"",
undefined,
"/path/to/qlconfig.yml",
getRunnerLogger(true),
);
const args = runnerConstructorStub.firstCall.args[1] as string[];
// should have used a config file
const hasCodeScanningConfigArg = args.some((arg: string) =>
arg.startsWith("--codescanning-config="),
);
t.true(hasCodeScanningConfigArg, "Should have injected a qlconfig");
const args = runnerConstructorStub.firstCall.args[1] as string[];
// should have used a config file
const hasCodeScanningConfigArg = args.some((arg: string) =>
arg.startsWith("--codescanning-config="),
);
t.true(hasCodeScanningConfigArg, "Should have injected a qlconfig");
// should have passed a qlconfig file
const hasQlconfigArg = args.some((arg: string) =>
arg.startsWith("--qlconfig-file="),
);
t.truthy(hasQlconfigArg, "Should have injected a codescanning config");
});
});
// should have passed a qlconfig file
const hasQlconfigArg = args.some((arg: string) =>
arg.startsWith("--qlconfig-file="),
);
t.truthy(hasQlconfigArg, "Should have injected a codescanning config");
});
},
);
test("does not pass a qlconfig to the CLI when it is undefined", async (t: ExecutionContext<unknown>) => {
await util.withTmpDir(async (tempDir) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await stubCodeql();
test.serial(
"does not pass a qlconfig to the CLI when it is undefined",
async (t: ExecutionContext<unknown>) => {
await util.withTmpDir(async (tempDir) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await stubCodeql();
await codeqlObject.databaseInitCluster(
{ ...stubConfig, tempDir },
"",
undefined,
undefined, // undefined qlconfigFile
getRunnerLogger(true),
);
await codeqlObject.databaseInitCluster(
{ ...stubConfig, tempDir },
"",
undefined,
undefined, // undefined qlconfigFile
getRunnerLogger(true),
);
const args = runnerConstructorStub.firstCall.args[1] as any[];
const hasQlconfigArg = args.some((arg: string) =>
arg.startsWith("--qlconfig-file="),
);
t.false(hasQlconfigArg, "should NOT have injected a qlconfig");
});
});
const args = runnerConstructorStub.firstCall.args[1] as any[];
const hasQlconfigArg = args.some((arg: string) =>
arg.startsWith("--qlconfig-file="),
);
t.false(hasQlconfigArg, "should NOT have injected a qlconfig");
});
},
);
test("runTool summarizes several fatal errors", async (t) => {
test.serial("runTool summarizes several fatal errors", async (t) => {
const heapError =
"A fatal error occurred: Evaluator heap must be at least 384.00 MiB";
const datasetImportError =
@@ -905,7 +937,7 @@ test("runTool summarizes several fatal errors", async (t) => {
);
});
test("runTool summarizes autobuilder errors", async (t) => {
test.serial("runTool summarizes autobuilder errors", async (t) => {
const stderr = `
[2019-09-18 12:00:00] [autobuild] A non-error message
[2019-09-18 12:00:00] Untagged message
@@ -938,7 +970,7 @@ test("runTool summarizes autobuilder errors", async (t) => {
);
});
test("runTool truncates long autobuilder errors", async (t) => {
test.serial("runTool truncates long autobuilder errors", async (t) => {
const stderr = Array.from(
{ length: 20 },
(_, i) => `[2019-09-18 12:00:00] [autobuild] [ERROR] line${i + 1}`,
@@ -964,7 +996,7 @@ test("runTool truncates long autobuilder errors", async (t) => {
);
});
test("runTool recognizes fatal internal errors", async (t) => {
test.serial("runTool recognizes fatal internal errors", async (t) => {
const stderr = `
[11/31 eval 8m19s] Evaluation done; writing results to codeql/go-queries/Security/CWE-020/MissingRegexpAnchor.bqrs.
Oops! A fatal internal error occurred. Details:
@@ -989,64 +1021,70 @@ test("runTool recognizes fatal internal errors", async (t) => {
);
});
test("runTool outputs last line of stderr if fatal error could not be found", async (t) => {
const cliStderr = "line1\nline2\nline3\nline4\nline5";
stubToolRunnerConstructor(32, cliStderr);
const codeqlObject = await stubCodeql();
// io throws because of the test CodeQL object.
sinon.stub(io, "which").resolves("");
test.serial(
"runTool outputs last line of stderr if fatal error could not be found",
async (t) => {
const cliStderr = "line1\nline2\nline3\nline4\nline5";
stubToolRunnerConstructor(32, cliStderr);
const codeqlObject = await stubCodeql();
// io throws because of the test CodeQL object.
sinon.stub(io, "which").resolves("");
await t.throwsAsync(
async () =>
await codeqlObject.finalizeDatabase(
"db",
"--threads=2",
"--ram=2048",
false,
),
{
instanceOf: util.ConfigurationError,
message: new RegExp(
'Encountered a fatal error while running \\"codeql-for-testing database finalize --finalize-dataset --threads=2 --ram=2048 db\\"\\. ' +
"Exit code was 32 and last log line was: line5\\. See the logs for more details\\.",
),
},
);
});
await t.throwsAsync(
async () =>
await codeqlObject.finalizeDatabase(
"db",
"--threads=2",
"--ram=2048",
false,
),
{
instanceOf: util.ConfigurationError,
message: new RegExp(
'Encountered a fatal error while running \\"codeql-for-testing database finalize --finalize-dataset --threads=2 --ram=2048 db\\"\\. ' +
"Exit code was 32 and last log line was: line5\\. See the logs for more details\\.",
),
},
);
},
);
test("Avoids duplicating --overwrite flag if specified in CODEQL_ACTION_EXTRA_OPTIONS", async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await stubCodeql();
// io throws because of the test CodeQL object.
sinon.stub(io, "which").resolves("");
test.serial(
"Avoids duplicating --overwrite flag if specified in CODEQL_ACTION_EXTRA_OPTIONS",
async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await stubCodeql();
// io throws because of the test CodeQL object.
sinon.stub(io, "which").resolves("");
process.env["CODEQL_ACTION_EXTRA_OPTIONS"] =
'{ "database": { "init": ["--overwrite"] } }';
process.env["CODEQL_ACTION_EXTRA_OPTIONS"] =
'{ "database": { "init": ["--overwrite"] } }';
await codeqlObject.databaseInitCluster(
stubConfig,
"sourceRoot",
undefined,
undefined,
getRunnerLogger(false),
);
await codeqlObject.databaseInitCluster(
stubConfig,
"sourceRoot",
undefined,
undefined,
getRunnerLogger(false),
);
t.true(runnerConstructorStub.calledOnce);
const args = runnerConstructorStub.firstCall.args[1] as string[];
t.is(
args.filter((option: string) => option === "--overwrite").length,
1,
"--overwrite should only be passed once",
);
t.true(runnerConstructorStub.calledOnce);
const args = runnerConstructorStub.firstCall.args[1] as string[];
t.is(
args.filter((option: string) => option === "--overwrite").length,
1,
"--overwrite should only be passed once",
);
// Clean up
const configArg = args.find((arg: string) =>
arg.startsWith("--codescanning-config="),
);
t.truthy(configArg, "Should have injected a codescanning config");
const configFile = configArg!.split("=")[1];
await fs.promises.rm(configFile, { force: true });
});
// Clean up
const configArg = args.find((arg: string) =>
arg.startsWith("--codescanning-config="),
);
t.truthy(configArg, "Should have injected a codescanning config");
const configFile = configArg!.split("=")[1];
await fs.promises.rm(configFile, { force: true });
},
);
export function stubToolRunnerConstructor(
exitCode: number = 0,
+271 -247
View File
@@ -137,7 +137,7 @@ function mockListLanguages(languages: string[]) {
sinon.stub(api, "getApiClient").value(() => client);
}
test("load empty config", async (t) => {
test.serial("load empty config", async (t) => {
return await withTmpDir(async (tempDir) => {
const logger = getRunnerLogger(true);
const languages = "javascript,python";
@@ -178,7 +178,7 @@ test("load empty config", async (t) => {
});
});
test("load code quality config", async (t) => {
test.serial("load code quality config", async (t) => {
return await withTmpDir(async (tempDir) => {
const logger = getRunnerLogger(true);
const languages = "actions";
@@ -228,65 +228,68 @@ test("load code quality config", async (t) => {
});
});
test("initActionState doesn't throw if there are queries configured in the repository properties", async (t) => {
return await withTmpDir(async (tempDir) => {
const logger = getRunnerLogger(true);
const languages = "javascript";
test.serial(
"initActionState doesn't throw if there are queries configured in the repository properties",
async (t) => {
return await withTmpDir(async (tempDir) => {
const logger = getRunnerLogger(true);
const languages = "javascript";
const codeql = createStubCodeQL({
async betterResolveLanguages() {
return {
extractors: {
javascript: [{ extractor_root: "" }],
},
};
},
const codeql = createStubCodeQL({
async betterResolveLanguages() {
return {
extractors: {
javascript: [{ extractor_root: "" }],
},
};
},
});
// This should be ignored and no error should be thrown.
const repositoryProperties = {
"github-codeql-extra-queries": "+foo",
};
// Expected configuration for a CQ-only analysis.
const computedConfig: UserConfig = {
"disable-default-queries": true,
queries: [{ uses: "code-quality" }],
"query-filters": [],
};
const expectedConfig = createTestConfig({
analysisKinds: [AnalysisKind.CodeQuality],
languages: [KnownLanguage.javascript],
codeQLCmd: codeql.getPath(),
computedConfig,
dbLocation: path.resolve(tempDir, "codeql_databases"),
debugArtifactName: "",
debugDatabaseName: "",
tempDir,
repositoryProperties,
});
await t.notThrowsAsync(async () => {
const config = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
analysisKinds: [AnalysisKind.CodeQuality],
languagesInput: languages,
repository: { owner: "github", repo: "example" },
tempDir,
codeql,
repositoryProperties,
logger,
}),
);
t.deepEqual(config, expectedConfig);
});
});
},
);
// This should be ignored and no error should be thrown.
const repositoryProperties = {
"github-codeql-extra-queries": "+foo",
};
// Expected configuration for a CQ-only analysis.
const computedConfig: UserConfig = {
"disable-default-queries": true,
queries: [{ uses: "code-quality" }],
"query-filters": [],
};
const expectedConfig = createTestConfig({
analysisKinds: [AnalysisKind.CodeQuality],
languages: [KnownLanguage.javascript],
codeQLCmd: codeql.getPath(),
computedConfig,
dbLocation: path.resolve(tempDir, "codeql_databases"),
debugArtifactName: "",
debugDatabaseName: "",
tempDir,
repositoryProperties,
});
await t.notThrowsAsync(async () => {
const config = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
analysisKinds: [AnalysisKind.CodeQuality],
languagesInput: languages,
repository: { owner: "github", repo: "example" },
tempDir,
codeql,
repositoryProperties,
logger,
}),
);
t.deepEqual(config, expectedConfig);
});
});
});
test("loading a saved config produces the same config", async (t) => {
test.serial("loading a saved config produces the same config", async (t) => {
return await withTmpDir(async (tempDir) => {
const logger = getRunnerLogger(true);
@@ -333,7 +336,7 @@ test("loading a saved config produces the same config", async (t) => {
});
});
test("loading config with version mismatch throws", async (t) => {
test.serial("loading config with version mismatch throws", async (t) => {
return await withTmpDir(async (tempDir) => {
const logger = getRunnerLogger(true);
@@ -385,7 +388,7 @@ test("loading config with version mismatch throws", async (t) => {
});
});
test("load input outside of workspace", async (t) => {
test.serial("load input outside of workspace", async (t) => {
return await withTmpDir(async (tempDir) => {
try {
await configUtils.initConfig(
@@ -410,7 +413,7 @@ test("load input outside of workspace", async (t) => {
});
});
test("load non-local input with invalid repo syntax", async (t) => {
test.serial("load non-local input with invalid repo syntax", async (t) => {
return await withTmpDir(async (tempDir) => {
// no filename given, just a repo
const configFile = "octo-org/codeql-config@main";
@@ -438,7 +441,7 @@ test("load non-local input with invalid repo syntax", async (t) => {
});
});
test("load non-existent input", async (t) => {
test.serial("load non-existent input", async (t) => {
return await withTmpDir(async (tempDir) => {
const languagesInput = "javascript";
const configFile = "input";
@@ -468,7 +471,7 @@ test("load non-existent input", async (t) => {
});
});
test("load non-empty input", async (t) => {
test.serial("load non-empty input", async (t) => {
return await withTmpDir(async (tempDir) => {
const codeql = createStubCodeQL({
async betterResolveLanguages() {
@@ -539,18 +542,20 @@ test("load non-empty input", async (t) => {
});
});
test("Using config input and file together, config input should be used.", async (t) => {
return await withTmpDir(async (tempDir) => {
process.env["RUNNER_TEMP"] = tempDir;
process.env["GITHUB_WORKSPACE"] = tempDir;
test.serial(
"Using config input and file together, config input should be used.",
async (t) => {
return await withTmpDir(async (tempDir) => {
process.env["RUNNER_TEMP"] = tempDir;
process.env["GITHUB_WORKSPACE"] = tempDir;
const inputFileContents = `
const inputFileContents = `
name: my config
queries:
- uses: ./foo_file`;
const configFilePath = createConfigFile(inputFileContents, tempDir);
const configFilePath = createConfigFile(inputFileContents, tempDir);
const configInput = `
const configInput = `
name: my config
queries:
- uses: ./foo
@@ -561,39 +566,40 @@ test("Using config input and file together, config input should be used.", async
- c/d@1.2.3
`;
fs.mkdirSync(path.join(tempDir, "foo"));
fs.mkdirSync(path.join(tempDir, "foo"));
const codeql = createStubCodeQL({
async betterResolveLanguages() {
return {
extractors: {
javascript: [{ extractor_root: "" }],
python: [{ extractor_root: "" }],
},
};
},
const codeql = createStubCodeQL({
async betterResolveLanguages() {
return {
extractors: {
javascript: [{ extractor_root: "" }],
python: [{ extractor_root: "" }],
},
};
},
});
// Only JS, python packs will be ignored
const languagesInput = "javascript";
const config = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput,
configFile: configFilePath,
configInput,
tempDir,
codeql,
workspacePath: tempDir,
}),
);
t.deepEqual(config.originalUserInput, yaml.load(configInput));
});
},
);
// Only JS, python packs will be ignored
const languagesInput = "javascript";
const config = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput,
configFile: configFilePath,
configInput,
tempDir,
codeql,
workspacePath: tempDir,
}),
);
t.deepEqual(config.originalUserInput, yaml.load(configInput));
});
});
test("API client used when reading remote config", async (t) => {
test.serial("API client used when reading remote config", async (t) => {
return await withTmpDir(async (tempDir) => {
const codeql = createStubCodeQL({
async betterResolveLanguages() {
@@ -642,34 +648,37 @@ test("API client used when reading remote config", async (t) => {
});
});
test("Remote config handles the case where a directory is provided", async (t) => {
return await withTmpDir(async (tempDir) => {
const dummyResponse = []; // directories are returned as arrays
mockGetContents(dummyResponse);
test.serial(
"Remote config handles the case where a directory is provided",
async (t) => {
return await withTmpDir(async (tempDir) => {
const dummyResponse = []; // directories are returned as arrays
mockGetContents(dummyResponse);
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
configFile: repoReference,
tempDir,
workspacePath: tempDir,
}),
);
throw new Error("initConfig did not throw error");
} catch (err) {
t.deepEqual(
err,
new ConfigurationError(
errorMessages.getConfigFileDirectoryGivenMessage(repoReference),
),
);
}
});
});
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
configFile: repoReference,
tempDir,
workspacePath: tempDir,
}),
);
throw new Error("initConfig did not throw error");
} catch (err) {
t.deepEqual(
err,
new ConfigurationError(
errorMessages.getConfigFileDirectoryGivenMessage(repoReference),
),
);
}
});
},
);
test("Invalid format of remote config handled correctly", async (t) => {
test.serial("Invalid format of remote config handled correctly", async (t) => {
return await withTmpDir(async (tempDir) => {
const dummyResponse = {
// note no "content" property here
@@ -698,7 +707,7 @@ test("Invalid format of remote config handled correctly", async (t) => {
});
});
test("No detected languages", async (t) => {
test.serial("No detected languages", async (t) => {
return await withTmpDir(async (tempDir) => {
mockListLanguages([]);
const codeql = createStubCodeQL({
@@ -726,7 +735,7 @@ test("No detected languages", async (t) => {
});
});
test("Unknown languages", async (t) => {
test.serial("Unknown languages", async (t) => {
return await withTmpDir(async (tempDir) => {
const languagesInput = "rubbish,english";
@@ -753,7 +762,7 @@ test("Unknown languages", async (t) => {
const mockLogger = getRunnerLogger(true);
test("no generateRegistries when registries is undefined", async (t) => {
test.serial("no generateRegistries when registries is undefined", async (t) => {
return await withTmpDir(async (tmpDir) => {
const registriesInput = undefined;
const logger = getRunnerLogger(true);
@@ -765,24 +774,27 @@ test("no generateRegistries when registries is undefined", async (t) => {
});
});
test("generateRegistries prefers original CODEQL_REGISTRIES_AUTH", async (t) => {
return await withTmpDir(async (tmpDir) => {
process.env.CODEQL_REGISTRIES_AUTH = "original";
const registriesInput = yaml.dump([
{
url: "http://ghcr.io",
packages: ["codeql/*", "codeql-testing/*"],
token: "not-a-token",
},
]);
const logger = getRunnerLogger(true);
const { registriesAuthTokens, qlconfigFile } =
await configUtils.generateRegistries(registriesInput, tmpDir, logger);
test.serial(
"generateRegistries prefers original CODEQL_REGISTRIES_AUTH",
async (t) => {
return await withTmpDir(async (tmpDir) => {
process.env.CODEQL_REGISTRIES_AUTH = "original";
const registriesInput = yaml.dump([
{
url: "http://ghcr.io",
packages: ["codeql/*", "codeql-testing/*"],
token: "not-a-token",
},
]);
const logger = getRunnerLogger(true);
const { registriesAuthTokens, qlconfigFile } =
await configUtils.generateRegistries(registriesInput, tmpDir, logger);
t.is(registriesAuthTokens, "original");
t.is(qlconfigFile, path.join(tmpDir, "qlconfig.yml"));
});
});
t.is(registriesAuthTokens, "original");
t.is(qlconfigFile, path.join(tmpDir, "qlconfig.yml"));
});
},
);
// getLanguages
@@ -860,7 +872,7 @@ const mockRepositoryNwo = parseRepositoryNwo("owner/repo");
expectedLanguages: ["javascript"],
},
].forEach((args) => {
test(`getLanguages: ${args.name}`, async (t) => {
test.serial(`getLanguages: ${args.name}`, async (t) => {
const mockRequest = mockLanguagesInRepo(args.languagesInRepository);
const stubExtractorEntry = {
extractor_root: "",
@@ -930,46 +942,55 @@ for (const { displayName, language, feature } of [
feature: Feature.DisableCsharpBuildless,
},
]) {
test(`Build mode not overridden when disable ${displayName} buildless feature flag disabled`, async (t) => {
const messages: LoggedMessage[] = [];
const buildMode = await configUtils.parseBuildModeInput(
"none",
[language],
createFeatures([]),
getRecordingLogger(messages),
);
t.is(buildMode, BuildMode.None);
t.deepEqual(messages, []);
});
test.serial(
`Build mode not overridden when disable ${displayName} buildless feature flag disabled`,
async (t) => {
const messages: LoggedMessage[] = [];
const buildMode = await configUtils.parseBuildModeInput(
"none",
[language],
createFeatures([]),
getRecordingLogger(messages),
);
t.is(buildMode, BuildMode.None);
t.deepEqual(messages, []);
},
);
test(`Build mode not overridden for other languages when disable ${displayName} buildless feature flag enabled`, async (t) => {
const messages: LoggedMessage[] = [];
const buildMode = await configUtils.parseBuildModeInput(
"none",
[KnownLanguage.python],
createFeatures([feature]),
getRecordingLogger(messages),
);
t.is(buildMode, BuildMode.None);
t.deepEqual(messages, []);
});
test.serial(
`Build mode not overridden for other languages when disable ${displayName} buildless feature flag enabled`,
async (t) => {
const messages: LoggedMessage[] = [];
const buildMode = await configUtils.parseBuildModeInput(
"none",
[KnownLanguage.python],
createFeatures([feature]),
getRecordingLogger(messages),
);
t.is(buildMode, BuildMode.None);
t.deepEqual(messages, []);
},
);
test(`Build mode overridden when analyzing ${displayName} and disable ${displayName} buildless feature flag enabled`, async (t) => {
const messages: LoggedMessage[] = [];
const buildMode = await configUtils.parseBuildModeInput(
"none",
[language],
createFeatures([feature]),
getRecordingLogger(messages),
);
t.is(buildMode, BuildMode.Autobuild);
t.deepEqual(messages, [
{
message: `Scanning ${displayName} code without a build is temporarily unavailable. Falling back to 'autobuild' build mode.`,
type: "warning",
},
]);
});
test.serial(
`Build mode overridden when analyzing ${displayName} and disable ${displayName} buildless feature flag enabled`,
async (t) => {
const messages: LoggedMessage[] = [];
const buildMode = await configUtils.parseBuildModeInput(
"none",
[language],
createFeatures([feature]),
getRecordingLogger(messages),
);
t.is(buildMode, BuildMode.Autobuild);
t.deepEqual(messages, [
{
message: `Scanning ${displayName} code without a build is temporarily unavailable. Falling back to 'autobuild' build mode.`,
type: "warning",
},
]);
},
);
}
interface OverlayDatabaseModeTestSetup {
@@ -1106,7 +1127,7 @@ const getOverlayDatabaseModeMacro = test.macro({
title: (_, title) => `getOverlayDatabaseMode: ${title}`,
});
test(
test.serial(
getOverlayDatabaseModeMacro,
"Environment variable override - Overlay",
{
@@ -1118,7 +1139,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Environment variable override - OverlayBase",
{
@@ -1130,7 +1151,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Environment variable override - None",
{
@@ -1142,7 +1163,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Ignore invalid environment variable",
{
@@ -1155,7 +1176,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Ignore feature flag when analyzing non-default branch",
{
@@ -1168,7 +1189,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Overlay-base database on default branch when feature enabled",
{
@@ -1182,7 +1203,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Overlay-base database on default branch when feature enabled with custom analysis",
{
@@ -1199,7 +1220,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Overlay-base database on default branch when code-scanning feature enabled",
{
@@ -1216,7 +1237,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay-base database on default branch if runner disk space is too low",
{
@@ -1238,7 +1259,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay-base database on default branch if we can't determine runner disk space",
{
@@ -1257,7 +1278,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Overlay-base database on default branch if runner disk space is too low and skip resource checks flag is enabled",
{
@@ -1279,7 +1300,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay-base database on default branch if runner disk space is below v2 limit and v2 resource checks enabled",
{
@@ -1302,7 +1323,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Overlay-base database on default branch if runner disk space is between v2 and v1 limits and v2 resource checks enabled",
{
@@ -1324,7 +1345,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay-base database on default branch if runner disk space is between v2 and v1 limits and v2 resource checks not enabled",
{
@@ -1346,7 +1367,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay-base database on default branch if memory flag is too low",
{
@@ -1365,7 +1386,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Overlay-base database on default branch if memory flag is too low but CodeQL >= 2.24.3",
{
@@ -1384,7 +1405,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Overlay-base database on default branch if memory flag is too low and skip resource checks flag is enabled",
{
@@ -1403,7 +1424,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay-base database on default branch when cached status indicates previous failure",
{
@@ -1423,7 +1444,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay analysis on PR when cached status indicates previous failure",
{
@@ -1443,7 +1464,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay-base database on default branch when code-scanning feature enabled with disable-default-queries",
{
@@ -1464,7 +1485,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay-base database on default branch when code-scanning feature enabled with packs",
{
@@ -1485,7 +1506,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay-base database on default branch when code-scanning feature enabled with queries",
{
@@ -1506,7 +1527,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay-base database on default branch when code-scanning feature enabled with query-filters",
{
@@ -1527,7 +1548,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay-base database on default branch when only language-specific feature enabled",
{
@@ -1542,7 +1563,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay-base database on default branch when only code-scanning feature enabled",
{
@@ -1557,7 +1578,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay-base database on default branch when language-specific feature disabled",
{
@@ -1572,7 +1593,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Overlay analysis on PR when feature enabled",
{
@@ -1586,7 +1607,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Overlay analysis on PR when feature enabled with custom analysis",
{
@@ -1603,7 +1624,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Overlay analysis on PR when code-scanning feature enabled",
{
@@ -1620,7 +1641,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay analysis on PR if runner disk space is too low",
{
@@ -1642,7 +1663,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Overlay analysis on PR if runner disk space is too low and skip resource checks flag is enabled",
{
@@ -1664,7 +1685,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay analysis on PR if we can't determine runner disk space",
{
@@ -1683,7 +1704,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay analysis on PR if memory flag is too low",
{
@@ -1702,7 +1723,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Overlay analysis on PR if memory flag is too low but CodeQL >= 2.24.3",
{
@@ -1721,7 +1742,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Overlay analysis on PR if memory flag is too low and skip resource checks flag is enabled",
{
@@ -1740,7 +1761,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay analysis on PR when code-scanning feature enabled with disable-default-queries",
{
@@ -1761,7 +1782,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay analysis on PR when code-scanning feature enabled with packs",
{
@@ -1782,7 +1803,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay analysis on PR when code-scanning feature enabled with queries",
{
@@ -1803,7 +1824,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay analysis on PR when code-scanning feature enabled with query-filters",
{
@@ -1824,7 +1845,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay analysis on PR when only language-specific feature enabled",
{
@@ -1839,7 +1860,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay analysis on PR when only code-scanning feature enabled",
{
@@ -1854,7 +1875,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay analysis on PR when language-specific feature disabled",
{
@@ -1869,7 +1890,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Overlay PR analysis by env",
{
@@ -1881,7 +1902,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Overlay PR analysis by env on a runner with low disk space",
{
@@ -1894,7 +1915,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Overlay PR analysis by feature flag",
{
@@ -1908,7 +1929,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Fallback due to autobuild with traced language",
{
@@ -1923,7 +1944,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Fallback due to no build mode with traced language",
{
@@ -1938,7 +1959,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Fallback due to old CodeQL version",
{
@@ -1952,7 +1973,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Fallback due to missing git root",
{
@@ -1966,7 +1987,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Fallback due to old git version",
{
@@ -1980,7 +2001,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Fallback when git version cannot be determined",
{
@@ -1994,7 +2015,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"No overlay when disabled via repository property",
{
@@ -2012,7 +2033,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Overlay not disabled when repository property is false",
{
@@ -2029,7 +2050,7 @@ test(
},
);
test(
test.serial(
getOverlayDatabaseModeMacro,
"Environment variable override takes precedence over repository property",
{
@@ -2046,7 +2067,7 @@ test(
// Exercise language-specific overlay analysis features code paths
for (const language in KnownLanguage) {
test(
test.serial(
getOverlayDatabaseModeMacro,
`Check default overlay analysis feature for ${language}`,
{
@@ -2062,13 +2083,16 @@ for (const language in KnownLanguage) {
);
}
test("hasActionsWorkflows doesn't throw if workflows folder doesn't exist", async (t) => {
return withTmpDir(async (tmpDir) => {
t.notThrows(() => configUtils.hasActionsWorkflows(tmpDir));
});
});
test.serial(
"hasActionsWorkflows doesn't throw if workflows folder doesn't exist",
async (t) => {
return withTmpDir(async (tmpDir) => {
t.notThrows(() => configUtils.hasActionsWorkflows(tmpDir));
});
},
);
test("getPrimaryAnalysisConfig - single analysis kind", (t) => {
test.serial("getPrimaryAnalysisConfig - single analysis kind", (t) => {
// If only one analysis kind is configured, we expect to get the matching configuration.
for (const analysisKind of supportedAnalysisKinds) {
const singleKind = createTestConfig({ analysisKinds: [analysisKind] });
@@ -2076,7 +2100,7 @@ test("getPrimaryAnalysisConfig - single analysis kind", (t) => {
}
});
test("getPrimaryAnalysisConfig - Code Scanning + Code Quality", (t) => {
test.serial("getPrimaryAnalysisConfig - Code Scanning + Code Quality", (t) => {
// For CS+CQ, we expect to get the Code Scanning configuration.
const codeScanningAndCodeQuality = createTestConfig({
analysisKinds: [AnalysisKind.CodeScanning, AnalysisKind.CodeQuality],
+97 -88
View File
@@ -82,70 +82,76 @@ function getCodeQL() {
});
}
test("Abort database upload if 'upload-database' input set to false", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("false");
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
test.serial(
"Abort database upload if 'upload-database' input set to false",
async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("false");
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
const loggedMessages = [];
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
getTestConfig(tmpDir),
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message ===
"Database upload disabled in workflow. Skipping upload.",
) !== undefined,
);
});
});
const loggedMessages = [];
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
getTestConfig(tmpDir),
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message ===
"Database upload disabled in workflow. Skipping upload.",
) !== undefined,
);
});
},
);
test("Abort database upload if 'analysis-kinds: code-scanning' is not enabled", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
test.serial(
"Abort database upload if 'analysis-kinds: code-scanning' is not enabled",
async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
await mockHttpRequests(201);
await mockHttpRequests(201);
const loggedMessages = [];
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
{
...getTestConfig(tmpDir),
analysisKinds: [AnalysisKind.CodeQuality],
},
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message ===
"Not uploading database because 'analysis-kinds: code-scanning' is not enabled.",
) !== undefined,
);
});
});
const loggedMessages = [];
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
{
...getTestConfig(tmpDir),
analysisKinds: [AnalysisKind.CodeQuality],
},
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message ===
"Not uploading database because 'analysis-kinds: code-scanning' is not enabled.",
) !== undefined,
);
});
},
);
test("Abort database upload if running against GHES", async (t) => {
test.serial("Abort database upload if running against GHES", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
@@ -177,35 +183,38 @@ test("Abort database upload if running against GHES", async (t) => {
});
});
test("Abort database upload if not analyzing default branch", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false);
test.serial(
"Abort database upload if not analyzing default branch",
async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false);
const loggedMessages = [];
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
getTestConfig(tmpDir),
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message === "Not analyzing default branch. Skipping upload.",
) !== undefined,
);
});
});
const loggedMessages = [];
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
getTestConfig(tmpDir),
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message === "Not analyzing default branch. Skipping upload.",
) !== undefined,
);
});
},
);
test("Don't crash if uploading a database fails", async (t) => {
test.serial("Don't crash if uploading a database fails", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
@@ -237,7 +246,7 @@ test("Don't crash if uploading a database fails", async (t) => {
});
});
test("Successfully uploading a database to github.com", async (t) => {
test.serial("Successfully uploading a database to github.com", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
@@ -267,7 +276,7 @@ test("Successfully uploading a database to github.com", async (t) => {
});
});
test("Successfully uploading a database to GHEC-DR", async (t) => {
test.serial("Successfully uploading a database to GHEC-DR", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
+397 -335
View File
@@ -44,27 +44,33 @@ function makeAbsolutePatterns(tmpDir: string, patterns: string[]): string[] {
return patterns.map((pattern) => path.join(tmpDir, pattern));
}
test("getCsharpDependencyDirs - does not include BMN dir if FF is enabled", async (t) => {
await withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
const codeql = createStubCodeQL({});
const features = createFeatures([]);
test.serial(
"getCsharpDependencyDirs - does not include BMN dir if FF is disabled",
async (t) => {
await withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
const codeql = createStubCodeQL({});
const features = createFeatures([]);
const results = await getCsharpDependencyDirs(codeql, features);
t.false(results.includes(getCsharpTempDependencyDir()));
});
});
const results = await getCsharpDependencyDirs(codeql, features);
t.false(results.includes(getCsharpTempDependencyDir()));
});
},
);
test("getCsharpDependencyDirs - includes BMN dir if FF is enabled", async (t) => {
await withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
const codeql = createStubCodeQL({});
const features = createFeatures([Feature.CsharpCacheBuildModeNone]);
test.serial(
"getCsharpDependencyDirs - includes BMN dir if FF is enabled",
async (t) => {
await withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
const codeql = createStubCodeQL({});
const features = createFeatures([Feature.CsharpCacheBuildModeNone]);
const results = await getCsharpDependencyDirs(codeql, features);
t.assert(results.includes(getCsharpTempDependencyDir()));
});
});
const results = await getCsharpDependencyDirs(codeql, features);
t.assert(results.includes(getCsharpTempDependencyDir()));
});
},
);
test("makePatternCheck - returns undefined if no patterns match", async (t) => {
await withTmpDir(async (tmpDir) => {
@@ -85,69 +91,81 @@ test("makePatternCheck - returns all patterns if any pattern matches", async (t)
});
});
test("getCsharpHashPatterns - returns base patterns if any pattern matches", async (t) => {
const codeql = createStubCodeQL({});
const features = createFeatures([]);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
test.serial(
"getCsharpHashPatterns - returns base patterns if any pattern matches",
async (t) => {
const codeql = createStubCodeQL({});
const features = createFeatures([]);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).rejects();
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).rejects();
await t.notThrowsAsync(async () => {
const result = await getCsharpHashPatterns(codeql, features);
t.deepEqual(result, CSHARP_BASE_PATTERNS);
});
});
await t.notThrowsAsync(async () => {
const result = await getCsharpHashPatterns(codeql, features);
t.deepEqual(result, CSHARP_BASE_PATTERNS);
});
},
);
test("getCsharpHashPatterns - returns base patterns if any base pattern matches and CsharpNewCacheKey is enabled", async (t) => {
const codeql = createStubCodeQL({});
const features = createFeatures([Feature.CsharpNewCacheKey]);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
test.serial(
"getCsharpHashPatterns - returns base patterns if any base pattern matches and CsharpNewCacheKey is enabled",
async (t) => {
const codeql = createStubCodeQL({});
const features = createFeatures([Feature.CsharpNewCacheKey]);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
makePatternCheckStub
.withArgs(CSHARP_EXTRA_PATTERNS)
.resolves(CSHARP_EXTRA_PATTERNS);
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
makePatternCheckStub
.withArgs(CSHARP_EXTRA_PATTERNS)
.resolves(CSHARP_EXTRA_PATTERNS);
await t.notThrowsAsync(async () => {
const result = await getCsharpHashPatterns(codeql, features);
t.deepEqual(result, CSHARP_BASE_PATTERNS);
});
});
await t.notThrowsAsync(async () => {
const result = await getCsharpHashPatterns(codeql, features);
t.deepEqual(result, CSHARP_BASE_PATTERNS);
});
},
);
test("getCsharpHashPatterns - returns extra patterns if any extra pattern matches and CsharpNewCacheKey is enabled", async (t) => {
const codeql = createStubCodeQL({});
const features = createFeatures([Feature.CsharpNewCacheKey]);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
test.serial(
"getCsharpHashPatterns - returns extra patterns if any extra pattern matches and CsharpNewCacheKey is enabled",
async (t) => {
const codeql = createStubCodeQL({});
const features = createFeatures([Feature.CsharpNewCacheKey]);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub.withArgs(CSHARP_BASE_PATTERNS).resolves(undefined);
makePatternCheckStub
.withArgs(CSHARP_EXTRA_PATTERNS)
.resolves(CSHARP_EXTRA_PATTERNS);
makePatternCheckStub.withArgs(CSHARP_BASE_PATTERNS).resolves(undefined);
makePatternCheckStub
.withArgs(CSHARP_EXTRA_PATTERNS)
.resolves(CSHARP_EXTRA_PATTERNS);
await t.notThrowsAsync(async () => {
const result = await getCsharpHashPatterns(codeql, features);
t.deepEqual(result, CSHARP_EXTRA_PATTERNS);
});
});
await t.notThrowsAsync(async () => {
const result = await getCsharpHashPatterns(codeql, features);
t.deepEqual(result, CSHARP_EXTRA_PATTERNS);
});
},
);
test("getCsharpHashPatterns - returns undefined if neither base nor extra patterns match", async (t) => {
const codeql = createStubCodeQL({});
const features = createFeatures([Feature.CsharpNewCacheKey]);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
test.serial(
"getCsharpHashPatterns - returns undefined if neither base nor extra patterns match",
async (t) => {
const codeql = createStubCodeQL({});
const features = createFeatures([Feature.CsharpNewCacheKey]);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub.withArgs(CSHARP_BASE_PATTERNS).resolves(undefined);
makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined);
makePatternCheckStub.withArgs(CSHARP_BASE_PATTERNS).resolves(undefined);
makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined);
await t.notThrowsAsync(async () => {
const result = await getCsharpHashPatterns(codeql, features);
t.deepEqual(result, undefined);
});
});
await t.notThrowsAsync(async () => {
const result = await getCsharpHashPatterns(codeql, features);
t.deepEqual(result, undefined);
});
},
);
test("checkHashPatterns - logs when no patterns match", async (t) => {
const codeql = createStubCodeQL({});
@@ -238,160 +256,169 @@ function makeMockCacheCheck(mockCacheKeys: string[]): RestoreCacheFunc {
};
}
test("downloadDependencyCaches - does not restore caches with feature keys if no features are enabled", async (t) => {
process.env["RUNNER_OS"] = "Linux";
test.serial(
"downloadDependencyCaches - does not restore caches with feature keys if no features are enabled",
async (t) => {
process.env["RUNNER_OS"] = "Linux";
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
sinon.stub(glob, "hashFiles").resolves("abcdef");
sinon.stub(glob, "hashFiles").resolves("abcdef");
const keyWithFeature = await cacheKey(
codeql,
createFeatures([Feature.CsharpNewCacheKey]),
KnownLanguage.csharp,
// Patterns don't matter here because we have stubbed `hashFiles` to always return a specific hash above.
[],
);
const keyWithFeature = await cacheKey(
codeql,
createFeatures([Feature.CsharpNewCacheKey]),
KnownLanguage.csharp,
// Patterns don't matter here because we have stubbed `hashFiles` to always return a specific hash above.
[],
);
const restoreCacheStub = sinon
.stub(actionsCache, "restoreCache")
.callsFake(makeMockCacheCheck([keyWithFeature]));
const restoreCacheStub = sinon
.stub(actionsCache, "restoreCache")
.callsFake(makeMockCacheCheck([keyWithFeature]));
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined);
const result = await downloadDependencyCaches(
codeql,
createFeatures([]),
[KnownLanguage.csharp],
logger,
);
const statusReport = result.statusReport;
t.is(statusReport.length, 1);
t.is(statusReport[0].language, KnownLanguage.csharp);
t.is(statusReport[0].hit_kind, CacheHitKind.Miss);
t.deepEqual(result.restoredKeys, []);
t.assert(restoreCacheStub.calledOnce);
});
const result = await downloadDependencyCaches(
codeql,
createFeatures([]),
[KnownLanguage.csharp],
logger,
);
const statusReport = result.statusReport;
t.is(statusReport.length, 1);
t.is(statusReport[0].language, KnownLanguage.csharp);
t.is(statusReport[0].hit_kind, CacheHitKind.Miss);
t.deepEqual(result.restoredKeys, []);
t.assert(restoreCacheStub.calledOnce);
},
);
test("downloadDependencyCaches - restores caches with feature keys if features are enabled", async (t) => {
process.env["RUNNER_OS"] = "Linux";
test.serial(
"downloadDependencyCaches - restores caches with feature keys if features are enabled",
async (t) => {
process.env["RUNNER_OS"] = "Linux";
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([Feature.CsharpNewCacheKey]);
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([Feature.CsharpNewCacheKey]);
const mockHash = "abcdef";
sinon.stub(glob, "hashFiles").resolves(mockHash);
const mockHash = "abcdef";
sinon.stub(glob, "hashFiles").resolves(mockHash);
const keyWithFeature = await cacheKey(
codeql,
features,
KnownLanguage.csharp,
// Patterns don't matter here because we have stubbed `hashFiles` to always return a specific hash above.
[],
);
const keyWithFeature = await cacheKey(
codeql,
features,
KnownLanguage.csharp,
// Patterns don't matter here because we have stubbed `hashFiles` to always return a specific hash above.
[],
);
const restoreCacheStub = sinon
.stub(actionsCache, "restoreCache")
.callsFake(makeMockCacheCheck([keyWithFeature]));
const restoreCacheStub = sinon
.stub(actionsCache, "restoreCache")
.callsFake(makeMockCacheCheck([keyWithFeature]));
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined);
const result = await downloadDependencyCaches(
codeql,
features,
[KnownLanguage.csharp],
logger,
);
const result = await downloadDependencyCaches(
codeql,
features,
[KnownLanguage.csharp],
logger,
);
// Check that the status report for telemetry indicates that one cache was restored with an exact match.
const statusReport = result.statusReport;
t.is(statusReport.length, 1);
t.is(statusReport[0].language, KnownLanguage.csharp);
t.is(statusReport[0].hit_kind, CacheHitKind.Exact);
// Check that the status report for telemetry indicates that one cache was restored with an exact match.
const statusReport = result.statusReport;
t.is(statusReport.length, 1);
t.is(statusReport[0].language, KnownLanguage.csharp);
t.is(statusReport[0].hit_kind, CacheHitKind.Exact);
// Check that the restored key has been returned.
const restoredKeys = result.restoredKeys;
t.is(restoredKeys.length, 1);
t.assert(
restoredKeys[0].endsWith(mockHash),
"Expected restored key to end with hash returned by `hashFiles`",
);
// Check that the restored key has been returned.
const restoredKeys = result.restoredKeys;
t.is(restoredKeys.length, 1);
t.assert(
restoredKeys[0].endsWith(mockHash),
"Expected restored key to end with hash returned by `hashFiles`",
);
// `restoreCache` should have been called exactly once.
t.assert(restoreCacheStub.calledOnce);
});
// `restoreCache` should have been called exactly once.
t.assert(restoreCacheStub.calledOnce);
},
);
test("downloadDependencyCaches - restores caches with feature keys if features are enabled for partial matches", async (t) => {
process.env["RUNNER_OS"] = "Linux";
test.serial(
"downloadDependencyCaches - restores caches with feature keys if features are enabled for partial matches",
async (t) => {
process.env["RUNNER_OS"] = "Linux";
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([Feature.CsharpNewCacheKey]);
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([Feature.CsharpNewCacheKey]);
// We expect two calls to `hashFiles`: the first by the call to `cacheKey` below,
// and the second by `downloadDependencyCaches`. We use the result of the first
// call as part of the cache key that identifies a mock, existing cache. The result
// of the second call is for the primary restore key, which we don't want to match
// the first key so that we can test the restore keys logic.
const restoredHash = "abcdef";
const hashFilesStub = sinon.stub(glob, "hashFiles");
hashFilesStub.onFirstCall().resolves(restoredHash);
hashFilesStub.onSecondCall().resolves("123456");
// We expect two calls to `hashFiles`: the first by the call to `cacheKey` below,
// and the second by `downloadDependencyCaches`. We use the result of the first
// call as part of the cache key that identifies a mock, existing cache. The result
// of the second call is for the primary restore key, which we don't want to match
// the first key so that we can test the restore keys logic.
const restoredHash = "abcdef";
const hashFilesStub = sinon.stub(glob, "hashFiles");
hashFilesStub.onFirstCall().resolves(restoredHash);
hashFilesStub.onSecondCall().resolves("123456");
const keyWithFeature = await cacheKey(
codeql,
features,
KnownLanguage.csharp,
// Patterns don't matter here because we have stubbed `hashFiles` to always return a specific hash above.
[],
);
const keyWithFeature = await cacheKey(
codeql,
features,
KnownLanguage.csharp,
// Patterns don't matter here because we have stubbed `hashFiles` to always return a specific hash above.
[],
);
const restoreCacheStub = sinon
.stub(actionsCache, "restoreCache")
.callsFake(makeMockCacheCheck([keyWithFeature]));
const restoreCacheStub = sinon
.stub(actionsCache, "restoreCache")
.callsFake(makeMockCacheCheck([keyWithFeature]));
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined);
const result = await downloadDependencyCaches(
codeql,
features,
[KnownLanguage.csharp],
logger,
);
const result = await downloadDependencyCaches(
codeql,
features,
[KnownLanguage.csharp],
logger,
);
// Check that the status report for telemetry indicates that one cache was restored with a partial match.
const statusReport = result.statusReport;
t.is(statusReport.length, 1);
t.is(statusReport[0].language, KnownLanguage.csharp);
t.is(statusReport[0].hit_kind, CacheHitKind.Partial);
// Check that the status report for telemetry indicates that one cache was restored with a partial match.
const statusReport = result.statusReport;
t.is(statusReport.length, 1);
t.is(statusReport[0].language, KnownLanguage.csharp);
t.is(statusReport[0].hit_kind, CacheHitKind.Partial);
// Check that the restored key has been returned.
const restoredKeys = result.restoredKeys;
t.is(restoredKeys.length, 1);
t.assert(
restoredKeys[0].endsWith(restoredHash),
"Expected restored key to end with hash returned by `hashFiles`",
);
// Check that the restored key has been returned.
const restoredKeys = result.restoredKeys;
t.is(restoredKeys.length, 1);
t.assert(
restoredKeys[0].endsWith(restoredHash),
"Expected restored key to end with hash returned by `hashFiles`",
);
t.assert(restoreCacheStub.calledOnce);
});
t.assert(restoreCacheStub.calledOnce);
},
);
test("uploadDependencyCaches - skips upload for a language with no cache config", async (t) => {
const codeql = createStubCodeQL({});
@@ -409,148 +436,62 @@ test("uploadDependencyCaches - skips upload for a language with no cache config"
]);
});
test("uploadDependencyCaches - skips upload if no files for the hash exist", async (t) => {
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([]);
const config = createTestConfig({
languages: [KnownLanguage.go],
});
test.serial(
"uploadDependencyCaches - skips upload if no files for the hash exist",
async (t) => {
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([]);
const config = createTestConfig({
languages: [KnownLanguage.go],
});
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub.resolves(undefined);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub.resolves(undefined);
const result = await uploadDependencyCaches(codeql, features, config, logger);
t.is(result.length, 1);
t.is(result[0].language, KnownLanguage.go);
t.is(result[0].result, CacheStoreResult.NoHash);
});
const result = await uploadDependencyCaches(
codeql,
features,
config,
logger,
);
t.is(result.length, 1);
t.is(result[0].language, KnownLanguage.go);
t.is(result[0].result, CacheStoreResult.NoHash);
},
);
test("uploadDependencyCaches - skips upload if we know the cache already exists", async (t) => {
process.env["RUNNER_OS"] = "Linux";
test.serial(
"uploadDependencyCaches - skips upload if we know the cache already exists",
async (t) => {
process.env["RUNNER_OS"] = "Linux";
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([]);
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([]);
const mockHash = "abcdef";
sinon.stub(glob, "hashFiles").resolves(mockHash);
const mockHash = "abcdef";
sinon.stub(glob, "hashFiles").resolves(mockHash);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
const primaryCacheKey = await cacheKey(
codeql,
features,
KnownLanguage.csharp,
CSHARP_BASE_PATTERNS,
);
const primaryCacheKey = await cacheKey(
codeql,
features,
KnownLanguage.csharp,
CSHARP_BASE_PATTERNS,
);
const config = createTestConfig({
languages: [KnownLanguage.csharp],
dependencyCachingRestoredKeys: [primaryCacheKey],
});
const config = createTestConfig({
languages: [KnownLanguage.csharp],
dependencyCachingRestoredKeys: [primaryCacheKey],
});
const result = await uploadDependencyCaches(codeql, features, config, logger);
t.is(result.length, 1);
t.is(result[0].language, KnownLanguage.csharp);
t.is(result[0].result, CacheStoreResult.Duplicate);
});
test("uploadDependencyCaches - skips upload if cache size is 0", async (t) => {
process.env["RUNNER_OS"] = "Linux";
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([]);
const mockHash = "abcdef";
sinon.stub(glob, "hashFiles").resolves(mockHash);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
sinon.stub(cachingUtils, "getTotalCacheSize").resolves(0);
const config = createTestConfig({
languages: [KnownLanguage.csharp],
});
const result = await uploadDependencyCaches(codeql, features, config, logger);
t.is(result.length, 1);
t.is(result[0].language, KnownLanguage.csharp);
t.is(result[0].result, CacheStoreResult.Empty);
checkExpectedLogMessages(t, messages, [
"Skipping upload of dependency cache",
]);
});
test("uploadDependencyCaches - uploads caches when all requirements are met", async (t) => {
process.env["RUNNER_OS"] = "Linux";
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([]);
const mockHash = "abcdef";
sinon.stub(glob, "hashFiles").resolves(mockHash);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
sinon.stub(cachingUtils, "getTotalCacheSize").resolves(1024);
sinon.stub(actionsCache, "saveCache").resolves();
const config = createTestConfig({
languages: [KnownLanguage.csharp],
});
const result = await uploadDependencyCaches(codeql, features, config, logger);
t.is(result.length, 1);
t.is(result[0].language, KnownLanguage.csharp);
t.is(result[0].result, CacheStoreResult.Stored);
t.is(result[0].upload_size_bytes, 1024);
checkExpectedLogMessages(t, messages, ["Uploading cache of size"]);
});
test("uploadDependencyCaches - catches `ReserveCacheError` exceptions", async (t) => {
process.env["RUNNER_OS"] = "Linux";
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([]);
const mockHash = "abcdef";
sinon.stub(glob, "hashFiles").resolves(mockHash);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
sinon.stub(cachingUtils, "getTotalCacheSize").resolves(1024);
sinon
.stub(actionsCache, "saveCache")
.throws(new actionsCache.ReserveCacheError("Already in use"));
const config = createTestConfig({
languages: [KnownLanguage.csharp],
});
await t.notThrowsAsync(async () => {
const result = await uploadDependencyCaches(
codeql,
features,
@@ -560,12 +501,133 @@ test("uploadDependencyCaches - catches `ReserveCacheError` exceptions", async (t
t.is(result.length, 1);
t.is(result[0].language, KnownLanguage.csharp);
t.is(result[0].result, CacheStoreResult.Duplicate);
},
);
checkExpectedLogMessages(t, messages, ["Not uploading cache for"]);
});
});
test.serial(
"uploadDependencyCaches - skips upload if cache size is 0",
async (t) => {
process.env["RUNNER_OS"] = "Linux";
test("uploadDependencyCaches - throws other exceptions", async (t) => {
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([]);
const mockHash = "abcdef";
sinon.stub(glob, "hashFiles").resolves(mockHash);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
sinon.stub(cachingUtils, "getTotalCacheSize").resolves(0);
const config = createTestConfig({
languages: [KnownLanguage.csharp],
});
const result = await uploadDependencyCaches(
codeql,
features,
config,
logger,
);
t.is(result.length, 1);
t.is(result[0].language, KnownLanguage.csharp);
t.is(result[0].result, CacheStoreResult.Empty);
checkExpectedLogMessages(t, messages, [
"Skipping upload of dependency cache",
]);
},
);
test.serial(
"uploadDependencyCaches - uploads caches when all requirements are met",
async (t) => {
process.env["RUNNER_OS"] = "Linux";
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([]);
const mockHash = "abcdef";
sinon.stub(glob, "hashFiles").resolves(mockHash);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
sinon.stub(cachingUtils, "getTotalCacheSize").resolves(1024);
sinon.stub(actionsCache, "saveCache").resolves();
const config = createTestConfig({
languages: [KnownLanguage.csharp],
});
const result = await uploadDependencyCaches(
codeql,
features,
config,
logger,
);
t.is(result.length, 1);
t.is(result[0].language, KnownLanguage.csharp);
t.is(result[0].result, CacheStoreResult.Stored);
t.is(result[0].upload_size_bytes, 1024);
checkExpectedLogMessages(t, messages, ["Uploading cache of size"]);
},
);
test.serial(
"uploadDependencyCaches - catches `ReserveCacheError` exceptions",
async (t) => {
process.env["RUNNER_OS"] = "Linux";
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([]);
const mockHash = "abcdef";
sinon.stub(glob, "hashFiles").resolves(mockHash);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
sinon.stub(cachingUtils, "getTotalCacheSize").resolves(1024);
sinon
.stub(actionsCache, "saveCache")
.throws(new actionsCache.ReserveCacheError("Already in use"));
const config = createTestConfig({
languages: [KnownLanguage.csharp],
});
await t.notThrowsAsync(async () => {
const result = await uploadDependencyCaches(
codeql,
features,
config,
logger,
);
t.is(result.length, 1);
t.is(result[0].language, KnownLanguage.csharp);
t.is(result[0].result, CacheStoreResult.Duplicate);
checkExpectedLogMessages(t, messages, ["Not uploading cache for"]);
});
},
);
test.serial("uploadDependencyCaches - throws other exceptions", async (t) => {
process.env["RUNNER_OS"] = "Linux";
const codeql = createStubCodeQL({});
+57 -51
View File
@@ -97,14 +97,14 @@ const testShouldPerformDiffInformedAnalysis = test.macro({
title: (_, title) => `shouldPerformDiffInformedAnalysis: ${title}`,
});
test(
test.serial(
testShouldPerformDiffInformedAnalysis,
"returns true in the default test case",
{},
true,
);
test(
test.serial(
testShouldPerformDiffInformedAnalysis,
"returns false when feature flag is disabled from the API",
{
@@ -113,7 +113,7 @@ test(
false,
);
test(
test.serial(
testShouldPerformDiffInformedAnalysis,
"returns false when CODEQL_ACTION_DIFF_INFORMED_QUERIES is set to false",
{
@@ -123,7 +123,7 @@ test(
false,
);
test(
test.serial(
testShouldPerformDiffInformedAnalysis,
"returns true when CODEQL_ACTION_DIFF_INFORMED_QUERIES is set to true",
{
@@ -133,7 +133,7 @@ test(
true,
);
test(
test.serial(
testShouldPerformDiffInformedAnalysis,
"returns false for CodeQL version 2.20.0",
{
@@ -142,7 +142,7 @@ test(
false,
);
test(
test.serial(
testShouldPerformDiffInformedAnalysis,
"returns false for invalid GHES version",
{
@@ -154,7 +154,7 @@ test(
false,
);
test(
test.serial(
testShouldPerformDiffInformedAnalysis,
"returns false for GHES version 3.18.5",
{
@@ -166,7 +166,7 @@ test(
false,
);
test(
test.serial(
testShouldPerformDiffInformedAnalysis,
"returns true for GHES version 3.19.0",
{
@@ -178,7 +178,7 @@ test(
true,
);
test(
test.serial(
testShouldPerformDiffInformedAnalysis,
"returns false when not a pull request",
{
@@ -202,12 +202,12 @@ function runGetDiffRanges(changes: number, patch: string[] | undefined): any {
);
}
test("getDiffRanges: file unchanged", async (t) => {
test.serial("getDiffRanges: file unchanged", async (t) => {
const diffRanges = runGetDiffRanges(0, undefined);
t.deepEqual(diffRanges, []);
});
test("getDiffRanges: file diff too large", async (t) => {
test.serial("getDiffRanges: file diff too large", async (t) => {
const diffRanges = runGetDiffRanges(1000000, undefined);
t.deepEqual(diffRanges, [
{
@@ -218,43 +218,49 @@ test("getDiffRanges: file diff too large", async (t) => {
]);
});
test("getDiffRanges: diff thunk with single addition range", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,6 +50,8 @@",
" a",
" b",
" c",
"+1",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 54,
},
]);
});
test.serial(
"getDiffRanges: diff thunk with single addition range",
async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,6 +50,8 @@",
" a",
" b",
" c",
"+1",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 54,
},
]);
},
);
test("getDiffRanges: diff thunk with single deletion range", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,8 +50,6 @@",
" a",
" b",
" c",
"-1",
"-2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, []);
});
test.serial(
"getDiffRanges: diff thunk with single deletion range",
async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,8 +50,6 @@",
" a",
" b",
" c",
"-1",
"-2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, []);
},
);
test("getDiffRanges: diff thunk with single update range", async (t) => {
test.serial("getDiffRanges: diff thunk with single update range", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,7 +50,7 @@",
" a",
@@ -275,7 +281,7 @@ test("getDiffRanges: diff thunk with single update range", async (t) => {
]);
});
test("getDiffRanges: diff thunk with addition ranges", async (t) => {
test.serial("getDiffRanges: diff thunk with addition ranges", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,7 +50,9 @@",
" a",
@@ -302,7 +308,7 @@ test("getDiffRanges: diff thunk with addition ranges", async (t) => {
]);
});
test("getDiffRanges: diff thunk with mixed ranges", async (t) => {
test.serial("getDiffRanges: diff thunk with mixed ranges", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,7 +50,7 @@",
" a",
@@ -334,7 +340,7 @@ test("getDiffRanges: diff thunk with mixed ranges", async (t) => {
]);
});
test("getDiffRanges: multiple diff thunks", async (t) => {
test.serial("getDiffRanges: multiple diff thunks", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,6 +50,8 @@",
" a",
@@ -369,7 +375,7 @@ test("getDiffRanges: multiple diff thunks", async (t) => {
]);
});
test("getDiffRanges: no diff context lines", async (t) => {
test.serial("getDiffRanges: no diff context lines", async (t) => {
const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]);
t.deepEqual(diffRanges, [
{
@@ -380,7 +386,7 @@ test("getDiffRanges: no diff context lines", async (t) => {
]);
});
test("getDiffRanges: malformed thunk header", async (t) => {
test.serial("getDiffRanges: malformed thunk header", async (t) => {
const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]);
t.deepEqual(diffRanges, undefined);
});
+393 -330
View File
@@ -34,23 +34,26 @@ test.beforeEach(() => {
initializeEnvironment("1.2.3");
});
test(`All features use default values if running against GHES`, async (t) => {
await withTmpDir(async (tmpDir) => {
const loggedMessages = [];
const features = setUpFeatureFlagTests(
tmpDir,
getRecordingLogger(loggedMessages),
{ type: GitHubVariant.GHES, version: "3.0.0" },
);
test.serial(
`All features use default values if running against GHES`,
async (t) => {
await withTmpDir(async (tmpDir) => {
const loggedMessages = [];
const features = setUpFeatureFlagTests(
tmpDir,
getRecordingLogger(loggedMessages),
{ type: GitHubVariant.GHES, version: "3.0.0" },
);
await assertAllFeaturesHaveDefaultValues(t, features);
checkExpectedLogMessages(t, loggedMessages, [
"Not running against github.com. Using default values for all features.",
]);
});
});
await assertAllFeaturesHaveDefaultValues(t, features);
checkExpectedLogMessages(t, loggedMessages, [
"Not running against github.com. Using default values for all features.",
]);
});
},
);
test(`Feature flags are requested in GHEC-DR`, async (t) => {
test.serial(`Feature flags are requested in GHEC-DR`, async (t) => {
await withTmpDir(async (tmpDir) => {
const loggedMessages = [];
const features = setUpFeatureFlagTests(
@@ -78,254 +81,288 @@ test(`Feature flags are requested in GHEC-DR`, async (t) => {
});
});
test("API response missing and features use default value", async (t) => {
await withTmpDir(async (tmpDir) => {
const loggedMessages: LoggedMessage[] = [];
const features = setUpFeatureFlagTests(
tmpDir,
getRecordingLogger(loggedMessages),
);
mockFeatureFlagApiEndpoint(403, {});
for (const feature of Object.values(Feature)) {
t.assert(
(await getFeatureIncludingCodeQlIfRequired(features, feature)) ===
featureConfig[feature].defaultValue,
);
}
assertAllFeaturesUndefinedInApi(t, loggedMessages);
});
});
test("Features use default value if they're not returned in API response", async (t) => {
await withTmpDir(async (tmpDir) => {
const loggedMessages: LoggedMessage[] = [];
const features = setUpFeatureFlagTests(
tmpDir,
getRecordingLogger(loggedMessages),
);
mockFeatureFlagApiEndpoint(200, {});
for (const feature of Object.values(Feature)) {
t.assert(
(await getFeatureIncludingCodeQlIfRequired(features, feature)) ===
featureConfig[feature].defaultValue,
);
}
assertAllFeaturesUndefinedInApi(t, loggedMessages);
});
});
test("Include no more than 25 features in each API request", async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
stubFeatureFlagApiEndpoint((request) => {
const requestedFeatures = (request.features as string).split(",");
return {
status: requestedFeatures.length <= 25 ? 200 : 400,
messageIfError: "Can request a maximum of 25 features.",
data: {},
};
});
// We only need to call getValue once, and it does not matter which feature
// we ask for. Under the hood, the features library will request all features
// from the API.
const feature = Object.values(Feature)[0];
await t.notThrowsAsync(async () =>
getFeatureIncludingCodeQlIfRequired(features, feature),
);
});
});
test("Feature flags exception is propagated if the API request errors", async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
mockFeatureFlagApiEndpoint(500, {});
const someFeature = Object.values(Feature)[0];
await t.throwsAsync(
async () => getFeatureIncludingCodeQlIfRequired(features, someFeature),
{
message:
"Encountered an error while trying to determine feature enablement: Error: some error message",
},
);
});
});
for (const feature of Object.keys(featureConfig)) {
test(`Only feature '${feature}' is enabled if enabled in the API response. Other features disabled`, async (t) => {
test.serial(
"API response missing and features use default value",
async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
const loggedMessages: LoggedMessage[] = [];
const features = setUpFeatureFlagTests(
tmpDir,
getRecordingLogger(loggedMessages),
);
// set all features to false except the one we're testing
const expectedFeatureEnablement: { [feature: string]: boolean } = {};
for (const f of Object.keys(featureConfig)) {
expectedFeatureEnablement[f] = f === feature;
mockFeatureFlagApiEndpoint(403, {});
for (const feature of Object.values(Feature)) {
t.assert(
(await getFeatureIncludingCodeQlIfRequired(features, feature)) ===
featureConfig[feature].defaultValue,
);
}
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
assertAllFeaturesUndefinedInApi(t, loggedMessages);
});
},
);
// retrieve the values of the actual features
const actualFeatureEnablement: { [feature: string]: boolean } = {};
for (const f of Object.keys(featureConfig)) {
actualFeatureEnablement[f] = await getFeatureIncludingCodeQlIfRequired(
features,
f as Feature,
test.serial(
"Features use default value if they're not returned in API response",
async (t) => {
await withTmpDir(async (tmpDir) => {
const loggedMessages: LoggedMessage[] = [];
const features = setUpFeatureFlagTests(
tmpDir,
getRecordingLogger(loggedMessages),
);
mockFeatureFlagApiEndpoint(200, {});
for (const feature of Object.values(Feature)) {
t.assert(
(await getFeatureIncludingCodeQlIfRequired(features, feature)) ===
featureConfig[feature].defaultValue,
);
}
// All features should be false except the one we're testing
t.deepEqual(actualFeatureEnablement, expectedFeatureEnablement);
assertAllFeaturesUndefinedInApi(t, loggedMessages);
});
});
},
);
test(`Only feature '${feature}' is enabled if the associated environment variable is true. Others disabled.`, async (t) => {
test.serial(
"Include no more than 25 features in each API request",
async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
const expectedFeatureEnablement = initializeFeatures(false);
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
stubFeatureFlagApiEndpoint((request) => {
const requestedFeatures = (request.features as string).split(",");
return {
status: requestedFeatures.length <= 25 ? 200 : 400,
messageIfError: "Can request a maximum of 25 features.",
data: {},
};
});
// feature should be disabled initially
t.assert(
!(await getFeatureIncludingCodeQlIfRequired(
features,
feature as Feature,
)),
);
// set env var to true and check that the feature is now enabled
process.env[featureConfig[feature].envVar] = "true";
t.assert(
await getFeatureIncludingCodeQlIfRequired(features, feature as Feature),
// We only need to call getValue once, and it does not matter which feature
// we ask for. Under the hood, the features library will request all features
// from the API.
const feature = Object.values(Feature)[0];
await t.notThrowsAsync(async () =>
getFeatureIncludingCodeQlIfRequired(features, feature),
);
});
});
},
);
test(`Feature '${feature}' is disabled if the associated environment variable is false, even if enabled in API`, async (t) => {
test.serial(
"Feature flags exception is propagated if the API request errors",
async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
const expectedFeatureEnablement = initializeFeatures(true);
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
mockFeatureFlagApiEndpoint(500, {});
// feature should be enabled initially
t.assert(
await getFeatureIncludingCodeQlIfRequired(features, feature as Feature),
);
const someFeature = Object.values(Feature)[0];
// set env var to false and check that the feature is now disabled
process.env[featureConfig[feature].envVar] = "false";
t.assert(
!(await getFeatureIncludingCodeQlIfRequired(
features,
feature as Feature,
)),
await t.throwsAsync(
async () => getFeatureIncludingCodeQlIfRequired(features, someFeature),
{
message:
"Encountered an error while trying to determine feature enablement: Error: some error message",
},
);
});
});
},
);
for (const feature of Object.keys(featureConfig)) {
test.serial(
`Only feature '${feature}' is enabled if enabled in the API response. Other features disabled`,
async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
// set all features to false except the one we're testing
const expectedFeatureEnablement: { [feature: string]: boolean } = {};
for (const f of Object.keys(featureConfig)) {
expectedFeatureEnablement[f] = f === feature;
}
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
// retrieve the values of the actual features
const actualFeatureEnablement: { [feature: string]: boolean } = {};
for (const f of Object.keys(featureConfig)) {
actualFeatureEnablement[f] =
await getFeatureIncludingCodeQlIfRequired(features, f as Feature);
}
// All features should be false except the one we're testing
t.deepEqual(actualFeatureEnablement, expectedFeatureEnablement);
});
},
);
test.serial(
`Only feature '${feature}' is enabled if the associated environment variable is true. Others disabled.`,
async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
const expectedFeatureEnablement = initializeFeatures(false);
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
// feature should be disabled initially
t.assert(
!(await getFeatureIncludingCodeQlIfRequired(
features,
feature as Feature,
)),
);
// set env var to true and check that the feature is now enabled
process.env[featureConfig[feature].envVar] = "true";
t.assert(
await getFeatureIncludingCodeQlIfRequired(
features,
feature as Feature,
),
);
});
},
);
test.serial(
`Feature '${feature}' is disabled if the associated environment variable is false, even if enabled in API`,
async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
const expectedFeatureEnablement = initializeFeatures(true);
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
// feature should be enabled initially
t.assert(
await getFeatureIncludingCodeQlIfRequired(
features,
feature as Feature,
),
);
// set env var to false and check that the feature is now disabled
process.env[featureConfig[feature].envVar] = "false";
t.assert(
!(await getFeatureIncludingCodeQlIfRequired(
features,
feature as Feature,
)),
);
});
},
);
if (
featureConfig[feature].minimumVersion !== undefined ||
featureConfig[feature].toolsFeature !== undefined
) {
test(`Getting feature '${feature} should throw if no codeql is provided`, async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
test.serial(
`Getting feature '${feature} should throw if no codeql is provided`,
async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
const expectedFeatureEnablement = initializeFeatures(true);
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
const expectedFeatureEnablement = initializeFeatures(true);
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
// The type system should prevent this happening, but test that if we
// bypass it we get the expected error.
await t.throwsAsync(
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
async () => features.getValue(feature as any),
{
message: `Internal error: A ${
featureConfig[feature].minimumVersion !== undefined
? "minimum version"
: "required tools feature"
} is specified for feature ${feature}, but no instance of CodeQL was provided.`,
},
);
});
});
// The type system should prevent this happening, but test that if we
// bypass it we get the expected error.
await t.throwsAsync(
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
async () => features.getValue(feature as any),
{
message: `Internal error: A ${
featureConfig[feature].minimumVersion !== undefined
? "minimum version"
: "required tools feature"
} is specified for feature ${feature}, but no instance of CodeQL was provided.`,
},
);
});
},
);
}
if (featureConfig[feature].minimumVersion !== undefined) {
test(`Feature '${feature}' is disabled if the minimum CLI version is below ${featureConfig[feature].minimumVersion}`, async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
test.serial(
`Feature '${feature}' is disabled if the minimum CLI version is below ${featureConfig[feature].minimumVersion}`,
async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
const expectedFeatureEnablement = initializeFeatures(true);
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
const expectedFeatureEnablement = initializeFeatures(true);
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
// feature should be disabled when an old CLI version is set
let codeql = mockCodeQLVersion("2.0.0");
t.assert(!(await features.getValue(feature as Feature, codeql)));
// feature should be disabled when an old CLI version is set
let codeql = mockCodeQLVersion("2.0.0");
t.assert(!(await features.getValue(feature as Feature, codeql)));
// even setting the env var to true should not enable the feature if
// the minimum CLI version is not met
process.env[featureConfig[feature].envVar] = "true";
t.assert(!(await features.getValue(feature as Feature, codeql)));
// even setting the env var to true should not enable the feature if
// the minimum CLI version is not met
process.env[featureConfig[feature].envVar] = "true";
t.assert(!(await features.getValue(feature as Feature, codeql)));
// feature should be enabled when a new CLI version is set
// and env var is not set
process.env[featureConfig[feature].envVar] = "";
codeql = mockCodeQLVersion(
featureConfig[feature].minimumVersion as string,
);
t.assert(await features.getValue(feature as Feature, codeql));
// feature should be enabled when a new CLI version is set
// and env var is not set
process.env[featureConfig[feature].envVar] = "";
codeql = mockCodeQLVersion(
featureConfig[feature].minimumVersion as string,
);
t.assert(await features.getValue(feature as Feature, codeql));
// set env var to false and check that the feature is now disabled
process.env[featureConfig[feature].envVar] = "false";
t.assert(!(await features.getValue(feature as Feature, codeql)));
});
});
// set env var to false and check that the feature is now disabled
process.env[featureConfig[feature].envVar] = "false";
t.assert(!(await features.getValue(feature as Feature, codeql)));
});
},
);
}
if (featureConfig[feature].toolsFeature !== undefined) {
test(`Feature '${feature}' is disabled if the required tools feature is not enabled`, async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
test.serial(
`Feature '${feature}' is disabled if the required tools feature is not enabled`,
async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
const expectedFeatureEnablement = initializeFeatures(true);
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
const expectedFeatureEnablement = initializeFeatures(true);
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
// feature should be disabled when the required tools feature is not enabled
let codeql = mockCodeQLVersion("2.0.0");
t.assert(!(await features.getValue(feature as Feature, codeql)));
// feature should be disabled when the required tools feature is not enabled
let codeql = mockCodeQLVersion("2.0.0");
t.assert(!(await features.getValue(feature as Feature, codeql)));
// even setting the env var to true should not enable the feature if
// the required tools feature is not enabled
process.env[featureConfig[feature].envVar] = "true";
t.assert(!(await features.getValue(feature as Feature, codeql)));
// even setting the env var to true should not enable the feature if
// the required tools feature is not enabled
process.env[featureConfig[feature].envVar] = "true";
t.assert(!(await features.getValue(feature as Feature, codeql)));
// feature should be enabled when the required tools feature is enabled
// and env var is not set
process.env[featureConfig[feature].envVar] = "";
codeql = mockCodeQLVersion("2.0.0", {
[featureConfig[feature].toolsFeature]: true,
// feature should be enabled when the required tools feature is enabled
// and env var is not set
process.env[featureConfig[feature].envVar] = "";
codeql = mockCodeQLVersion("2.0.0", {
[featureConfig[feature].toolsFeature]: true,
});
t.assert(await features.getValue(feature as Feature, codeql));
// set env var to false and check that the feature is now disabled
process.env[featureConfig[feature].envVar] = "false";
t.assert(!(await features.getValue(feature as Feature, codeql)));
});
t.assert(await features.getValue(feature as Feature, codeql));
// set env var to false and check that the feature is now disabled
process.env[featureConfig[feature].envVar] = "false";
t.assert(!(await features.getValue(feature as Feature, codeql)));
});
});
},
);
}
}
test("Feature flags are saved to disk", async (t) => {
test.serial("Feature flags are saved to disk", async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
const expectedFeatureEnablement = initializeFeatures(true);
@@ -376,38 +413,41 @@ test("Feature flags are saved to disk", async (t) => {
});
});
test("Environment variable can override feature flag cache", async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
const expectedFeatureEnablement = initializeFeatures(true);
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
test.serial(
"Environment variable can override feature flag cache",
async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
const expectedFeatureEnablement = initializeFeatures(true);
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
const cachedFeatureFlags = path.join(tmpDir, FEATURE_FLAGS_FILE_NAME);
t.true(
await getFeatureIncludingCodeQlIfRequired(
features,
Feature.QaTelemetryEnabled,
),
"Feature flag should be enabled initially",
);
const cachedFeatureFlags = path.join(tmpDir, FEATURE_FLAGS_FILE_NAME);
t.true(
await getFeatureIncludingCodeQlIfRequired(
features,
Feature.QaTelemetryEnabled,
),
"Feature flag should be enabled initially",
);
t.true(
fs.existsSync(cachedFeatureFlags),
"Feature flag cached file should exist after getting feature flags",
);
process.env.CODEQL_ACTION_QA_TELEMETRY = "false";
t.true(
fs.existsSync(cachedFeatureFlags),
"Feature flag cached file should exist after getting feature flags",
);
process.env.CODEQL_ACTION_QA_TELEMETRY = "false";
t.false(
await getFeatureIncludingCodeQlIfRequired(
features,
Feature.QaTelemetryEnabled,
),
"Feature flag should be disabled after setting env var",
);
});
});
t.false(
await getFeatureIncludingCodeQlIfRequired(
features,
Feature.QaTelemetryEnabled,
),
"Feature flag should be disabled after setting env var",
);
});
},
);
test(`selects CLI from defaults.json on GHES`, async (t) => {
test.serial(`selects CLI from defaults.json on GHES`, async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
@@ -422,80 +462,94 @@ test(`selects CLI from defaults.json on GHES`, async (t) => {
});
for (const variant of [GitHubVariant.DOTCOM, GitHubVariant.GHEC_DR]) {
test(`selects CLI v2.20.1 on ${variant} when feature flags enable v2.20.0 and v2.20.1`, async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
const expectedFeatureEnablement = initializeFeatures(true);
expectedFeatureEnablement["default_codeql_version_2_20_0_enabled"] = true;
expectedFeatureEnablement["default_codeql_version_2_20_1_enabled"] = true;
expectedFeatureEnablement["default_codeql_version_2_20_2_enabled"] =
false;
expectedFeatureEnablement["default_codeql_version_2_20_3_enabled"] =
false;
expectedFeatureEnablement["default_codeql_version_2_20_4_enabled"] =
false;
expectedFeatureEnablement["default_codeql_version_2_20_5_enabled"] =
false;
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
test.serial(
`selects CLI v2.20.1 on ${variant} when feature flags enable v2.20.0 and v2.20.1`,
async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
const expectedFeatureEnablement = initializeFeatures(true);
expectedFeatureEnablement["default_codeql_version_2_20_0_enabled"] =
true;
expectedFeatureEnablement["default_codeql_version_2_20_1_enabled"] =
true;
expectedFeatureEnablement["default_codeql_version_2_20_2_enabled"] =
false;
expectedFeatureEnablement["default_codeql_version_2_20_3_enabled"] =
false;
expectedFeatureEnablement["default_codeql_version_2_20_4_enabled"] =
false;
expectedFeatureEnablement["default_codeql_version_2_20_5_enabled"] =
false;
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
const defaultCliVersion = await features.getDefaultCliVersion(variant);
t.deepEqual(defaultCliVersion, {
cliVersion: "2.20.1",
tagName: "codeql-bundle-v2.20.1",
toolsFeatureFlagsValid: true,
const defaultCliVersion = await features.getDefaultCliVersion(variant);
t.deepEqual(defaultCliVersion, {
cliVersion: "2.20.1",
tagName: "codeql-bundle-v2.20.1",
toolsFeatureFlagsValid: true,
});
});
});
});
},
);
test(`selects CLI from defaults.json on ${variant} when no default version feature flags are enabled`, async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
const expectedFeatureEnablement = initializeFeatures(true);
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
test.serial(
`selects CLI from defaults.json on ${variant} when no default version feature flags are enabled`,
async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
const expectedFeatureEnablement = initializeFeatures(true);
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
const defaultCliVersion = await features.getDefaultCliVersion(variant);
t.deepEqual(defaultCliVersion, {
cliVersion: defaults.cliVersion,
tagName: defaults.bundleVersion,
toolsFeatureFlagsValid: false,
const defaultCliVersion = await features.getDefaultCliVersion(variant);
t.deepEqual(defaultCliVersion, {
cliVersion: defaults.cliVersion,
tagName: defaults.bundleVersion,
toolsFeatureFlagsValid: false,
});
});
});
});
},
);
test(`ignores invalid version numbers in default version feature flags on ${variant}`, async (t) => {
await withTmpDir(async (tmpDir) => {
const loggedMessages = [];
const features = setUpFeatureFlagTests(
tmpDir,
getRecordingLogger(loggedMessages),
);
const expectedFeatureEnablement = initializeFeatures(true);
expectedFeatureEnablement["default_codeql_version_2_20_0_enabled"] = true;
expectedFeatureEnablement["default_codeql_version_2_20_1_enabled"] = true;
expectedFeatureEnablement["default_codeql_version_2_20_invalid_enabled"] =
true;
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
test.serial(
`ignores invalid version numbers in default version feature flags on ${variant}`,
async (t) => {
await withTmpDir(async (tmpDir) => {
const loggedMessages = [];
const features = setUpFeatureFlagTests(
tmpDir,
getRecordingLogger(loggedMessages),
);
const expectedFeatureEnablement = initializeFeatures(true);
expectedFeatureEnablement["default_codeql_version_2_20_0_enabled"] =
true;
expectedFeatureEnablement["default_codeql_version_2_20_1_enabled"] =
true;
expectedFeatureEnablement[
"default_codeql_version_2_20_invalid_enabled"
] = true;
mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement);
const defaultCliVersion = await features.getDefaultCliVersion(variant);
t.deepEqual(defaultCliVersion, {
cliVersion: "2.20.1",
tagName: "codeql-bundle-v2.20.1",
toolsFeatureFlagsValid: true,
const defaultCliVersion = await features.getDefaultCliVersion(variant);
t.deepEqual(defaultCliVersion, {
cliVersion: "2.20.1",
tagName: "codeql-bundle-v2.20.1",
toolsFeatureFlagsValid: true,
});
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "warning" &&
v.message ===
"Ignoring feature flag default_codeql_version_2_20_invalid_enabled as it does not specify a valid CodeQL version.",
) !== undefined,
);
});
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "warning" &&
v.message ===
"Ignoring feature flag default_codeql_version_2_20_invalid_enabled as it does not specify a valid CodeQL version.",
) !== undefined,
);
});
});
},
);
}
test("legacy feature flags should end with _enabled", async (t) => {
test.serial("legacy feature flags should end with _enabled", async (t) => {
for (const [feature, config] of Object.entries(featureConfig)) {
if ((config satisfies FeatureConfig as FeatureConfig).legacyApi) {
t.assert(
@@ -506,31 +560,40 @@ test("legacy feature flags should end with _enabled", async (t) => {
}
});
test("non-legacy feature flags should not end with _enabled", async (t) => {
for (const [feature, config] of Object.entries(featureConfig)) {
if (!(config satisfies FeatureConfig as FeatureConfig).legacyApi) {
t.false(
feature.endsWith("_enabled"),
`non-legacy feature ${feature} should not end with '_enabled'`,
);
test.serial(
"non-legacy feature flags should not end with _enabled",
async (t) => {
for (const [feature, config] of Object.entries(featureConfig)) {
if (!(config satisfies FeatureConfig as FeatureConfig).legacyApi) {
t.false(
feature.endsWith("_enabled"),
`non-legacy feature ${feature} should not end with '_enabled'`,
);
}
}
}
});
},
);
test("non-legacy feature flags should not start with codeql_action_", async (t) => {
for (const [feature, config] of Object.entries(featureConfig)) {
if (!(config satisfies FeatureConfig as FeatureConfig).legacyApi) {
t.false(
feature.startsWith("codeql_action_"),
`non-legacy feature ${feature} should not start with 'codeql_action_'`,
);
test.serial(
"non-legacy feature flags should not start with codeql_action_",
async (t) => {
for (const [feature, config] of Object.entries(featureConfig)) {
if (!(config satisfies FeatureConfig as FeatureConfig).legacyApi) {
t.false(
feature.startsWith("codeql_action_"),
`non-legacy feature ${feature} should not start with 'codeql_action_'`,
);
}
}
}
});
},
);
test("initFeatures returns a `Features` instance by default", async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
t.is("Features", features.constructor.name);
});
});
test.serial(
"initFeatures returns a `Features` instance by default",
async (t) => {
await withTmpDir(async (tmpDir) => {
const features = setUpFeatureFlagTests(tmpDir);
t.is("Features", features.constructor.name);
});
},
);
+13
View File
@@ -62,21 +62,29 @@ export enum Feature {
OverlayAnalysisCodeScanningSwift = "overlay_analysis_code_scanning_swift",
OverlayAnalysisCpp = "overlay_analysis_cpp",
OverlayAnalysisCsharp = "overlay_analysis_csharp",
/** Controls whether the Actions cache is checked for overlay build outcomes. */
OverlayAnalysisStatusCheck = "overlay_analysis_status_check",
/** Controls whether overlay build failures on are stored in the Actions cache. */
OverlayAnalysisStatusSave = "overlay_analysis_status_save",
OverlayAnalysisGo = "overlay_analysis_go",
OverlayAnalysisJava = "overlay_analysis_java",
OverlayAnalysisJavascript = "overlay_analysis_javascript",
OverlayAnalysisPython = "overlay_analysis_python",
/**
* Controls whether lower disk space requirements are used for overlay hardware checks.
* Has no effect if `OverlayAnalysisSkipResourceChecks` is enabled.
*/
OverlayAnalysisResourceChecksV2 = "overlay_analysis_resource_checks_v2",
OverlayAnalysisRuby = "overlay_analysis_ruby",
OverlayAnalysisRust = "overlay_analysis_rust",
/** Controls whether hardware checks are skipped for overlay analysis. */
OverlayAnalysisSkipResourceChecks = "overlay_analysis_skip_resource_checks",
OverlayAnalysisSwift = "overlay_analysis_swift",
PythonDefaultIsToNotExtractStdlib = "python_default_is_to_not_extract_stdlib",
QaTelemetryEnabled = "qa_telemetry_enabled",
/** Note that this currently only disables baseline file coverage information. */
SkipFileCoverageOnPrs = "skip_file_coverage_on_prs",
StartProxyRemoveUnusedRegistries = "start_proxy_remove_unused_registries",
StartProxyUseFeaturesRelease = "start_proxy_use_features_release",
UploadOverlayDbToApi = "upload_overlay_db_to_api",
UseRepositoryProperties = "use_repository_properties_v2",
@@ -328,6 +336,11 @@ export const featureConfig = {
// cannot be found when interpreting results.
minimumVersion: undefined,
},
[Feature.StartProxyRemoveUnusedRegistries]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: undefined,
},
[Feature.StartProxyUseFeaturesRelease]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
+160 -141
View File
@@ -11,76 +11,86 @@ import * as properties from "./properties";
setupTests(test);
test("loadPropertiesFromApi throws if response data is not an array", async (t) => {
sinon.stub(api, "getRepositoryProperties").resolves({
headers: {},
status: 200,
url: "",
data: {},
});
const logger = getRunnerLogger(true);
const mockRepositoryNwo = parseRepositoryNwo("owner/repo");
await t.throwsAsync(
properties.loadPropertiesFromApi(
test.serial(
"loadPropertiesFromApi throws if response data is not an array",
async (t) => {
sinon.stub(api, "getRepositoryProperties").resolves({
headers: {},
status: 200,
url: "",
data: {},
});
const logger = getRunnerLogger(true);
const mockRepositoryNwo = parseRepositoryNwo("owner/repo");
await t.throwsAsync(
properties.loadPropertiesFromApi(
{
type: util.GitHubVariant.DOTCOM,
},
logger,
mockRepositoryNwo,
),
{
type: util.GitHubVariant.DOTCOM,
message: /Expected repository properties API to return an array/,
},
);
},
);
test.serial(
"loadPropertiesFromApi throws if response data contains unexpected objects",
async (t) => {
sinon.stub(api, "getRepositoryProperties").resolves({
headers: {},
status: 200,
url: "",
data: [{}],
});
const logger = getRunnerLogger(true);
const mockRepositoryNwo = parseRepositoryNwo("owner/repo");
await t.throwsAsync(
properties.loadPropertiesFromApi(
{
type: util.GitHubVariant.DOTCOM,
},
logger,
mockRepositoryNwo,
),
{
message:
/Expected repository property object to have a 'property_name'/,
},
);
},
);
test.serial(
"loadPropertiesFromApi returns empty object if on GHES",
async (t) => {
sinon.stub(api, "getRepositoryProperties").resolves({
headers: {},
status: 200,
url: "",
data: [
{ property_name: "github-codeql-extra-queries", value: "+queries" },
{ property_name: "unknown-property", value: "something" },
] satisfies properties.GitHubPropertiesResponse,
});
const logger = getRunnerLogger(true);
const mockRepositoryNwo = parseRepositoryNwo("owner/repo");
const response = await properties.loadPropertiesFromApi(
{
type: util.GitHubVariant.GHES,
version: "",
},
logger,
mockRepositoryNwo,
),
{
message: /Expected repository properties API to return an array/,
},
);
});
);
t.deepEqual(response, {});
},
);
test("loadPropertiesFromApi throws if response data contains unexpected objects", async (t) => {
sinon.stub(api, "getRepositoryProperties").resolves({
headers: {},
status: 200,
url: "",
data: [{}],
});
const logger = getRunnerLogger(true);
const mockRepositoryNwo = parseRepositoryNwo("owner/repo");
await t.throwsAsync(
properties.loadPropertiesFromApi(
{
type: util.GitHubVariant.DOTCOM,
},
logger,
mockRepositoryNwo,
),
{
message: /Expected repository property object to have a 'property_name'/,
},
);
});
test("loadPropertiesFromApi returns empty object if on GHES", async (t) => {
sinon.stub(api, "getRepositoryProperties").resolves({
headers: {},
status: 200,
url: "",
data: [
{ property_name: "github-codeql-extra-queries", value: "+queries" },
{ property_name: "unknown-property", value: "something" },
] satisfies properties.GitHubPropertiesResponse,
});
const logger = getRunnerLogger(true);
const mockRepositoryNwo = parseRepositoryNwo("owner/repo");
const response = await properties.loadPropertiesFromApi(
{
type: util.GitHubVariant.GHES,
version: "",
},
logger,
mockRepositoryNwo,
);
t.deepEqual(response, {});
});
test("loadPropertiesFromApi loads known properties", async (t) => {
test.serial("loadPropertiesFromApi loads known properties", async (t) => {
sinon.stub(api, "getRepositoryProperties").resolves({
headers: {},
status: 200,
@@ -102,7 +112,7 @@ test("loadPropertiesFromApi loads known properties", async (t) => {
t.deepEqual(response, { "github-codeql-extra-queries": "+queries" });
});
test("loadPropertiesFromApi parses true boolean property", async (t) => {
test.serial("loadPropertiesFromApi parses true boolean property", async (t) => {
sinon.stub(api, "getRepositoryProperties").resolves({
headers: {},
status: 200,
@@ -132,86 +142,95 @@ test("loadPropertiesFromApi parses true boolean property", async (t) => {
t.true(warningSpy.notCalled);
});
test("loadPropertiesFromApi parses false boolean property", async (t) => {
sinon.stub(api, "getRepositoryProperties").resolves({
headers: {},
status: 200,
url: "",
data: [
{
property_name: "github-codeql-disable-overlay",
value: "false",
},
] satisfies properties.GitHubPropertiesResponse,
});
const logger = getRunnerLogger(true);
const warningSpy = sinon.spy(logger, "warning");
const mockRepositoryNwo = parseRepositoryNwo("owner/repo");
const response = await properties.loadPropertiesFromApi(
{
type: util.GitHubVariant.DOTCOM,
},
logger,
mockRepositoryNwo,
);
t.deepEqual(response, {
"github-codeql-disable-overlay": false,
});
t.true(warningSpy.notCalled);
});
test("loadPropertiesFromApi throws if property value is not a string", async (t) => {
sinon.stub(api, "getRepositoryProperties").resolves({
headers: {},
status: 200,
url: "",
data: [{ property_name: "github-codeql-extra-queries", value: 123 }],
});
const logger = getRunnerLogger(true);
const mockRepositoryNwo = parseRepositoryNwo("owner/repo");
await t.throwsAsync(
properties.loadPropertiesFromApi(
test.serial(
"loadPropertiesFromApi parses false boolean property",
async (t) => {
sinon.stub(api, "getRepositoryProperties").resolves({
headers: {},
status: 200,
url: "",
data: [
{
property_name: "github-codeql-disable-overlay",
value: "false",
},
] satisfies properties.GitHubPropertiesResponse,
});
const logger = getRunnerLogger(true);
const warningSpy = sinon.spy(logger, "warning");
const mockRepositoryNwo = parseRepositoryNwo("owner/repo");
const response = await properties.loadPropertiesFromApi(
{
type: util.GitHubVariant.DOTCOM,
},
logger,
mockRepositoryNwo,
),
{
message:
/Expected repository property 'github-codeql-extra-queries' to have a string value/,
},
);
});
);
t.deepEqual(response, {
"github-codeql-disable-overlay": false,
});
t.true(warningSpy.notCalled);
},
);
test("loadPropertiesFromApi warns if boolean property has unexpected value", async (t) => {
sinon.stub(api, "getRepositoryProperties").resolves({
headers: {},
status: 200,
url: "",
data: [
test.serial(
"loadPropertiesFromApi throws if property value is not a string",
async (t) => {
sinon.stub(api, "getRepositoryProperties").resolves({
headers: {},
status: 200,
url: "",
data: [{ property_name: "github-codeql-extra-queries", value: 123 }],
});
const logger = getRunnerLogger(true);
const mockRepositoryNwo = parseRepositoryNwo("owner/repo");
await t.throwsAsync(
properties.loadPropertiesFromApi(
{
type: util.GitHubVariant.DOTCOM,
},
logger,
mockRepositoryNwo,
),
{
property_name: "github-codeql-disable-overlay",
value: "yes",
message:
/Expected repository property 'github-codeql-extra-queries' to have a string value/,
},
] satisfies properties.GitHubPropertiesResponse,
});
const logger = getRunnerLogger(true);
const warningSpy = sinon.spy(logger, "warning");
const mockRepositoryNwo = parseRepositoryNwo("owner/repo");
const response = await properties.loadPropertiesFromApi(
{
type: util.GitHubVariant.DOTCOM,
},
logger,
mockRepositoryNwo,
);
t.deepEqual(response, {
"github-codeql-disable-overlay": false,
});
t.true(warningSpy.calledOnce);
t.is(
warningSpy.firstCall.args[0],
"Repository property 'github-codeql-disable-overlay' has unexpected value 'yes'. Expected 'true' or 'false'. Defaulting to false.",
);
});
);
},
);
test.serial(
"loadPropertiesFromApi warns if boolean property has unexpected value",
async (t) => {
sinon.stub(api, "getRepositoryProperties").resolves({
headers: {},
status: 200,
url: "",
data: [
{
property_name: "github-codeql-disable-overlay",
value: "yes",
},
] satisfies properties.GitHubPropertiesResponse,
});
const logger = getRunnerLogger(true);
const warningSpy = sinon.spy(logger, "warning");
const mockRepositoryNwo = parseRepositoryNwo("owner/repo");
const response = await properties.loadPropertiesFromApi(
{
type: util.GitHubVariant.DOTCOM,
},
logger,
mockRepositoryNwo,
);
t.deepEqual(response, {
"github-codeql-disable-overlay": false,
});
t.true(warningSpy.calledOnce);
t.is(
warningSpy.firstCall.args[0],
"Repository property 'github-codeql-disable-overlay' has unexpected value 'yes'. Expected 'true' or 'false'. Defaulting to false.",
);
},
);
+3 -2
View File
@@ -6,6 +6,7 @@ import test from "ava";
import * as fingerprints from "./fingerprints";
import { getRunnerLogger } from "./logging";
import * as sarif from "./sarif";
import { setupTests } from "./testing-utils";
import * as util from "./util";
@@ -201,7 +202,7 @@ test("addFingerprints", async (t) => {
fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`)
.toString(),
) as util.SarifFile;
) as sarif.Log;
const expected = JSON.parse(
fs
.readFileSync(
@@ -229,7 +230,7 @@ test("missingRegions", async (t) => {
fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`)
.toString(),
) as util.SarifFile;
) as sarif.Log;
const expected = JSON.parse(
fs
.readFileSync(
+6 -6
View File
@@ -5,7 +5,7 @@ import Long from "long";
import { DocUrl } from "./doc-url";
import { Logger } from "./logging";
import { SarifFile, SarifResult } from "./util";
import type * as sarif from "./sarif";
const tab = "\t".charCodeAt(0);
const space = " ".charCodeAt(0);
@@ -138,7 +138,7 @@ export async function hash(callback: hashCallback, filepath: string) {
// Generate a hash callback function that updates the given result in-place
// when it receives a hash for the correct line number. Ignores hashes for other lines.
function locationUpdateCallback(
result: SarifResult,
result: sarif.Result,
location: any,
logger: Logger,
): hashCallback {
@@ -256,17 +256,17 @@ export function resolveUriToFile(
// Compute fingerprints for results in the given sarif file
// and return an updated sarif file contents.
export async function addFingerprints(
sarif: SarifFile,
sarifLog: Partial<sarif.Log>,
sourceRoot: string,
logger: Logger,
): Promise<SarifFile> {
): Promise<Partial<sarif.Log>> {
logger.info(
`Adding fingerprints to SARIF file. See ${DocUrl.TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS} for more information.`,
);
// Gather together results for the same file and construct
// callbacks to accept hashes for that file and update the location
const callbacksByFile: { [filename: string]: hashCallback[] } = {};
for (const run of sarif.runs || []) {
for (const run of sarifLog.runs || []) {
// We may need the list of artifacts to resolve against
const artifacts = run.artifacts || [];
@@ -316,5 +316,5 @@ export async function addFingerprints(
await hash(teeCallback, filepath);
}
return sarif;
return sarifLog;
}
+239 -191
View File
@@ -13,154 +13,187 @@ import { withTmpDir } from "./util";
setupTests(test);
test("getRef() throws on the empty string", async (t) => {
test.serial("getRef() throws on the empty string", async (t) => {
process.env["GITHUB_REF"] = "";
await t.throwsAsync(gitUtils.getRef);
});
test("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => {
await withTmpDir(async (tmpDir: string) => {
setupActionsVars(tmpDir, tmpDir);
const expectedRef = "refs/pull/1/merge";
const currentSha = "a".repeat(40);
process.env["GITHUB_REF"] = expectedRef;
process.env["GITHUB_SHA"] = currentSha;
test.serial(
"getRef() returns merge PR ref if GITHUB_SHA still checked out",
async (t) => {
await withTmpDir(async (tmpDir: string) => {
setupActionsVars(tmpDir, tmpDir);
const expectedRef = "refs/pull/1/merge";
const currentSha = "a".repeat(40);
process.env["GITHUB_REF"] = expectedRef;
process.env["GITHUB_SHA"] = currentSha;
const callback = sinon.stub(gitUtils, "getCommitOid");
callback.withArgs("HEAD").resolves(currentSha);
const callback = sinon.stub(gitUtils, "getCommitOid");
callback.withArgs("HEAD").resolves(currentSha);
const actualRef = await gitUtils.getRef();
t.deepEqual(actualRef, expectedRef);
callback.restore();
});
});
const actualRef = await gitUtils.getRef();
t.deepEqual(actualRef, expectedRef);
callback.restore();
});
},
);
test("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => {
await withTmpDir(async (tmpDir: string) => {
setupActionsVars(tmpDir, tmpDir);
const expectedRef = "refs/pull/1/merge";
process.env["GITHUB_REF"] = expectedRef;
process.env["GITHUB_SHA"] = "b".repeat(40);
const sha = "a".repeat(40);
test.serial(
"getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)",
async (t) => {
await withTmpDir(async (tmpDir: string) => {
setupActionsVars(tmpDir, tmpDir);
const expectedRef = "refs/pull/1/merge";
process.env["GITHUB_REF"] = expectedRef;
process.env["GITHUB_SHA"] = "b".repeat(40);
const sha = "a".repeat(40);
const callback = sinon.stub(gitUtils, "getCommitOid");
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
callback.withArgs("HEAD").resolves(sha);
const callback = sinon.stub(gitUtils, "getCommitOid");
callback.withArgs("refs/remotes/pull/1/merge").resolves(sha);
callback.withArgs("HEAD").resolves(sha);
const actualRef = await gitUtils.getRef();
t.deepEqual(actualRef, expectedRef);
callback.restore();
});
});
const actualRef = await gitUtils.getRef();
t.deepEqual(actualRef, expectedRef);
callback.restore();
});
},
);
test("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => {
await withTmpDir(async (tmpDir: string) => {
setupActionsVars(tmpDir, tmpDir);
process.env["GITHUB_REF"] = "refs/pull/1/merge";
process.env["GITHUB_SHA"] = "a".repeat(40);
test.serial(
"getRef() returns head PR ref if GITHUB_REF no longer checked out",
async (t) => {
await withTmpDir(async (tmpDir: string) => {
setupActionsVars(tmpDir, tmpDir);
process.env["GITHUB_REF"] = "refs/pull/1/merge";
process.env["GITHUB_SHA"] = "a".repeat(40);
const callback = sinon.stub(gitUtils, "getCommitOid");
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
const callback = sinon.stub(gitUtils, "getCommitOid");
callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40));
callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40));
const actualRef = await gitUtils.getRef();
t.deepEqual(actualRef, "refs/pull/1/head");
callback.restore();
});
});
const actualRef = await gitUtils.getRef();
t.deepEqual(actualRef, "refs/pull/1/head");
callback.restore();
});
},
);
test("getRef() returns ref provided as an input and ignores current HEAD", async (t) => {
await withTmpDir(async (tmpDir: string) => {
setupActionsVars(tmpDir, tmpDir);
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
test.serial(
"getRef() returns ref provided as an input and ignores current HEAD",
async (t) => {
await withTmpDir(async (tmpDir: string) => {
setupActionsVars(tmpDir, tmpDir);
const getAdditionalInputStub = sinon.stub(
actionsUtil,
"getOptionalInput",
);
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge");
getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40));
// These values are be ignored
process.env["GITHUB_REF"] = "refs/pull/1/merge";
process.env["GITHUB_SHA"] = "a".repeat(40);
// These values are be ignored
process.env["GITHUB_REF"] = "refs/pull/1/merge";
process.env["GITHUB_SHA"] = "a".repeat(40);
const callback = sinon.stub(gitUtils, "getCommitOid");
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
callback.withArgs("HEAD").resolves("b".repeat(40));
const callback = sinon.stub(gitUtils, "getCommitOid");
callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40));
callback.withArgs("HEAD").resolves("b".repeat(40));
const actualRef = await gitUtils.getRef();
t.deepEqual(actualRef, "refs/pull/2/merge");
callback.restore();
getAdditionalInputStub.restore();
});
});
const actualRef = await gitUtils.getRef();
t.deepEqual(actualRef, "refs/pull/2/merge");
callback.restore();
getAdditionalInputStub.restore();
});
},
);
test("getRef() returns CODE_SCANNING_REF as a fallback for GITHUB_REF", async (t) => {
await withTmpDir(async (tmpDir: string) => {
setupActionsVars(tmpDir, tmpDir);
const expectedRef = "refs/pull/1/HEAD";
const currentSha = "a".repeat(40);
process.env["CODE_SCANNING_REF"] = expectedRef;
process.env["GITHUB_REF"] = "";
process.env["GITHUB_SHA"] = currentSha;
test.serial(
"getRef() returns CODE_SCANNING_REF as a fallback for GITHUB_REF",
async (t) => {
await withTmpDir(async (tmpDir: string) => {
setupActionsVars(tmpDir, tmpDir);
const expectedRef = "refs/pull/1/HEAD";
const currentSha = "a".repeat(40);
process.env["CODE_SCANNING_REF"] = expectedRef;
process.env["GITHUB_REF"] = "";
process.env["GITHUB_SHA"] = currentSha;
const actualRef = await gitUtils.getRef();
t.deepEqual(actualRef, expectedRef);
});
});
const actualRef = await gitUtils.getRef();
t.deepEqual(actualRef, expectedRef);
});
},
);
test("getRef() returns GITHUB_REF over CODE_SCANNING_REF if both are provided", async (t) => {
await withTmpDir(async (tmpDir: string) => {
setupActionsVars(tmpDir, tmpDir);
const expectedRef = "refs/pull/1/merge";
const currentSha = "a".repeat(40);
process.env["CODE_SCANNING_REF"] = "refs/pull/1/HEAD";
process.env["GITHUB_REF"] = expectedRef;
process.env["GITHUB_SHA"] = currentSha;
test.serial(
"getRef() returns GITHUB_REF over CODE_SCANNING_REF if both are provided",
async (t) => {
await withTmpDir(async (tmpDir: string) => {
setupActionsVars(tmpDir, tmpDir);
const expectedRef = "refs/pull/1/merge";
const currentSha = "a".repeat(40);
process.env["CODE_SCANNING_REF"] = "refs/pull/1/HEAD";
process.env["GITHUB_REF"] = expectedRef;
process.env["GITHUB_SHA"] = currentSha;
const actualRef = await gitUtils.getRef();
t.deepEqual(actualRef, expectedRef);
});
});
const actualRef = await gitUtils.getRef();
t.deepEqual(actualRef, expectedRef);
});
},
);
test("getRef() throws an error if only `ref` is provided as an input", async (t) => {
await withTmpDir(async (tmpDir: string) => {
setupActionsVars(tmpDir, tmpDir);
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
test.serial(
"getRef() throws an error if only `ref` is provided as an input",
async (t) => {
await withTmpDir(async (tmpDir: string) => {
setupActionsVars(tmpDir, tmpDir);
const getAdditionalInputStub = sinon.stub(
actionsUtil,
"getOptionalInput",
);
getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge");
await t.throwsAsync(
async () => {
await gitUtils.getRef();
},
{
instanceOf: Error,
message:
"Both 'ref' and 'sha' are required if one of them is provided.",
},
);
getAdditionalInputStub.restore();
});
});
await t.throwsAsync(
async () => {
await gitUtils.getRef();
},
{
instanceOf: Error,
message:
"Both 'ref' and 'sha' are required if one of them is provided.",
},
);
getAdditionalInputStub.restore();
});
},
);
test("getRef() throws an error if only `sha` is provided as an input", async (t) => {
await withTmpDir(async (tmpDir: string) => {
setupActionsVars(tmpDir, tmpDir);
process.env["GITHUB_WORKSPACE"] = "/tmp";
const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
test.serial(
"getRef() throws an error if only `sha` is provided as an input",
async (t) => {
await withTmpDir(async (tmpDir: string) => {
setupActionsVars(tmpDir, tmpDir);
process.env["GITHUB_WORKSPACE"] = "/tmp";
const getAdditionalInputStub = sinon.stub(
actionsUtil,
"getOptionalInput",
);
getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40));
await t.throwsAsync(
async () => {
await gitUtils.getRef();
},
{
instanceOf: Error,
message:
"Both 'ref' and 'sha' are required if one of them is provided.",
},
);
getAdditionalInputStub.restore();
});
});
await t.throwsAsync(
async () => {
await gitUtils.getRef();
},
{
instanceOf: Error,
message:
"Both 'ref' and 'sha' are required if one of them is provided.",
},
);
getAdditionalInputStub.restore();
});
},
);
test("isAnalyzingDefaultBranch()", async (t) => {
test.serial("isAnalyzingDefaultBranch()", async (t) => {
process.env["GITHUB_EVENT_NAME"] = "push";
process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "true";
t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), true);
@@ -213,7 +246,7 @@ test("isAnalyzingDefaultBranch()", async (t) => {
});
});
test("determineBaseBranchHeadCommitOid non-pullrequest", async (t) => {
test.serial("determineBaseBranchHeadCommitOid non-pullrequest", async (t) => {
const infoStub = sinon.stub(core, "info");
process.env["GITHUB_EVENT_NAME"] = "hucairz";
@@ -225,27 +258,30 @@ test("determineBaseBranchHeadCommitOid non-pullrequest", async (t) => {
infoStub.restore();
});
test("determineBaseBranchHeadCommitOid not git repository", async (t) => {
const infoStub = sinon.stub(core, "info");
test.serial(
"determineBaseBranchHeadCommitOid not git repository",
async (t) => {
const infoStub = sinon.stub(core, "info");
process.env["GITHUB_EVENT_NAME"] = "pull_request";
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
process.env["GITHUB_EVENT_NAME"] = "pull_request";
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
await withTmpDir(async (tmpDir) => {
await gitUtils.determineBaseBranchHeadCommitOid(tmpDir);
});
await withTmpDir(async (tmpDir) => {
await gitUtils.determineBaseBranchHeadCommitOid(tmpDir);
});
t.deepEqual(1, infoStub.callCount);
t.deepEqual(
infoStub.firstCall.args[0],
"git call failed. Will calculate the base branch SHA on the server. Error: " +
"The checkout path provided to the action does not appear to be a git repository.",
);
t.deepEqual(1, infoStub.callCount);
t.deepEqual(
infoStub.firstCall.args[0],
"git call failed. Will calculate the base branch SHA on the server. Error: " +
"The checkout path provided to the action does not appear to be a git repository.",
);
infoStub.restore();
});
infoStub.restore();
},
);
test("determineBaseBranchHeadCommitOid other error", async (t) => {
test.serial("determineBaseBranchHeadCommitOid other error", async (t) => {
const infoStub = sinon.stub(core, "info");
process.env["GITHUB_EVENT_NAME"] = "pull_request";
@@ -269,7 +305,7 @@ test("determineBaseBranchHeadCommitOid other error", async (t) => {
infoStub.restore();
});
test("decodeGitFilePath unquoted strings", async (t) => {
test.serial("decodeGitFilePath unquoted strings", async (t) => {
t.deepEqual(gitUtils.decodeGitFilePath("foo"), "foo");
t.deepEqual(gitUtils.decodeGitFilePath("foo bar"), "foo bar");
t.deepEqual(gitUtils.decodeGitFilePath("foo\\\\bar"), "foo\\\\bar");
@@ -288,7 +324,7 @@ test("decodeGitFilePath unquoted strings", async (t) => {
);
});
test("decodeGitFilePath quoted strings", async (t) => {
test.serial("decodeGitFilePath quoted strings", async (t) => {
t.deepEqual(gitUtils.decodeGitFilePath('"foo"'), "foo");
t.deepEqual(gitUtils.decodeGitFilePath('"foo bar"'), "foo bar");
t.deepEqual(gitUtils.decodeGitFilePath('"foo\\\\bar"'), "foo\\bar");
@@ -307,7 +343,7 @@ test("decodeGitFilePath quoted strings", async (t) => {
);
});
test("getFileOidsUnderPath returns correct file mapping", async (t) => {
test.serial("getFileOidsUnderPath returns correct file mapping", async (t) => {
const runGitCommandStub = sinon
.stub(gitUtils as any, "runGitCommand")
.resolves(
@@ -331,7 +367,7 @@ test("getFileOidsUnderPath returns correct file mapping", async (t) => {
]);
});
test("getFileOidsUnderPath handles quoted paths", async (t) => {
test.serial("getFileOidsUnderPath handles quoted paths", async (t) => {
sinon
.stub(gitUtils as any, "runGitCommand")
.resolves(
@@ -349,44 +385,50 @@ test("getFileOidsUnderPath handles quoted paths", async (t) => {
});
});
test("getFileOidsUnderPath handles empty output", async (t) => {
test.serial("getFileOidsUnderPath handles empty output", async (t) => {
sinon.stub(gitUtils as any, "runGitCommand").resolves("");
const result = await gitUtils.getFileOidsUnderPath("/fake/path");
t.deepEqual(result, {});
});
test("getFileOidsUnderPath throws on unexpected output format", async (t) => {
sinon
.stub(gitUtils as any, "runGitCommand")
.resolves(
"30d998ded095371488be3a729eb61d86ed721a18_lib/git-utils.js\n" +
"invalid-line-format\n" +
"a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts",
test.serial(
"getFileOidsUnderPath throws on unexpected output format",
async (t) => {
sinon
.stub(gitUtils as any, "runGitCommand")
.resolves(
"30d998ded095371488be3a729eb61d86ed721a18_lib/git-utils.js\n" +
"invalid-line-format\n" +
"a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts",
);
await t.throwsAsync(
async () => {
await gitUtils.getFileOidsUnderPath("/fake/path");
},
{
instanceOf: Error,
message: 'Unexpected "git ls-files" output: invalid-line-format',
},
);
},
);
await t.throwsAsync(
async () => {
await gitUtils.getFileOidsUnderPath("/fake/path");
},
{
instanceOf: Error,
message: 'Unexpected "git ls-files" output: invalid-line-format',
},
);
});
test.serial(
"getGitVersionOrThrow returns version for valid git output",
async (t) => {
sinon
.stub(gitUtils as any, "runGitCommand")
.resolves(`git version 2.40.0${os.EOL}`);
test("getGitVersionOrThrow returns version for valid git output", async (t) => {
sinon
.stub(gitUtils as any, "runGitCommand")
.resolves(`git version 2.40.0${os.EOL}`);
const version = await gitUtils.getGitVersionOrThrow();
t.is(version.truncatedVersion, "2.40.0");
t.is(version.fullVersion, "2.40.0");
},
);
const version = await gitUtils.getGitVersionOrThrow();
t.is(version.truncatedVersion, "2.40.0");
t.is(version.fullVersion, "2.40.0");
});
test("getGitVersionOrThrow throws for invalid git output", async (t) => {
test.serial("getGitVersionOrThrow throws for invalid git output", async (t) => {
sinon.stub(gitUtils as any, "runGitCommand").resolves("invalid output");
await t.throwsAsync(
@@ -400,18 +442,21 @@ test("getGitVersionOrThrow throws for invalid git output", async (t) => {
);
});
test("getGitVersionOrThrow handles Windows-style git output", async (t) => {
sinon
.stub(gitUtils as any, "runGitCommand")
.resolves("git version 2.40.0.windows.1");
test.serial(
"getGitVersionOrThrow handles Windows-style git output",
async (t) => {
sinon
.stub(gitUtils as any, "runGitCommand")
.resolves("git version 2.40.0.windows.1");
const version = await gitUtils.getGitVersionOrThrow();
// The truncated version should contain just the major.minor.patch portion
t.is(version.truncatedVersion, "2.40.0");
t.is(version.fullVersion, "2.40.0.windows.1");
});
const version = await gitUtils.getGitVersionOrThrow();
// The truncated version should contain just the major.minor.patch portion
t.is(version.truncatedVersion, "2.40.0");
t.is(version.fullVersion, "2.40.0.windows.1");
},
);
test("getGitVersionOrThrow throws when git command fails", async (t) => {
test.serial("getGitVersionOrThrow throws when git command fails", async (t) => {
sinon
.stub(gitUtils as any, "runGitCommand")
.rejects(new Error("git not found"));
@@ -427,16 +472,19 @@ test("getGitVersionOrThrow throws when git command fails", async (t) => {
);
});
test("GitVersionInfo.isAtLeast correctly compares versions", async (t) => {
const version = new gitUtils.GitVersionInfo("2.40.0", "2.40.0");
test.serial(
"GitVersionInfo.isAtLeast correctly compares versions",
async (t) => {
const version = new gitUtils.GitVersionInfo("2.40.0", "2.40.0");
t.true(version.isAtLeast("2.38.0"));
t.true(version.isAtLeast("2.40.0"));
t.false(version.isAtLeast("2.41.0"));
t.false(version.isAtLeast("3.0.0"));
});
t.true(version.isAtLeast("2.38.0"));
t.true(version.isAtLeast("2.40.0"));
t.false(version.isAtLeast("2.41.0"));
t.false(version.isAtLeast("3.0.0"));
},
);
test("listFiles returns array of file paths", async (t) => {
test.serial("listFiles returns array of file paths", async (t) => {
sinon
.stub(gitUtils, "runGitCommand")
.resolves(["dir/file.txt", "README.txt", ""].join(os.EOL));
@@ -448,7 +496,7 @@ test("listFiles returns array of file paths", async (t) => {
});
});
test("getGeneratedFiles returns generated files only", async (t) => {
test.serial("getGeneratedFiles returns generated files only", async (t) => {
const runGitCommandStub = sinon.stub(gitUtils, "runGitCommand");
runGitCommandStub
+344 -308
View File
@@ -15,7 +15,9 @@ import { parseRepositoryNwo } from "./repository";
import {
createFeatures,
createTestConfig,
DEFAULT_ACTIONS_VARS,
makeVersionInfo,
setupActionsVars,
setupTests,
} from "./testing-utils";
import * as uploadLib from "./upload-lib";
@@ -26,10 +28,9 @@ const NUM_BYTES_PER_GIB = 1024 * 1024 * 1024;
setupTests(test);
test("init-post action with debug mode off", async (t) => {
test.serial("init-post action with debug mode off", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
process.env["RUNNER_TEMP"] = tmpDir;
setupActionsVars(tmpDir, tmpDir);
const gitHubVersion: util.GitHubVersion = {
type: util.GitHubVariant.DOTCOM,
@@ -60,10 +61,9 @@ test("init-post action with debug mode off", async (t) => {
});
});
test("init-post action with debug mode on", async (t) => {
test.serial("init-post action with debug mode on", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
process.env["RUNNER_TEMP"] = tmpDir;
setupActionsVars(tmpDir, tmpDir);
const uploadAllAvailableDebugArtifactsSpy = sinon.spy();
const printDebugLogsSpy = sinon.spy();
@@ -83,83 +83,94 @@ test("init-post action with debug mode on", async (t) => {
});
});
test("uploads failed SARIF run with `diagnostics export` if feature flag is off", async (t) => {
const actionsWorkflow = createTestWorkflow([
{
name: "Checkout repository",
uses: "actions/checkout@v5",
},
{
name: "Initialize CodeQL",
uses: "github/codeql-action/init@v4",
with: {
languages: "javascript",
test.serial(
"uploads failed SARIF run with `diagnostics export` if feature flag is off",
async (t) => {
const actionsWorkflow = createTestWorkflow([
{
name: "Checkout repository",
uses: "actions/checkout@v5",
},
},
{
name: "Perform CodeQL Analysis",
uses: "github/codeql-action/analyze@v4",
with: {
category: "my-category",
{
name: "Initialize CodeQL",
uses: "github/codeql-action/init@v4",
with: {
languages: "javascript",
},
},
},
]);
await testFailedSarifUpload(t, actionsWorkflow, { category: "my-category" });
});
{
name: "Perform CodeQL Analysis",
uses: "github/codeql-action/analyze@v4",
with: {
category: "my-category",
},
},
]);
await testFailedSarifUpload(t, actionsWorkflow, {
category: "my-category",
});
},
);
test("uploads failed SARIF run with `diagnostics export` if the database doesn't exist", async (t) => {
const actionsWorkflow = createTestWorkflow([
{
name: "Checkout repository",
uses: "actions/checkout@v5",
},
{
name: "Initialize CodeQL",
uses: "github/codeql-action/init@v4",
with: {
languages: "javascript",
test.serial(
"uploads failed SARIF run with `diagnostics export` if the database doesn't exist",
async (t) => {
const actionsWorkflow = createTestWorkflow([
{
name: "Checkout repository",
uses: "actions/checkout@v5",
},
},
{
name: "Perform CodeQL Analysis",
uses: "github/codeql-action/analyze@v4",
with: {
category: "my-category",
{
name: "Initialize CodeQL",
uses: "github/codeql-action/init@v4",
with: {
languages: "javascript",
},
},
},
]);
await testFailedSarifUpload(t, actionsWorkflow, {
category: "my-category",
databaseExists: false,
});
});
{
name: "Perform CodeQL Analysis",
uses: "github/codeql-action/analyze@v4",
with: {
category: "my-category",
},
},
]);
await testFailedSarifUpload(t, actionsWorkflow, {
category: "my-category",
databaseExists: false,
});
},
);
test("uploads failed SARIF run with database export-diagnostics if the database exists and feature flag is on", async (t) => {
const actionsWorkflow = createTestWorkflow([
{
name: "Checkout repository",
uses: "actions/checkout@v5",
},
{
name: "Initialize CodeQL",
uses: "github/codeql-action/init@v4",
with: {
languages: "javascript",
test.serial(
"uploads failed SARIF run with database export-diagnostics if the database exists and feature flag is on",
async (t) => {
const actionsWorkflow = createTestWorkflow([
{
name: "Checkout repository",
uses: "actions/checkout@v5",
},
},
{
name: "Perform CodeQL Analysis",
uses: "github/codeql-action/analyze@v4",
with: {
category: "my-category",
{
name: "Initialize CodeQL",
uses: "github/codeql-action/init@v4",
with: {
languages: "javascript",
},
},
},
]);
await testFailedSarifUpload(t, actionsWorkflow, {
category: "my-category",
exportDiagnosticsEnabled: true,
});
});
{
name: "Perform CodeQL Analysis",
uses: "github/codeql-action/analyze@v4",
with: {
category: "my-category",
},
},
]);
await testFailedSarifUpload(t, actionsWorkflow, {
category: "my-category",
exportDiagnosticsEnabled: true,
});
},
);
const UPLOAD_INPUT_TEST_CASES = [
{
@@ -189,9 +200,49 @@ const UPLOAD_INPUT_TEST_CASES = [
];
for (const { uploadInput, shouldUpload } of UPLOAD_INPUT_TEST_CASES) {
test(`does ${
shouldUpload ? "" : "not "
}upload failed SARIF run for workflow with upload: ${uploadInput}`, async (t) => {
test.serial(
`does ${
shouldUpload ? "" : "not "
}upload failed SARIF run for workflow with upload: ${uploadInput}`,
async (t) => {
const actionsWorkflow = createTestWorkflow([
{
name: "Checkout repository",
uses: "actions/checkout@v5",
},
{
name: "Initialize CodeQL",
uses: "github/codeql-action/init@v4",
with: {
languages: "javascript",
},
},
{
name: "Perform CodeQL Analysis",
uses: "github/codeql-action/analyze@v4",
with: {
category: "my-category",
upload: uploadInput,
},
},
]);
const result = await testFailedSarifUpload(t, actionsWorkflow, {
category: "my-category",
expectUpload: shouldUpload,
});
if (!shouldUpload) {
t.is(
result.upload_failed_run_skipped_because,
"SARIF upload is disabled",
);
}
},
);
}
test.serial(
"uploading failed SARIF run succeeds when workflow uses an input with a matrix var",
async (t) => {
const actionsWorkflow = createTestWorkflow([
{
name: "Checkout repository",
@@ -208,215 +259,199 @@ for (const { uploadInput, shouldUpload } of UPLOAD_INPUT_TEST_CASES) {
name: "Perform CodeQL Analysis",
uses: "github/codeql-action/analyze@v4",
with: {
category: "my-category",
upload: uploadInput,
category: "/language:${{ matrix.language }}",
},
},
]);
await testFailedSarifUpload(t, actionsWorkflow, {
category: "/language:csharp",
matrix: { language: "csharp" },
});
},
);
test.serial(
"uploading failed SARIF run fails when workflow uses a complex upload input",
async (t) => {
const actionsWorkflow = createTestWorkflow([
{
name: "Checkout repository",
uses: "actions/checkout@v5",
},
{
name: "Initialize CodeQL",
uses: "github/codeql-action/init@v4",
with: {
languages: "javascript",
},
},
{
name: "Perform CodeQL Analysis",
uses: "github/codeql-action/analyze@v4",
with: {
upload: "${{ matrix.language != 'csharp' }}",
},
},
]);
const result = await testFailedSarifUpload(t, actionsWorkflow, {
category: "my-category",
expectUpload: shouldUpload,
expectUpload: false,
});
if (!shouldUpload) {
t.is(
result.upload_failed_run_skipped_because,
"SARIF upload is disabled",
);
}
});
}
test("uploading failed SARIF run succeeds when workflow uses an input with a matrix var", async (t) => {
const actionsWorkflow = createTestWorkflow([
{
name: "Checkout repository",
uses: "actions/checkout@v5",
},
{
name: "Initialize CodeQL",
uses: "github/codeql-action/init@v4",
with: {
languages: "javascript",
},
},
{
name: "Perform CodeQL Analysis",
uses: "github/codeql-action/analyze@v4",
with: {
category: "/language:${{ matrix.language }}",
},
},
]);
await testFailedSarifUpload(t, actionsWorkflow, {
category: "/language:csharp",
matrix: { language: "csharp" },
});
});
test("uploading failed SARIF run fails when workflow uses a complex upload input", async (t) => {
const actionsWorkflow = createTestWorkflow([
{
name: "Checkout repository",
uses: "actions/checkout@v5",
},
{
name: "Initialize CodeQL",
uses: "github/codeql-action/init@v4",
with: {
languages: "javascript",
},
},
{
name: "Perform CodeQL Analysis",
uses: "github/codeql-action/analyze@v4",
with: {
upload: "${{ matrix.language != 'csharp' }}",
},
},
]);
const result = await testFailedSarifUpload(t, actionsWorkflow, {
expectUpload: false,
});
t.is(
result.upload_failed_run_error,
"Could not get upload input to github/codeql-action/analyze since it contained an " +
"unrecognized dynamic value.",
);
});
test("uploading failed SARIF run fails when workflow does not reference github/codeql-action", async (t) => {
const actionsWorkflow = createTestWorkflow([
{
name: "Checkout repository",
uses: "actions/checkout@v5",
},
]);
const result = await testFailedSarifUpload(t, actionsWorkflow, {
expectUpload: false,
});
t.is(
result.upload_failed_run_error,
"Could not get upload input to github/codeql-action/analyze since the analyze job does not " +
"call github/codeql-action/analyze.",
);
t.truthy(result.upload_failed_run_stack_trace);
});
test("not uploading failed SARIF when `code-scanning` is not an enabled analysis kind", async (t) => {
const result = await testFailedSarifUpload(t, createTestWorkflow([]), {
analysisKinds: [AnalysisKind.CodeQuality],
expectUpload: false,
});
t.is(
result.upload_failed_run_skipped_because,
"Code Scanning is not enabled.",
);
});
test("saves overlay status when overlay-base analysis did not complete successfully", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
process.env["RUNNER_TEMP"] = tmpDir;
// Ensure analyze did not complete successfully.
delete process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY];
const diskUsage: util.DiskUsage = {
numAvailableBytes: 100 * NUM_BYTES_PER_GIB,
numTotalBytes: 200 * NUM_BYTES_PER_GIB,
};
sinon.stub(util, "checkDiskUsage").resolves(diskUsage);
const saveOverlayStatusStub = sinon
.stub(overlayStatus, "saveOverlayStatus")
.resolves(true);
const stubCodeQL = codeql.createStubCodeQL({});
await initActionPostHelper.run(
sinon.spy(),
sinon.spy(),
stubCodeQL,
createTestConfig({
debugMode: false,
languages: ["javascript"],
overlayDatabaseMode: OverlayDatabaseMode.OverlayBase,
}),
parseRepositoryNwo("github/codeql-action"),
createFeatures([Feature.OverlayAnalysisStatusSave]),
getRunnerLogger(true),
t.is(
result.upload_failed_run_error,
"Could not get upload input to github/codeql-action/analyze since it contained an " +
"unrecognized dynamic value.",
);
},
);
t.true(
saveOverlayStatusStub.calledOnce,
"saveOverlayStatus should be called exactly once",
);
t.deepEqual(
saveOverlayStatusStub.firstCall.args[0],
stubCodeQL,
"first arg should be the CodeQL instance",
);
t.deepEqual(
saveOverlayStatusStub.firstCall.args[1],
["javascript"],
"second arg should be the languages",
);
t.deepEqual(
saveOverlayStatusStub.firstCall.args[2],
diskUsage,
"third arg should be the disk usage",
);
t.deepEqual(
saveOverlayStatusStub.firstCall.args[3],
test.serial(
"uploading failed SARIF run fails when workflow does not reference github/codeql-action",
async (t) => {
const actionsWorkflow = createTestWorkflow([
{
attemptedToBuildOverlayBaseDatabase: true,
builtOverlayBaseDatabase: false,
name: "Checkout repository",
uses: "actions/checkout@v5",
},
"fourth arg should be the overlay status recording an unsuccessful build attempt",
);
});
});
test("does not save overlay status when OverlayAnalysisStatusSave feature flag is disabled", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
process.env["RUNNER_TEMP"] = tmpDir;
// Ensure analyze did not complete successfully.
delete process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY];
sinon.stub(util, "checkDiskUsage").resolves({
numAvailableBytes: 100 * NUM_BYTES_PER_GIB,
numTotalBytes: 200 * NUM_BYTES_PER_GIB,
]);
const result = await testFailedSarifUpload(t, actionsWorkflow, {
expectUpload: false,
});
const saveOverlayStatusStub = sinon
.stub(overlayStatus, "saveOverlayStatus")
.resolves(true);
await initActionPostHelper.run(
sinon.spy(),
sinon.spy(),
codeql.createStubCodeQL({}),
createTestConfig({
debugMode: false,
languages: ["javascript"],
overlayDatabaseMode: OverlayDatabaseMode.OverlayBase,
}),
parseRepositoryNwo("github/codeql-action"),
createFeatures([]),
getRunnerLogger(true),
t.is(
result.upload_failed_run_error,
"Could not get upload input to github/codeql-action/analyze since the analyze job does not " +
"call github/codeql-action/analyze.",
);
t.truthy(result.upload_failed_run_stack_trace);
},
);
t.true(
saveOverlayStatusStub.notCalled,
"saveOverlayStatus should not be called when OverlayAnalysisStatusSave feature flag is disabled",
test.serial(
"not uploading failed SARIF when `code-scanning` is not an enabled analysis kind",
async (t) => {
const result = await testFailedSarifUpload(t, createTestWorkflow([]), {
analysisKinds: [AnalysisKind.CodeQuality],
expectUpload: false,
});
t.is(
result.upload_failed_run_skipped_because,
"Code Scanning is not enabled.",
);
});
});
},
);
test("does not save overlay status when build successful", async (t) => {
test.serial(
"saves overlay status when overlay-base analysis did not complete successfully",
async (t) => {
return await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
// Ensure analyze did not complete successfully.
delete process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY];
const diskUsage: util.DiskUsage = {
numAvailableBytes: 100 * NUM_BYTES_PER_GIB,
numTotalBytes: 200 * NUM_BYTES_PER_GIB,
};
sinon.stub(util, "checkDiskUsage").resolves(diskUsage);
const saveOverlayStatusStub = sinon
.stub(overlayStatus, "saveOverlayStatus")
.resolves(true);
const stubCodeQL = codeql.createStubCodeQL({});
await initActionPostHelper.run(
sinon.spy(),
sinon.spy(),
stubCodeQL,
createTestConfig({
debugMode: false,
languages: ["javascript"],
overlayDatabaseMode: OverlayDatabaseMode.OverlayBase,
}),
parseRepositoryNwo("github/codeql-action"),
createFeatures([Feature.OverlayAnalysisStatusSave]),
getRunnerLogger(true),
);
t.true(
saveOverlayStatusStub.calledOnce,
"saveOverlayStatus should be called exactly once",
);
t.deepEqual(
saveOverlayStatusStub.firstCall.args[0],
stubCodeQL,
"first arg should be the CodeQL instance",
);
t.deepEqual(
saveOverlayStatusStub.firstCall.args[1],
["javascript"],
"second arg should be the languages",
);
t.deepEqual(
saveOverlayStatusStub.firstCall.args[2],
diskUsage,
"third arg should be the disk usage",
);
t.deepEqual(
saveOverlayStatusStub.firstCall.args[3],
{
attemptedToBuildOverlayBaseDatabase: true,
builtOverlayBaseDatabase: false,
job: {
checkRunId: undefined,
workflowRunId: Number(DEFAULT_ACTIONS_VARS.GITHUB_RUN_ID),
workflowRunAttempt: Number(DEFAULT_ACTIONS_VARS.GITHUB_RUN_ATTEMPT),
name: DEFAULT_ACTIONS_VARS.GITHUB_JOB,
},
},
"fourth arg should be the overlay status recording an unsuccessful build attempt with job details",
);
});
},
);
test.serial(
"does not save overlay status when OverlayAnalysisStatusSave feature flag is disabled",
async (t) => {
return await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
// Ensure analyze did not complete successfully.
delete process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY];
sinon.stub(util, "checkDiskUsage").resolves({
numAvailableBytes: 100 * NUM_BYTES_PER_GIB,
numTotalBytes: 200 * NUM_BYTES_PER_GIB,
});
const saveOverlayStatusStub = sinon
.stub(overlayStatus, "saveOverlayStatus")
.resolves(true);
await initActionPostHelper.run(
sinon.spy(),
sinon.spy(),
codeql.createStubCodeQL({}),
createTestConfig({
debugMode: false,
languages: ["javascript"],
overlayDatabaseMode: OverlayDatabaseMode.OverlayBase,
}),
parseRepositoryNwo("github/codeql-action"),
createFeatures([]),
getRunnerLogger(true),
);
t.true(
saveOverlayStatusStub.notCalled,
"saveOverlayStatus should not be called when OverlayAnalysisStatusSave feature flag is disabled",
);
});
},
);
test.serial("does not save overlay status when build successful", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
process.env["RUNNER_TEMP"] = tmpDir;
setupActionsVars(tmpDir, tmpDir);
// Mark analyze as having completed successfully.
process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY] = "true";
@@ -450,41 +485,43 @@ test("does not save overlay status when build successful", async (t) => {
});
});
test("does not save overlay status when overlay not enabled", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
process.env["RUNNER_TEMP"] = tmpDir;
delete process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY];
test.serial(
"does not save overlay status when overlay not enabled",
async (t) => {
return await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
delete process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY];
sinon.stub(util, "checkDiskUsage").resolves({
numAvailableBytes: 100 * NUM_BYTES_PER_GIB,
numTotalBytes: 200 * NUM_BYTES_PER_GIB,
sinon.stub(util, "checkDiskUsage").resolves({
numAvailableBytes: 100 * NUM_BYTES_PER_GIB,
numTotalBytes: 200 * NUM_BYTES_PER_GIB,
});
const saveOverlayStatusStub = sinon
.stub(overlayStatus, "saveOverlayStatus")
.resolves(true);
await initActionPostHelper.run(
sinon.spy(),
sinon.spy(),
codeql.createStubCodeQL({}),
createTestConfig({
debugMode: false,
languages: ["javascript"],
overlayDatabaseMode: OverlayDatabaseMode.None,
}),
parseRepositoryNwo("github/codeql-action"),
createFeatures([]),
getRunnerLogger(true),
);
t.true(
saveOverlayStatusStub.notCalled,
"saveOverlayStatus should not be called when overlay is not enabled",
);
});
const saveOverlayStatusStub = sinon
.stub(overlayStatus, "saveOverlayStatus")
.resolves(true);
await initActionPostHelper.run(
sinon.spy(),
sinon.spy(),
codeql.createStubCodeQL({}),
createTestConfig({
debugMode: false,
languages: ["javascript"],
overlayDatabaseMode: OverlayDatabaseMode.None,
}),
parseRepositoryNwo("github/codeql-action"),
createFeatures([]),
getRunnerLogger(true),
);
t.true(
saveOverlayStatusStub.notCalled,
"saveOverlayStatus should not be called when overlay is not enabled",
);
});
});
},
);
function createTestWorkflow(
steps: workflow.WorkflowJobStep[],
@@ -538,9 +575,8 @@ async function testFailedSarifUpload(
config.dbLocation = "path/to/database";
}
process.env["GITHUB_JOB"] = "analyze";
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
process.env["GITHUB_WORKSPACE"] =
"/home/runner/work/codeql-action/codeql-action";
process.env["GITHUB_REPOSITORY"] = DEFAULT_ACTIONS_VARS.GITHUB_REPOSITORY;
process.env["GITHUB_WORKSPACE"] = "/tmp";
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("matrix")
+16 -5
View File
@@ -12,7 +12,11 @@ import { EnvVar } from "./environment";
import { Feature, FeatureEnablement } from "./feature-flags";
import { Logger } from "./logging";
import { OverlayDatabaseMode } from "./overlay";
import { OverlayStatus, saveOverlayStatus } from "./overlay/status";
import {
createOverlayStatus,
OverlayStatus,
saveOverlayStatus,
} from "./overlay/status";
import { RepositoryNwo, getRepositoryNwo } from "./repository";
import { JobStatus } from "./status-report";
import * as uploadLib from "./upload-lib";
@@ -270,10 +274,17 @@ async function recordOverlayStatus(
return;
}
const overlayStatus: OverlayStatus = {
attemptedToBuildOverlayBaseDatabase: true,
builtOverlayBaseDatabase: false,
};
const checkRunIdInput = actionsUtil.getOptionalInput("check-run-id");
const checkRunId =
checkRunIdInput !== undefined ? parseInt(checkRunIdInput, 10) : undefined;
const overlayStatus: OverlayStatus = createOverlayStatus(
{
attemptedToBuildOverlayBaseDatabase: true,
builtOverlayBaseDatabase: false,
},
checkRunId !== undefined && checkRunId >= 0 ? checkRunId : undefined,
);
const diskUsage = await checkDiskUsage(logger);
if (diskUsage === undefined) {
+90 -75
View File
@@ -77,46 +77,49 @@ for (const { runnerEnv, ErrorConstructor, message } of [
"otherwise we recommend rerunning the job.",
},
]) {
test(`cleanupDatabaseClusterDirectory throws a ${ErrorConstructor.name} when cleanup fails on ${runnerEnv} runner`, async (t) => {
await withTmpDir(async (tmpDir: string) => {
process.env["RUNNER_ENVIRONMENT"] = runnerEnv;
test.serial(
`cleanupDatabaseClusterDirectory throws a ${ErrorConstructor.name} when cleanup fails on ${runnerEnv} runner`,
async (t) => {
await withTmpDir(async (tmpDir: string) => {
process.env["RUNNER_ENVIRONMENT"] = runnerEnv;
const dbLocation = path.resolve(tmpDir, "dbs");
fs.mkdirSync(dbLocation, { recursive: true });
const dbLocation = path.resolve(tmpDir, "dbs");
fs.mkdirSync(dbLocation, { recursive: true });
const fileToCleanUp = path.resolve(
dbLocation,
"something-to-cleanup.txt",
);
fs.writeFileSync(fileToCleanUp, "");
const fileToCleanUp = path.resolve(
dbLocation,
"something-to-cleanup.txt",
);
fs.writeFileSync(fileToCleanUp, "");
const rmSyncError = `Failed to clean up file ${fileToCleanUp}`;
const rmSyncError = `Failed to clean up file ${fileToCleanUp}`;
const messages: LoggedMessage[] = [];
t.throws(
() =>
cleanupDatabaseClusterDirectory(
createTestConfig({ dbLocation }),
getRecordingLogger(messages),
{},
() => {
throw new Error(rmSyncError);
},
),
{
instanceOf: ErrorConstructor,
message: `${message(dbLocation)} Details: ${rmSyncError}`,
},
);
const messages: LoggedMessage[] = [];
t.throws(
() =>
cleanupDatabaseClusterDirectory(
createTestConfig({ dbLocation }),
getRecordingLogger(messages),
{},
() => {
throw new Error(rmSyncError);
},
),
{
instanceOf: ErrorConstructor,
message: `${message(dbLocation)} Details: ${rmSyncError}`,
},
);
t.is(messages.length, 1);
t.is(messages[0].type, "warning");
t.is(
messages[0].message,
`The database cluster directory ${dbLocation} must be empty. Attempting to clean it up.`,
);
});
});
t.is(messages.length, 1);
t.is(messages[0].type, "warning");
t.is(
messages[0].message,
`The database cluster directory ${dbLocation} must be empty. Attempting to clean it up.`,
);
});
},
);
}
test("cleanupDatabaseClusterDirectory can disable warning with options", async (t) => {
@@ -459,50 +462,62 @@ test("file coverage information enabled when debugMode is true", async (t) => {
);
});
test("file coverage information enabled when not analyzing a pull request", async (t) => {
sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(false);
test.serial(
"file coverage information enabled when not analyzing a pull request",
async (t) => {
sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(false);
t.true(
await getFileCoverageInformationEnabled(
false, // debugMode
parseRepositoryNwo("github/codeql-action"),
createFeatures([Feature.SkipFileCoverageOnPrs]),
),
);
});
t.true(
await getFileCoverageInformationEnabled(
false, // debugMode
parseRepositoryNwo("github/codeql-action"),
createFeatures([Feature.SkipFileCoverageOnPrs]),
),
);
},
);
test("file coverage information enabled when owner is not 'github'", async (t) => {
sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true);
test.serial(
"file coverage information enabled when owner is not 'github'",
async (t) => {
sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true);
t.true(
await getFileCoverageInformationEnabled(
false, // debugMode
parseRepositoryNwo("other-org/some-repo"),
createFeatures([Feature.SkipFileCoverageOnPrs]),
),
);
});
t.true(
await getFileCoverageInformationEnabled(
false, // debugMode
parseRepositoryNwo("other-org/some-repo"),
createFeatures([Feature.SkipFileCoverageOnPrs]),
),
);
},
);
test("file coverage information enabled when feature flag is not enabled", async (t) => {
sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true);
test.serial(
"file coverage information enabled when feature flag is not enabled",
async (t) => {
sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true);
t.true(
await getFileCoverageInformationEnabled(
false, // debugMode
parseRepositoryNwo("github/codeql-action"),
createFeatures([]),
),
);
});
t.true(
await getFileCoverageInformationEnabled(
false, // debugMode
parseRepositoryNwo("github/codeql-action"),
createFeatures([]),
),
);
},
);
test("file coverage information disabled when all conditions for skipping are met", async (t) => {
sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true);
test.serial(
"file coverage information disabled when all conditions for skipping are met",
async (t) => {
sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true);
t.false(
await getFileCoverageInformationEnabled(
false, // debugMode
parseRepositoryNwo("github/codeql-action"),
createFeatures([Feature.SkipFileCoverageOnPrs]),
),
);
});
t.false(
await getFileCoverageInformationEnabled(
false, // debugMode
parseRepositoryNwo("github/codeql-action"),
createFeatures([Feature.SkipFileCoverageOnPrs]),
),
);
},
);
+67 -64
View File
@@ -30,65 +30,68 @@ import {
setupTests(test);
test("writeOverlayChangesFile generates correct changes file", async (t) => {
await withTmpDir(async (tmpDir) => {
const dbLocation = path.join(tmpDir, "db");
await fs.promises.mkdir(dbLocation, { recursive: true });
const sourceRoot = path.join(tmpDir, "src");
await fs.promises.mkdir(sourceRoot, { recursive: true });
const tempDir = path.join(tmpDir, "temp");
await fs.promises.mkdir(tempDir, { recursive: true });
test.serial(
"writeOverlayChangesFile generates correct changes file",
async (t) => {
await withTmpDir(async (tmpDir) => {
const dbLocation = path.join(tmpDir, "db");
await fs.promises.mkdir(dbLocation, { recursive: true });
const sourceRoot = path.join(tmpDir, "src");
await fs.promises.mkdir(sourceRoot, { recursive: true });
const tempDir = path.join(tmpDir, "temp");
await fs.promises.mkdir(tempDir, { recursive: true });
const logger = getRunnerLogger(true);
const config = createTestConfig({ dbLocation });
const logger = getRunnerLogger(true);
const config = createTestConfig({ dbLocation });
// Mock the getFileOidsUnderPath function to return base OIDs
const baseOids = {
"unchanged.js": "aaa111",
"modified.js": "bbb222",
"deleted.js": "ccc333",
};
const getFileOidsStubForBase = sinon
.stub(gitUtils, "getFileOidsUnderPath")
.resolves(baseOids);
// Mock the getFileOidsUnderPath function to return base OIDs
const baseOids = {
"unchanged.js": "aaa111",
"modified.js": "bbb222",
"deleted.js": "ccc333",
};
const getFileOidsStubForBase = sinon
.stub(gitUtils, "getFileOidsUnderPath")
.resolves(baseOids);
// Write the base database OIDs file
await writeBaseDatabaseOidsFile(config, sourceRoot);
getFileOidsStubForBase.restore();
// Write the base database OIDs file
await writeBaseDatabaseOidsFile(config, sourceRoot);
getFileOidsStubForBase.restore();
// Mock the getFileOidsUnderPath function to return overlay OIDs
const currentOids = {
"unchanged.js": "aaa111",
"modified.js": "ddd444", // Changed OID
"added.js": "eee555", // New file
};
const getFileOidsStubForOverlay = sinon
.stub(gitUtils, "getFileOidsUnderPath")
.resolves(currentOids);
// Mock the getFileOidsUnderPath function to return overlay OIDs
const currentOids = {
"unchanged.js": "aaa111",
"modified.js": "ddd444", // Changed OID
"added.js": "eee555", // New file
};
const getFileOidsStubForOverlay = sinon
.stub(gitUtils, "getFileOidsUnderPath")
.resolves(currentOids);
// Write the overlay changes file, which uses the mocked overlay OIDs
// and the base database OIDs file
const getTempDirStub = sinon
.stub(actionsUtil, "getTemporaryDirectory")
.returns(tempDir);
const changesFilePath = await writeOverlayChangesFile(
config,
sourceRoot,
logger,
);
getFileOidsStubForOverlay.restore();
getTempDirStub.restore();
// Write the overlay changes file, which uses the mocked overlay OIDs
// and the base database OIDs file
const getTempDirStub = sinon
.stub(actionsUtil, "getTemporaryDirectory")
.returns(tempDir);
const changesFilePath = await writeOverlayChangesFile(
config,
sourceRoot,
logger,
);
getFileOidsStubForOverlay.restore();
getTempDirStub.restore();
const fileContent = await fs.promises.readFile(changesFilePath, "utf-8");
const parsedContent = JSON.parse(fileContent) as { changes: string[] };
const fileContent = await fs.promises.readFile(changesFilePath, "utf-8");
const parsedContent = JSON.parse(fileContent) as { changes: string[] };
t.deepEqual(
parsedContent.changes.sort(),
["added.js", "deleted.js", "modified.js"],
"Should identify added, deleted, and modified files",
);
});
});
t.deepEqual(
parsedContent.changes.sort(),
["added.js", "deleted.js", "modified.js"],
"Should identify added, deleted, and modified files",
);
});
},
);
interface DownloadOverlayBaseDatabaseTestCase {
overlayDatabaseMode: OverlayDatabaseMode;
@@ -206,14 +209,14 @@ const testDownloadOverlayBaseDatabaseFromCache = test.macro({
title: (_, title) => `downloadOverlayBaseDatabaseFromCache: ${title}`,
});
test(
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns stats when successful",
{},
true,
);
test(
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when mode is OverlayDatabaseMode.OverlayBase",
{
@@ -222,7 +225,7 @@ test(
false,
);
test(
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when mode is OverlayDatabaseMode.None",
{
@@ -231,7 +234,7 @@ test(
false,
);
test(
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when caching is disabled",
{
@@ -240,7 +243,7 @@ test(
false,
);
test(
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined in test mode",
{
@@ -249,7 +252,7 @@ test(
false,
);
test(
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when cache miss",
{
@@ -258,7 +261,7 @@ test(
false,
);
test(
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when download fails",
{
@@ -267,7 +270,7 @@ test(
false,
);
test(
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when downloaded database is invalid",
{
@@ -276,7 +279,7 @@ test(
false,
);
test(
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when downloaded database doesn't have an overlayBaseSpecifier",
{
@@ -285,7 +288,7 @@ test(
false,
);
test(
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when resolving database metadata fails",
{
@@ -294,7 +297,7 @@ test(
false,
);
test(
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when filesystem error occurs",
{
@@ -303,7 +306,7 @@ test(
false,
);
test("overlay-base database cache keys remain stable", async (t) => {
test.serial("overlay-base database cache keys remain stable", async (t) => {
const logger = getRunnerLogger(true);
const config = createTestConfig({ languages: ["python", "javascript"] });
const codeQlVersion = "2.23.0";
+95 -86
View File
@@ -72,101 +72,110 @@ test("getCacheKey rounds disk space down to nearest 10 GiB", async (t) => {
);
});
test("shouldSkipOverlayAnalysis returns false when no cached status exists", async (t) => {
await withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
const codeql = mockCodeQLVersion("2.20.0");
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
test.serial(
"shouldSkipOverlayAnalysis returns false when no cached status exists",
async (t) => {
await withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
const codeql = mockCodeQLVersion("2.20.0");
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
sinon.stub(actionsCache, "restoreCache").resolves(undefined);
sinon.stub(actionsCache, "restoreCache").resolves(undefined);
const result = await shouldSkipOverlayAnalysis(
codeql,
["javascript"],
makeDiskUsage(50),
logger,
);
const result = await shouldSkipOverlayAnalysis(
codeql,
["javascript"],
makeDiskUsage(50),
logger,
);
t.false(result);
t.true(
messages.some(
(m) =>
m.type === "debug" &&
typeof m.message === "string" &&
m.message.includes("No overlay status found in Actions cache."),
),
);
});
});
test("shouldSkipOverlayAnalysis returns true when cached status indicates failed build", async (t) => {
await withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
const codeql = mockCodeQLVersion("2.20.0");
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const status = {
attemptedToBuildOverlayBaseDatabase: true,
builtOverlayBaseDatabase: false,
};
// Stub restoreCache to write the status file and return a key
sinon.stub(actionsCache, "restoreCache").callsFake(async (paths) => {
const statusFile = paths[0];
await fs.promises.mkdir(path.dirname(statusFile), { recursive: true });
await fs.promises.writeFile(statusFile, JSON.stringify(status));
return "found-key";
t.false(result);
t.true(
messages.some(
(m) =>
m.type === "debug" &&
typeof m.message === "string" &&
m.message.includes("No overlay status found in Actions cache."),
),
);
});
},
);
const result = await shouldSkipOverlayAnalysis(
codeql,
["javascript"],
makeDiskUsage(50),
logger,
);
test.serial(
"shouldSkipOverlayAnalysis returns true when cached status indicates failed build",
async (t) => {
await withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
const codeql = mockCodeQLVersion("2.20.0");
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
t.true(result);
});
});
const status = {
attemptedToBuildOverlayBaseDatabase: true,
builtOverlayBaseDatabase: false,
};
test("shouldSkipOverlayAnalysis returns false when cached status indicates successful build", async (t) => {
await withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
const codeql = mockCodeQLVersion("2.20.0");
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
// Stub restoreCache to write the status file and return a key
sinon.stub(actionsCache, "restoreCache").callsFake(async (paths) => {
const statusFile = paths[0];
await fs.promises.mkdir(path.dirname(statusFile), { recursive: true });
await fs.promises.writeFile(statusFile, JSON.stringify(status));
return "found-key";
});
const status = {
attemptedToBuildOverlayBaseDatabase: true,
builtOverlayBaseDatabase: true,
};
const result = await shouldSkipOverlayAnalysis(
codeql,
["javascript"],
makeDiskUsage(50),
logger,
);
sinon.stub(actionsCache, "restoreCache").callsFake(async (paths) => {
const statusFile = paths[0];
await fs.promises.mkdir(path.dirname(statusFile), { recursive: true });
await fs.promises.writeFile(statusFile, JSON.stringify(status));
return "found-key";
t.true(result);
});
},
);
const result = await shouldSkipOverlayAnalysis(
codeql,
["javascript"],
makeDiskUsage(50),
logger,
);
test.serial(
"shouldSkipOverlayAnalysis returns false when cached status indicates successful build",
async (t) => {
await withTmpDir(async (tmpDir) => {
process.env["RUNNER_TEMP"] = tmpDir;
const codeql = mockCodeQLVersion("2.20.0");
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
t.false(result);
t.true(
messages.some(
(m) =>
m.type === "debug" &&
typeof m.message === "string" &&
m.message.includes(
"Cached overlay status does not indicate a previous unsuccessful attempt",
),
),
);
});
});
const status = {
attemptedToBuildOverlayBaseDatabase: true,
builtOverlayBaseDatabase: true,
};
sinon.stub(actionsCache, "restoreCache").callsFake(async (paths) => {
const statusFile = paths[0];
await fs.promises.mkdir(path.dirname(statusFile), { recursive: true });
await fs.promises.writeFile(statusFile, JSON.stringify(status));
return "found-key";
});
const result = await shouldSkipOverlayAnalysis(
codeql,
["javascript"],
makeDiskUsage(50),
logger,
);
t.false(result);
t.true(
messages.some(
(m) =>
m.type === "debug" &&
typeof m.message === "string" &&
m.message.includes(
"Cached overlay status does not indicate a previous unsuccessful attempt",
),
),
);
});
},
);
+37 -1
View File
@@ -13,12 +13,17 @@ import * as path from "path";
import * as actionsCache from "@actions/cache";
import { getTemporaryDirectory } from "../actions-util";
import {
getTemporaryDirectory,
getWorkflowRunAttempt,
getWorkflowRunID,
} from "../actions-util";
import { type CodeQL } from "../codeql";
import { Logger } from "../logging";
import {
DiskUsage,
getErrorMessage,
getRequiredEnvParam,
waitForResultWithTimeLimit,
} from "../util";
@@ -38,12 +43,43 @@ function getStatusFilePath(languages: string[]): string {
);
}
/** Details of the job that recorded an overlay status. */
interface JobInfo {
/** The check run ID. This is optional since it is not always available. */
checkRunId?: number;
/** The workflow run ID. */
workflowRunId: number;
/** The workflow run attempt number. */
workflowRunAttempt: number;
/** The name of the job (from GITHUB_JOB). */
name: string;
}
/** Status of an overlay analysis for a group of languages. */
export interface OverlayStatus {
/** Whether the job attempted to build an overlay base database. */
attemptedToBuildOverlayBaseDatabase: boolean;
/** Whether the job successfully built an overlay base database. */
builtOverlayBaseDatabase: boolean;
/** Details of the job that recorded this status. */
job?: JobInfo;
}
/** Creates an `OverlayStatus` populated with the details of the current job. */
export function createOverlayStatus(
attributes: Omit<OverlayStatus, "job">,
checkRunId?: number,
): OverlayStatus {
const job: JobInfo = {
workflowRunId: getWorkflowRunID(),
workflowRunAttempt: getWorkflowRunAttempt(),
name: getRequiredEnvParam("GITHUB_JOB"),
checkRunId,
};
return {
...attributes,
job,
};
}
/**
+18
View File
@@ -0,0 +1,18 @@
import * as fs from "fs";
import test from "ava";
import { setupTests } from "../testing-utils";
import { getToolNames, type Log } from ".";
setupTests(test);
test("getToolNames", (t) => {
const input = fs.readFileSync(
`${__dirname}/../../src/testdata/tool-names.sarif`,
"utf8",
);
const toolNames = getToolNames(JSON.parse(input) as Log);
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
});

Some files were not shown because too many files have changed in this diff Show More