Merge pull request #3551 from github/update-v4.32.6-72d2d850d

Merge main into releases/v4
This commit is contained in:
Óscar San José
2026-03-05 20:29:07 +01:00
committed by GitHub
133 changed files with 8413 additions and 6878 deletions

View File

@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
all-platform-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: all-platform-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
all-platform-bundle:
strategy:
@@ -95,7 +94,7 @@ jobs:
- id: init
uses: ./../action/init
with:
# Swift is not supported on Ubuntu so we manually exclude it from the list here
# Swift is not supported on Ubuntu so we manually exclude it from the list here
languages: cpp,csharp,go,java,javascript,python,ruby
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code

View File

@@ -87,24 +87,24 @@ jobs:
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
post-processed-sarif-path: ${{ runner.temp }}/post-processed
post-processed-sarif-path: '${{ runner.temp }}/post-processed'
- name: Upload SARIF files
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: |
analysis-kinds-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}
path: ${{ runner.temp }}/results/*.sarif
path: '${{ runner.temp }}/results/*.sarif'
retention-days: 7
- name: Upload post-processed SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: |
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}
path: ${{ runner.temp }}/post-processed
path: '${{ runner.temp }}/post-processed'
retention-days: 7
if-no-files-found: error
@@ -112,7 +112,7 @@ jobs:
if: contains(matrix.analysis-kinds, 'code-scanning')
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif
SARIF_PATH: '${{ runner.temp }}/results/javascript.sarif'
EXPECT_PRESENT: 'false'
with:
script: ${{ env.CHECK_SCRIPT }}
@@ -120,7 +120,7 @@ jobs:
if: contains(matrix.analysis-kinds, 'code-quality')
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/javascript.quality.sarif
SARIF_PATH: '${{ runner.temp }}/results/javascript.quality.sarif'
EXPECT_PRESENT: 'true'
with:
script: ${{ env.CHECK_SCRIPT }}

View File

@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
analyze-ref-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: analyze-ref-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
analyze-ref-input:
strategy:
@@ -107,13 +106,12 @@ jobs:
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
- name: Build code
run: ./build.sh
- uses: ./../action/analyze
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -82,7 +82,7 @@ jobs:
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/autobuild
env:
# Explicitly disable the CLR tracer.
# Explicitly disable the CLR tracer.
COR_ENABLE_PROFILING: ''
COR_PROFILER: ''
COR_PROFILER_PATH_64: ''

View File

@@ -42,8 +42,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
autobuild-direct-tracing-with-working-dir-${{github.ref}}-${{inputs.java-version}}
group: autobuild-direct-tracing-with-working-dir-${{github.ref}}-${{inputs.java-version}}
jobs:
autobuild-direct-tracing-with-working-dir:
strategy:

View File

@@ -97,7 +97,7 @@ jobs:
id: init
with:
build-mode: autobuild
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
languages: java
tools: ${{ steps.prepare-test.outputs.tools-url }}

View File

@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
build-mode-manual-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: build-mode-manual-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
build-mode-manual:
strategy:
@@ -92,7 +91,7 @@ jobs:
id: init
with:
build-mode: manual
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
languages: java
tools: ${{ steps.prepare-test.outputs.tools-url }}

View File

@@ -64,7 +64,7 @@ jobs:
id: init
with:
build-mode: none
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
languages: java
tools: ${{ steps.prepare-test.outputs.tools-url }}
@@ -77,7 +77,7 @@ jobs:
exit 1
fi
# The latest nightly supports omitting the autobuild Action when the build mode is specified.
# The latest nightly supports omitting the autobuild Action when the build mode is specified.
- uses: ./../action/autobuild
if: matrix.version != 'nightly-latest'

View File

@@ -68,7 +68,7 @@ jobs:
id: init
with:
build-mode: none
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
languages: java
tools: ${{ steps.prepare-test.outputs.tools-url }}

View File

@@ -66,7 +66,7 @@ jobs:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: javascript
- name: Fail if the CodeQL version is not a nightly
if: "!contains(steps.init.outputs.codeql-version, '+')"
if: ${{ !contains(steps.init.outputs.codeql-version, '+') }}
run: exit 1
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -82,7 +82,7 @@ jobs:
output: ${{ runner.temp }}/results
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: ${{ matrix.os }}-zstd-bundle.sarif
path: ${{ runner.temp }}/results/javascript.sarif

View File

@@ -67,7 +67,7 @@ jobs:
id: init
with:
build-mode: none
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}

View File

@@ -67,18 +67,18 @@ jobs:
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif
path: '${{ runner.temp }}/results/javascript.sarif'
retention-days: 7
- name: Check config properties appear in SARIF
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif
SARIF_PATH: '${{ runner.temp }}/results/javascript.sarif'
with:
script: |
const fs = require('fs');

View File

@@ -78,18 +78,18 @@ jobs:
--ready-for-status-page
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif
path: '${{ runner.temp }}/results/javascript.sarif'
retention-days: 7
- name: Check diagnostics appear in SARIF
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif
SARIF_PATH: '${{ runner.temp }}/results/javascript.sarif'
with:
script: |
const fs = require('fs');

View File

@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
export-file-baseline-information-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: export-file-baseline-information-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
export-file-baseline-information:
strategy:
@@ -101,12 +100,12 @@ jobs:
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif
path: '${{ runner.temp }}/results/javascript.sarif'
retention-days: 7
- name: Check results
run: |

View File

@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
go-custom-queries-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: go-custom-queries-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
go-custom-queries:
strategy:

View File

@@ -77,7 +77,7 @@ jobs:
with:
languages: go
tools: ${{ steps.prepare-test.outputs.tools-url }}
# Deliberately change Go after the `init` step
# Deliberately change Go after the `init` step
- uses: actions/setup-go@v6
with:
go-version: '1.20'
@@ -85,12 +85,12 @@ jobs:
run: go build main.go
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Check diagnostic appears in SARIF
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/go.sarif
SARIF_PATH: '${{ runner.temp }}/results/go.sarif'
with:
script: |
const fs = require('fs');

View File

@@ -42,8 +42,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
go-indirect-tracing-workaround-no-file-program-${{github.ref}}-${{inputs.go-version}}
group: go-indirect-tracing-workaround-no-file-program-${{github.ref}}-${{inputs.go-version}}
jobs:
go-indirect-tracing-workaround-no-file-program:
strategy:
@@ -87,12 +86,12 @@ jobs:
run: go build main.go
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Check diagnostic appears in SARIF
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/go.sarif
SARIF_PATH: '${{ runner.temp }}/results/go.sarif'
with:
script: |
const fs = require('fs');

View File

@@ -50,7 +50,6 @@ jobs:
permissions:
contents: read
packages: read
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
@@ -66,7 +65,7 @@ jobs:
- name: Init with registries
uses: ./../action/init
with:
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
tools: ${{ steps.prepare-test.outputs.tools-url }}
config-file: ./.github/codeql/codeql-config-registries.yml
languages: javascript

View File

@@ -65,12 +65,12 @@ jobs:
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif
path: '${{ runner.temp }}/results/javascript.sarif'
retention-days: 7
- name: Check results
run: |

View File

@@ -63,7 +63,7 @@ jobs:
languages: C#,java-kotlin,swift,typescript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Check languages
- name: 'Check languages'
run: |
expected_languages="csharp,java,swift,javascript"
actual_languages=$(jq -r '.languages | join(",")' "$RUNNER_TEMP"/config)

View File

@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
local-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: local-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
local-bundle:
strategy:
@@ -109,7 +108,7 @@ jobs:
- id: init
uses: ./../action/init
with:
# Swift is not supported on Ubuntu so we manually exclude it from the list here
# Swift is not supported on Ubuntu so we manually exclude it from the list here
languages: cpp,csharp,go,java,javascript,python,ruby
tools: ./codeql-bundle-linux64.tar.zst
- name: Build code

View File

@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
multi-language-autodetect-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: multi-language-autodetect-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
multi-language-autodetect:
strategy:
@@ -144,9 +143,8 @@ jobs:
- uses: ./../action/init
id: init
with:
db-location: ${{ runner.temp }}/customDbLocation
languages: ${{ runner.os == 'Linux' && 'cpp,csharp,go,java,javascript,python,ruby'
|| '' }}
db-location: '${{ runner.temp }}/customDbLocation'
languages: ${{ runner.os == 'Linux' && 'cpp,csharp,go,java,javascript,python,ruby' || '' }}
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code

View File

@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
packaging-codescanning-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: packaging-codescanning-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
packaging-codescanning-config-inputs-js:
strategy:
@@ -116,7 +115,7 @@ jobs:
dotnet-version: ${{ inputs.dotnet-version || '9.x' }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging3.yml
config-file: '.github/codeql/codeql-config-packaging3.yml'
packs: +codeql-testing/codeql-pack1@1.0.0
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
@@ -124,15 +123,14 @@ jobs:
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Check results
uses: ./../action/.github/actions/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run:
javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results

View File

@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
packaging-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: packaging-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
packaging-config-inputs-js:
strategy:
@@ -101,7 +100,7 @@ jobs:
dotnet-version: ${{ inputs.dotnet-version || '9.x' }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging3.yml
config-file: '.github/codeql/codeql-config-packaging3.yml'
packs: +codeql-testing/codeql-pack1@1.0.0
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
@@ -109,15 +108,14 @@ jobs:
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Check results
uses: ./../action/.github/actions/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run:
javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results

View File

@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
packaging-config-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: packaging-config-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
packaging-config-js:
strategy:
@@ -101,22 +100,21 @@ jobs:
dotnet-version: ${{ inputs.dotnet-version || '9.x' }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging.yml
config-file: '.github/codeql/codeql-config-packaging.yml'
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Check results
uses: ./../action/.github/actions/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run:
javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results

View File

@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
packaging-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: packaging-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
packaging-inputs-js:
strategy:
@@ -101,7 +100,7 @@ jobs:
dotnet-version: ${{ inputs.dotnet-version || '9.x' }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging2.yml
config-file: '.github/codeql/codeql-config-packaging2.yml'
languages: javascript
packs: codeql-testing/codeql-pack1@1.0.0, codeql-testing/codeql-pack2, codeql-testing/codeql-pack3:other-query.ql
tools: ${{ steps.prepare-test.outputs.tools-url }}
@@ -109,14 +108,13 @@ jobs:
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
- name: Check results
uses: ./../action/.github/actions/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run:
javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results

View File

@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
remote-config-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: remote-config-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
remote-config:
strategy:
@@ -109,8 +108,7 @@ jobs:
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
- name: Build code
run: ./build.sh
- uses: ./../action/analyze

View File

@@ -84,8 +84,7 @@ jobs:
language: javascript-typescript
- name: Fail if JavaScript/TypeScript configuration present
if:
fromJSON(steps.resolve-environment-js.outputs.environment).configuration.javascript
if: fromJSON(steps.resolve-environment-js.outputs.environment).configuration.javascript
run: exit 1
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -99,7 +99,7 @@ jobs:
dotnet-version: ${{ inputs.dotnet-version || '9.x' }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging3.yml
config-file: '.github/codeql/codeql-config-packaging3.yml'
packs: +codeql-testing/codeql-pack1@1.0.0
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
@@ -108,7 +108,7 @@ jobs:
- uses: ./../action/analyze
with:
skip-queries: true
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Assert No Results
@@ -119,7 +119,7 @@ jobs:
fi
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Assert Results
run: |

View File

@@ -71,8 +71,7 @@ jobs:
id: proxy
uses: ./../action/start-proxy
with:
registry_secrets: '[{ "type": "nuget_feed", "url": "https://api.nuget.org/v3/index.json"
}]'
registry_secrets: '[{ "type": "nuget_feed", "url": "https://api.nuget.org/v3/index.json" }]'
- name: Print proxy outputs
run: |
@@ -81,8 +80,7 @@ jobs:
echo "${{ steps.proxy.outputs.proxy_urls }}"
- name: Fail if proxy outputs are not set
if: (!steps.proxy.outputs.proxy_host) || (!steps.proxy.outputs.proxy_port)
|| (!steps.proxy.outputs.proxy_ca_certificate) || (!steps.proxy.outputs.proxy_urls)
if: (!steps.proxy.outputs.proxy_host) || (!steps.proxy.outputs.proxy_port) || (!steps.proxy.outputs.proxy_ca_certificate) || (!steps.proxy.outputs.proxy_urls)
run: exit 1
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -49,8 +49,7 @@ jobs:
if: github.triggering_actor != 'dependabot[bot]'
permissions:
contents: read
security-events: write # needed to upload the SARIF file
security-events: write
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
@@ -69,26 +68,20 @@ jobs:
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Fail
# We want this job to pass if the Action correctly uploads the SARIF file for
# the failed run.
# Setting this step to continue on error means that it is marked as completing
# successfully, so will not fail the job.
# We want this job to pass if the Action correctly uploads the SARIF file for
# the failed run.
# Setting this step to continue on error means that it is marked as completing
# successfully, so will not fail the job.
continue-on-error: true
run: exit 1
- uses: ./analyze
# In a real workflow, this step wouldn't run. Since we used `continue-on-error`
# above, we manually disable it with an `if` condition.
# In a real workflow, this step wouldn't run. Since we used `continue-on-error`
# above, we manually disable it with an `if` condition.
if: false
with:
category: /test-codeql-version:${{ matrix.version }}
category: '/test-codeql-version:${{ matrix.version }}'
env:
# Internal-only environment variable used to indicate that the post-init Action
# should expect to upload a SARIF file for the failed run.
CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF: true
# Make sure the uploading SARIF files feature is enabled.
CODEQL_ACTION_UPLOAD_FAILED_SARIF: true
# Upload the failed SARIF file as an integration test of the API endpoint.
CODEQL_ACTION_TEST_MODE: false
# Mark telemetry for this workflow so it can be treated separately.
CODEQL_ACTION_TESTING_ENVIRONMENT: codeql-action-pr-checks

View File

@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
swift-custom-build-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: swift-custom-build-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
swift-custom-build:
strategy:

View File

@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
unset-environment-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: unset-environment-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
unset-environment:
strategy:
@@ -109,7 +108,7 @@ jobs:
id: init
with:
db-location: ${{ runner.temp }}/customDbLocation
# Swift is not supported on Ubuntu so we manually exclude it from the list here
# Swift is not supported on Ubuntu so we manually exclude it from the list here
languages: cpp,csharp,go,java,javascript,python,ruby
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code

View File

@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
upload-ref-sha-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: upload-ref-sha-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
upload-ref-sha-input:
strategy:
@@ -107,19 +106,18 @@ jobs:
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
- name: Build code
run: ./build.sh
# Generate some SARIF we can upload with the upload-sarif step
# Generate some SARIF we can upload with the upload-sarif step
- uses: ./../action/analyze
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
upload: never
- uses: ./../action/upload-sarif
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
env:
CODEQL_ACTION_TEST_MODE: true

41
.github/workflows/__upload-sarif.yml generated vendored
View File

@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
upload-sarif-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: upload-sarif-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
upload-sarif:
strategy:
@@ -117,11 +116,11 @@ jobs:
analysis-kinds: ${{ matrix.analysis-kinds }}
- name: Build code
run: ./build.sh
# Generate some SARIF we can upload with the upload-sarif step
# Generate some SARIF we can upload with the upload-sarif step
- uses: ./../action/analyze
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
upload: never
output: ${{ runner.temp }}/results
@@ -130,15 +129,15 @@ jobs:
uses: ./../action/upload-sarif
id: upload-sarif
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
sarif_file: ${{ runner.temp }}/results
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:all-files/
- name: Fail for missing output from `upload-sarif` step for `code-scanning`
- name: 'Fail for missing output from `upload-sarif` step for `code-scanning`'
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-scanning)
run: exit 1
- name: Fail for missing output from `upload-sarif` step for `code-quality`
- name: 'Fail for missing output from `upload-sarif` step for `code-quality`'
if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)
run: exit 1
@@ -147,28 +146,26 @@ jobs:
id: upload-single-sarif-code-scanning
if: contains(matrix.analysis-kinds, 'code-scanning')
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
sarif_file: ${{ runner.temp }}/results/javascript.sarif
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-scanning/
- name: Fail for missing output from `upload-single-sarif-code-scanning` step
if: contains(matrix.analysis-kinds, 'code-scanning') &&
!(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning)
- name: 'Fail for missing output from `upload-single-sarif-code-scanning` step'
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning)
run: exit 1
- name: Upload single SARIF file for Code Quality
uses: ./../action/upload-sarif
id: upload-single-sarif-code-quality
if: contains(matrix.analysis-kinds, 'code-quality')
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
sarif_file: ${{ runner.temp }}/results/javascript.quality.sarif
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-quality/
- name: Fail for missing output from `upload-single-sarif-code-quality` step
if: contains(matrix.analysis-kinds, 'code-quality') &&
!(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality)
- name: 'Fail for missing output from `upload-single-sarif-code-quality` step'
if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality)
run: exit 1
- name: Change SARIF file extension
@@ -179,12 +176,12 @@ jobs:
id: upload-single-non-sarif
if: contains(matrix.analysis-kinds, 'code-scanning')
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
sarif_file: ${{ runner.temp }}/results/javascript.sarif.json
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:non-sarif/
- name: Fail for missing output from `upload-single-non-sarif` step
- name: 'Fail for missing output from `upload-single-non-sarif` step'
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-non-sarif.outputs.sarif-ids).code-scanning)
run: exit 1
env:

View File

@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
with-checkout-path-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: with-checkout-path-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
with-checkout-path:
strategy:
@@ -80,6 +79,7 @@ jobs:
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
# This ensures we don't accidentally use the original checkout for any part of the test.
- name: Check out repository
uses: actions/checkout@v6
- name: Prepare test
@@ -109,8 +109,8 @@ jobs:
# Actions does not support deleting the current working directory, so we
# delete the contents of the directory instead.
rm -rf ./* .github .git
# Check out the actions repo again, but at a different location.
# choose an arbitrary SHA so that we can later test that the commit_oid is not from main
# Check out the actions repo again, but at a different location.
# choose an arbitrary SHA so that we can later test that the commit_oid is not from main
- uses: actions/checkout@v6
with:
ref: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
@@ -119,7 +119,7 @@ jobs:
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
# it's enough to test one compiled language and one interpreted language
# it's enough to test one compiled language and one interpreted language
languages: csharp,javascript
source-root: x/y/z/some-path/tests/multi-language-repo

View File

@@ -31,34 +31,29 @@ jobs:
permissions:
contents: read
# We currently need `security-events: read` to access feature flags.
security-events: read
steps:
- uses: actions/checkout@v6
- name: Init with default CodeQL bundle from the VM image
id: init-default
uses: ./init
with:
languages: javascript
- name: Remove empty database
# allows us to run init a second time
run: |
rm -rf "$RUNNER_TEMP/codeql_databases"
- name: Init with latest CodeQL bundle
id: init-latest
uses: ./init
- name: Set up default CodeQL bundle
id: setup-default
uses: ./setup-codeql
- name: Set up linked CodeQL bundle
id: setup-linked
uses: ./setup-codeql
with:
tools: linked
languages: javascript
- name: Compare default and latest CodeQL bundle versions
- name: Compare default and linked CodeQL bundle versions
id: compare
env:
CODEQL_DEFAULT: ${{ steps.init-default.outputs.codeql-path }}
CODEQL_LATEST: ${{ steps.init-latest.outputs.codeql-path }}
CODEQL_DEFAULT: ${{ steps.setup-default.outputs.codeql-path }}
CODEQL_LINKED: ${{ steps.setup-linked.outputs.codeql-path }}
run: |
CODEQL_VERSION_DEFAULT="$("$CODEQL_DEFAULT" version --format terse)"
CODEQL_VERSION_LATEST="$("$CODEQL_LATEST" version --format terse)"
CODEQL_VERSION_LINKED="$("$CODEQL_LINKED" version --format terse)"
echo "Default CodeQL bundle version is $CODEQL_VERSION_DEFAULT"
echo "Latest CodeQL bundle version is $CODEQL_VERSION_LATEST"
echo "Linked CodeQL bundle version is $CODEQL_VERSION_LINKED"
# If we're running on a pull request, run with both bundles, even if `tools: linked` would
# be the same as `tools: null`. This allows us to make the job for each of the bundles a
@@ -66,7 +61,7 @@ jobs:
#
# If we're running on push or schedule, then we can skip running with `tools: linked` when it would be
# the same as running with `tools: null`.
if [[ "$GITHUB_EVENT_NAME" != "pull_request" && "$GITHUB_EVENT_NAME" != "merge_group" && "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LATEST" ]]; then
if [[ "$GITHUB_EVENT_NAME" != "pull_request" && "$GITHUB_EVENT_NAME" != "merge_group" && "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LINKED" ]]; then
VERSIONS_JSON='[null]'
else
VERSIONS_JSON='[null, "linked"]'

View File

@@ -11,6 +11,8 @@ env:
CODEQL_ACTION_OVERLAY_ANALYSIS: true
CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT: false
CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT: true
CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_CHECK: false
CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS: true
on:
push:

View File

@@ -41,6 +41,8 @@ jobs:
CODEQL_ACTION_TEST_MODE: true
permissions:
contents: read
# We currently need `security-events: read` to access feature flags.
security-events: read
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
@@ -87,7 +89,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download all artifacts
uses: actions/download-artifact@v7
uses: actions/download-artifact@v8
- name: Check expected artifacts exist
run: |
LANGUAGES="cpp csharp go java javascript python"

View File

@@ -40,6 +40,8 @@ jobs:
timeout-minutes: 45
permissions:
contents: read
# We currently need `security-events: read` to access feature flags.
security-events: read
runs-on: ubuntu-latest
steps:
- name: Check out repository
@@ -81,7 +83,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download all artifacts
uses: actions/download-artifact@v7
uses: actions/download-artifact@v8
- name: Check expected artifacts exist
run: |
VERSIONS="stable-v2.20.3 default linked nightly-latest"

View File

@@ -42,11 +42,6 @@ jobs:
node-version: ${{ matrix.node-version }}
cache: 'npm'
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: 3.11
- name: Install dependencies
run: |
# Use the system Bash shell to ensure we can run commands like `npm ci`
@@ -68,7 +63,7 @@ jobs:
- name: Run pr-checks tests
if: always()
working-directory: pr-checks
run: python -m unittest discover
run: npm ci && npx tsx --test
- name: Lint
if: always() && matrix.os != 'windows-latest'

View File

@@ -26,6 +26,8 @@ jobs:
timeout-minutes: 45
permissions:
contents: read
# We currently need `security-events: read` to access feature flags.
security-events: read
runs-on: windows-latest
steps:

View File

@@ -73,24 +73,17 @@ jobs:
npm run lint -- --fix
npm run build
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: 3.11
- name: Sync back version updates to generated workflows
# Only sync back versions on Dependabot update PRs
if: startsWith(env.HEAD_REF, 'dependabot/')
working-directory: pr-checks
run: |
python3 sync_back.py -v
npm ci
npx tsx sync_back.ts --verbose
- name: Generate workflows
working-directory: pr-checks
run: |
python -m pip install --upgrade pip
pip install ruamel.yaml==0.17.31
python3 sync.py
run: ./sync.sh
- name: "Merge in progress: Finish merge and push"
if: steps.merge.outputs.merge-in-progress == 'true'

View File

@@ -19,7 +19,7 @@ if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then the PR needs attention
git diff
git status
>&2 echo "Failed: PR checks are not up to date. Run 'cd pr-checks && python3 sync.py' to update"
>&2 echo "Failed: PR checks are not up to date. Run 'cd pr-checks && ./sync.sh' to update"
echo "### Generated workflows diff" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY

View File

@@ -2,6 +2,10 @@
See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs.
## 4.32.6 - 05 Mar 2026
- Update default CodeQL bundle version to [2.24.3](https://github.com/github/codeql-action/releases/tag/codeql-bundle-v2.24.3). [#3548](https://github.com/github/codeql-action/pull/3548)
## 4.32.5 - 02 Mar 2026
- Repositories owned by an organization can now set up the `github-codeql-disable-overlay` custom repository property to disable [improved incremental analysis for CodeQL](https://github.com/github/roadmap/issues/1158). First, create a custom repository property with the name `github-codeql-disable-overlay` and the type "True/false" in the organization's settings. Then in the repository's settings, set this property to `true` to disable improved incremental analysis. For more information, see [Managing custom properties for repositories in your organization](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization). This feature is not yet available on GitHub Enterprise Server. [#3507](https://github.com/github/codeql-action/pull/3507)

View File

@@ -92,7 +92,7 @@ We typically deprecate a version of CodeQL when the GitHub Enterprise Server (GH
1. Remove support for the old version of CodeQL.
- Bump `CODEQL_MINIMUM_VERSION` in `src/codeql.ts` to the new minimum version of CodeQL.
- Remove any code that is only needed to support the old version of CodeQL. This is often behind a version guard, so look for instances of version numbers between the old minimum version and the new minimum version in the codebase. A good place to start is the list of version numbers in `src/codeql.ts`.
- Update the default set of CodeQL test versions in `pr-checks/sync.py`.
- Update the default set of CodeQL test versions in `pr-checks/sync.ts`.
- Remove the old minimum version of CodeQL.
- Add the latest patch release for any new CodeQL minor version series that have shipped in GHES.
- Run the script to update the generated PR checks.

View File

@@ -21,6 +21,7 @@ export default [
"build.mjs",
"eslint.config.mjs",
".github/**/*",
"pr-checks/**/*",
],
},
// eslint recommended config

View File

@@ -159,6 +159,11 @@ inputs:
description: >-
Explicitly enable or disable caching of project build dependencies.
required: false
check-run-id:
description: >-
[Internal] The ID of the check run, as provided by the Actions runtime environment. Do not set this value manually.
default: ${{ job.check_run_id }}
required: false
outputs:
codeql-path:
description: The path of the CodeQL binary used for analysis

View File

@@ -45986,7 +45986,7 @@ var require_package = __commonJS({
"package.json"(exports2, module2) {
module2.exports = {
name: "codeql",
version: "4.32.5",
version: "4.32.6",
private: true,
description: "CodeQL action",
scripts: {
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -46052,14 +46053,14 @@ var require_package = __commonJS({
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.6.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
glob: "^11.1.0",
globals: "^17.3.0",
nock: "^14.0.11",
sinon: "^21.0.1",
typescript: "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
},
overrides: {
"@actions/tool-cache": {
@@ -161099,7 +161100,7 @@ var safeDump = renamed("safeDump", "dump");
var semver = __toESM(require_semver2());
// src/api-compatibility.json
var maximumVersion = "3.20";
var maximumVersion = "3.21";
var minimumVersion = "3.14";
// src/util.ts
@@ -162226,6 +162227,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",

304
lib/analyze-action.js generated
View File

@@ -204,7 +204,7 @@ var require_file_command = __commonJS({
exports2.issueFileCommand = issueFileCommand;
exports2.prepareKeyValueMessage = prepareKeyValueMessage;
var crypto3 = __importStar2(require("crypto"));
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var os5 = __importStar2(require("os"));
var utils_1 = require_utils();
function issueFileCommand(command, message) {
@@ -212,10 +212,10 @@ var require_file_command = __commonJS({
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs17.existsSync(filePath)) {
if (!fs18.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs17.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os5.EOL}`, {
fs18.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os5.EOL}`, {
encoding: "utf8"
});
}
@@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({
exports2.isRooted = isRooted;
exports2.tryGetExecutablePath = tryGetExecutablePath;
exports2.getCmdPath = getCmdPath;
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var path16 = __importStar2(require("path"));
_a = fs17.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
_a = fs18.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
exports2.IS_WINDOWS = process.platform === "win32";
function readlink(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
const result = yield fs17.promises.readlink(fsPath);
const result = yield fs18.promises.readlink(fsPath);
if (exports2.IS_WINDOWS && !result.endsWith("\\")) {
return `${result}\\`;
}
@@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({
});
}
exports2.UV_FS_O_EXLOCK = 268435456;
exports2.READONLY = fs17.constants.O_RDONLY;
exports2.READONLY = fs18.constants.O_RDONLY;
function exists(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
try {
@@ -45986,7 +45986,7 @@ var require_package = __commonJS({
"package.json"(exports2, module2) {
module2.exports = {
name: "codeql",
version: "4.32.5",
version: "4.32.6",
private: true,
description: "CodeQL action",
scripts: {
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -46052,14 +46053,14 @@ var require_package = __commonJS({
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.6.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
glob: "^11.1.0",
globals: "^17.3.0",
nock: "^14.0.11",
sinon: "^21.0.1",
typescript: "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
},
overrides: {
"@actions/tool-cache": {
@@ -50403,7 +50404,7 @@ var require_internal_globber = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.DefaultGlobber = void 0;
var core15 = __importStar2(require_core());
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var globOptionsHelper = __importStar2(require_internal_glob_options_helper());
var path16 = __importStar2(require("path"));
var patternHelper = __importStar2(require_internal_pattern_helper());
@@ -50457,7 +50458,7 @@ var require_internal_globber = __commonJS({
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
core15.debug(`Search path '${searchPath}'`);
try {
yield __await2(fs17.promises.lstat(searchPath));
yield __await2(fs18.promises.lstat(searchPath));
} catch (err) {
if (err.code === "ENOENT") {
continue;
@@ -50491,7 +50492,7 @@ var require_internal_globber = __commonJS({
continue;
}
const childLevel = item.level + 1;
const childItems = (yield __await2(fs17.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path16.join(item.path, x), childLevel));
const childItems = (yield __await2(fs18.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path16.join(item.path, x), childLevel));
stack.push(...childItems.reverse());
} else if (match & internal_match_kind_1.MatchKind.File) {
yield yield __await2(item.path);
@@ -50526,7 +50527,7 @@ var require_internal_globber = __commonJS({
let stats;
if (options.followSymbolicLinks) {
try {
stats = yield fs17.promises.stat(item.path);
stats = yield fs18.promises.stat(item.path);
} catch (err) {
if (err.code === "ENOENT") {
if (options.omitBrokenSymbolicLinks) {
@@ -50538,10 +50539,10 @@ var require_internal_globber = __commonJS({
throw err;
}
} else {
stats = yield fs17.promises.lstat(item.path);
stats = yield fs18.promises.lstat(item.path);
}
if (stats.isDirectory() && options.followSymbolicLinks) {
const realPath = yield fs17.promises.realpath(item.path);
const realPath = yield fs18.promises.realpath(item.path);
while (traversalChain.length >= item.level) {
traversalChain.pop();
}
@@ -50650,7 +50651,7 @@ var require_internal_hash_files = __commonJS({
exports2.hashFiles = hashFiles2;
var crypto3 = __importStar2(require("crypto"));
var core15 = __importStar2(require_core());
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var path16 = __importStar2(require("path"));
@@ -50673,13 +50674,13 @@ var require_internal_hash_files = __commonJS({
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
continue;
}
if (fs17.statSync(file).isDirectory()) {
if (fs18.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash2 = crypto3.createHash("sha256");
const pipeline = util.promisify(stream2.pipeline);
yield pipeline(fs17.createReadStream(file), hash2);
yield pipeline(fs18.createReadStream(file), hash2);
result.write(hash2.digest());
count++;
if (!hasMatch) {
@@ -52054,7 +52055,7 @@ var require_cacheUtils = __commonJS({
var glob2 = __importStar2(require_glob());
var io7 = __importStar2(require_io());
var crypto3 = __importStar2(require("crypto"));
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var path16 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
@@ -52083,7 +52084,7 @@ var require_cacheUtils = __commonJS({
});
}
function getArchiveFileSizeInBytes(filePath) {
return fs17.statSync(filePath).size;
return fs18.statSync(filePath).size;
}
function resolvePaths(patterns) {
return __awaiter2(this, void 0, void 0, function* () {
@@ -52121,7 +52122,7 @@ var require_cacheUtils = __commonJS({
}
function unlinkFile(filePath) {
return __awaiter2(this, void 0, void 0, function* () {
return util.promisify(fs17.unlink)(filePath);
return util.promisify(fs18.unlink)(filePath);
});
}
function getVersion(app_1) {
@@ -52163,7 +52164,7 @@ var require_cacheUtils = __commonJS({
}
function getGnuTarPathOnWindows() {
return __awaiter2(this, void 0, void 0, function* () {
if (fs17.existsSync(constants_1.GnuTarPathOnWindows)) {
if (fs18.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion("tar");
@@ -92320,7 +92321,7 @@ var require_downloadUtils = __commonJS({
var http_client_1 = require_lib();
var storage_blob_1 = require_commonjs15();
var buffer = __importStar2(require("buffer"));
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var utils = __importStar2(require_cacheUtils());
@@ -92431,7 +92432,7 @@ var require_downloadUtils = __commonJS({
exports2.DownloadProgress = DownloadProgress;
function downloadCacheHttpClient(archiveLocation, archivePath) {
return __awaiter2(this, void 0, void 0, function* () {
const writeStream = fs17.createWriteStream(archivePath);
const writeStream = fs18.createWriteStream(archivePath);
const httpClient = new http_client_1.HttpClient("actions/cache");
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () {
return httpClient.get(archiveLocation);
@@ -92456,7 +92457,7 @@ var require_downloadUtils = __commonJS({
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
return __awaiter2(this, void 0, void 0, function* () {
var _a;
const archiveDescriptor = yield fs17.promises.open(archivePath, "w");
const archiveDescriptor = yield fs18.promises.open(archivePath, "w");
const httpClient = new http_client_1.HttpClient("actions/cache", void 0, {
socketTimeout: options.timeoutInMs,
keepAlive: true
@@ -92572,7 +92573,7 @@ var require_downloadUtils = __commonJS({
} else {
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
const downloadProgress = new DownloadProgress(contentLength);
const fd = fs17.openSync(archivePath, "w");
const fd = fs18.openSync(archivePath, "w");
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
@@ -92590,12 +92591,12 @@ var require_downloadUtils = __commonJS({
controller.abort();
throw new Error("Aborting cache download as the download time exceeded the timeout.");
} else if (Buffer.isBuffer(result)) {
fs17.writeFileSync(fd, result);
fs18.writeFileSync(fd, result);
}
}
} finally {
downloadProgress.stopDisplayTimer();
fs17.closeSync(fd);
fs18.closeSync(fd);
}
}
});
@@ -92917,7 +92918,7 @@ var require_cacheHttpClient = __commonJS({
var core15 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var url_1 = require("url");
var utils = __importStar2(require_cacheUtils());
var uploadUtils_1 = require_uploadUtils();
@@ -93052,7 +93053,7 @@ Other caches with similar key:`);
return __awaiter2(this, void 0, void 0, function* () {
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs17.openSync(archivePath, "r");
const fd = fs18.openSync(archivePath, "r");
const uploadOptions = (0, options_1.getUploadOptions)(options);
const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency);
const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize);
@@ -93066,7 +93067,7 @@ Other caches with similar key:`);
const start = offset;
const end = offset + chunkSize - 1;
offset += maxChunkSize;
yield uploadChunk(httpClient, resourceUrl, () => fs17.createReadStream(archivePath, {
yield uploadChunk(httpClient, resourceUrl, () => fs18.createReadStream(archivePath, {
fd,
start,
end,
@@ -93077,7 +93078,7 @@ Other caches with similar key:`);
}
})));
} finally {
fs17.closeSync(fd);
fs18.closeSync(fd);
}
return;
});
@@ -99033,7 +99034,7 @@ var require_manifest = __commonJS({
var core_1 = require_core();
var os5 = require("os");
var cp = require("child_process");
var fs17 = require("fs");
var fs18 = require("fs");
function _findMatch(versionSpec, stable, candidates, archFilter) {
return __awaiter2(this, void 0, void 0, function* () {
const platFilter = os5.platform();
@@ -99095,10 +99096,10 @@ var require_manifest = __commonJS({
const lsbReleaseFile = "/etc/lsb-release";
const osReleaseFile = "/etc/os-release";
let contents = "";
if (fs17.existsSync(lsbReleaseFile)) {
contents = fs17.readFileSync(lsbReleaseFile).toString();
} else if (fs17.existsSync(osReleaseFile)) {
contents = fs17.readFileSync(osReleaseFile).toString();
if (fs18.existsSync(lsbReleaseFile)) {
contents = fs18.readFileSync(lsbReleaseFile).toString();
} else if (fs18.existsSync(osReleaseFile)) {
contents = fs18.readFileSync(osReleaseFile).toString();
}
return contents;
}
@@ -99307,7 +99308,7 @@ var require_tool_cache = __commonJS({
var core15 = __importStar2(require_core());
var io7 = __importStar2(require_io());
var crypto3 = __importStar2(require("crypto"));
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var mm = __importStar2(require_manifest());
var os5 = __importStar2(require("os"));
var path16 = __importStar2(require("path"));
@@ -99353,7 +99354,7 @@ var require_tool_cache = __commonJS({
}
function downloadToolAttempt(url2, dest, auth2, headers) {
return __awaiter2(this, void 0, void 0, function* () {
if (fs17.existsSync(dest)) {
if (fs18.existsSync(dest)) {
throw new Error(`Destination file path ${dest} already exists`);
}
const http = new httpm.HttpClient(userAgent2, [], {
@@ -99377,7 +99378,7 @@ var require_tool_cache = __commonJS({
const readStream = responseMessageFactory();
let succeeded = false;
try {
yield pipeline(readStream, fs17.createWriteStream(dest));
yield pipeline(readStream, fs18.createWriteStream(dest));
core15.debug("download complete");
succeeded = true;
return dest;
@@ -99589,11 +99590,11 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os5.arch();
core15.debug(`Caching tool ${tool} ${version} ${arch2}`);
core15.debug(`source dir: ${sourceDir}`);
if (!fs17.statSync(sourceDir).isDirectory()) {
if (!fs18.statSync(sourceDir).isDirectory()) {
throw new Error("sourceDir is not a directory");
}
const destPath = yield _createToolPath(tool, version, arch2);
for (const itemName of fs17.readdirSync(sourceDir)) {
for (const itemName of fs18.readdirSync(sourceDir)) {
const s = path16.join(sourceDir, itemName);
yield io7.cp(s, destPath, { recursive: true });
}
@@ -99607,7 +99608,7 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os5.arch();
core15.debug(`Caching tool ${tool} ${version} ${arch2}`);
core15.debug(`source file: ${sourceFile}`);
if (!fs17.statSync(sourceFile).isFile()) {
if (!fs18.statSync(sourceFile).isFile()) {
throw new Error("sourceFile is not a file");
}
const destFolder = yield _createToolPath(tool, version, arch2);
@@ -99636,7 +99637,7 @@ var require_tool_cache = __commonJS({
versionSpec = semver9.clean(versionSpec) || "";
const cachePath = path16.join(_getCacheDirectory(), toolName, versionSpec, arch2);
core15.debug(`checking cache: ${cachePath}`);
if (fs17.existsSync(cachePath) && fs17.existsSync(`${cachePath}.complete`)) {
if (fs18.existsSync(cachePath) && fs18.existsSync(`${cachePath}.complete`)) {
core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`);
toolPath = cachePath;
} else {
@@ -99649,12 +99650,12 @@ var require_tool_cache = __commonJS({
const versions = [];
arch2 = arch2 || os5.arch();
const toolPath = path16.join(_getCacheDirectory(), toolName);
if (fs17.existsSync(toolPath)) {
const children = fs17.readdirSync(toolPath);
if (fs18.existsSync(toolPath)) {
const children = fs18.readdirSync(toolPath);
for (const child of children) {
if (isExplicitVersion(child)) {
const fullPath = path16.join(toolPath, child, arch2 || "");
if (fs17.existsSync(fullPath) && fs17.existsSync(`${fullPath}.complete`)) {
if (fs18.existsSync(fullPath) && fs18.existsSync(`${fullPath}.complete`)) {
versions.push(child);
}
}
@@ -99725,7 +99726,7 @@ var require_tool_cache = __commonJS({
function _completeToolPath(tool, version, arch2) {
const folderPath = path16.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || "");
const markerPath = `${folderPath}.complete`;
fs17.writeFileSync(markerPath, "");
fs18.writeFileSync(markerPath, "");
core15.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
@@ -103232,7 +103233,7 @@ __export(analyze_action_exports, {
runPromise: () => runPromise
});
module.exports = __toCommonJS(analyze_action_exports);
var fs16 = __toESM(require("fs"));
var fs17 = __toESM(require("fs"));
var import_path4 = __toESM(require("path"));
var import_perf_hooks3 = require("perf_hooks");
var core14 = __toESM(require_core());
@@ -103261,21 +103262,21 @@ async function getFolderSize(itemPath, options) {
getFolderSize.loose = async (itemPath, options) => await core(itemPath, options);
getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true });
async function core(rootItemPath, options = {}, returnType = {}) {
const fs17 = options.fs || await import("node:fs/promises");
const fs18 = options.fs || await import("node:fs/promises");
let folderSize = 0n;
const foundInos = /* @__PURE__ */ new Set();
const errors = [];
await processItem(rootItemPath);
async function processItem(itemPath) {
if (options.ignore?.test(itemPath)) return;
const stats = returnType.strict ? await fs17.lstat(itemPath, { bigint: true }) : await fs17.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
const stats = returnType.strict ? await fs18.lstat(itemPath, { bigint: true }) : await fs18.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
if (typeof stats !== "object") return;
if (!foundInos.has(stats.ino)) {
foundInos.add(stats.ino);
folderSize += stats.size;
}
if (stats.isDirectory()) {
const directoryItems = returnType.strict ? await fs17.readdir(itemPath) : await fs17.readdir(itemPath).catch((error3) => errors.push(error3));
const directoryItems = returnType.strict ? await fs18.readdir(itemPath) : await fs18.readdir(itemPath).catch((error3) => errors.push(error3));
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
@@ -105916,17 +105917,6 @@ function getExtraOptionsEnvParam() {
);
}
}
function getToolNames(sarif) {
const toolNames = {};
for (const run2 of sarif.runs || []) {
const tool = run2.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, platform2) {
const fixedAmount = 1024 * (platform2 === "win32" ? 1.5 : 1);
const scaledAmount = getReservedRamScaleFactor() * Math.max(totalMemoryMegaBytes - 8 * 1024, 0);
@@ -107360,8 +107350,8 @@ var path5 = __toESM(require("path"));
var semver5 = __toESM(require_semver2());
// src/defaults.json
var bundleVersion = "codeql-bundle-v2.24.2";
var cliVersion = "2.24.2";
var bundleVersion = "codeql-bundle-v2.24.3";
var cliVersion = "2.24.3";
// src/overlay/index.ts
var fs3 = __toESM(require("fs"));
@@ -107986,6 +107976,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
@@ -111235,7 +111230,7 @@ async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error
}
// src/upload-lib.ts
var fs15 = __toESM(require("fs"));
var fs16 = __toESM(require("fs"));
var path14 = __toESM(require("path"));
var url = __toESM(require("url"));
var import_zlib = __toESM(require("zlib"));
@@ -112316,12 +112311,12 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
}
return uri;
}
async function addFingerprints(sarif, sourceRoot, logger) {
async function addFingerprints(sarifLog, sourceRoot, logger) {
logger.info(
`Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.`
);
const callbacksByFile = {};
for (const run2 of sarif.runs || []) {
for (const run2 of sarifLog.runs || []) {
const artifacts = run2.artifacts || [];
for (const result of run2.results || []) {
const primaryLocation = (result.locations || [])[0];
@@ -112361,7 +112356,7 @@ async function addFingerprints(sarif, sourceRoot, logger) {
};
await hash(teeCallback, filepath);
}
return sarif;
return sarifLog;
}
// src/init.ts
@@ -112396,36 +112391,48 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
};
}
// src/upload-lib.ts
var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning.";
var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository.";
// src/sarif/index.ts
var fs15 = __toESM(require("fs"));
var InvalidSarifUploadError = class extends Error {
};
function getToolNames(sarifFile) {
const toolNames = {};
for (const run2 of sarifFile.runs || []) {
const tool = run2.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function readSarifFile(sarifFilePath) {
return JSON.parse(fs15.readFileSync(sarifFilePath, "utf8"));
}
function combineSarifFiles(sarifFiles, logger) {
logger.info(`Loading SARIF file(s)`);
const combinedSarif = {
version: null,
runs: []
};
const runs = [];
let version = void 0;
for (const sarifFile of sarifFiles) {
logger.debug(`Loading SARIF file: ${sarifFile}`);
const sarifObject = JSON.parse(
fs15.readFileSync(sarifFile, "utf8")
);
if (combinedSarif.version === null) {
combinedSarif.version = sarifObject.version;
} else if (combinedSarif.version !== sarifObject.version) {
const sarifLog = readSarifFile(sarifFile);
if (version === void 0) {
version = sarifLog.version;
} else if (version !== sarifLog.version) {
throw new InvalidSarifUploadError(
`Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`
`Different SARIF versions encountered: ${version} and ${sarifLog.version}`
);
}
combinedSarif.runs.push(...sarifObject.runs);
runs.push(...sarifLog?.runs || []);
}
return combinedSarif;
if (version === void 0) {
version = "2.1.0";
}
return { version, runs };
}
function areAllRunsProducedByCodeQL(sarifObjects) {
return sarifObjects.every((sarifObject) => {
return sarifObject.runs?.every(
(run2) => run2.tool?.driver?.name === "CodeQL"
);
function areAllRunsProducedByCodeQL(sarifLogs) {
return sarifLogs.every((sarifLog) => {
return sarifLog.runs?.every((run2) => run2.tool?.driver?.name === "CodeQL");
});
}
function createRunKey(run2) {
@@ -112438,10 +112445,13 @@ function createRunKey(run2) {
automationId: run2.automationDetails?.id
};
}
function areAllRunsUnique(sarifObjects) {
function areAllRunsUnique(sarifLogs) {
const keys = /* @__PURE__ */ new Set();
for (const sarifObject of sarifObjects) {
for (const run2 of sarifObject.runs) {
for (const sarifLog of sarifLogs) {
if (sarifLog.runs === void 0) {
continue;
}
for (const run2 of sarifLog.runs) {
const key = JSON.stringify(createRunKey(run2));
if (keys.has(key)) {
return false;
@@ -112451,6 +112461,10 @@ function areAllRunsUnique(sarifObjects) {
}
return true;
}
// src/upload-lib.ts
var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning.";
var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository.";
async function shouldShowCombineSarifFilesDeprecationWarning(sarifObjects, githubVersion) {
if (githubVersion.type === "GitHub Enterprise Server" /* GHES */ && satisfiesGHESVersion(githubVersion.version, "<3.14", true)) {
return false;
@@ -112479,9 +112493,7 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) {
}
async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) {
logger.info("Combining SARIF files using the CodeQL CLI");
const sarifObjects = sarifFiles.map((sarifFile) => {
return JSON.parse(fs15.readFileSync(sarifFile, "utf8"));
});
const sarifObjects = sarifFiles.map(readSarifFile);
const deprecationWarningMessage = gitHubVersion.type === "GitHub Enterprise Server" /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025";
const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload";
if (!areAllRunsProducedByCodeQL(sarifObjects)) {
@@ -112534,27 +112546,27 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
codeQL = initCodeQLResult.codeql;
}
const baseTempDir = path14.resolve(tempDir, "combined-sarif");
fs15.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs15.mkdtempSync(path14.resolve(baseTempDir, "output-"));
fs16.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs16.mkdtempSync(path14.resolve(baseTempDir, "output-"));
const outputFile = path14.resolve(outputDirectory, "combined-sarif.sarif");
await codeQL.mergeResults(sarifFiles, outputFile, {
mergeRunsFromEqualCategory: true
});
return JSON.parse(fs15.readFileSync(outputFile, "utf8"));
return readSarifFile(outputFile);
}
function populateRunAutomationDetails(sarif, category, analysis_key, environment) {
function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) {
const automationID = getAutomationID2(category, analysis_key, environment);
if (automationID !== void 0) {
for (const run2 of sarif.runs || []) {
for (const run2 of sarifFile.runs || []) {
if (run2.automationDetails === void 0) {
run2.automationDetails = {
id: automationID
};
}
}
return sarif;
return sarifFile;
}
return sarif;
return sarifFile;
}
function getAutomationID2(category, analysis_key, environment) {
if (category !== void 0) {
@@ -112577,7 +112589,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
`SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}`
);
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
fs15.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
fs16.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
return "dummy-sarif-id";
}
const client = getApiClient();
@@ -112611,7 +112623,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
function findSarifFilesInDir(sarifPath, isSarif) {
const sarifFiles = [];
const walkSarifFiles = (dir) => {
const entries = fs15.readdirSync(dir, { withFileTypes: true });
const entries = fs16.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile() && isSarif(entry.name)) {
sarifFiles.push(path14.resolve(dir, entry.name));
@@ -112624,7 +112636,7 @@ function findSarifFilesInDir(sarifPath, isSarif) {
return sarifFiles;
}
async function getGroupedSarifFilePaths(logger, sarifPath) {
const stats = fs15.statSync(sarifPath, { throwIfNoEntry: false });
const stats = fs16.statSync(sarifPath, { throwIfNoEntry: false });
if (stats === void 0) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
}
@@ -112671,9 +112683,9 @@ async function getGroupedSarifFilePaths(logger, sarifPath) {
}
return results;
}
function countResultsInSarif(sarif) {
function countResultsInSarif(sarifLog) {
let numResults = 0;
const parsedSarif = JSON.parse(sarif);
const parsedSarif = JSON.parse(sarifLog);
if (!Array.isArray(parsedSarif.runs)) {
throw new InvalidSarifUploadError("Invalid SARIF. Missing 'runs' array.");
}
@@ -112687,26 +112699,26 @@ function countResultsInSarif(sarif) {
}
return numResults;
}
function readSarifFile(sarifFilePath) {
function readSarifFileOrThrow(sarifFilePath) {
try {
return JSON.parse(fs15.readFileSync(sarifFilePath, "utf8"));
return readSarifFile(sarifFilePath);
} catch (e) {
throw new InvalidSarifUploadError(
`Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}`
);
}
}
function validateSarifFileSchema(sarif, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarif]) && // We want to validate CodeQL SARIF in testing environments.
function validateSarifFileSchema(sarifLog, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarifLog]) && // We want to validate CodeQL SARIF in testing environments.
!getTestingEnvironment()) {
logger.debug(
`Skipping SARIF schema validation for ${sarifFilePath} as all runs are produced by CodeQL.`
);
return;
return true;
}
logger.info(`Validating ${sarifFilePath}`);
const schema2 = require_sarif_schema_2_1_0();
const result = new jsonschema2.Validator().validate(sarif, schema2);
const result = new jsonschema2.Validator().validate(sarifLog, schema2);
const warningAttributes = ["uri-reference", "uri"];
const errors = (result.errors ?? []).filter(
(err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument))
@@ -112733,6 +112745,7 @@ ${sarifErrors.join(
)}`
);
}
return true;
}
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, mergeBaseCommitOid) {
const payloadObj = {
@@ -112758,7 +112771,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
payloadObj.base_sha = mergeBaseCommitOid;
} else if (process.env.GITHUB_EVENT_PATH) {
const githubEvent = JSON.parse(
fs15.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
fs16.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
);
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha;
@@ -112769,14 +112782,14 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) {
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif;
let sarifLog;
category = analysis.fixCategory(logger, category);
if (sarifPaths.length > 1) {
for (const sarifPath of sarifPaths) {
const parsedSarif = readSarifFile(sarifPath);
const parsedSarif = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(parsedSarif, sarifPath, logger);
}
sarif = await combineSarifFilesUsingCLI(
sarifLog = await combineSarifFilesUsingCLI(
sarifPaths,
gitHubVersion,
features,
@@ -112784,21 +112797,21 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths,
);
} else {
const sarifPath = sarifPaths[0];
sarif = readSarifFile(sarifPath);
validateSarifFileSchema(sarif, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
sarifLog = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(sarifLog, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarifLog], gitHubVersion);
}
sarif = filterAlertsByDiffRange(logger, sarif);
sarif = await addFingerprints(sarif, checkoutPath, logger);
sarifLog = filterAlertsByDiffRange(logger, sarifLog);
sarifLog = await addFingerprints(sarifLog, checkoutPath, logger);
const analysisKey = await getAnalysisKey();
const environment = getRequiredInput("matrix");
sarif = populateRunAutomationDetails(
sarif,
sarifLog = populateRunAutomationDetails(
sarifLog,
category,
analysisKey,
environment
);
return { sarif, analysisKey, environment };
return { sarif: sarifLog, analysisKey, environment };
}
async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) {
const outputPath = pathInput || getOptionalEnvVar("CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */);
@@ -112815,12 +112828,12 @@ async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProc
}
async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
const sarif = postProcessingResults.sarif;
const toolNames = getToolNames(sarif);
const sarifLog = postProcessingResults.sarif;
const toolNames = getToolNames(sarifLog);
logger.debug(`Validating that each SARIF run has a unique category`);
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
validateUniqueCategory(sarifLog, uploadTarget.sentinelPrefix);
logger.debug(`Serializing SARIF for upload`);
const sarifPayload = JSON.stringify(sarif);
const sarifPayload = JSON.stringify(sarifLog);
logger.debug(`Compressing serialized SARIF`);
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = url.pathToFileURL(checkoutPath).href;
@@ -112862,9 +112875,9 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post
};
}
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
if (!fs15.existsSync(outputDir)) {
fs15.mkdirSync(outputDir, { recursive: true });
} else if (!fs15.lstatSync(outputDir).isDirectory()) {
if (!fs16.existsSync(outputDir)) {
fs16.mkdirSync(outputDir, { recursive: true });
} else if (!fs16.lstatSync(outputDir).isDirectory()) {
throw new ConfigurationError(
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`
);
@@ -112874,7 +112887,7 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
`upload${uploadTarget.sarifExtension}`
);
logger.info(`Writing processed SARIF file to ${outputFile}`);
fs15.writeFileSync(outputFile, sarifPayload);
fs16.writeFileSync(outputFile, sarifPayload);
}
var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3;
var STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1e3;
@@ -112972,9 +112985,9 @@ function handleProcessingResultForUnsuccessfulExecution(response, status, logger
assertNever(status);
}
}
function validateUniqueCategory(sarif, sentinelPrefix) {
function validateUniqueCategory(sarifLog, sentinelPrefix) {
const categories = {};
for (const run2 of sarif.runs) {
for (const run2 of sarifLog.runs || []) {
const id = run2?.automationDetails?.id;
const tool = run2.tool?.driver?.name;
const category = `${sanitize(id)}_${sanitize(tool)}`;
@@ -112993,15 +113006,16 @@ function validateUniqueCategory(sarif, sentinelPrefix) {
function sanitize(str2) {
return (str2 ?? "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase();
}
var InvalidSarifUploadError = class extends Error {
};
function filterAlertsByDiffRange(logger, sarif) {
function filterAlertsByDiffRange(logger, sarifLog) {
const diffRanges = readDiffRangesJsonFile(logger);
if (!diffRanges?.length) {
return sarif;
return sarifLog;
}
if (sarifLog.runs === void 0) {
return sarifLog;
}
const checkoutPath = getRequiredInput("checkout_path");
for (const run2 of sarif.runs) {
for (const run2 of sarifLog.runs) {
if (run2.results) {
run2.results = run2.results.filter((result) => {
const locations = [
@@ -113022,7 +113036,7 @@ function filterAlertsByDiffRange(logger, sarif) {
});
}
}
return sarif;
return sarifLog;
}
// src/upload-sarif.ts
@@ -113107,7 +113121,7 @@ function doesGoExtractionOutputExist(config) {
"go" /* go */
);
const trapDirectory = import_path4.default.join(golangDbDirectory, "trap", "go" /* go */);
return fs16.existsSync(trapDirectory) && fs16.readdirSync(trapDirectory).some(
return fs17.existsSync(trapDirectory) && fs17.readdirSync(trapDirectory).some(
(fileName) => [
".trap",
".trap.gz",

View File

@@ -45986,7 +45986,7 @@ var require_package = __commonJS({
"package.json"(exports2, module2) {
module2.exports = {
name: "codeql",
version: "4.32.5",
version: "4.32.6",
private: true,
description: "CodeQL action",
scripts: {
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -46052,14 +46053,14 @@ var require_package = __commonJS({
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.6.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
glob: "^11.1.0",
globals: "^17.3.0",
nock: "^14.0.11",
sinon: "^21.0.1",
typescript: "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
},
overrides: {
"@actions/tool-cache": {
@@ -102991,7 +102992,7 @@ var safeDump = renamed("safeDump", "dump");
var semver = __toESM(require_semver2());
// src/api-compatibility.json
var maximumVersion = "3.20";
var maximumVersion = "3.21";
var minimumVersion = "3.14";
// src/util.ts
@@ -103819,8 +103820,8 @@ var path3 = __toESM(require("path"));
var semver5 = __toESM(require_semver2());
// src/defaults.json
var bundleVersion = "codeql-bundle-v2.24.2";
var cliVersion = "2.24.2";
var bundleVersion = "codeql-bundle-v2.24.3";
var cliVersion = "2.24.3";
// src/overlay/index.ts
var fs2 = __toESM(require("fs"));
@@ -104275,6 +104276,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",

View File

@@ -1,6 +1,6 @@
{
"bundleVersion": "codeql-bundle-v2.24.2",
"cliVersion": "2.24.2",
"priorBundleVersion": "codeql-bundle-v2.24.1",
"priorCliVersion": "2.24.1"
"bundleVersion": "codeql-bundle-v2.24.3",
"cliVersion": "2.24.3",
"priorBundleVersion": "codeql-bundle-v2.24.2",
"priorCliVersion": "2.24.2"
}

733
lib/init-action-post.js generated

File diff suppressed because it is too large Load Diff

231
lib/init-action.js generated
View File

@@ -45986,7 +45986,7 @@ var require_package = __commonJS({
"package.json"(exports2, module2) {
module2.exports = {
name: "codeql",
version: "4.32.5",
version: "4.32.6",
private: true,
description: "CodeQL action",
scripts: {
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -46052,14 +46053,14 @@ var require_package = __commonJS({
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.6.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
glob: "^11.1.0",
globals: "^17.3.0",
nock: "^14.0.11",
sinon: "^21.0.1",
typescript: "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
},
overrides: {
"@actions/tool-cache": {
@@ -103210,7 +103211,7 @@ var safeDump = renamed("safeDump", "dump");
var semver = __toESM(require_semver2());
// src/api-compatibility.json
var maximumVersion = "3.20";
var maximumVersion = "3.21";
var minimumVersion = "3.14";
// src/util.ts
@@ -104856,8 +104857,8 @@ var path6 = __toESM(require("path"));
var semver5 = __toESM(require_semver2());
// src/defaults.json
var bundleVersion = "codeql-bundle-v2.24.2";
var cliVersion = "2.24.2";
var bundleVersion = "codeql-bundle-v2.24.3";
var cliVersion = "2.24.3";
// src/overlay/index.ts
var fs3 = __toESM(require("fs"));
@@ -105521,6 +105522,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
@@ -106387,9 +106393,9 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = {
rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */,
swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */
};
async function isOverlayAnalysisFeatureEnabled(features, codeql, languages, codeScanningConfig) {
async function checkOverlayAnalysisFeatureEnabled(features, codeql, languages, codeScanningConfig) {
if (!await features.getValue("overlay_analysis" /* OverlayAnalysis */, codeql)) {
return false;
return new Failure("overall-feature-not-enabled" /* OverallFeatureNotEnabled */);
}
let enableForCodeScanningOnly = false;
for (const language of languages) {
@@ -106402,17 +106408,20 @@ async function isOverlayAnalysisFeatureEnabled(features, codeql, languages, code
enableForCodeScanningOnly = true;
continue;
}
return false;
return new Failure("language-not-enabled" /* LanguageNotEnabled */);
}
if (enableForCodeScanningOnly) {
return codeScanningConfig["disable-default-queries"] !== true && codeScanningConfig.packs === void 0 && codeScanningConfig.queries === void 0 && codeScanningConfig["query-filters"] === void 0;
const usesDefaultQueriesOnly = codeScanningConfig["disable-default-queries"] !== true && codeScanningConfig.packs === void 0 && codeScanningConfig.queries === void 0 && codeScanningConfig["query-filters"] === void 0;
if (!usesDefaultQueriesOnly) {
return new Failure("non-default-queries" /* NonDefaultQueries */);
}
}
return true;
return new Success(void 0);
}
function runnerHasSufficientDiskSpace(diskUsage, logger, useV2ResourceChecks) {
const minimumDiskSpaceBytes = useV2ResourceChecks ? OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES : OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_BYTES;
if (diskUsage === void 0 || diskUsage.numAvailableBytes < minimumDiskSpaceBytes) {
const diskSpaceMb = diskUsage === void 0 ? 0 : Math.round(diskUsage.numAvailableBytes / 1e6);
if (diskUsage.numAvailableBytes < minimumDiskSpaceBytes) {
const diskSpaceMb = Math.round(diskUsage.numAvailableBytes / 1e6);
const minimumDiskSpaceMb = Math.round(minimumDiskSpaceBytes / 1e6);
logger.info(
`Setting overlay database mode to ${"none" /* None */} due to insufficient disk space (${diskSpaceMb} MB, needed ${minimumDiskSpaceMb} MB).`
@@ -106443,93 +106452,110 @@ async function runnerHasSufficientMemory(codeql, ramInput, logger) {
);
return true;
}
async function runnerSupportsOverlayAnalysis(codeql, diskUsage, ramInput, logger, useV2ResourceChecks) {
async function checkRunnerResources(codeql, diskUsage, ramInput, logger, useV2ResourceChecks) {
if (!runnerHasSufficientDiskSpace(diskUsage, logger, useV2ResourceChecks)) {
return false;
return new Failure("insufficient-disk-space" /* InsufficientDiskSpace */);
}
if (!await runnerHasSufficientMemory(codeql, ramInput, logger)) {
return false;
return new Failure("insufficient-memory" /* InsufficientMemory */);
}
return true;
return new Success(void 0);
}
async function getOverlayDatabaseMode(codeql, features, languages, sourceRoot, buildMode, ramInput, codeScanningConfig, repositoryProperties, gitVersion, logger) {
let overlayDatabaseMode = "none" /* None */;
let useOverlayDatabaseCaching = false;
let disabledReason;
async function checkOverlayEnablement(codeql, features, languages, sourceRoot, buildMode, ramInput, codeScanningConfig, repositoryProperties, gitVersion, logger) {
const modeEnv = process.env.CODEQL_OVERLAY_DATABASE_MODE;
if (modeEnv === "overlay" /* Overlay */ || modeEnv === "overlay-base" /* OverlayBase */ || modeEnv === "none" /* None */) {
overlayDatabaseMode = modeEnv;
logger.info(
`Setting overlay database mode to ${overlayDatabaseMode} from the CODEQL_OVERLAY_DATABASE_MODE environment variable.`
`Setting overlay database mode to ${modeEnv} from the CODEQL_OVERLAY_DATABASE_MODE environment variable.`
);
} else if (repositoryProperties["github-codeql-disable-overlay" /* DISABLE_OVERLAY */] === true) {
if (modeEnv === "none" /* None */) {
return new Failure("disabled-by-environment-variable" /* DisabledByEnvironmentVariable */);
}
return validateOverlayDatabaseMode(
modeEnv,
false,
codeql,
languages,
sourceRoot,
buildMode,
gitVersion,
logger
);
}
if (repositoryProperties["github-codeql-disable-overlay" /* DISABLE_OVERLAY */] === true) {
logger.info(
`Setting overlay database mode to ${"none" /* None */} because the ${"github-codeql-disable-overlay" /* DISABLE_OVERLAY */} repository property is set to true.`
);
overlayDatabaseMode = "none" /* None */;
disabledReason = "disabled-by-repository-property" /* DisabledByRepositoryProperty */;
} else if (await isOverlayAnalysisFeatureEnabled(
return new Failure("disabled-by-repository-property" /* DisabledByRepositoryProperty */);
}
const featureResult = await checkOverlayAnalysisFeatureEnabled(
features,
codeql,
languages,
codeScanningConfig
)) {
const performResourceChecks = !await features.getValue(
"overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */,
codeql
);
if (featureResult.isFailure()) {
return featureResult;
}
const performResourceChecks = !await features.getValue(
"overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */,
codeql
);
const useV2ResourceChecks = await features.getValue(
"overlay_analysis_resource_checks_v2" /* OverlayAnalysisResourceChecksV2 */
);
const checkOverlayStatus = await features.getValue(
"overlay_analysis_status_check" /* OverlayAnalysisStatusCheck */
);
const needDiskUsage = performResourceChecks || checkOverlayStatus;
const diskUsage = needDiskUsage ? await checkDiskUsage(logger) : void 0;
if (needDiskUsage && diskUsage === void 0) {
logger.warning(
`Unable to determine disk usage, therefore setting overlay database mode to ${"none" /* None */}.`
);
const useV2ResourceChecks = await features.getValue(
"overlay_analysis_resource_checks_v2" /* OverlayAnalysisResourceChecksV2 */
return new Failure("unable-to-determine-disk-usage" /* UnableToDetermineDiskUsage */);
}
const resourceResult = performResourceChecks && diskUsage !== void 0 ? await checkRunnerResources(
codeql,
diskUsage,
ramInput,
logger,
useV2ResourceChecks
) : new Success(void 0);
if (resourceResult.isFailure()) {
return resourceResult;
}
if (checkOverlayStatus && diskUsage !== void 0 && await shouldSkipOverlayAnalysis(codeql, languages, diskUsage, logger)) {
logger.info(
`Setting overlay database mode to ${"none" /* None */} because overlay analysis previously failed with this combination of languages, disk space, and CodeQL version.`
);
const checkOverlayStatus = await features.getValue(
"overlay_analysis_status_check" /* OverlayAnalysisStatusCheck */
return new Failure("skipped-due-to-cached-status" /* SkippedDueToCachedStatus */);
}
let overlayDatabaseMode;
if (isAnalyzingPullRequest()) {
overlayDatabaseMode = "overlay" /* Overlay */;
logger.info(
`Setting overlay database mode to ${overlayDatabaseMode} with caching because we are analyzing a pull request.`
);
} else if (await isAnalyzingDefaultBranch()) {
overlayDatabaseMode = "overlay-base" /* OverlayBase */;
logger.info(
`Setting overlay database mode to ${overlayDatabaseMode} with caching because we are analyzing the default branch.`
);
const diskUsage = performResourceChecks || checkOverlayStatus ? await checkDiskUsage(logger) : void 0;
if (performResourceChecks && !await runnerSupportsOverlayAnalysis(
codeql,
diskUsage,
ramInput,
logger,
useV2ResourceChecks
)) {
overlayDatabaseMode = "none" /* None */;
disabledReason = "insufficient-resources" /* InsufficientResources */;
} else if (checkOverlayStatus && diskUsage === void 0) {
logger.warning(
`Unable to determine disk usage, therefore setting overlay database mode to ${"none" /* None */}.`
);
overlayDatabaseMode = "none" /* None */;
disabledReason = "unable-to-determine-disk-usage" /* UnableToDetermineDiskUsage */;
} else if (checkOverlayStatus && diskUsage && await shouldSkipOverlayAnalysis(codeql, languages, diskUsage, logger)) {
logger.info(
`Setting overlay database mode to ${"none" /* None */} because overlay analysis previously failed with this combination of languages, disk space, and CodeQL version.`
);
overlayDatabaseMode = "none" /* None */;
disabledReason = "skipped-due-to-cached-status" /* SkippedDueToCachedStatus */;
} else if (isAnalyzingPullRequest()) {
overlayDatabaseMode = "overlay" /* Overlay */;
useOverlayDatabaseCaching = true;
logger.info(
`Setting overlay database mode to ${overlayDatabaseMode} with caching because we are analyzing a pull request.`
);
} else if (await isAnalyzingDefaultBranch()) {
overlayDatabaseMode = "overlay-base" /* OverlayBase */;
useOverlayDatabaseCaching = true;
logger.info(
`Setting overlay database mode to ${overlayDatabaseMode} with caching because we are analyzing the default branch.`
);
}
} else {
disabledReason = "feature-not-enabled" /* FeatureNotEnabled */;
}
const disabledResult = (reason) => ({
overlayDatabaseMode: "none" /* None */,
useOverlayDatabaseCaching: false,
disabledReason: reason
});
if (overlayDatabaseMode === "none" /* None */) {
return disabledResult(disabledReason);
return new Failure("not-pull-request-or-default-branch" /* NotPullRequestOrDefaultBranch */);
}
return validateOverlayDatabaseMode(
overlayDatabaseMode,
true,
codeql,
languages,
sourceRoot,
buildMode,
gitVersion,
logger
);
}
async function validateOverlayDatabaseMode(overlayDatabaseMode, useOverlayDatabaseCaching, codeql, languages, sourceRoot, buildMode, gitVersion, logger) {
if (buildMode !== "none" /* None */ && (await Promise.all(
languages.map(
async (l) => l !== "go" /* go */ && // Workaround to allow overlay analysis for Go with any build
@@ -106542,37 +106568,36 @@ async function getOverlayDatabaseMode(codeql, features, languages, sourceRoot, b
logger.warning(
`Cannot build an ${overlayDatabaseMode} database because build-mode is set to "${buildMode}" instead of "none". Falling back to creating a normal full database instead.`
);
return disabledResult("incompatible-build-mode" /* IncompatibleBuildMode */);
return new Failure("incompatible-build-mode" /* IncompatibleBuildMode */);
}
if (!await codeQlVersionAtLeast(codeql, CODEQL_OVERLAY_MINIMUM_VERSION)) {
logger.warning(
`Cannot build an ${overlayDatabaseMode} database because the CodeQL CLI is older than ${CODEQL_OVERLAY_MINIMUM_VERSION}. Falling back to creating a normal full database instead.`
);
return disabledResult("incompatible-codeql" /* IncompatibleCodeQl */);
return new Failure("incompatible-codeql" /* IncompatibleCodeQl */);
}
if (await getGitRoot(sourceRoot) === void 0) {
logger.warning(
`Cannot build an ${overlayDatabaseMode} database because the source root "${sourceRoot}" is not inside a git repository. Falling back to creating a normal full database instead.`
);
return disabledResult("no-git-root" /* NoGitRoot */);
return new Failure("no-git-root" /* NoGitRoot */);
}
if (gitVersion === void 0) {
logger.warning(
`Cannot build an ${overlayDatabaseMode} database because the Git version could not be determined. Falling back to creating a normal full database instead.`
);
return disabledResult("incompatible-git" /* IncompatibleGit */);
return new Failure("incompatible-git" /* IncompatibleGit */);
}
if (!gitVersion.isAtLeast(GIT_MINIMUM_VERSION_FOR_OVERLAY)) {
logger.warning(
`Cannot build an ${overlayDatabaseMode} database because the installed Git version is older than ${GIT_MINIMUM_VERSION_FOR_OVERLAY}. Falling back to creating a normal full database instead.`
);
return disabledResult("incompatible-git" /* IncompatibleGit */);
return new Failure("incompatible-git" /* IncompatibleGit */);
}
return {
return new Success({
overlayDatabaseMode,
useOverlayDatabaseCaching,
disabledReason
};
useOverlayDatabaseCaching
});
}
function dbLocationOrDefault(dbLocation, tempDir) {
return dbLocation || path9.resolve(tempDir, "codeql_databases");
@@ -106660,11 +106685,7 @@ async function initConfig(features, inputs) {
} else {
logger.debug(`Skipping check for generated files.`);
}
const {
overlayDatabaseMode,
useOverlayDatabaseCaching,
disabledReason: overlayDisabledReason
} = await getOverlayDatabaseMode(
const overlayDatabaseModeResult = await checkOverlayEnablement(
inputs.codeql,
inputs.features,
config.languages,
@@ -106676,19 +106697,27 @@ async function initConfig(features, inputs) {
gitVersion,
logger
);
logger.info(
`Using overlay database mode: ${overlayDatabaseMode} ${useOverlayDatabaseCaching ? "with" : "without"} caching.`
);
config.overlayDatabaseMode = overlayDatabaseMode;
config.useOverlayDatabaseCaching = useOverlayDatabaseCaching;
if (overlayDisabledReason !== void 0) {
if (overlayDatabaseModeResult.isSuccess()) {
const { overlayDatabaseMode, useOverlayDatabaseCaching } = overlayDatabaseModeResult.value;
logger.info(
`Using overlay database mode: ${overlayDatabaseMode} ${useOverlayDatabaseCaching ? "with" : "without"} caching.`
);
config.overlayDatabaseMode = overlayDatabaseMode;
config.useOverlayDatabaseCaching = useOverlayDatabaseCaching;
} else {
const overlayDisabledReason = overlayDatabaseModeResult.value;
logger.info(
`Using overlay database mode: ${"none" /* None */} without caching.`
);
config.overlayDatabaseMode = "none" /* None */;
config.useOverlayDatabaseCaching = false;
await addOverlayDisablementDiagnostics(
config,
inputs.codeql,
overlayDisabledReason
);
}
if (overlayDatabaseMode === "overlay" /* Overlay */ || await shouldPerformDiffInformedAnalysis(
if (config.overlayDatabaseMode === "overlay" /* Overlay */ || await shouldPerformDiffInformedAnalysis(
inputs.codeql,
inputs.features,
logger

View File

@@ -45986,7 +45986,7 @@ var require_package = __commonJS({
"package.json"(exports2, module2) {
module2.exports = {
name: "codeql",
version: "4.32.5",
version: "4.32.6",
private: true,
description: "CodeQL action",
scripts: {
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -46052,14 +46053,14 @@ var require_package = __commonJS({
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.6.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
glob: "^11.1.0",
globals: "^17.3.0",
nock: "^14.0.11",
sinon: "^21.0.1",
typescript: "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
},
overrides: {
"@actions/tool-cache": {
@@ -102991,7 +102992,7 @@ var safeDump = renamed("safeDump", "dump");
var semver = __toESM(require_semver2());
// src/api-compatibility.json
var maximumVersion = "3.20";
var maximumVersion = "3.21";
var minimumVersion = "3.14";
// src/util.ts
@@ -104266,6 +104267,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",

View File

@@ -45986,7 +45986,7 @@ var require_package = __commonJS({
"package.json"(exports2, module2) {
module2.exports = {
name: "codeql",
version: "4.32.5",
version: "4.32.6",
private: true,
description: "CodeQL action",
scripts: {
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -46052,14 +46053,14 @@ var require_package = __commonJS({
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.6.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
glob: "^11.1.0",
globals: "^17.3.0",
nock: "^14.0.11",
sinon: "^21.0.1",
typescript: "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
},
overrides: {
"@actions/tool-cache": {
@@ -103047,7 +103048,7 @@ var safeDump = renamed("safeDump", "dump");
var semver = __toESM(require_semver2());
// src/api-compatibility.json
var maximumVersion = "3.20";
var maximumVersion = "3.21";
var minimumVersion = "3.14";
// src/util.ts
@@ -103678,8 +103679,8 @@ var path4 = __toESM(require("path"));
var semver4 = __toESM(require_semver2());
// src/defaults.json
var bundleVersion = "codeql-bundle-v2.24.2";
var cliVersion = "2.24.2";
var bundleVersion = "codeql-bundle-v2.24.3";
var cliVersion = "2.24.3";
// src/overlay/index.ts
var fs3 = __toESM(require("fs"));
@@ -104163,6 +104164,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",

View File

@@ -45986,7 +45986,7 @@ var require_package = __commonJS({
"package.json"(exports2, module2) {
module2.exports = {
name: "codeql",
version: "4.32.5",
version: "4.32.6",
private: true,
description: "CodeQL action",
scripts: {
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -46052,14 +46053,14 @@ var require_package = __commonJS({
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.6.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
glob: "^11.1.0",
globals: "^17.3.0",
nock: "^14.0.11",
sinon: "^21.0.1",
typescript: "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
},
overrides: {
"@actions/tool-cache": {
@@ -161096,7 +161097,7 @@ var safeDump = renamed("safeDump", "dump");
var semver = __toESM(require_semver2());
// src/api-compatibility.json
var maximumVersion = "3.20";
var maximumVersion = "3.21";
var minimumVersion = "3.14";
// src/util.ts
@@ -161632,6 +161633,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",

View File

@@ -45986,7 +45986,7 @@ var require_package = __commonJS({
"package.json"(exports2, module2) {
module2.exports = {
name: "codeql",
version: "4.32.5",
version: "4.32.6",
private: true,
description: "CodeQL action",
scripts: {
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -46052,14 +46053,14 @@ var require_package = __commonJS({
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.6.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
glob: "^11.1.0",
globals: "^17.3.0",
nock: "^14.0.11",
sinon: "^21.0.1",
typescript: "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
},
overrides: {
"@actions/tool-cache": {
@@ -120611,8 +120612,8 @@ var path = __toESM(require("path"));
var semver4 = __toESM(require_semver2());
// src/defaults.json
var bundleVersion = "codeql-bundle-v2.24.2";
var cliVersion = "2.24.2";
var bundleVersion = "codeql-bundle-v2.24.3";
var cliVersion = "2.24.3";
// src/overlay/index.ts
var actionsCache = __toESM(require_cache5());
@@ -120955,6 +120956,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
@@ -121747,6 +121753,18 @@ var LANGUAGE_TO_REGISTRY_TYPE = {
rust: ["cargo_registry"],
go: ["goproxy_server", "git_source"]
};
var NEW_LANGUAGE_TO_REGISTRY_TYPE = {
actions: [],
cpp: [],
java: ["maven_repository"],
csharp: ["nuget_feed"],
javascript: [],
python: [],
ruby: [],
rust: [],
swift: [],
go: ["goproxy_server", "git_source"]
};
function getRegistryAddress(registry) {
if (isDefined2(registry.url)) {
return {
@@ -121764,8 +121782,9 @@ function getRegistryAddress(registry) {
);
}
}
function getCredentials(logger, registrySecrets, registriesCredentials, language) {
const registryTypeForLanguage = language ? LANGUAGE_TO_REGISTRY_TYPE[language] : void 0;
function getCredentials(logger, registrySecrets, registriesCredentials, language, skipUnusedRegistries = false) {
const registryMapping = skipUnusedRegistries ? NEW_LANGUAGE_TO_REGISTRY_TYPE : LANGUAGE_TO_REGISTRY_TYPE;
const registryTypeForLanguage = language ? registryMapping[language] : void 0;
let credentialsStr;
if (registriesCredentials !== void 0) {
logger.info(`Using registries_credentials input.`);
@@ -122262,11 +122281,15 @@ async function run(startedAt) {
);
const languageInput = getOptionalInput("language");
language = languageInput ? parseLanguage(languageInput) : void 0;
const skipUnusedRegistries = await features.getValue(
"start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */
);
const credentials = getCredentials(
logger,
getOptionalInput("registry_secrets"),
getOptionalInput("registries_credentials"),
language
language,
skipUnusedRegistries
);
if (credentials.length === 0) {
logger.info("No credentials found, skipping proxy setup.");

310
lib/upload-lib.js generated
View File

@@ -204,7 +204,7 @@ var require_file_command = __commonJS({
exports2.issueFileCommand = issueFileCommand;
exports2.prepareKeyValueMessage = prepareKeyValueMessage;
var crypto2 = __importStar2(require("crypto"));
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var os2 = __importStar2(require("os"));
var utils_1 = require_utils();
function issueFileCommand(command, message) {
@@ -212,10 +212,10 @@ var require_file_command = __commonJS({
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs12.existsSync(filePath)) {
if (!fs13.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs12.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, {
fs13.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, {
encoding: "utf8"
});
}
@@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({
exports2.isRooted = isRooted;
exports2.tryGetExecutablePath = tryGetExecutablePath;
exports2.getCmdPath = getCmdPath;
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var path12 = __importStar2(require("path"));
_a = fs12.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
_a = fs13.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
exports2.IS_WINDOWS = process.platform === "win32";
function readlink(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
const result = yield fs12.promises.readlink(fsPath);
const result = yield fs13.promises.readlink(fsPath);
if (exports2.IS_WINDOWS && !result.endsWith("\\")) {
return `${result}\\`;
}
@@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({
});
}
exports2.UV_FS_O_EXLOCK = 268435456;
exports2.READONLY = fs12.constants.O_RDONLY;
exports2.READONLY = fs13.constants.O_RDONLY;
function exists(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
try {
@@ -47283,7 +47283,7 @@ var require_package = __commonJS({
"package.json"(exports2, module2) {
module2.exports = {
name: "codeql",
version: "4.32.5",
version: "4.32.6",
private: true,
description: "CodeQL action",
scripts: {
@@ -47292,7 +47292,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -47341,6 +47341,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -47349,14 +47350,14 @@ var require_package = __commonJS({
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.6.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
glob: "^11.1.0",
globals: "^17.3.0",
nock: "^14.0.11",
sinon: "^21.0.1",
typescript: "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
},
overrides: {
"@actions/tool-cache": {
@@ -50403,7 +50404,7 @@ var require_internal_globber = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.DefaultGlobber = void 0;
var core12 = __importStar2(require_core());
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var globOptionsHelper = __importStar2(require_internal_glob_options_helper());
var path12 = __importStar2(require("path"));
var patternHelper = __importStar2(require_internal_pattern_helper());
@@ -50457,7 +50458,7 @@ var require_internal_globber = __commonJS({
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
core12.debug(`Search path '${searchPath}'`);
try {
yield __await2(fs12.promises.lstat(searchPath));
yield __await2(fs13.promises.lstat(searchPath));
} catch (err) {
if (err.code === "ENOENT") {
continue;
@@ -50491,7 +50492,7 @@ var require_internal_globber = __commonJS({
continue;
}
const childLevel = item.level + 1;
const childItems = (yield __await2(fs12.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path12.join(item.path, x), childLevel));
const childItems = (yield __await2(fs13.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path12.join(item.path, x), childLevel));
stack.push(...childItems.reverse());
} else if (match & internal_match_kind_1.MatchKind.File) {
yield yield __await2(item.path);
@@ -50526,7 +50527,7 @@ var require_internal_globber = __commonJS({
let stats;
if (options.followSymbolicLinks) {
try {
stats = yield fs12.promises.stat(item.path);
stats = yield fs13.promises.stat(item.path);
} catch (err) {
if (err.code === "ENOENT") {
if (options.omitBrokenSymbolicLinks) {
@@ -50538,10 +50539,10 @@ var require_internal_globber = __commonJS({
throw err;
}
} else {
stats = yield fs12.promises.lstat(item.path);
stats = yield fs13.promises.lstat(item.path);
}
if (stats.isDirectory() && options.followSymbolicLinks) {
const realPath = yield fs12.promises.realpath(item.path);
const realPath = yield fs13.promises.realpath(item.path);
while (traversalChain.length >= item.level) {
traversalChain.pop();
}
@@ -50650,7 +50651,7 @@ var require_internal_hash_files = __commonJS({
exports2.hashFiles = hashFiles;
var crypto2 = __importStar2(require("crypto"));
var core12 = __importStar2(require_core());
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var path12 = __importStar2(require("path"));
@@ -50673,13 +50674,13 @@ var require_internal_hash_files = __commonJS({
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
continue;
}
if (fs12.statSync(file).isDirectory()) {
if (fs13.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash2 = crypto2.createHash("sha256");
const pipeline = util.promisify(stream2.pipeline);
yield pipeline(fs12.createReadStream(file), hash2);
yield pipeline(fs13.createReadStream(file), hash2);
result.write(hash2.digest());
count++;
if (!hasMatch) {
@@ -52054,7 +52055,7 @@ var require_cacheUtils = __commonJS({
var glob = __importStar2(require_glob());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var path12 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
@@ -52083,7 +52084,7 @@ var require_cacheUtils = __commonJS({
});
}
function getArchiveFileSizeInBytes(filePath) {
return fs12.statSync(filePath).size;
return fs13.statSync(filePath).size;
}
function resolvePaths(patterns) {
return __awaiter2(this, void 0, void 0, function* () {
@@ -52121,7 +52122,7 @@ var require_cacheUtils = __commonJS({
}
function unlinkFile(filePath) {
return __awaiter2(this, void 0, void 0, function* () {
return util.promisify(fs12.unlink)(filePath);
return util.promisify(fs13.unlink)(filePath);
});
}
function getVersion(app_1) {
@@ -52163,7 +52164,7 @@ var require_cacheUtils = __commonJS({
}
function getGnuTarPathOnWindows() {
return __awaiter2(this, void 0, void 0, function* () {
if (fs12.existsSync(constants_1.GnuTarPathOnWindows)) {
if (fs13.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion("tar");
@@ -92320,7 +92321,7 @@ var require_downloadUtils = __commonJS({
var http_client_1 = require_lib();
var storage_blob_1 = require_commonjs15();
var buffer = __importStar2(require("buffer"));
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var utils = __importStar2(require_cacheUtils());
@@ -92431,7 +92432,7 @@ var require_downloadUtils = __commonJS({
exports2.DownloadProgress = DownloadProgress;
function downloadCacheHttpClient(archiveLocation, archivePath) {
return __awaiter2(this, void 0, void 0, function* () {
const writeStream = fs12.createWriteStream(archivePath);
const writeStream = fs13.createWriteStream(archivePath);
const httpClient = new http_client_1.HttpClient("actions/cache");
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () {
return httpClient.get(archiveLocation);
@@ -92456,7 +92457,7 @@ var require_downloadUtils = __commonJS({
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
return __awaiter2(this, void 0, void 0, function* () {
var _a;
const archiveDescriptor = yield fs12.promises.open(archivePath, "w");
const archiveDescriptor = yield fs13.promises.open(archivePath, "w");
const httpClient = new http_client_1.HttpClient("actions/cache", void 0, {
socketTimeout: options.timeoutInMs,
keepAlive: true
@@ -92572,7 +92573,7 @@ var require_downloadUtils = __commonJS({
} else {
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
const downloadProgress = new DownloadProgress(contentLength);
const fd = fs12.openSync(archivePath, "w");
const fd = fs13.openSync(archivePath, "w");
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
@@ -92590,12 +92591,12 @@ var require_downloadUtils = __commonJS({
controller.abort();
throw new Error("Aborting cache download as the download time exceeded the timeout.");
} else if (Buffer.isBuffer(result)) {
fs12.writeFileSync(fd, result);
fs13.writeFileSync(fd, result);
}
}
} finally {
downloadProgress.stopDisplayTimer();
fs12.closeSync(fd);
fs13.closeSync(fd);
}
}
});
@@ -92917,7 +92918,7 @@ var require_cacheHttpClient = __commonJS({
var core12 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var url_1 = require("url");
var utils = __importStar2(require_cacheUtils());
var uploadUtils_1 = require_uploadUtils();
@@ -93052,7 +93053,7 @@ Other caches with similar key:`);
return __awaiter2(this, void 0, void 0, function* () {
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs12.openSync(archivePath, "r");
const fd = fs13.openSync(archivePath, "r");
const uploadOptions = (0, options_1.getUploadOptions)(options);
const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency);
const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize);
@@ -93066,7 +93067,7 @@ Other caches with similar key:`);
const start = offset;
const end = offset + chunkSize - 1;
offset += maxChunkSize;
yield uploadChunk(httpClient, resourceUrl, () => fs12.createReadStream(archivePath, {
yield uploadChunk(httpClient, resourceUrl, () => fs13.createReadStream(archivePath, {
fd,
start,
end,
@@ -93077,7 +93078,7 @@ Other caches with similar key:`);
}
})));
} finally {
fs12.closeSync(fd);
fs13.closeSync(fd);
}
return;
});
@@ -99033,7 +99034,7 @@ var require_manifest = __commonJS({
var core_1 = require_core();
var os2 = require("os");
var cp = require("child_process");
var fs12 = require("fs");
var fs13 = require("fs");
function _findMatch(versionSpec, stable, candidates, archFilter) {
return __awaiter2(this, void 0, void 0, function* () {
const platFilter = os2.platform();
@@ -99095,10 +99096,10 @@ var require_manifest = __commonJS({
const lsbReleaseFile = "/etc/lsb-release";
const osReleaseFile = "/etc/os-release";
let contents = "";
if (fs12.existsSync(lsbReleaseFile)) {
contents = fs12.readFileSync(lsbReleaseFile).toString();
} else if (fs12.existsSync(osReleaseFile)) {
contents = fs12.readFileSync(osReleaseFile).toString();
if (fs13.existsSync(lsbReleaseFile)) {
contents = fs13.readFileSync(lsbReleaseFile).toString();
} else if (fs13.existsSync(osReleaseFile)) {
contents = fs13.readFileSync(osReleaseFile).toString();
}
return contents;
}
@@ -99307,7 +99308,7 @@ var require_tool_cache = __commonJS({
var core12 = __importStar2(require_core());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var mm = __importStar2(require_manifest());
var os2 = __importStar2(require("os"));
var path12 = __importStar2(require("path"));
@@ -99353,7 +99354,7 @@ var require_tool_cache = __commonJS({
}
function downloadToolAttempt(url2, dest, auth2, headers) {
return __awaiter2(this, void 0, void 0, function* () {
if (fs12.existsSync(dest)) {
if (fs13.existsSync(dest)) {
throw new Error(`Destination file path ${dest} already exists`);
}
const http = new httpm.HttpClient(userAgent2, [], {
@@ -99377,7 +99378,7 @@ var require_tool_cache = __commonJS({
const readStream = responseMessageFactory();
let succeeded = false;
try {
yield pipeline(readStream, fs12.createWriteStream(dest));
yield pipeline(readStream, fs13.createWriteStream(dest));
core12.debug("download complete");
succeeded = true;
return dest;
@@ -99589,11 +99590,11 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os2.arch();
core12.debug(`Caching tool ${tool} ${version} ${arch2}`);
core12.debug(`source dir: ${sourceDir}`);
if (!fs12.statSync(sourceDir).isDirectory()) {
if (!fs13.statSync(sourceDir).isDirectory()) {
throw new Error("sourceDir is not a directory");
}
const destPath = yield _createToolPath(tool, version, arch2);
for (const itemName of fs12.readdirSync(sourceDir)) {
for (const itemName of fs13.readdirSync(sourceDir)) {
const s = path12.join(sourceDir, itemName);
yield io6.cp(s, destPath, { recursive: true });
}
@@ -99607,7 +99608,7 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os2.arch();
core12.debug(`Caching tool ${tool} ${version} ${arch2}`);
core12.debug(`source file: ${sourceFile}`);
if (!fs12.statSync(sourceFile).isFile()) {
if (!fs13.statSync(sourceFile).isFile()) {
throw new Error("sourceFile is not a file");
}
const destFolder = yield _createToolPath(tool, version, arch2);
@@ -99636,7 +99637,7 @@ var require_tool_cache = __commonJS({
versionSpec = semver9.clean(versionSpec) || "";
const cachePath = path12.join(_getCacheDirectory(), toolName, versionSpec, arch2);
core12.debug(`checking cache: ${cachePath}`);
if (fs12.existsSync(cachePath) && fs12.existsSync(`${cachePath}.complete`)) {
if (fs13.existsSync(cachePath) && fs13.existsSync(`${cachePath}.complete`)) {
core12.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`);
toolPath = cachePath;
} else {
@@ -99649,12 +99650,12 @@ var require_tool_cache = __commonJS({
const versions = [];
arch2 = arch2 || os2.arch();
const toolPath = path12.join(_getCacheDirectory(), toolName);
if (fs12.existsSync(toolPath)) {
const children = fs12.readdirSync(toolPath);
if (fs13.existsSync(toolPath)) {
const children = fs13.readdirSync(toolPath);
for (const child of children) {
if (isExplicitVersion(child)) {
const fullPath = path12.join(toolPath, child, arch2 || "");
if (fs12.existsSync(fullPath) && fs12.existsSync(`${fullPath}.complete`)) {
if (fs13.existsSync(fullPath) && fs13.existsSync(`${fullPath}.complete`)) {
versions.push(child);
}
}
@@ -99725,7 +99726,7 @@ var require_tool_cache = __commonJS({
function _completeToolPath(tool, version, arch2) {
const folderPath = path12.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || "");
const markerPath = `${folderPath}.complete`;
fs12.writeFileSync(markerPath, "");
fs13.writeFileSync(markerPath, "");
core12.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
@@ -103229,13 +103230,12 @@ var require_sarif_schema_2_1_0 = __commonJS({
// src/upload-lib.ts
var upload_lib_exports = {};
__export(upload_lib_exports, {
InvalidSarifUploadError: () => InvalidSarifUploadError,
buildPayload: () => buildPayload,
findSarifFilesInDir: () => findSarifFilesInDir,
getGroupedSarifFilePaths: () => getGroupedSarifFilePaths,
populateRunAutomationDetails: () => populateRunAutomationDetails,
postProcessSarifFiles: () => postProcessSarifFiles,
readSarifFile: () => readSarifFile,
readSarifFileOrThrow: () => readSarifFileOrThrow,
shouldConsiderConfigurationError: () => shouldConsiderConfigurationError,
shouldConsiderInvalidRequest: () => shouldConsiderInvalidRequest,
shouldShowCombineSarifFilesDeprecationWarning: () => shouldShowCombineSarifFilesDeprecationWarning,
@@ -103249,7 +103249,7 @@ __export(upload_lib_exports, {
writePostProcessedFiles: () => writePostProcessedFiles
});
module.exports = __toCommonJS(upload_lib_exports);
var fs11 = __toESM(require("fs"));
var fs12 = __toESM(require("fs"));
var path11 = __toESM(require("path"));
var url = __toESM(require("url"));
var import_zlib = __toESM(require("zlib"));
@@ -103278,21 +103278,21 @@ async function getFolderSize(itemPath, options) {
getFolderSize.loose = async (itemPath, options) => await core(itemPath, options);
getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true });
async function core(rootItemPath, options = {}, returnType = {}) {
const fs12 = options.fs || await import("node:fs/promises");
const fs13 = options.fs || await import("node:fs/promises");
let folderSize = 0n;
const foundInos = /* @__PURE__ */ new Set();
const errors = [];
await processItem(rootItemPath);
async function processItem(itemPath) {
if (options.ignore?.test(itemPath)) return;
const stats = returnType.strict ? await fs12.lstat(itemPath, { bigint: true }) : await fs12.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
const stats = returnType.strict ? await fs13.lstat(itemPath, { bigint: true }) : await fs13.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
if (typeof stats !== "object") return;
if (!foundInos.has(stats.ino)) {
foundInos.add(stats.ino);
folderSize += stats.size;
}
if (stats.isDirectory()) {
const directoryItems = returnType.strict ? await fs12.readdir(itemPath) : await fs12.readdir(itemPath).catch((error3) => errors.push(error3));
const directoryItems = returnType.strict ? await fs13.readdir(itemPath) : await fs13.readdir(itemPath).catch((error3) => errors.push(error3));
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
@@ -105932,17 +105932,6 @@ function getExtraOptionsEnvParam() {
);
}
}
function getToolNames(sarif) {
const toolNames = {};
for (const run of sarif.runs || []) {
const tool = run.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function getCodeQLDatabasePath(config, language) {
return path.resolve(config.dbLocation, language);
}
@@ -106945,8 +106934,8 @@ var path5 = __toESM(require("path"));
var semver5 = __toESM(require_semver2());
// src/defaults.json
var bundleVersion = "codeql-bundle-v2.24.2";
var cliVersion = "2.24.2";
var bundleVersion = "codeql-bundle-v2.24.3";
var cliVersion = "2.24.3";
// src/overlay/index.ts
var fs3 = __toESM(require("fs"));
@@ -107434,6 +107423,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
@@ -110204,12 +110198,12 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
}
return uri;
}
async function addFingerprints(sarif, sourceRoot, logger) {
async function addFingerprints(sarifLog, sourceRoot, logger) {
logger.info(
`Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.`
);
const callbacksByFile = {};
for (const run of sarif.runs || []) {
for (const run of sarifLog.runs || []) {
const artifacts = run.artifacts || [];
for (const result of run.results || []) {
const primaryLocation = (result.locations || [])[0];
@@ -110249,7 +110243,7 @@ async function addFingerprints(sarif, sourceRoot, logger) {
};
await hash(teeCallback, filepath);
}
return sarif;
return sarifLog;
}
// src/init.ts
@@ -110284,36 +110278,48 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
};
}
// src/upload-lib.ts
var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning.";
var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository.";
// src/sarif/index.ts
var fs11 = __toESM(require("fs"));
var InvalidSarifUploadError = class extends Error {
};
function getToolNames(sarifFile) {
const toolNames = {};
for (const run of sarifFile.runs || []) {
const tool = run.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function readSarifFile(sarifFilePath) {
return JSON.parse(fs11.readFileSync(sarifFilePath, "utf8"));
}
function combineSarifFiles(sarifFiles, logger) {
logger.info(`Loading SARIF file(s)`);
const combinedSarif = {
version: null,
runs: []
};
const runs = [];
let version = void 0;
for (const sarifFile of sarifFiles) {
logger.debug(`Loading SARIF file: ${sarifFile}`);
const sarifObject = JSON.parse(
fs11.readFileSync(sarifFile, "utf8")
);
if (combinedSarif.version === null) {
combinedSarif.version = sarifObject.version;
} else if (combinedSarif.version !== sarifObject.version) {
const sarifLog = readSarifFile(sarifFile);
if (version === void 0) {
version = sarifLog.version;
} else if (version !== sarifLog.version) {
throw new InvalidSarifUploadError(
`Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`
`Different SARIF versions encountered: ${version} and ${sarifLog.version}`
);
}
combinedSarif.runs.push(...sarifObject.runs);
runs.push(...sarifLog?.runs || []);
}
return combinedSarif;
if (version === void 0) {
version = "2.1.0";
}
return { version, runs };
}
function areAllRunsProducedByCodeQL(sarifObjects) {
return sarifObjects.every((sarifObject) => {
return sarifObject.runs?.every(
(run) => run.tool?.driver?.name === "CodeQL"
);
function areAllRunsProducedByCodeQL(sarifLogs) {
return sarifLogs.every((sarifLog) => {
return sarifLog.runs?.every((run) => run.tool?.driver?.name === "CodeQL");
});
}
function createRunKey(run) {
@@ -110326,10 +110332,13 @@ function createRunKey(run) {
automationId: run.automationDetails?.id
};
}
function areAllRunsUnique(sarifObjects) {
function areAllRunsUnique(sarifLogs) {
const keys = /* @__PURE__ */ new Set();
for (const sarifObject of sarifObjects) {
for (const run of sarifObject.runs) {
for (const sarifLog of sarifLogs) {
if (sarifLog.runs === void 0) {
continue;
}
for (const run of sarifLog.runs) {
const key = JSON.stringify(createRunKey(run));
if (keys.has(key)) {
return false;
@@ -110339,6 +110348,10 @@ function areAllRunsUnique(sarifObjects) {
}
return true;
}
// src/upload-lib.ts
var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning.";
var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository.";
async function shouldShowCombineSarifFilesDeprecationWarning(sarifObjects, githubVersion) {
if (githubVersion.type === "GitHub Enterprise Server" /* GHES */ && satisfiesGHESVersion(githubVersion.version, "<3.14", true)) {
return false;
@@ -110367,9 +110380,7 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) {
}
async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) {
logger.info("Combining SARIF files using the CodeQL CLI");
const sarifObjects = sarifFiles.map((sarifFile) => {
return JSON.parse(fs11.readFileSync(sarifFile, "utf8"));
});
const sarifObjects = sarifFiles.map(readSarifFile);
const deprecationWarningMessage = gitHubVersion.type === "GitHub Enterprise Server" /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025";
const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload";
if (!areAllRunsProducedByCodeQL(sarifObjects)) {
@@ -110422,27 +110433,27 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
codeQL = initCodeQLResult.codeql;
}
const baseTempDir = path11.resolve(tempDir, "combined-sarif");
fs11.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs11.mkdtempSync(path11.resolve(baseTempDir, "output-"));
fs12.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs12.mkdtempSync(path11.resolve(baseTempDir, "output-"));
const outputFile = path11.resolve(outputDirectory, "combined-sarif.sarif");
await codeQL.mergeResults(sarifFiles, outputFile, {
mergeRunsFromEqualCategory: true
});
return JSON.parse(fs11.readFileSync(outputFile, "utf8"));
return readSarifFile(outputFile);
}
function populateRunAutomationDetails(sarif, category, analysis_key, environment) {
function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) {
const automationID = getAutomationID2(category, analysis_key, environment);
if (automationID !== void 0) {
for (const run of sarif.runs || []) {
for (const run of sarifFile.runs || []) {
if (run.automationDetails === void 0) {
run.automationDetails = {
id: automationID
};
}
}
return sarif;
return sarifFile;
}
return sarif;
return sarifFile;
}
function getAutomationID2(category, analysis_key, environment) {
if (category !== void 0) {
@@ -110465,7 +110476,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
`SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}`
);
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
fs11.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
fs12.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
return "dummy-sarif-id";
}
const client = getApiClient();
@@ -110499,7 +110510,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
function findSarifFilesInDir(sarifPath, isSarif) {
const sarifFiles = [];
const walkSarifFiles = (dir) => {
const entries = fs11.readdirSync(dir, { withFileTypes: true });
const entries = fs12.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile() && isSarif(entry.name)) {
sarifFiles.push(path11.resolve(dir, entry.name));
@@ -110512,11 +110523,11 @@ function findSarifFilesInDir(sarifPath, isSarif) {
return sarifFiles;
}
function getSarifFilePaths(sarifPath, isSarif) {
if (!fs11.existsSync(sarifPath)) {
if (!fs12.existsSync(sarifPath)) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
}
let sarifFiles;
if (fs11.lstatSync(sarifPath).isDirectory()) {
if (fs12.lstatSync(sarifPath).isDirectory()) {
sarifFiles = findSarifFilesInDir(sarifPath, isSarif);
if (sarifFiles.length === 0) {
throw new ConfigurationError(
@@ -110529,7 +110540,7 @@ function getSarifFilePaths(sarifPath, isSarif) {
return sarifFiles;
}
async function getGroupedSarifFilePaths(logger, sarifPath) {
const stats = fs11.statSync(sarifPath, { throwIfNoEntry: false });
const stats = fs12.statSync(sarifPath, { throwIfNoEntry: false });
if (stats === void 0) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
}
@@ -110576,9 +110587,9 @@ async function getGroupedSarifFilePaths(logger, sarifPath) {
}
return results;
}
function countResultsInSarif(sarif) {
function countResultsInSarif(sarifLog) {
let numResults = 0;
const parsedSarif = JSON.parse(sarif);
const parsedSarif = JSON.parse(sarifLog);
if (!Array.isArray(parsedSarif.runs)) {
throw new InvalidSarifUploadError("Invalid SARIF. Missing 'runs' array.");
}
@@ -110592,26 +110603,26 @@ function countResultsInSarif(sarif) {
}
return numResults;
}
function readSarifFile(sarifFilePath) {
function readSarifFileOrThrow(sarifFilePath) {
try {
return JSON.parse(fs11.readFileSync(sarifFilePath, "utf8"));
return readSarifFile(sarifFilePath);
} catch (e) {
throw new InvalidSarifUploadError(
`Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}`
);
}
}
function validateSarifFileSchema(sarif, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarif]) && // We want to validate CodeQL SARIF in testing environments.
function validateSarifFileSchema(sarifLog, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarifLog]) && // We want to validate CodeQL SARIF in testing environments.
!getTestingEnvironment()) {
logger.debug(
`Skipping SARIF schema validation for ${sarifFilePath} as all runs are produced by CodeQL.`
);
return;
return true;
}
logger.info(`Validating ${sarifFilePath}`);
const schema2 = require_sarif_schema_2_1_0();
const result = new jsonschema2.Validator().validate(sarif, schema2);
const result = new jsonschema2.Validator().validate(sarifLog, schema2);
const warningAttributes = ["uri-reference", "uri"];
const errors = (result.errors ?? []).filter(
(err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument))
@@ -110638,6 +110649,7 @@ ${sarifErrors.join(
)}`
);
}
return true;
}
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, mergeBaseCommitOid) {
const payloadObj = {
@@ -110663,7 +110675,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
payloadObj.base_sha = mergeBaseCommitOid;
} else if (process.env.GITHUB_EVENT_PATH) {
const githubEvent = JSON.parse(
fs11.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
fs12.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
);
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha;
@@ -110674,14 +110686,14 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) {
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif;
let sarifLog;
category = analysis.fixCategory(logger, category);
if (sarifPaths.length > 1) {
for (const sarifPath of sarifPaths) {
const parsedSarif = readSarifFile(sarifPath);
const parsedSarif = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(parsedSarif, sarifPath, logger);
}
sarif = await combineSarifFilesUsingCLI(
sarifLog = await combineSarifFilesUsingCLI(
sarifPaths,
gitHubVersion,
features,
@@ -110689,21 +110701,21 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths,
);
} else {
const sarifPath = sarifPaths[0];
sarif = readSarifFile(sarifPath);
validateSarifFileSchema(sarif, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
sarifLog = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(sarifLog, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarifLog], gitHubVersion);
}
sarif = filterAlertsByDiffRange(logger, sarif);
sarif = await addFingerprints(sarif, checkoutPath, logger);
sarifLog = filterAlertsByDiffRange(logger, sarifLog);
sarifLog = await addFingerprints(sarifLog, checkoutPath, logger);
const analysisKey = await getAnalysisKey();
const environment = getRequiredInput("matrix");
sarif = populateRunAutomationDetails(
sarif,
sarifLog = populateRunAutomationDetails(
sarifLog,
category,
analysisKey,
environment
);
return { sarif, analysisKey, environment };
return { sarif: sarifLog, analysisKey, environment };
}
async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) {
const outputPath = pathInput || getOptionalEnvVar("CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */);
@@ -110750,12 +110762,12 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
}
async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
const sarif = postProcessingResults.sarif;
const toolNames = getToolNames(sarif);
const sarifLog = postProcessingResults.sarif;
const toolNames = getToolNames(sarifLog);
logger.debug(`Validating that each SARIF run has a unique category`);
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
validateUniqueCategory(sarifLog, uploadTarget.sentinelPrefix);
logger.debug(`Serializing SARIF for upload`);
const sarifPayload = JSON.stringify(sarif);
const sarifPayload = JSON.stringify(sarifLog);
logger.debug(`Compressing serialized SARIF`);
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = url.pathToFileURL(checkoutPath).href;
@@ -110797,9 +110809,9 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post
};
}
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
if (!fs11.existsSync(outputDir)) {
fs11.mkdirSync(outputDir, { recursive: true });
} else if (!fs11.lstatSync(outputDir).isDirectory()) {
if (!fs12.existsSync(outputDir)) {
fs12.mkdirSync(outputDir, { recursive: true });
} else if (!fs12.lstatSync(outputDir).isDirectory()) {
throw new ConfigurationError(
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`
);
@@ -110809,7 +110821,7 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
`upload${uploadTarget.sarifExtension}`
);
logger.info(`Writing processed SARIF file to ${outputFile}`);
fs11.writeFileSync(outputFile, sarifPayload);
fs12.writeFileSync(outputFile, sarifPayload);
}
var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3;
var STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1e3;
@@ -110907,9 +110919,9 @@ function handleProcessingResultForUnsuccessfulExecution(response, status, logger
assertNever(status);
}
}
function validateUniqueCategory(sarif, sentinelPrefix) {
function validateUniqueCategory(sarifLog, sentinelPrefix) {
const categories = {};
for (const run of sarif.runs) {
for (const run of sarifLog.runs || []) {
const id = run?.automationDetails?.id;
const tool = run.tool?.driver?.name;
const category = `${sanitize(id)}_${sanitize(tool)}`;
@@ -110928,15 +110940,16 @@ function validateUniqueCategory(sarif, sentinelPrefix) {
function sanitize(str2) {
return (str2 ?? "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase();
}
var InvalidSarifUploadError = class extends Error {
};
function filterAlertsByDiffRange(logger, sarif) {
function filterAlertsByDiffRange(logger, sarifLog) {
const diffRanges = readDiffRangesJsonFile(logger);
if (!diffRanges?.length) {
return sarif;
return sarifLog;
}
if (sarifLog.runs === void 0) {
return sarifLog;
}
const checkoutPath = getRequiredInput("checkout_path");
for (const run of sarif.runs) {
for (const run of sarifLog.runs) {
if (run.results) {
run.results = run.results.filter((result) => {
const locations = [
@@ -110957,17 +110970,16 @@ function filterAlertsByDiffRange(logger, sarif) {
});
}
}
return sarif;
return sarifLog;
}
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
InvalidSarifUploadError,
buildPayload,
findSarifFilesInDir,
getGroupedSarifFilePaths,
populateRunAutomationDetails,
postProcessSarifFiles,
readSarifFile,
readSarifFileOrThrow,
shouldConsiderConfigurationError,
shouldConsiderInvalidRequest,
shouldShowCombineSarifFilesDeprecationWarning,

View File

@@ -45986,7 +45986,7 @@ var require_package = __commonJS({
"package.json"(exports2, module2) {
module2.exports = {
name: "codeql",
version: "4.32.5",
version: "4.32.6",
private: true,
description: "CodeQL action",
scripts: {
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -46052,14 +46053,14 @@ var require_package = __commonJS({
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.6.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
glob: "^11.1.0",
globals: "^17.3.0",
nock: "^14.0.11",
sinon: "^21.0.1",
typescript: "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
},
overrides: {
"@actions/tool-cache": {
@@ -161096,7 +161097,7 @@ var safeDump = renamed("safeDump", "dump");
var semver = __toESM(require_semver2());
// src/api-compatibility.json
var maximumVersion = "3.20";
var maximumVersion = "3.21";
var minimumVersion = "3.14";
// src/util.ts
@@ -161794,6 +161795,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",

View File

@@ -204,7 +204,7 @@ var require_file_command = __commonJS({
exports2.issueFileCommand = issueFileCommand;
exports2.prepareKeyValueMessage = prepareKeyValueMessage;
var crypto2 = __importStar2(require("crypto"));
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var os3 = __importStar2(require("os"));
var utils_1 = require_utils();
function issueFileCommand(command, message) {
@@ -212,10 +212,10 @@ var require_file_command = __commonJS({
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs13.existsSync(filePath)) {
if (!fs14.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs13.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os3.EOL}`, {
fs14.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os3.EOL}`, {
encoding: "utf8"
});
}
@@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({
exports2.isRooted = isRooted;
exports2.tryGetExecutablePath = tryGetExecutablePath;
exports2.getCmdPath = getCmdPath;
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var path13 = __importStar2(require("path"));
_a = fs13.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
_a = fs14.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
exports2.IS_WINDOWS = process.platform === "win32";
function readlink(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
const result = yield fs13.promises.readlink(fsPath);
const result = yield fs14.promises.readlink(fsPath);
if (exports2.IS_WINDOWS && !result.endsWith("\\")) {
return `${result}\\`;
}
@@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({
});
}
exports2.UV_FS_O_EXLOCK = 268435456;
exports2.READONLY = fs13.constants.O_RDONLY;
exports2.READONLY = fs14.constants.O_RDONLY;
function exists(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
try {
@@ -45986,7 +45986,7 @@ var require_package = __commonJS({
"package.json"(exports2, module2) {
module2.exports = {
name: "codeql",
version: "4.32.5",
version: "4.32.6",
private: true,
description: "CodeQL action",
scripts: {
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -46052,14 +46053,14 @@ var require_package = __commonJS({
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.6.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
glob: "^11.1.0",
globals: "^17.3.0",
nock: "^14.0.11",
sinon: "^21.0.1",
typescript: "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
},
overrides: {
"@actions/tool-cache": {
@@ -49106,7 +49107,7 @@ var require_internal_globber = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.DefaultGlobber = void 0;
var core14 = __importStar2(require_core());
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var globOptionsHelper = __importStar2(require_internal_glob_options_helper());
var path13 = __importStar2(require("path"));
var patternHelper = __importStar2(require_internal_pattern_helper());
@@ -49160,7 +49161,7 @@ var require_internal_globber = __commonJS({
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
core14.debug(`Search path '${searchPath}'`);
try {
yield __await2(fs13.promises.lstat(searchPath));
yield __await2(fs14.promises.lstat(searchPath));
} catch (err) {
if (err.code === "ENOENT") {
continue;
@@ -49194,7 +49195,7 @@ var require_internal_globber = __commonJS({
continue;
}
const childLevel = item.level + 1;
const childItems = (yield __await2(fs13.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path13.join(item.path, x), childLevel));
const childItems = (yield __await2(fs14.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path13.join(item.path, x), childLevel));
stack.push(...childItems.reverse());
} else if (match & internal_match_kind_1.MatchKind.File) {
yield yield __await2(item.path);
@@ -49229,7 +49230,7 @@ var require_internal_globber = __commonJS({
let stats;
if (options.followSymbolicLinks) {
try {
stats = yield fs13.promises.stat(item.path);
stats = yield fs14.promises.stat(item.path);
} catch (err) {
if (err.code === "ENOENT") {
if (options.omitBrokenSymbolicLinks) {
@@ -49241,10 +49242,10 @@ var require_internal_globber = __commonJS({
throw err;
}
} else {
stats = yield fs13.promises.lstat(item.path);
stats = yield fs14.promises.lstat(item.path);
}
if (stats.isDirectory() && options.followSymbolicLinks) {
const realPath = yield fs13.promises.realpath(item.path);
const realPath = yield fs14.promises.realpath(item.path);
while (traversalChain.length >= item.level) {
traversalChain.pop();
}
@@ -49353,7 +49354,7 @@ var require_internal_hash_files = __commonJS({
exports2.hashFiles = hashFiles;
var crypto2 = __importStar2(require("crypto"));
var core14 = __importStar2(require_core());
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var path13 = __importStar2(require("path"));
@@ -49376,13 +49377,13 @@ var require_internal_hash_files = __commonJS({
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
continue;
}
if (fs13.statSync(file).isDirectory()) {
if (fs14.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash2 = crypto2.createHash("sha256");
const pipeline = util.promisify(stream2.pipeline);
yield pipeline(fs13.createReadStream(file), hash2);
yield pipeline(fs14.createReadStream(file), hash2);
result.write(hash2.digest());
count++;
if (!hasMatch) {
@@ -50757,7 +50758,7 @@ var require_cacheUtils = __commonJS({
var glob = __importStar2(require_glob());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var path13 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
@@ -50786,7 +50787,7 @@ var require_cacheUtils = __commonJS({
});
}
function getArchiveFileSizeInBytes(filePath) {
return fs13.statSync(filePath).size;
return fs14.statSync(filePath).size;
}
function resolvePaths(patterns) {
return __awaiter2(this, void 0, void 0, function* () {
@@ -50824,7 +50825,7 @@ var require_cacheUtils = __commonJS({
}
function unlinkFile(filePath) {
return __awaiter2(this, void 0, void 0, function* () {
return util.promisify(fs13.unlink)(filePath);
return util.promisify(fs14.unlink)(filePath);
});
}
function getVersion(app_1) {
@@ -50866,7 +50867,7 @@ var require_cacheUtils = __commonJS({
}
function getGnuTarPathOnWindows() {
return __awaiter2(this, void 0, void 0, function* () {
if (fs13.existsSync(constants_1.GnuTarPathOnWindows)) {
if (fs14.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion("tar");
@@ -91023,7 +91024,7 @@ var require_downloadUtils = __commonJS({
var http_client_1 = require_lib();
var storage_blob_1 = require_commonjs15();
var buffer = __importStar2(require("buffer"));
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var utils = __importStar2(require_cacheUtils());
@@ -91134,7 +91135,7 @@ var require_downloadUtils = __commonJS({
exports2.DownloadProgress = DownloadProgress;
function downloadCacheHttpClient(archiveLocation, archivePath) {
return __awaiter2(this, void 0, void 0, function* () {
const writeStream = fs13.createWriteStream(archivePath);
const writeStream = fs14.createWriteStream(archivePath);
const httpClient = new http_client_1.HttpClient("actions/cache");
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () {
return httpClient.get(archiveLocation);
@@ -91159,7 +91160,7 @@ var require_downloadUtils = __commonJS({
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
return __awaiter2(this, void 0, void 0, function* () {
var _a;
const archiveDescriptor = yield fs13.promises.open(archivePath, "w");
const archiveDescriptor = yield fs14.promises.open(archivePath, "w");
const httpClient = new http_client_1.HttpClient("actions/cache", void 0, {
socketTimeout: options.timeoutInMs,
keepAlive: true
@@ -91275,7 +91276,7 @@ var require_downloadUtils = __commonJS({
} else {
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
const downloadProgress = new DownloadProgress(contentLength);
const fd = fs13.openSync(archivePath, "w");
const fd = fs14.openSync(archivePath, "w");
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
@@ -91293,12 +91294,12 @@ var require_downloadUtils = __commonJS({
controller.abort();
throw new Error("Aborting cache download as the download time exceeded the timeout.");
} else if (Buffer.isBuffer(result)) {
fs13.writeFileSync(fd, result);
fs14.writeFileSync(fd, result);
}
}
} finally {
downloadProgress.stopDisplayTimer();
fs13.closeSync(fd);
fs14.closeSync(fd);
}
}
});
@@ -91620,7 +91621,7 @@ var require_cacheHttpClient = __commonJS({
var core14 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var url_1 = require("url");
var utils = __importStar2(require_cacheUtils());
var uploadUtils_1 = require_uploadUtils();
@@ -91755,7 +91756,7 @@ Other caches with similar key:`);
return __awaiter2(this, void 0, void 0, function* () {
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs13.openSync(archivePath, "r");
const fd = fs14.openSync(archivePath, "r");
const uploadOptions = (0, options_1.getUploadOptions)(options);
const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency);
const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize);
@@ -91769,7 +91770,7 @@ Other caches with similar key:`);
const start = offset;
const end = offset + chunkSize - 1;
offset += maxChunkSize;
yield uploadChunk(httpClient, resourceUrl, () => fs13.createReadStream(archivePath, {
yield uploadChunk(httpClient, resourceUrl, () => fs14.createReadStream(archivePath, {
fd,
start,
end,
@@ -91780,7 +91781,7 @@ Other caches with similar key:`);
}
})));
} finally {
fs13.closeSync(fd);
fs14.closeSync(fd);
}
return;
});
@@ -99033,7 +99034,7 @@ var require_manifest = __commonJS({
var core_1 = require_core();
var os3 = require("os");
var cp = require("child_process");
var fs13 = require("fs");
var fs14 = require("fs");
function _findMatch(versionSpec, stable, candidates, archFilter) {
return __awaiter2(this, void 0, void 0, function* () {
const platFilter = os3.platform();
@@ -99095,10 +99096,10 @@ var require_manifest = __commonJS({
const lsbReleaseFile = "/etc/lsb-release";
const osReleaseFile = "/etc/os-release";
let contents = "";
if (fs13.existsSync(lsbReleaseFile)) {
contents = fs13.readFileSync(lsbReleaseFile).toString();
} else if (fs13.existsSync(osReleaseFile)) {
contents = fs13.readFileSync(osReleaseFile).toString();
if (fs14.existsSync(lsbReleaseFile)) {
contents = fs14.readFileSync(lsbReleaseFile).toString();
} else if (fs14.existsSync(osReleaseFile)) {
contents = fs14.readFileSync(osReleaseFile).toString();
}
return contents;
}
@@ -99307,7 +99308,7 @@ var require_tool_cache = __commonJS({
var core14 = __importStar2(require_core());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var mm = __importStar2(require_manifest());
var os3 = __importStar2(require("os"));
var path13 = __importStar2(require("path"));
@@ -99353,7 +99354,7 @@ var require_tool_cache = __commonJS({
}
function downloadToolAttempt(url2, dest, auth2, headers) {
return __awaiter2(this, void 0, void 0, function* () {
if (fs13.existsSync(dest)) {
if (fs14.existsSync(dest)) {
throw new Error(`Destination file path ${dest} already exists`);
}
const http = new httpm.HttpClient(userAgent2, [], {
@@ -99377,7 +99378,7 @@ var require_tool_cache = __commonJS({
const readStream = responseMessageFactory();
let succeeded = false;
try {
yield pipeline(readStream, fs13.createWriteStream(dest));
yield pipeline(readStream, fs14.createWriteStream(dest));
core14.debug("download complete");
succeeded = true;
return dest;
@@ -99589,11 +99590,11 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os3.arch();
core14.debug(`Caching tool ${tool} ${version} ${arch2}`);
core14.debug(`source dir: ${sourceDir}`);
if (!fs13.statSync(sourceDir).isDirectory()) {
if (!fs14.statSync(sourceDir).isDirectory()) {
throw new Error("sourceDir is not a directory");
}
const destPath = yield _createToolPath(tool, version, arch2);
for (const itemName of fs13.readdirSync(sourceDir)) {
for (const itemName of fs14.readdirSync(sourceDir)) {
const s = path13.join(sourceDir, itemName);
yield io6.cp(s, destPath, { recursive: true });
}
@@ -99607,7 +99608,7 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os3.arch();
core14.debug(`Caching tool ${tool} ${version} ${arch2}`);
core14.debug(`source file: ${sourceFile}`);
if (!fs13.statSync(sourceFile).isFile()) {
if (!fs14.statSync(sourceFile).isFile()) {
throw new Error("sourceFile is not a file");
}
const destFolder = yield _createToolPath(tool, version, arch2);
@@ -99636,7 +99637,7 @@ var require_tool_cache = __commonJS({
versionSpec = semver9.clean(versionSpec) || "";
const cachePath = path13.join(_getCacheDirectory(), toolName, versionSpec, arch2);
core14.debug(`checking cache: ${cachePath}`);
if (fs13.existsSync(cachePath) && fs13.existsSync(`${cachePath}.complete`)) {
if (fs14.existsSync(cachePath) && fs14.existsSync(`${cachePath}.complete`)) {
core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`);
toolPath = cachePath;
} else {
@@ -99649,12 +99650,12 @@ var require_tool_cache = __commonJS({
const versions = [];
arch2 = arch2 || os3.arch();
const toolPath = path13.join(_getCacheDirectory(), toolName);
if (fs13.existsSync(toolPath)) {
const children = fs13.readdirSync(toolPath);
if (fs14.existsSync(toolPath)) {
const children = fs14.readdirSync(toolPath);
for (const child of children) {
if (isExplicitVersion(child)) {
const fullPath = path13.join(toolPath, child, arch2 || "");
if (fs13.existsSync(fullPath) && fs13.existsSync(`${fullPath}.complete`)) {
if (fs14.existsSync(fullPath) && fs14.existsSync(`${fullPath}.complete`)) {
versions.push(child);
}
}
@@ -99725,7 +99726,7 @@ var require_tool_cache = __commonJS({
function _completeToolPath(tool, version, arch2) {
const folderPath = path13.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || "");
const markerPath = `${folderPath}.complete`;
fs13.writeFileSync(markerPath, "");
fs14.writeFileSync(markerPath, "");
core14.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
@@ -103252,21 +103253,21 @@ async function getFolderSize(itemPath, options) {
getFolderSize.loose = async (itemPath, options) => await core(itemPath, options);
getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true });
async function core(rootItemPath, options = {}, returnType = {}) {
const fs13 = options.fs || await import("node:fs/promises");
const fs14 = options.fs || await import("node:fs/promises");
let folderSize = 0n;
const foundInos = /* @__PURE__ */ new Set();
const errors = [];
await processItem(rootItemPath);
async function processItem(itemPath) {
if (options.ignore?.test(itemPath)) return;
const stats = returnType.strict ? await fs13.lstat(itemPath, { bigint: true }) : await fs13.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
const stats = returnType.strict ? await fs14.lstat(itemPath, { bigint: true }) : await fs14.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
if (typeof stats !== "object") return;
if (!foundInos.has(stats.ino)) {
foundInos.add(stats.ino);
folderSize += stats.size;
}
if (stats.isDirectory()) {
const directoryItems = returnType.strict ? await fs13.readdir(itemPath) : await fs13.readdir(itemPath).catch((error3) => errors.push(error3));
const directoryItems = returnType.strict ? await fs14.readdir(itemPath) : await fs14.readdir(itemPath).catch((error3) => errors.push(error3));
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
@@ -105906,17 +105907,6 @@ function getExtraOptionsEnvParam() {
);
}
}
function getToolNames(sarif) {
const toolNames = {};
for (const run2 of sarif.runs || []) {
const tool = run2.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function getCodeQLDatabasePath(config, language) {
return path.resolve(config.dbLocation, language);
}
@@ -106628,8 +106618,8 @@ var path4 = __toESM(require("path"));
var semver4 = __toESM(require_semver2());
// src/defaults.json
var bundleVersion = "codeql-bundle-v2.24.2";
var cliVersion = "2.24.2";
var bundleVersion = "codeql-bundle-v2.24.3";
var cliVersion = "2.24.3";
// src/overlay/index.ts
var fs3 = __toESM(require("fs"));
@@ -107147,6 +107137,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
@@ -107493,12 +107488,83 @@ function initFeatures(gitHubVersion, repositoryNwo, tempDir, logger) {
}
}
// src/sarif/index.ts
var fs5 = __toESM(require("fs"));
var InvalidSarifUploadError = class extends Error {
};
function getToolNames(sarifFile) {
const toolNames = {};
for (const run2 of sarifFile.runs || []) {
const tool = run2.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function readSarifFile(sarifFilePath) {
return JSON.parse(fs5.readFileSync(sarifFilePath, "utf8"));
}
function combineSarifFiles(sarifFiles, logger) {
logger.info(`Loading SARIF file(s)`);
const runs = [];
let version = void 0;
for (const sarifFile of sarifFiles) {
logger.debug(`Loading SARIF file: ${sarifFile}`);
const sarifLog = readSarifFile(sarifFile);
if (version === void 0) {
version = sarifLog.version;
} else if (version !== sarifLog.version) {
throw new InvalidSarifUploadError(
`Different SARIF versions encountered: ${version} and ${sarifLog.version}`
);
}
runs.push(...sarifLog?.runs || []);
}
if (version === void 0) {
version = "2.1.0";
}
return { version, runs };
}
function areAllRunsProducedByCodeQL(sarifLogs) {
return sarifLogs.every((sarifLog) => {
return sarifLog.runs?.every((run2) => run2.tool?.driver?.name === "CodeQL");
});
}
function createRunKey(run2) {
return {
name: run2.tool?.driver?.name,
fullName: run2.tool?.driver?.fullName,
version: run2.tool?.driver?.version,
semanticVersion: run2.tool?.driver?.semanticVersion,
guid: run2.tool?.driver?.guid,
automationId: run2.automationDetails?.id
};
}
function areAllRunsUnique(sarifLogs) {
const keys = /* @__PURE__ */ new Set();
for (const sarifLog of sarifLogs) {
if (sarifLog.runs === void 0) {
continue;
}
for (const run2 of sarifLog.runs) {
const key = JSON.stringify(createRunKey(run2));
if (keys.has(key)) {
return false;
}
keys.add(key);
}
}
return true;
}
// src/status-report.ts
var os = __toESM(require("os"));
var core9 = __toESM(require_core());
// src/config-utils.ts
var fs6 = __toESM(require("fs"));
var fs7 = __toESM(require("fs"));
var path7 = __toESM(require("path"));
// src/config/db-config.ts
@@ -107583,18 +107649,18 @@ function writeDiagnostic(config, language, diagnostic) {
}
// src/diff-informed-analysis-utils.ts
var fs5 = __toESM(require("fs"));
var fs6 = __toESM(require("fs"));
var path6 = __toESM(require("path"));
function getDiffRangesJsonFilePath() {
return path6.join(getTemporaryDirectory(), "pr-diff-range.json");
}
function readDiffRangesJsonFile(logger) {
const jsonFilePath = getDiffRangesJsonFilePath();
if (!fs5.existsSync(jsonFilePath)) {
if (!fs6.existsSync(jsonFilePath)) {
logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`);
return void 0;
}
const jsonContents = fs5.readFileSync(jsonFilePath, "utf8");
const jsonContents = fs6.readFileSync(jsonFilePath, "utf8");
logger.debug(
`Read pr-diff-range JSON file from ${jsonFilePath}:
${jsonContents}`
@@ -107643,10 +107709,10 @@ function getPathToParsedConfigFile(tempDir) {
}
async function getConfig(tempDir, logger) {
const configFile = getPathToParsedConfigFile(tempDir);
if (!fs6.existsSync(configFile)) {
if (!fs7.existsSync(configFile)) {
return void 0;
}
const configString = fs6.readFileSync(configFile, "utf8");
const configString = fs7.readFileSync(configFile, "utf8");
logger.debug("Loaded config:");
logger.debug(configString);
const config = JSON.parse(configString);
@@ -107890,7 +107956,7 @@ async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error
}
// src/upload-lib.ts
var fs12 = __toESM(require("fs"));
var fs13 = __toESM(require("fs"));
var path12 = __toESM(require("path"));
var url = __toESM(require("url"));
var import_zlib = __toESM(require("zlib"));
@@ -107898,7 +107964,7 @@ var core12 = __toESM(require_core());
var jsonschema2 = __toESM(require_lib2());
// src/codeql.ts
var fs10 = __toESM(require("fs"));
var fs11 = __toESM(require("fs"));
var path10 = __toESM(require("path"));
var core11 = __toESM(require_core());
var toolrunner3 = __toESM(require_toolrunner());
@@ -108146,7 +108212,7 @@ function wrapCliConfigurationError(cliError) {
}
// src/setup-codeql.ts
var fs9 = __toESM(require("fs"));
var fs10 = __toESM(require("fs"));
var path9 = __toESM(require("path"));
var toolcache3 = __toESM(require_tool_cache());
var import_fast_deep_equal = __toESM(require_fast_deep_equal());
@@ -108208,7 +108274,7 @@ var v4_default = v4;
// src/tar.ts
var import_child_process = require("child_process");
var fs7 = __toESM(require("fs"));
var fs8 = __toESM(require("fs"));
var stream = __toESM(require("stream"));
var import_toolrunner = __toESM(require_toolrunner());
var io4 = __toESM(require_io());
@@ -108281,7 +108347,7 @@ async function isZstdAvailable(logger) {
}
}
async function extract(tarPath, dest, compressionMethod, tarVersion, logger) {
fs7.mkdirSync(dest, { recursive: true });
fs8.mkdirSync(dest, { recursive: true });
switch (compressionMethod) {
case "gzip":
return await toolcache.extractTar(tarPath, dest);
@@ -108365,7 +108431,7 @@ function inferCompressionMethod(tarPath) {
}
// src/tools-download.ts
var fs8 = __toESM(require("fs"));
var fs9 = __toESM(require("fs"));
var os2 = __toESM(require("os"));
var path8 = __toESM(require("path"));
var import_perf_hooks = require("perf_hooks");
@@ -108472,7 +108538,7 @@ async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorizat
};
}
async function downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorization, headers, tarVersion, logger) {
fs8.mkdirSync(dest, { recursive: true });
fs9.mkdirSync(dest, { recursive: true });
const agent = new import_http_client.HttpClient().getAgent(codeqlURL);
headers = Object.assign(
{ "User-Agent": "CodeQL Action" },
@@ -108509,7 +108575,7 @@ function getToolcacheDirectory(version) {
}
function writeToolcacheMarkerFile(extractedPath, logger) {
const markerFilePath = `${extractedPath}.complete`;
fs8.writeFileSync(markerFilePath, "");
fs9.writeFileSync(markerFilePath, "");
logger.info(`Created toolcache marker file ${markerFilePath}`);
}
function sanitizeUrlForStatusReport(url2) {
@@ -108644,7 +108710,7 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
const candidates = toolcache3.findAllVersions("CodeQL").filter(isGoodVersion).map((version) => ({
folder: toolcache3.find("CodeQL", version),
version
})).filter(({ folder }) => fs9.existsSync(path9.join(folder, "pinned-version")));
})).filter(({ folder }) => fs10.existsSync(path9.join(folder, "pinned-version")));
if (candidates.length === 1) {
const candidate = candidates[0];
logger.debug(
@@ -109198,7 +109264,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
"tools",
"tracing-config.lua"
);
return fs10.existsSync(tracingConfigPath);
return fs11.existsSync(tracingConfigPath);
},
async isScannedLanguage(language) {
return !await this.isTracedLanguage(language);
@@ -109678,7 +109744,7 @@ async function writeCodeScanningConfigFile(config, logger) {
logger.startGroup("Augmented user configuration file contents");
logger.info(dump(augmentedConfig));
logger.endGroup();
fs10.writeFileSync(codeScanningConfigFile, dump(augmentedConfig));
fs11.writeFileSync(codeScanningConfigFile, dump(augmentedConfig));
return codeScanningConfigFile;
}
var TRAP_CACHE_SIZE_MB = 1024;
@@ -109722,7 +109788,7 @@ async function getJobRunUuidSarifOptions(codeql) {
}
// src/fingerprints.ts
var fs11 = __toESM(require("fs"));
var fs12 = __toESM(require("fs"));
var import_path2 = __toESM(require("path"));
// node_modules/long/index.js
@@ -110710,7 +110776,7 @@ async function hash(callback, filepath) {
}
updateHash(current);
};
const readStream = fs11.createReadStream(filepath, "utf8");
const readStream = fs12.createReadStream(filepath, "utf8");
for await (const data of readStream) {
for (let i = 0; i < data.length; ++i) {
processCharacter(data.charCodeAt(i));
@@ -110785,22 +110851,22 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
if (!import_path2.default.isAbsolute(uri)) {
uri = srcRootPrefix + uri;
}
if (!fs11.existsSync(uri)) {
if (!fs12.existsSync(uri)) {
logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`);
return void 0;
}
if (fs11.statSync(uri).isDirectory()) {
if (fs12.statSync(uri).isDirectory()) {
logger.debug(`Unable to compute fingerprint for directory: ${uri}`);
return void 0;
}
return uri;
}
async function addFingerprints(sarif, sourceRoot, logger) {
async function addFingerprints(sarifLog, sourceRoot, logger) {
logger.info(
`Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.`
);
const callbacksByFile = {};
for (const run2 of sarif.runs || []) {
for (const run2 of sarifLog.runs || []) {
const artifacts = run2.artifacts || [];
for (const result of run2.results || []) {
const primaryLocation = (result.locations || [])[0];
@@ -110840,7 +110906,7 @@ async function addFingerprints(sarif, sourceRoot, logger) {
};
await hash(teeCallback, filepath);
}
return sarif;
return sarifLog;
}
// src/init.ts
@@ -110878,58 +110944,6 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
// src/upload-lib.ts
var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning.";
var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository.";
function combineSarifFiles(sarifFiles, logger) {
logger.info(`Loading SARIF file(s)`);
const combinedSarif = {
version: null,
runs: []
};
for (const sarifFile of sarifFiles) {
logger.debug(`Loading SARIF file: ${sarifFile}`);
const sarifObject = JSON.parse(
fs12.readFileSync(sarifFile, "utf8")
);
if (combinedSarif.version === null) {
combinedSarif.version = sarifObject.version;
} else if (combinedSarif.version !== sarifObject.version) {
throw new InvalidSarifUploadError(
`Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`
);
}
combinedSarif.runs.push(...sarifObject.runs);
}
return combinedSarif;
}
function areAllRunsProducedByCodeQL(sarifObjects) {
return sarifObjects.every((sarifObject) => {
return sarifObject.runs?.every(
(run2) => run2.tool?.driver?.name === "CodeQL"
);
});
}
function createRunKey(run2) {
return {
name: run2.tool?.driver?.name,
fullName: run2.tool?.driver?.fullName,
version: run2.tool?.driver?.version,
semanticVersion: run2.tool?.driver?.semanticVersion,
guid: run2.tool?.driver?.guid,
automationId: run2.automationDetails?.id
};
}
function areAllRunsUnique(sarifObjects) {
const keys = /* @__PURE__ */ new Set();
for (const sarifObject of sarifObjects) {
for (const run2 of sarifObject.runs) {
const key = JSON.stringify(createRunKey(run2));
if (keys.has(key)) {
return false;
}
keys.add(key);
}
}
return true;
}
async function shouldShowCombineSarifFilesDeprecationWarning(sarifObjects, githubVersion) {
if (githubVersion.type === "GitHub Enterprise Server" /* GHES */ && satisfiesGHESVersion(githubVersion.version, "<3.14", true)) {
return false;
@@ -110958,9 +110972,7 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) {
}
async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) {
logger.info("Combining SARIF files using the CodeQL CLI");
const sarifObjects = sarifFiles.map((sarifFile) => {
return JSON.parse(fs12.readFileSync(sarifFile, "utf8"));
});
const sarifObjects = sarifFiles.map(readSarifFile);
const deprecationWarningMessage = gitHubVersion.type === "GitHub Enterprise Server" /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025";
const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload";
if (!areAllRunsProducedByCodeQL(sarifObjects)) {
@@ -111013,27 +111025,27 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
codeQL = initCodeQLResult.codeql;
}
const baseTempDir = path12.resolve(tempDir, "combined-sarif");
fs12.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs12.mkdtempSync(path12.resolve(baseTempDir, "output-"));
fs13.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs13.mkdtempSync(path12.resolve(baseTempDir, "output-"));
const outputFile = path12.resolve(outputDirectory, "combined-sarif.sarif");
await codeQL.mergeResults(sarifFiles, outputFile, {
mergeRunsFromEqualCategory: true
});
return JSON.parse(fs12.readFileSync(outputFile, "utf8"));
return readSarifFile(outputFile);
}
function populateRunAutomationDetails(sarif, category, analysis_key, environment) {
function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) {
const automationID = getAutomationID2(category, analysis_key, environment);
if (automationID !== void 0) {
for (const run2 of sarif.runs || []) {
for (const run2 of sarifFile.runs || []) {
if (run2.automationDetails === void 0) {
run2.automationDetails = {
id: automationID
};
}
}
return sarif;
return sarifFile;
}
return sarif;
return sarifFile;
}
function getAutomationID2(category, analysis_key, environment) {
if (category !== void 0) {
@@ -111056,7 +111068,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
`SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}`
);
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
fs12.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
fs13.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
return "dummy-sarif-id";
}
const client = getApiClient();
@@ -111090,7 +111102,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
function findSarifFilesInDir(sarifPath, isSarif) {
const sarifFiles = [];
const walkSarifFiles = (dir) => {
const entries = fs12.readdirSync(dir, { withFileTypes: true });
const entries = fs13.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile() && isSarif(entry.name)) {
sarifFiles.push(path12.resolve(dir, entry.name));
@@ -111103,7 +111115,7 @@ function findSarifFilesInDir(sarifPath, isSarif) {
return sarifFiles;
}
async function getGroupedSarifFilePaths(logger, sarifPath) {
const stats = fs12.statSync(sarifPath, { throwIfNoEntry: false });
const stats = fs13.statSync(sarifPath, { throwIfNoEntry: false });
if (stats === void 0) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
}
@@ -111150,9 +111162,9 @@ async function getGroupedSarifFilePaths(logger, sarifPath) {
}
return results;
}
function countResultsInSarif(sarif) {
function countResultsInSarif(sarifLog) {
let numResults = 0;
const parsedSarif = JSON.parse(sarif);
const parsedSarif = JSON.parse(sarifLog);
if (!Array.isArray(parsedSarif.runs)) {
throw new InvalidSarifUploadError("Invalid SARIF. Missing 'runs' array.");
}
@@ -111166,26 +111178,26 @@ function countResultsInSarif(sarif) {
}
return numResults;
}
function readSarifFile(sarifFilePath) {
function readSarifFileOrThrow(sarifFilePath) {
try {
return JSON.parse(fs12.readFileSync(sarifFilePath, "utf8"));
return readSarifFile(sarifFilePath);
} catch (e) {
throw new InvalidSarifUploadError(
`Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}`
);
}
}
function validateSarifFileSchema(sarif, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarif]) && // We want to validate CodeQL SARIF in testing environments.
function validateSarifFileSchema(sarifLog, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarifLog]) && // We want to validate CodeQL SARIF in testing environments.
!getTestingEnvironment()) {
logger.debug(
`Skipping SARIF schema validation for ${sarifFilePath} as all runs are produced by CodeQL.`
);
return;
return true;
}
logger.info(`Validating ${sarifFilePath}`);
const schema2 = require_sarif_schema_2_1_0();
const result = new jsonschema2.Validator().validate(sarif, schema2);
const result = new jsonschema2.Validator().validate(sarifLog, schema2);
const warningAttributes = ["uri-reference", "uri"];
const errors = (result.errors ?? []).filter(
(err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument))
@@ -111212,6 +111224,7 @@ ${sarifErrors.join(
)}`
);
}
return true;
}
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, mergeBaseCommitOid) {
const payloadObj = {
@@ -111237,7 +111250,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
payloadObj.base_sha = mergeBaseCommitOid;
} else if (process.env.GITHUB_EVENT_PATH) {
const githubEvent = JSON.parse(
fs12.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
fs13.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
);
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha;
@@ -111248,14 +111261,14 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) {
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif;
let sarifLog;
category = analysis.fixCategory(logger, category);
if (sarifPaths.length > 1) {
for (const sarifPath of sarifPaths) {
const parsedSarif = readSarifFile(sarifPath);
const parsedSarif = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(parsedSarif, sarifPath, logger);
}
sarif = await combineSarifFilesUsingCLI(
sarifLog = await combineSarifFilesUsingCLI(
sarifPaths,
gitHubVersion,
features,
@@ -111263,21 +111276,21 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths,
);
} else {
const sarifPath = sarifPaths[0];
sarif = readSarifFile(sarifPath);
validateSarifFileSchema(sarif, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
sarifLog = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(sarifLog, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarifLog], gitHubVersion);
}
sarif = filterAlertsByDiffRange(logger, sarif);
sarif = await addFingerprints(sarif, checkoutPath, logger);
sarifLog = filterAlertsByDiffRange(logger, sarifLog);
sarifLog = await addFingerprints(sarifLog, checkoutPath, logger);
const analysisKey = await getAnalysisKey();
const environment = getRequiredInput("matrix");
sarif = populateRunAutomationDetails(
sarif,
sarifLog = populateRunAutomationDetails(
sarifLog,
category,
analysisKey,
environment
);
return { sarif, analysisKey, environment };
return { sarif: sarifLog, analysisKey, environment };
}
async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) {
const outputPath = pathInput || getOptionalEnvVar("CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */);
@@ -111294,12 +111307,12 @@ async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProc
}
async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
const sarif = postProcessingResults.sarif;
const toolNames = getToolNames(sarif);
const sarifLog = postProcessingResults.sarif;
const toolNames = getToolNames(sarifLog);
logger.debug(`Validating that each SARIF run has a unique category`);
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
validateUniqueCategory(sarifLog, uploadTarget.sentinelPrefix);
logger.debug(`Serializing SARIF for upload`);
const sarifPayload = JSON.stringify(sarif);
const sarifPayload = JSON.stringify(sarifLog);
logger.debug(`Compressing serialized SARIF`);
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = url.pathToFileURL(checkoutPath).href;
@@ -111341,9 +111354,9 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post
};
}
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
if (!fs12.existsSync(outputDir)) {
fs12.mkdirSync(outputDir, { recursive: true });
} else if (!fs12.lstatSync(outputDir).isDirectory()) {
if (!fs13.existsSync(outputDir)) {
fs13.mkdirSync(outputDir, { recursive: true });
} else if (!fs13.lstatSync(outputDir).isDirectory()) {
throw new ConfigurationError(
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`
);
@@ -111353,7 +111366,7 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
`upload${uploadTarget.sarifExtension}`
);
logger.info(`Writing processed SARIF file to ${outputFile}`);
fs12.writeFileSync(outputFile, sarifPayload);
fs13.writeFileSync(outputFile, sarifPayload);
}
var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3;
var STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1e3;
@@ -111451,9 +111464,9 @@ function handleProcessingResultForUnsuccessfulExecution(response, status, logger
assertNever(status);
}
}
function validateUniqueCategory(sarif, sentinelPrefix) {
function validateUniqueCategory(sarifLog, sentinelPrefix) {
const categories = {};
for (const run2 of sarif.runs) {
for (const run2 of sarifLog.runs || []) {
const id = run2?.automationDetails?.id;
const tool = run2.tool?.driver?.name;
const category = `${sanitize(id)}_${sanitize(tool)}`;
@@ -111472,15 +111485,16 @@ function validateUniqueCategory(sarif, sentinelPrefix) {
function sanitize(str2) {
return (str2 ?? "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase();
}
var InvalidSarifUploadError = class extends Error {
};
function filterAlertsByDiffRange(logger, sarif) {
function filterAlertsByDiffRange(logger, sarifLog) {
const diffRanges = readDiffRangesJsonFile(logger);
if (!diffRanges?.length) {
return sarif;
return sarifLog;
}
if (sarifLog.runs === void 0) {
return sarifLog;
}
const checkoutPath = getRequiredInput("checkout_path");
for (const run2 of sarif.runs) {
for (const run2 of sarifLog.runs) {
if (run2.results) {
run2.results = run2.results.filter((result) => {
const locations = [
@@ -111501,7 +111515,7 @@ function filterAlertsByDiffRange(logger, sarif) {
});
}
}
return sarif;
return sarifLog;
}
// src/upload-sarif.ts

195
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "codeql",
"version": "4.32.5",
"version": "4.32.6",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "codeql",
"version": "4.32.5",
"version": "4.32.6",
"license": "MIT",
"dependencies": {
"@actions/artifact": "^5.0.3",
@@ -43,6 +43,7 @@
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
"ava": "^6.4.1",
@@ -51,14 +52,14 @@
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.6.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
"glob": "^11.1.0",
"globals": "^17.3.0",
"nock": "^14.0.11",
"sinon": "^21.0.1",
"typescript": "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
}
},
"node_modules/@aashutoshrathi/word-wrap": {
@@ -2522,6 +2523,13 @@
"@types/node": "*"
}
},
"node_modules/@types/sarif": {
"version": "2.1.7",
"resolved": "https://registry.npmjs.org/@types/sarif/-/sarif-2.1.7.tgz",
"integrity": "sha512-kRz0VEkJqWLf1LLVN4pT1cg1Z9wAuvI6L97V3m2f5B76Tg8d413ddvLBPTEHAZJlnn4XSvu0FkZtViCQGVyrXQ==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/semver": {
"version": "7.7.1",
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.7.1.tgz",
@@ -2545,17 +2553,17 @@
"license": "MIT"
},
"node_modules/@typescript-eslint/eslint-plugin": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.56.0.tgz",
"integrity": "sha512-lRyPDLzNCuae71A3t9NEINBiTn7swyOhvUj3MyUOxb8x6g6vPEFoOU+ZRmGMusNC3X3YMhqMIX7i8ShqhT74Pw==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.56.1.tgz",
"integrity": "sha512-Jz9ZztpB37dNC+HU2HI28Bs9QXpzCz+y/twHOwhyrIRdbuVDxSytJNDl6z/aAKlaRIwC7y8wJdkBv7FxYGgi0A==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/regexpp": "^4.12.2",
"@typescript-eslint/scope-manager": "8.56.0",
"@typescript-eslint/type-utils": "8.56.0",
"@typescript-eslint/utils": "8.56.0",
"@typescript-eslint/visitor-keys": "8.56.0",
"@typescript-eslint/scope-manager": "8.56.1",
"@typescript-eslint/type-utils": "8.56.1",
"@typescript-eslint/utils": "8.56.1",
"@typescript-eslint/visitor-keys": "8.56.1",
"ignore": "^7.0.5",
"natural-compare": "^1.4.0",
"ts-api-utils": "^2.4.0"
@@ -2568,7 +2576,7 @@
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
"@typescript-eslint/parser": "^8.56.0",
"@typescript-eslint/parser": "^8.56.1",
"eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
"typescript": ">=4.8.4 <6.0.0"
}
@@ -2584,16 +2592,16 @@
}
},
"node_modules/@typescript-eslint/parser": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.56.0.tgz",
"integrity": "sha512-IgSWvLobTDOjnaxAfDTIHaECbkNlAlKv2j5SjpB2v7QHKv1FIfjwMy8FsDbVfDX/KjmCmYICcw7uGaXLhtsLNg==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.56.1.tgz",
"integrity": "sha512-klQbnPAAiGYFyI02+znpBRLyjL4/BrBd0nyWkdC0s/6xFLkXYQ8OoRrSkqacS1ddVxf/LDyODIKbQ5TgKAf/Fg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/scope-manager": "8.56.0",
"@typescript-eslint/types": "8.56.0",
"@typescript-eslint/typescript-estree": "8.56.0",
"@typescript-eslint/visitor-keys": "8.56.0",
"@typescript-eslint/scope-manager": "8.56.1",
"@typescript-eslint/types": "8.56.1",
"@typescript-eslint/typescript-estree": "8.56.1",
"@typescript-eslint/visitor-keys": "8.56.1",
"debug": "^4.4.3"
},
"engines": {
@@ -2627,14 +2635,14 @@
}
},
"node_modules/@typescript-eslint/project-service": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.56.0.tgz",
"integrity": "sha512-M3rnyL1vIQOMeWxTWIW096/TtVP+8W3p/XnaFflhmcFp+U4zlxUxWj4XwNs6HbDeTtN4yun0GNTTDBw/SvufKg==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.56.1.tgz",
"integrity": "sha512-TAdqQTzHNNvlVFfR+hu2PDJrURiwKsUvxFn1M0h95BB8ah5jejas08jUWG4dBA68jDMI988IvtfdAI53JzEHOQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/tsconfig-utils": "^8.56.0",
"@typescript-eslint/types": "^8.56.0",
"@typescript-eslint/tsconfig-utils": "^8.56.1",
"@typescript-eslint/types": "^8.56.1",
"debug": "^4.4.3"
},
"engines": {
@@ -2667,14 +2675,14 @@
}
},
"node_modules/@typescript-eslint/scope-manager": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.56.0.tgz",
"integrity": "sha512-7UiO/XwMHquH+ZzfVCfUNkIXlp/yQjjnlYUyYz7pfvlK3/EyyN6BK+emDmGNyQLBtLGaYrTAI6KOw8tFucWL2w==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.56.1.tgz",
"integrity": "sha512-YAi4VDKcIZp0O4tz/haYKhmIDZFEUPOreKbfdAN3SzUDMcPhJ8QI99xQXqX+HoUVq8cs85eRKnD+rne2UAnj2w==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.56.0",
"@typescript-eslint/visitor-keys": "8.56.0"
"@typescript-eslint/types": "8.56.1",
"@typescript-eslint/visitor-keys": "8.56.1"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2685,9 +2693,9 @@
}
},
"node_modules/@typescript-eslint/tsconfig-utils": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.56.0.tgz",
"integrity": "sha512-bSJoIIt4o3lKXD3xmDh9chZcjCz5Lk8xS7Rxn+6l5/pKrDpkCwtQNQQwZ2qRPk7TkUYhrq3WPIHXOXlbXP0itg==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.56.1.tgz",
"integrity": "sha512-qOtCYzKEeyr3aR9f28mPJqBty7+DBqsdd63eO0yyDwc6vgThj2UjWfJIcsFeSucYydqcuudMOprZ+x1SpF3ZuQ==",
"dev": true,
"license": "MIT",
"engines": {
@@ -2702,15 +2710,15 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.56.0.tgz",
"integrity": "sha512-qX2L3HWOU2nuDs6GzglBeuFXviDODreS58tLY/BALPC7iu3Fa+J7EOTwnX9PdNBxUI7Uh0ntP0YWGnxCkXzmfA==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.56.1.tgz",
"integrity": "sha512-yB/7dxi7MgTtGhZdaHCemf7PuwrHMenHjmzgUW1aJpO+bBU43OycnM3Wn+DdvDO/8zzA9HlhaJ0AUGuvri4oGg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.56.0",
"@typescript-eslint/typescript-estree": "8.56.0",
"@typescript-eslint/utils": "8.56.0",
"@typescript-eslint/types": "8.56.1",
"@typescript-eslint/typescript-estree": "8.56.1",
"@typescript-eslint/utils": "8.56.1",
"debug": "^4.4.3",
"ts-api-utils": "^2.4.0"
},
@@ -2745,9 +2753,9 @@
}
},
"node_modules/@typescript-eslint/types": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.56.0.tgz",
"integrity": "sha512-DBsLPs3GsWhX5HylbP9HNG15U0bnwut55Lx12bHB9MpXxQ+R5GC8MwQe+N1UFXxAeQDvEsEDY6ZYwX03K7Z6HQ==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.56.1.tgz",
"integrity": "sha512-dbMkdIUkIkchgGDIv7KLUpa0Mda4IYjo4IAMJUZ+3xNoUXxMsk9YtKpTHSChRS85o+H9ftm51gsK1dZReY9CVw==",
"dev": true,
"license": "MIT",
"engines": {
@@ -2759,18 +2767,18 @@
}
},
"node_modules/@typescript-eslint/typescript-estree": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.56.0.tgz",
"integrity": "sha512-ex1nTUMWrseMltXUHmR2GAQ4d+WjkZCT4f+4bVsps8QEdh0vlBsaCokKTPlnqBFqqGaxilDNJG7b8dolW2m43Q==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.56.1.tgz",
"integrity": "sha512-qzUL1qgalIvKWAf9C1HpvBjif+Vm6rcT5wZd4VoMb9+Km3iS3Cv9DY6dMRMDtPnwRAFyAi7YXJpTIEXLvdfPxg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/project-service": "8.56.0",
"@typescript-eslint/tsconfig-utils": "8.56.0",
"@typescript-eslint/types": "8.56.0",
"@typescript-eslint/visitor-keys": "8.56.0",
"@typescript-eslint/project-service": "8.56.1",
"@typescript-eslint/tsconfig-utils": "8.56.1",
"@typescript-eslint/types": "8.56.1",
"@typescript-eslint/visitor-keys": "8.56.1",
"debug": "^4.4.3",
"minimatch": "^9.0.5",
"minimatch": "^10.2.2",
"semver": "^7.7.3",
"tinyglobby": "^0.2.15",
"ts-api-utils": "^2.4.0"
@@ -2786,14 +2794,27 @@
"typescript": ">=4.8.4 <6.0.0"
}
},
"node_modules/@typescript-eslint/typescript-estree/node_modules/balanced-match": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz",
"integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==",
"dev": true,
"license": "MIT",
"engines": {
"node": "18 || 20 || >=22"
}
},
"node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
"integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.4.tgz",
"integrity": "sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==",
"dev": true,
"license": "MIT",
"dependencies": {
"balanced-match": "^1.0.0"
"balanced-match": "^4.0.2"
},
"engines": {
"node": "18 || 20 || >=22"
}
},
"node_modules/@typescript-eslint/typescript-estree/node_modules/debug": {
@@ -2815,32 +2836,32 @@
}
},
"node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": {
"version": "9.0.9",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz",
"integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==",
"version": "10.2.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz",
"integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==",
"dev": true,
"license": "ISC",
"license": "BlueOak-1.0.0",
"dependencies": {
"brace-expansion": "^2.0.2"
"brace-expansion": "^5.0.2"
},
"engines": {
"node": ">=16 || 14 >=14.17"
"node": "18 || 20 || >=22"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/@typescript-eslint/utils": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.56.0.tgz",
"integrity": "sha512-RZ3Qsmi2nFGsS+n+kjLAYDPVlrzf7UhTffrDIKr+h2yzAlYP/y5ZulU0yeDEPItos2Ph46JAL5P/On3pe7kDIQ==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.56.1.tgz",
"integrity": "sha512-HPAVNIME3tABJ61siYlHzSWCGtOoeP2RTIaHXFMPqjrQKCGB9OgUVdiNgH7TJS2JNIQ5qQ4RsAUDuGaGme/KOA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.9.1",
"@typescript-eslint/scope-manager": "8.56.0",
"@typescript-eslint/types": "8.56.0",
"@typescript-eslint/typescript-estree": "8.56.0"
"@typescript-eslint/scope-manager": "8.56.1",
"@typescript-eslint/types": "8.56.1",
"@typescript-eslint/typescript-estree": "8.56.1"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2855,13 +2876,13 @@
}
},
"node_modules/@typescript-eslint/visitor-keys": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.56.0.tgz",
"integrity": "sha512-q+SL+b+05Ud6LbEE35qe4A99P+htKTKVbyiNEe45eCbJFyh/HVK9QXwlrbz+Q4L8SOW4roxSVwXYj4DMBT7Ieg==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.56.1.tgz",
"integrity": "sha512-KiROIzYdEV85YygXw6BI/Dx4fnBlFQu6Mq4QE4MOH9fFnhohw6wX/OAvDY2/C+ut0I3RSPKenvZJIVYqJNkhEw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.56.0",
"@typescript-eslint/types": "8.56.1",
"eslint-visitor-keys": "^5.0.0"
},
"engines": {
@@ -2873,9 +2894,9 @@
}
},
"node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.0.tgz",
"integrity": "sha512-A0XeIi7CXU7nPlfHS9loMYEKxUaONu/hTEzHTGba9Huu94Cq1hPivf+DE5erJozZOky0LfvXAyrV/tcswpLI0Q==",
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.1.tgz",
"integrity": "sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA==",
"dev": true,
"license": "Apache-2.0",
"engines": {
@@ -5138,9 +5159,9 @@
}
},
"node_modules/eslint-plugin-jsdoc": {
"version": "62.6.0",
"resolved": "https://registry.npmjs.org/eslint-plugin-jsdoc/-/eslint-plugin-jsdoc-62.6.0.tgz",
"integrity": "sha512-Z18zZD1Q2m9usqFbAzb30z+lF8bzE4WiUy+dfOXljJlZ1Jm5uhkuAWfGV97FYyh+WlKfrvpDYs+s1z45eZWMfA==",
"version": "62.7.1",
"resolved": "https://registry.npmjs.org/eslint-plugin-jsdoc/-/eslint-plugin-jsdoc-62.7.1.tgz",
"integrity": "sha512-4Zvx99Q7d1uggYBUX/AIjvoyqXhluGbbKrRmG8SQTLprPFg6fa293tVJH1o1GQwNe3lUydd8ZHzn37OaSncgSQ==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
@@ -5155,7 +5176,7 @@
"html-entities": "^2.6.0",
"object-deep-merge": "^2.0.0",
"parse-imports-exports": "^0.2.4",
"semver": "^7.7.3",
"semver": "^7.7.4",
"spdx-expression-parse": "^4.0.0",
"to-valid-identifier": "^1.0.0"
},
@@ -5163,7 +5184,7 @@
"node": "^20.19.0 || ^22.13.0 || >=24"
},
"peerDependencies": {
"eslint": "^7.0.0 || ^8.0.0 || ^9.0.0"
"eslint": "^7.0.0 || ^8.0.0 || ^9.0.0 || ^10.0.0"
}
},
"node_modules/eslint-plugin-jsdoc/node_modules/debug": {
@@ -8849,9 +8870,9 @@
}
},
"node_modules/tar": {
"version": "7.5.7",
"resolved": "https://registry.npmjs.org/tar/-/tar-7.5.7.tgz",
"integrity": "sha512-fov56fJiRuThVFXD6o6/Q354S7pnWMJIVlDBYijsTNx6jKSE4pvrDTs6lUnmGvNyfJwFQQwWy3owKz1ucIhveQ==",
"version": "7.5.10",
"resolved": "https://registry.npmjs.org/tar/-/tar-7.5.10.tgz",
"integrity": "sha512-8mOPs1//5q/rlkNSPcCegA6hiHJYDmSLEI8aMH/CdSQJNWztHC9WHNam5zdQlfpTwB9Xp7IBEsHfV5LKMJGVAw==",
"dev": true,
"license": "BlueOak-1.0.0",
"dependencies": {
@@ -9181,16 +9202,16 @@
}
},
"node_modules/typescript-eslint": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.56.0.tgz",
"integrity": "sha512-c7toRLrotJ9oixgdW7liukZpsnq5CZ7PuKztubGYlNppuTqhIoWfhgHo/7EU0v06gS2l/x0i2NEFK1qMIf0rIg==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.56.1.tgz",
"integrity": "sha512-U4lM6pjmBX7J5wk4szltF7I1cGBHXZopnAXCMXb3+fZ3B/0Z3hq3wS/CCUB2NZBNAExK92mCU2tEohWuwVMsDQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/eslint-plugin": "8.56.0",
"@typescript-eslint/parser": "8.56.0",
"@typescript-eslint/typescript-estree": "8.56.0",
"@typescript-eslint/utils": "8.56.0"
"@typescript-eslint/eslint-plugin": "8.56.1",
"@typescript-eslint/parser": "8.56.1",
"@typescript-eslint/typescript-estree": "8.56.1",
"@typescript-eslint/utils": "8.56.1"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"

View File

@@ -1,6 +1,6 @@
{
"name": "codeql",
"version": "4.32.5",
"version": "4.32.6",
"private": true,
"description": "CodeQL action",
"scripts": {
@@ -9,7 +9,7 @@
"lint": "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
"ava": "npm run transpile && ava --serial --verbose",
"ava": "npm run transpile && ava --verbose",
"test": "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
"transpile": "tsc --build --verbose"
@@ -58,6 +58,7 @@
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
"ava": "^6.4.1",
@@ -66,14 +67,14 @@
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.6.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
"glob": "^11.1.0",
"globals": "^17.3.0",
"nock": "^14.0.11",
"sinon": "^21.0.1",
"typescript": "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
},
"overrides": {
"@actions/tool-cache": {

View File

@@ -1,3 +1 @@
env
__pycache__/
*.pyc
node_modules/

View File

View File

@@ -40,7 +40,7 @@ steps:
post-processed-sarif-path: "${{ runner.temp }}/post-processed"
- name: Upload SARIF files
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: |
analysis-kinds-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}
@@ -48,7 +48,7 @@ steps:
retention-days: 7
- name: Upload post-processed SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: |
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}

View File

@@ -5,7 +5,7 @@ description: >
autobuild Action.
operatingSystems: ["ubuntu", "windows"]
versions: ["linked", "nightly-latest"]
installJava: "true"
installJava: true
env:
CODEQL_ACTION_AUTOBUILD_BUILD_MODE_DIRECT_TRACING: true
steps:

View File

@@ -2,8 +2,8 @@ name: "Build mode autobuild"
description: "An end-to-end integration test of a Java repository built using 'build-mode: autobuild'"
operatingSystems: ["ubuntu", "windows"]
versions: ["linked", "nightly-latest"]
installJava: "true"
installYq: "true"
installJava: true
installYq: true
steps:
- name: Set up Java test repo configuration
run: |

View File

@@ -11,5 +11,5 @@ steps:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: javascript
- name: Fail if the CodeQL version is not a nightly
if: "!contains(steps.init.outputs.codeql-version, '+')"
if: ${{ !contains(steps.init.outputs.codeql-version, '+') }}
run: exit 1

View File

@@ -27,7 +27,7 @@ steps:
output: ${{ runner.temp }}/results
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: ${{ matrix.os }}-zstd-bundle.sarif
path: ${{ runner.temp }}/results/javascript.sarif

View File

@@ -12,7 +12,7 @@ steps:
output: "${{ runner.temp }}/results"
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: "${{ runner.temp }}/results/javascript.sarif"

View File

@@ -25,7 +25,7 @@ steps:
output: "${{ runner.temp }}/results"
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: "${{ runner.temp }}/results/javascript.sarif"

View File

@@ -19,7 +19,7 @@ steps:
with:
output: "${{ runner.temp }}/results"
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: "${{ runner.temp }}/results/javascript.sarif"

View File

@@ -11,7 +11,7 @@ steps:
with:
output: "${{ runner.temp }}/results"
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: "${{ runner.temp }}/results/javascript.sarif"

View File

@@ -32,16 +32,16 @@ steps:
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:all-files/
- name: "Fail for missing output from `upload-sarif` step for `code-scanning`"
if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-scanning)"
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-scanning)
run: exit 1
- name: "Fail for missing output from `upload-sarif` step for `code-quality`"
if: "contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)"
if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)
run: exit 1
- name: Upload single SARIF file for Code Scanning
uses: ./../action/upload-sarif
id: upload-single-sarif-code-scanning
if: "contains(matrix.analysis-kinds, 'code-scanning')"
if: contains(matrix.analysis-kinds, 'code-scanning')
with:
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
@@ -49,12 +49,12 @@ steps:
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-scanning/
- name: "Fail for missing output from `upload-single-sarif-code-scanning` step"
if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning)"
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning)
run: exit 1
- name: Upload single SARIF file for Code Quality
uses: ./../action/upload-sarif
id: upload-single-sarif-code-quality
if: "contains(matrix.analysis-kinds, 'code-quality')"
if: contains(matrix.analysis-kinds, 'code-quality')
with:
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
@@ -62,16 +62,16 @@ steps:
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-quality/
- name: "Fail for missing output from `upload-single-sarif-code-quality` step"
if: "contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality)"
if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality)
run: exit 1
- name: Change SARIF file extension
if: "contains(matrix.analysis-kinds, 'code-scanning')"
if: contains(matrix.analysis-kinds, 'code-scanning')
run: mv ${{ runner.temp }}/results/javascript.sarif ${{ runner.temp }}/results/javascript.sarif.json
- name: Upload single non-`.sarif` file
uses: ./../action/upload-sarif
id: upload-single-non-sarif
if: "contains(matrix.analysis-kinds, 'code-scanning')"
if: contains(matrix.analysis-kinds, 'code-scanning')
with:
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
@@ -79,5 +79,5 @@ steps:
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:non-sarif/
- name: "Fail for missing output from `upload-single-non-sarif` step"
if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-non-sarif.outputs.sarif-ids).code-scanning)"
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-non-sarif.outputs.sarif-ids).code-scanning)
run: exit 1

605
pr-checks/package-lock.json generated Normal file
View File

@@ -0,0 +1,605 @@
{
"name": "pr-checks",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"dependencies": {
"yaml": "^2.8.2"
},
"devDependencies": {
"@types/node": "^20.19.9",
"tsx": "^4.21.0",
"typescript": "^5.9.3"
}
},
"node_modules/@esbuild/aix-ppc64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.3.tgz",
"integrity": "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"aix"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-arm": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.3.tgz",
"integrity": "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.3.tgz",
"integrity": "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.3.tgz",
"integrity": "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/darwin-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz",
"integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/darwin-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz",
"integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/freebsd-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.3.tgz",
"integrity": "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/freebsd-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.3.tgz",
"integrity": "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-arm": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.3.tgz",
"integrity": "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.3.tgz",
"integrity": "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-ia32": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.3.tgz",
"integrity": "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-loong64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.3.tgz",
"integrity": "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==",
"cpu": [
"loong64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-mips64el": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.3.tgz",
"integrity": "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==",
"cpu": [
"mips64el"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-ppc64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.3.tgz",
"integrity": "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-riscv64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.3.tgz",
"integrity": "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==",
"cpu": [
"riscv64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-s390x": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.3.tgz",
"integrity": "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==",
"cpu": [
"s390x"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.3.tgz",
"integrity": "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/netbsd-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.3.tgz",
"integrity": "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/netbsd-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.3.tgz",
"integrity": "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openbsd-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.3.tgz",
"integrity": "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openbsd-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.3.tgz",
"integrity": "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openharmony-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.3.tgz",
"integrity": "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openharmony"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/sunos-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.3.tgz",
"integrity": "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"sunos"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.3.tgz",
"integrity": "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-ia32": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.3.tgz",
"integrity": "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.3.tgz",
"integrity": "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@types/node": {
"version": "20.19.35",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.35.tgz",
"integrity": "sha512-Uarfe6J91b9HAUXxjvSOdiO2UPOKLm07Q1oh0JHxoZ1y8HoqxDAu3gVrsrOHeiio0kSsoVBt4wFrKOm0dKxVPQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"undici-types": "~6.21.0"
}
},
"node_modules/esbuild": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz",
"integrity": "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"bin": {
"esbuild": "bin/esbuild"
},
"engines": {
"node": ">=18"
},
"optionalDependencies": {
"@esbuild/aix-ppc64": "0.27.3",
"@esbuild/android-arm": "0.27.3",
"@esbuild/android-arm64": "0.27.3",
"@esbuild/android-x64": "0.27.3",
"@esbuild/darwin-arm64": "0.27.3",
"@esbuild/darwin-x64": "0.27.3",
"@esbuild/freebsd-arm64": "0.27.3",
"@esbuild/freebsd-x64": "0.27.3",
"@esbuild/linux-arm": "0.27.3",
"@esbuild/linux-arm64": "0.27.3",
"@esbuild/linux-ia32": "0.27.3",
"@esbuild/linux-loong64": "0.27.3",
"@esbuild/linux-mips64el": "0.27.3",
"@esbuild/linux-ppc64": "0.27.3",
"@esbuild/linux-riscv64": "0.27.3",
"@esbuild/linux-s390x": "0.27.3",
"@esbuild/linux-x64": "0.27.3",
"@esbuild/netbsd-arm64": "0.27.3",
"@esbuild/netbsd-x64": "0.27.3",
"@esbuild/openbsd-arm64": "0.27.3",
"@esbuild/openbsd-x64": "0.27.3",
"@esbuild/openharmony-arm64": "0.27.3",
"@esbuild/sunos-x64": "0.27.3",
"@esbuild/win32-arm64": "0.27.3",
"@esbuild/win32-ia32": "0.27.3",
"@esbuild/win32-x64": "0.27.3"
}
},
"node_modules/fsevents": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/get-tsconfig": {
"version": "4.13.6",
"resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.6.tgz",
"integrity": "sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==",
"dev": true,
"license": "MIT",
"dependencies": {
"resolve-pkg-maps": "^1.0.0"
},
"funding": {
"url": "https://github.com/privatenumber/get-tsconfig?sponsor=1"
}
},
"node_modules/resolve-pkg-maps": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz",
"integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==",
"dev": true,
"license": "MIT",
"funding": {
"url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1"
}
},
"node_modules/tsx": {
"version": "4.21.0",
"resolved": "https://registry.npmjs.org/tsx/-/tsx-4.21.0.tgz",
"integrity": "sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==",
"dev": true,
"license": "MIT",
"dependencies": {
"esbuild": "~0.27.0",
"get-tsconfig": "^4.7.5"
},
"bin": {
"tsx": "dist/cli.mjs"
},
"engines": {
"node": ">=18.0.0"
},
"optionalDependencies": {
"fsevents": "~2.3.3"
}
},
"node_modules/typescript": {
"version": "5.9.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz",
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
"dev": true,
"license": "Apache-2.0",
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=14.17"
}
},
"node_modules/undici-types": {
"version": "6.21.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
"dev": true,
"license": "MIT"
},
"node_modules/yaml": {
"version": "2.8.2",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz",
"integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==",
"license": "ISC",
"bin": {
"yaml": "bin.mjs"
},
"engines": {
"node": ">= 14.6"
},
"funding": {
"url": "https://github.com/sponsors/eemeli"
}
}
}
}

12
pr-checks/package.json Normal file
View File

@@ -0,0 +1,12 @@
{
"private": true,
"description": "Dependencies for the sync.ts",
"dependencies": {
"yaml": "^2.8.2"
},
"devDependencies": {
"@types/node": "^20.19.9",
"tsx": "^4.21.0",
"typescript": "^5.9.3"
}
}

View File

@@ -6,9 +6,9 @@ to one of the files in this directory.
## Updating workflows
Run `./sync.sh` to invoke the workflow generator and re-generate the workflow files in `.github/workflows/` based on the templates in `pr-checks/checks/`.
Alternatively, you can use `just`:
1. Install https://github.com/casey/just by whichever way you prefer.
2. Run `just update-pr-checks` in your terminal.
### If you don't want to install `just`
Manually run each step in the `justfile`.

View File

@@ -1,402 +0,0 @@
#!/usr/bin/env python
import ruamel.yaml
from ruamel.yaml.scalarstring import SingleQuotedScalarString, LiteralScalarString
import pathlib
import os
# The default set of CodeQL Bundle versions to use for the PR checks.
defaultTestVersions = [
# The oldest supported CodeQL version. If bumping, update `CODEQL_MINIMUM_VERSION` in `codeql.ts`
"stable-v2.17.6",
# The last CodeQL release in the 2.18 series.
"stable-v2.18.4",
# The last CodeQL release in the 2.19 series.
"stable-v2.19.4",
# The last CodeQL release in the 2.20 series.
"stable-v2.20.7",
# The last CodeQL release in the 2.21 series.
"stable-v2.21.4",
# The last CodeQL release in the 2.22 series.
"stable-v2.22.4",
# The default version of CodeQL for Dotcom, as determined by feature flags.
"default",
# The version of CodeQL shipped with the Action in `defaults.json`. During the release process
# for a new CodeQL release, there will be a period of time during which this will be newer than
# the default version on Dotcom.
"linked",
# A nightly build directly from the our private repo, built in the last 24 hours.
"nightly-latest"
]
# When updating the ruamel.yaml version here, update the PR check in
# `.github/workflows/pr-checks.yml` too.
header = """# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pr-checks/sync.sh
# to regenerate this file.
"""
def is_truthy(value):
if isinstance(value, str):
return value.lower() == 'true'
return bool(value)
class NonAliasingRTRepresenter(ruamel.yaml.representer.RoundTripRepresenter):
def ignore_aliases(self, data):
return True
def writeHeader(checkStream):
checkStream.write(header)
yaml = ruamel.yaml.YAML()
yaml.Representer = NonAliasingRTRepresenter
yaml.indent(mapping=2, sequence=4, offset=2)
this_dir = pathlib.Path(__file__).resolve().parent
allJobs = {}
collections = {}
for file in sorted((this_dir / 'checks').glob('*.yml')):
with open(file, 'r') as checkStream:
checkSpecification = yaml.load(checkStream)
matrix = []
workflowInputs = {}
if 'inputs' in checkSpecification:
workflowInputs = checkSpecification['inputs']
for version in checkSpecification.get('versions', defaultTestVersions):
if version == "latest":
raise ValueError('Did not recognize "version: latest". Did you mean "version: linked"?')
runnerImages = ["ubuntu-latest", "macos-latest", "windows-latest"]
operatingSystems = checkSpecification.get('operatingSystems', ["ubuntu"])
for operatingSystem in operatingSystems:
runnerImagesForOs = [image for image in runnerImages if image.startswith(operatingSystem)]
for runnerImage in runnerImagesForOs:
matrix.append({
'os': runnerImage,
'version': version
})
useAllPlatformBundle = "false" # Default to false
if checkSpecification.get('useAllPlatformBundle'):
useAllPlatformBundle = checkSpecification['useAllPlatformBundle']
if 'analysisKinds' in checkSpecification:
newMatrix = []
for matrixInclude in matrix:
for analysisKind in checkSpecification.get('analysisKinds'):
newMatrix.append(
matrixInclude |
{ 'analysis-kinds': analysisKind }
)
matrix = newMatrix
# Construct the workflow steps needed for this check.
steps = [
{
'name': 'Check out repository',
'uses': 'actions/checkout@v6'
},
]
installNode = is_truthy(checkSpecification.get('installNode', ''))
if installNode:
steps.extend([
{
'name': 'Install Node.js',
'uses': 'actions/setup-node@v6',
'with': {
'node-version': '20.x',
'cache': 'npm',
},
},
{
'name': 'Install dependencies',
'run': 'npm ci',
},
])
steps.append({
'name': 'Prepare test',
'id': 'prepare-test',
'uses': './.github/actions/prepare-test',
'with': {
'version': '${{ matrix.version }}',
'use-all-platform-bundle': useAllPlatformBundle,
# If the action is being run from a container, then do not setup kotlin.
# This is because the kotlin binaries cannot be downloaded from the container.
'setup-kotlin': str(not 'container' in checkSpecification).lower(),
}
})
installGo = is_truthy(checkSpecification.get('installGo', ''))
if installGo:
baseGoVersionExpr = '>=1.21.0'
workflowInputs['go-version'] = {
'type': 'string',
'description': 'The version of Go to install',
'required': False,
'default': baseGoVersionExpr,
}
steps.append({
'name': 'Install Go',
'uses': 'actions/setup-go@v6',
'with': {
'go-version': '${{ inputs.go-version || \'' + baseGoVersionExpr + '\' }}',
# to avoid potentially misleading autobuilder results where we expect it to download
# dependencies successfully, but they actually come from a warm cache
'cache': False
}
})
installJava = is_truthy(checkSpecification.get('installJava', ''))
if installJava:
baseJavaVersionExpr = '17'
workflowInputs['java-version'] = {
'type': 'string',
'description': 'The version of Java to install',
'required': False,
'default': baseJavaVersionExpr,
}
steps.append({
'name': 'Install Java',
'uses': 'actions/setup-java@v5',
'with': {
'java-version': '${{ inputs.java-version || \'' + baseJavaVersionExpr + '\' }}',
'distribution': 'temurin'
}
})
installPython = is_truthy(checkSpecification.get('installPython', ''))
if installPython:
basePythonVersionExpr = '3.13'
workflowInputs['python-version'] = {
'type': 'string',
'description': 'The version of Python to install',
'required': False,
'default': basePythonVersionExpr,
}
steps.append({
'name': 'Install Python',
'if': 'matrix.version != \'nightly-latest\'',
'uses': 'actions/setup-python@v6',
'with': {
'python-version': '${{ inputs.python-version || \'' + basePythonVersionExpr + '\' }}'
}
})
installDotNet = is_truthy(checkSpecification.get('installDotNet', ''))
if installDotNet:
baseDotNetVersionExpr = '9.x'
workflowInputs['dotnet-version'] = {
'type': 'string',
'description': 'The version of .NET to install',
'required': False,
'default': baseDotNetVersionExpr,
}
steps.append({
'name': 'Install .NET',
'uses': 'actions/setup-dotnet@v5',
'with': {
'dotnet-version': '${{ inputs.dotnet-version || \'' + baseDotNetVersionExpr + '\' }}'
}
})
installYq = is_truthy(checkSpecification.get('installYq', ''))
if installYq:
steps.append({
'name': 'Install yq',
'if': "runner.os == 'Windows'",
'env': {
'YQ_PATH': '${{ runner.temp }}/yq',
# This is essentially an arbitrary version of `yq`, which happened to be the one that
# `choco` fetched when we moved away from using that here.
# See https://github.com/github/codeql-action/pull/3423
'YQ_VERSION': 'v4.50.1'
},
'run': LiteralScalarString(
'gh release download --repo mikefarah/yq --pattern "yq_windows_amd64.exe" "$YQ_VERSION" -O "$YQ_PATH/yq.exe"\n'
'echo "$YQ_PATH" >> "$GITHUB_PATH"'
),
})
# If container initialisation steps are present in the check specification,
# make sure to execute them first.
if 'container' in checkSpecification and 'container-init-steps' in checkSpecification:
steps.insert(0, checkSpecification['container-init-steps'])
steps.extend(checkSpecification['steps'])
checkJob = {
'strategy': {
'fail-fast': False,
'matrix': {
'include': matrix
}
},
'name': checkSpecification['name'],
'if': 'github.triggering_actor != \'dependabot[bot]\'',
'permissions': {
'contents': 'read',
'security-events': 'read'
},
'timeout-minutes': 45,
'runs-on': '${{ matrix.os }}',
'steps': steps,
}
if 'permissions' in checkSpecification:
checkJob['permissions'] = checkSpecification['permissions']
for key in ["env", "container", "services"]:
if key in checkSpecification:
checkJob[key] = checkSpecification[key]
checkJob['env'] = checkJob.get('env', {})
if 'CODEQL_ACTION_TEST_MODE' not in checkJob['env']:
checkJob['env']['CODEQL_ACTION_TEST_MODE'] = True
checkName = file.stem
# If this check belongs to a named collection, record it.
if 'collection' in checkSpecification:
collection_name = checkSpecification['collection']
collections.setdefault(collection_name, []).append({
'specification': checkSpecification,
'checkName': checkName,
'inputs': workflowInputs
})
raw_file = this_dir.parent / ".github" / "workflows" / f"__{checkName}.yml.raw"
with open(raw_file, 'w', newline='\n') as output_stream:
extraGroupName = ""
for inputName in workflowInputs.keys():
extraGroupName += "-${{inputs." + inputName + "}}"
writeHeader(output_stream)
yaml.dump({
'name': f"PR Check - {checkSpecification['name']}",
'env': {
'GITHUB_TOKEN': '${{ secrets.GITHUB_TOKEN }}',
'GO111MODULE': 'auto'
},
'on': {
'push': {
'branches': ['main', 'releases/v*']
},
'pull_request': {
'types': ["opened", "synchronize", "reopened", "ready_for_review"]
},
'merge_group': {
'types': ['checks_requested']
},
'schedule': [{'cron': SingleQuotedScalarString('0 5 * * *')}],
'workflow_dispatch': {
'inputs': workflowInputs
},
'workflow_call': {
'inputs': workflowInputs
}
},
'defaults': {
'run': {
'shell': 'bash',
},
},
'concurrency': {
# Cancel in-progress workflows in the same 'group' for pull_request events,
# but not other event types. This should have the effect that workflows on PRs
# get cancelled if there is a newer workflow in the same concurrency group.
# For other events, the new workflows should wait until earlier ones have finished.
# This should help reduce the number of concurrent workflows on the repo, and
# consequently the number of concurrent API requests.
# Note, the `|| false` is intentional to rule out that this somehow ends up being
# `true` since we observed workflows for non-`pull_request` events getting cancelled.
'cancel-in-progress': "${{ github.event_name == 'pull_request' || false }}",
# The group is determined by the workflow name, the ref, and the input values.
# The base name is hard-coded to avoid issues when the workflow is triggered by
# a `workflow_call` event (where `github.workflow` would be the name of the caller).
# The input values are added, since they may result in different behaviour for a
# given workflow on the same ref.
'group': checkName + "-${{github.ref}}" + extraGroupName
},
'jobs': {
checkName: checkJob
}
}, output_stream)
with open(raw_file, 'r') as input_stream:
with open(this_dir.parent / ".github" / "workflows" / f"__{checkName}.yml", 'w', newline='\n') as output_stream:
content = input_stream.read()
output_stream.write("\n".join(list(map(lambda x:x.rstrip(), content.splitlines()))+['']))
os.remove(raw_file)
# write workflow files for collections
for collection_name in collections:
jobs = {}
combinedInputs = {}
for check in collections[collection_name]:
checkName = check['checkName']
checkSpecification = check['specification']
checkInputs = check['inputs']
checkWith = {}
combinedInputs |= checkInputs
for inputName in checkInputs.keys():
checkWith[inputName] = "${{ inputs." + inputName + " }}"
jobs[checkName] = {
'name': checkSpecification['name'],
'permissions': {
'contents': 'read',
'security-events': 'read'
},
'uses': "./.github/workflows/" + f"__{checkName}.yml",
'with': checkWith
}
raw_file = this_dir.parent / ".github" / "workflows" / f"__{collection_name}.yml.raw"
with open(raw_file, 'w') as output_stream:
writeHeader(output_stream)
yaml.dump({
'name': f"Manual Check - {collection_name}",
'env': {
'GITHUB_TOKEN': '${{ secrets.GITHUB_TOKEN }}',
'GO111MODULE': 'auto'
},
'on': {
'workflow_dispatch': {
'inputs': combinedInputs
},
},
'jobs': jobs
}, output_stream)
with open(raw_file, 'r') as input_stream:
with open(this_dir.parent / ".github" / "workflows" / f"__{collection_name}.yml", 'w', newline='\n') as output_stream:
content = input_stream.read()
output_stream.write("\n".join(list(map(lambda x:x.rstrip(), content.splitlines()))+['']))
os.remove(raw_file)

View File

@@ -2,8 +2,14 @@
set -e
cd "$(dirname "$0")"
python3 -m venv env
source env/*/activate
pip3 install ruamel.yaml==0.17.31
python3 sync.py
# Run `npm ci` in CI or `npm install` otherwise.
if [ "$GITHUB_ACTIONS" = "true" ]; then
echo "In Actions, running 'npm ci' for 'sync.ts'..."
npm ci
else
echo "Running 'npm install' for 'sync.ts'..."
npm install --no-audit --no-fund
fi
npx tsx sync.ts

525
pr-checks/sync.ts Executable file
View File

@@ -0,0 +1,525 @@
#!/usr/bin/env npx tsx
import * as fs from "fs";
import * as path from "path";
import * as yaml from "yaml";
/** Known workflow input names. */
enum KnownInputName {
GoVersion = "go-version",
JavaVersion = "java-version",
PythonVersion = "python-version",
DotnetVersion = "dotnet-version",
}
/**
* Represents workflow input definitions.
*/
interface WorkflowInput {
type: string;
description: string;
required: boolean;
default: string;
}
/** A partial mapping from known input names to input definitions. */
type WorkflowInputs = Partial<Record<KnownInputName, WorkflowInput>>;
/**
* Represents PR check specifications.
*/
interface Specification {
/** The display name for the check. */
name: string;
/** The workflow steps specific to this check. */
steps: any[];
/** Workflow-level input definitions forwarded to `workflow_dispatch`/`workflow_call`. */
inputs?: Record<string, WorkflowInput>;
/** CodeQL bundle versions to test against. Defaults to `DEFAULT_TEST_VERSIONS`. */
versions?: string[];
/** Operating system prefixes used to select runner images (e.g. `["ubuntu", "macos"]`). */
operatingSystems?: string[];
/** Whether to use the all-platform CodeQL bundle. */
useAllPlatformBundle?: string;
/** Values for the `analysis-kinds` matrix dimension. */
analysisKinds?: string[];
installNode?: boolean;
installGo?: boolean;
installJava?: boolean;
installPython?: boolean;
installDotNet?: boolean;
installYq?: boolean;
/** Container image configuration for the job. */
container?: any;
/** Service containers for the job. */
services?: any;
/** Custom permissions override for the job. */
permissions?: Record<string, string>;
/** Extra environment variables for the job. */
env?: Record<string, any>;
/** If set, this check is part of a named collection that gets its own caller workflow. */
collection?: string;
}
// The default set of CodeQL Bundle versions to use for the PR checks.
const defaultTestVersions = [
// The oldest supported CodeQL version. If bumping, update `CODEQL_MINIMUM_VERSION` in `codeql.ts`
"stable-v2.17.6",
// The last CodeQL release in the 2.18 series.
"stable-v2.18.4",
// The last CodeQL release in the 2.19 series.
"stable-v2.19.4",
// The last CodeQL release in the 2.20 series.
"stable-v2.20.7",
// The last CodeQL release in the 2.21 series.
"stable-v2.21.4",
// The last CodeQL release in the 2.22 series.
"stable-v2.22.4",
// The default version of CodeQL for Dotcom, as determined by feature flags.
"default",
// The version of CodeQL shipped with the Action in `defaults.json`. During the release process
// for a new CodeQL release, there will be a period of time during which this will be newer than
// the default version on Dotcom.
"linked",
// A nightly build directly from the our private repo, built in the last 24 hours.
"nightly-latest",
];
const THIS_DIR = __dirname;
const CHECKS_DIR = path.join(THIS_DIR, "checks");
const OUTPUT_DIR = path.join(THIS_DIR, "..", ".github", "workflows");
/**
* Loads and parses a YAML file.
*/
function loadYaml(filePath: string): yaml.Document {
const content = fs.readFileSync(filePath, "utf8");
return yaml.parseDocument(content);
}
/**
* Serialize a value to YAML and write it to a file, prepended with the
* standard header comment.
*/
function writeYaml(filePath: string, workflow: any): void {
const header = `# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pr-checks/sync.sh
# to regenerate this file.
`;
const workflowDoc = new yaml.Document(workflow, {
aliasDuplicateObjects: false,
});
const yamlStr = yaml.stringify(workflowDoc, {
aliasDuplicateObjects: false,
singleQuote: true,
lineWidth: 0,
});
fs.writeFileSync(filePath, stripTrailingWhitespace(header + yamlStr), "utf8");
}
/**
* Strip trailing whitespace from each line.
*/
function stripTrailingWhitespace(content: string): string {
return content
.split("\n")
.map((line) => line.trimEnd())
.join("\n");
}
/**
* Main entry point for the sync script.
*/
function main(): void {
// Ensure the output directory exists.
fs.mkdirSync(OUTPUT_DIR, { recursive: true });
// Discover and sort all check specification files.
const checkFiles = fs
.readdirSync(CHECKS_DIR)
.filter((f) => f.endsWith(".yml"))
.sort()
.map((f) => path.join(CHECKS_DIR, f));
console.log(`Found ${checkFiles.length} check specification(s).`);
const collections: Record<
string,
Array<{
specification: Specification;
checkName: string;
inputs: Record<string, WorkflowInput>;
}>
> = {};
for (const file of checkFiles) {
const checkName = path.basename(file, ".yml");
const specDocument = loadYaml(file);
const checkSpecification = specDocument.toJS() as Specification;
console.log(`Processing: ${checkName} — "${checkSpecification.name}"`);
const workflowInputs: WorkflowInputs = {};
let matrix: Array<Record<string, any>> = [];
for (const version of checkSpecification.versions ?? defaultTestVersions) {
if (version === "latest") {
throw new Error(
'Did not recognise "version: latest". Did you mean "version: linked"?',
);
}
const runnerImages = ["ubuntu-latest", "macos-latest", "windows-latest"];
const operatingSystems = checkSpecification.operatingSystems ?? [
"ubuntu",
];
for (const operatingSystem of operatingSystems) {
const runnerImagesForOs = runnerImages.filter((image) =>
image.startsWith(operatingSystem),
);
for (const runnerImage of runnerImagesForOs) {
matrix.push({
os: runnerImage,
version,
});
}
}
}
const useAllPlatformBundle = checkSpecification.useAllPlatformBundle
? checkSpecification.useAllPlatformBundle
: "false";
if (checkSpecification.analysisKinds) {
const newMatrix: Array<Record<string, any>> = [];
for (const matrixInclude of matrix) {
for (const analysisKind of checkSpecification.analysisKinds) {
newMatrix.push({
...matrixInclude,
"analysis-kinds": analysisKind,
});
}
}
matrix = newMatrix;
}
// Construct the workflow steps needed for this check.
const steps: any[] = [
{
name: "Check out repository",
uses: "actions/checkout@v6",
},
];
const installNode = checkSpecification.installNode;
if (installNode) {
steps.push(
{
name: "Install Node.js",
uses: "actions/setup-node@v6",
with: {
"node-version": "20.x",
cache: "npm",
},
},
{
name: "Install dependencies",
run: "npm ci",
},
);
}
steps.push({
name: "Prepare test",
id: "prepare-test",
uses: "./.github/actions/prepare-test",
with: {
version: "${{ matrix.version }}",
"use-all-platform-bundle": useAllPlatformBundle,
// If the action is being run from a container, then do not setup kotlin.
// This is because the kotlin binaries cannot be downloaded from the container.
"setup-kotlin": "container" in checkSpecification ? "false" : "true",
},
});
const installGo = checkSpecification.installGo;
if (installGo) {
const baseGoVersionExpr = ">=1.21.0";
workflowInputs[KnownInputName.GoVersion] = {
type: "string",
description: "The version of Go to install",
required: false,
default: baseGoVersionExpr,
};
steps.push({
name: "Install Go",
uses: "actions/setup-go@v6",
with: {
"go-version":
"${{ inputs.go-version || '" + baseGoVersionExpr + "' }}",
// to avoid potentially misleading autobuilder results where we expect it to download
// dependencies successfully, but they actually come from a warm cache
cache: false,
},
});
}
const installJava = checkSpecification.installJava;
if (installJava) {
const baseJavaVersionExpr = "17";
workflowInputs[KnownInputName.JavaVersion] = {
type: "string",
description: "The version of Java to install",
required: false,
default: baseJavaVersionExpr,
};
steps.push({
name: "Install Java",
uses: "actions/setup-java@v5",
with: {
"java-version":
"${{ inputs.java-version || '" + baseJavaVersionExpr + "' }}",
distribution: "temurin",
},
});
}
const installPython = checkSpecification.installPython;
if (installPython) {
const basePythonVersionExpr = "3.13";
workflowInputs[KnownInputName.PythonVersion] = {
type: "string",
description: "The version of Python to install",
required: false,
default: basePythonVersionExpr,
};
steps.push({
name: "Install Python",
if: "matrix.version != 'nightly-latest'",
uses: "actions/setup-python@v6",
with: {
"python-version":
"${{ inputs.python-version || '" + basePythonVersionExpr + "' }}",
},
});
}
const installDotNet = checkSpecification.installDotNet;
if (installDotNet) {
const baseDotNetVersionExpr = "9.x";
workflowInputs[KnownInputName.DotnetVersion] = {
type: "string",
description: "The version of .NET to install",
required: false,
default: baseDotNetVersionExpr,
};
steps.push({
name: "Install .NET",
uses: "actions/setup-dotnet@v5",
with: {
"dotnet-version":
"${{ inputs.dotnet-version || '" + baseDotNetVersionExpr + "' }}",
},
});
}
const installYq = checkSpecification.installYq;
if (installYq) {
steps.push({
name: "Install yq",
if: "runner.os == 'Windows'",
env: {
YQ_PATH: "${{ runner.temp }}/yq",
// This is essentially an arbitrary version of `yq`, which happened to be the one that
// `choco` fetched when we moved away from using that here.
// See https://github.com/github/codeql-action/pull/3423
YQ_VERSION: "v4.50.1",
},
run:
'gh release download --repo mikefarah/yq --pattern "yq_windows_amd64.exe" "$YQ_VERSION" -O "$YQ_PATH/yq.exe"\n' +
'echo "$YQ_PATH" >> "$GITHUB_PATH"',
});
}
// Extract the sequence of steps from the YAML document to persist as much formatting as possible.
const specSteps = specDocument.get("steps") as yaml.YAMLSeq;
// A handful of workflow specifications use double quotes for values, while we generally use single quotes.
// This replaces double quotes with single quotes for consistency.
yaml.visit(specSteps, {
Scalar(_key, node) {
if (node.type === "QUOTE_DOUBLE") {
node.type = "QUOTE_SINGLE";
}
},
});
// Add the generated steps in front of the ones from the specification.
specSteps.items.unshift(...steps);
const checkJob: Record<string, any> = {
strategy: {
"fail-fast": false,
matrix: {
include: matrix,
},
},
name: checkSpecification.name,
if: "github.triggering_actor != 'dependabot[bot]'",
permissions: {
contents: "read",
"security-events": "read",
},
"timeout-minutes": 45,
"runs-on": "${{ matrix.os }}",
steps: specSteps,
};
if (checkSpecification.permissions) {
checkJob.permissions = checkSpecification.permissions;
}
for (const key of ["env", "container", "services"] as const) {
if (checkSpecification[key] !== undefined) {
checkJob[key] = checkSpecification[key];
}
}
checkJob.env = checkJob.env ?? {};
if (!("CODEQL_ACTION_TEST_MODE" in checkJob.env)) {
checkJob.env.CODEQL_ACTION_TEST_MODE = true;
}
// If this check belongs to a named collection, record it.
if (checkSpecification.collection) {
const collectionName = checkSpecification.collection;
if (!collections[collectionName]) {
collections[collectionName] = [];
}
collections[collectionName].push({
specification: checkSpecification,
checkName,
inputs: workflowInputs,
});
}
let extraGroupName = "";
for (const inputName of Object.keys(workflowInputs)) {
extraGroupName += "-${{inputs." + inputName + "}}";
}
const cron = new yaml.Scalar("0 5 * * *");
cron.type = yaml.Scalar.QUOTE_SINGLE;
const workflow = {
name: `PR Check - ${checkSpecification.name}`,
env: {
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}",
GO111MODULE: "auto",
},
on: {
push: {
branches: ["main", "releases/v*"],
},
pull_request: {
types: ["opened", "synchronize", "reopened", "ready_for_review"],
},
merge_group: {
types: ["checks_requested"],
},
schedule: [{ cron }],
workflow_dispatch: {
inputs: workflowInputs,
},
workflow_call: {
inputs: workflowInputs,
},
},
defaults: {
run: {
shell: "bash",
},
},
concurrency: {
"cancel-in-progress":
"${{ github.event_name == 'pull_request' || false }}",
group: checkName + "-${{github.ref}}" + extraGroupName,
},
jobs: {
[checkName]: checkJob,
},
};
const outputPath = path.join(OUTPUT_DIR, `__${checkName}.yml`);
writeYaml(outputPath, workflow);
}
// Write workflow files for collections.
for (const collectionName of Object.keys(collections)) {
const jobs: Record<string, any> = {};
let combinedInputs: Record<string, WorkflowInput> = {};
for (const check of collections[collectionName]) {
const { checkName, specification, inputs: checkInputs } = check;
const checkWith: Record<string, string> = {};
combinedInputs = { ...combinedInputs, ...checkInputs };
for (const inputName of Object.keys(checkInputs)) {
checkWith[inputName] = "${{ inputs." + inputName + " }}";
}
jobs[checkName] = {
name: specification.name,
permissions: {
contents: "read",
"security-events": "read",
},
uses: `./.github/workflows/__${checkName}.yml`,
with: checkWith,
};
}
const collectionWorkflow = {
name: `Manual Check - ${collectionName}`,
env: {
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}",
GO111MODULE: "auto",
},
on: {
workflow_dispatch: {
inputs: combinedInputs,
},
},
jobs,
};
const outputPath = path.join(OUTPUT_DIR, `__${collectionName}.yml`);
writeYaml(outputPath, collectionWorkflow);
}
console.log(
`\nDone. Wrote ${checkFiles.length} workflow file(s) to ${OUTPUT_DIR}`,
);
}
main();

View File

@@ -1,185 +0,0 @@
#!/usr/bin/env python3
"""
Sync-back script to automatically update action versions in source templates
from the generated workflow files after Dependabot updates.
This script scans the generated workflow files (.github/workflows/__*.yml) to find
all external action versions used, then updates:
1. Hardcoded action versions in pr-checks/sync.py
2. Action version references in template files in pr-checks/checks/
The script automatically detects all actions used in generated workflows and
preserves version comments (e.g., # v1.2.3) when syncing versions.
This ensures that when Dependabot updates action versions in generated workflows,
those changes are properly synced back to the source templates. Regular workflow
files are updated directly by Dependabot and don't need sync-back.
"""
import os
import re
import glob
import argparse
import sys
from pathlib import Path
from typing import Dict, List
def scan_generated_workflows(workflow_dir: str) -> Dict[str, str]:
"""
Scan generated workflow files to extract the latest action versions.
Args:
workflow_dir: Path to .github/workflows directory
Returns:
Dictionary mapping action names to their latest versions (including comments)
"""
action_versions = {}
generated_files = glob.glob(os.path.join(workflow_dir, "__*.yml"))
for file_path in generated_files:
with open(file_path, 'r') as f:
content = f.read()
# Find all action uses in the file, including potential comments
# This pattern captures: action_name@version_with_possible_comment
pattern = r'uses:\s+([^/\s]+/[^@\s]+)@([^@\n]+)'
matches = re.findall(pattern, content)
for action_name, version_with_comment in matches:
# Only track non-local actions (those with / but not starting with ./)
if not action_name.startswith('./'):
# Assume that version numbers are consistent (this should be the case on a Dependabot update PR)
action_versions[action_name] = version_with_comment.rstrip()
return action_versions
def update_sync_py(sync_py_path: str, action_versions: Dict[str, str]) -> bool:
"""
Update hardcoded action versions in pr-checks/sync.py
Args:
sync_py_path: Path to sync.py file
action_versions: Dictionary of action names to versions (may include comments)
Returns:
True if file was modified, False otherwise
"""
if not os.path.exists(sync_py_path):
raise FileNotFoundError(f"Could not find {sync_py_path}")
with open(sync_py_path, 'r') as f:
content = f.read()
original_content = content
# Update hardcoded action versions
for action_name, version_with_comment in action_versions.items():
# Extract just the version part (before any comment) for sync.py
version = version_with_comment.split('#')[0].strip() if '#' in version_with_comment else version_with_comment.strip()
# Look for patterns like 'uses': 'actions/setup-node@v4'
# Note that this will break if we store an Action uses reference in a
# variable - that's a risk we're happy to take since in that case the
# PR checks will just fail.
pattern = rf"('uses':\s*'){re.escape(action_name)}@(?:[^']+)(')"
replacement = rf"\1{action_name}@{version}\2"
content = re.sub(pattern, replacement, content)
if content != original_content:
with open(sync_py_path, 'w') as f:
f.write(content)
print(f"Updated {sync_py_path}")
return True
else:
print(f"No changes needed in {sync_py_path}")
return False
def update_template_files(checks_dir: str, action_versions: Dict[str, str]) -> List[str]:
"""
Update action versions in template files in pr-checks/checks/
Args:
checks_dir: Path to pr-checks/checks directory
action_versions: Dictionary of action names to versions (may include comments)
Returns:
List of files that were modified
"""
modified_files = []
template_files = glob.glob(os.path.join(checks_dir, "*.yml"))
for file_path in template_files:
with open(file_path, 'r') as f:
content = f.read()
original_content = content
# Update action versions
for action_name, version_with_comment in action_versions.items():
# Look for patterns like 'uses: actions/setup-node@v4' or 'uses: actions/setup-node@sha # comment'
pattern = rf"(uses:\s+{re.escape(action_name)})@(?:[^@\n]+)"
replacement = rf"\1@{version_with_comment}"
content = re.sub(pattern, replacement, content)
if content != original_content:
with open(file_path, 'w') as f:
f.write(content)
modified_files.append(file_path)
print(f"Updated {file_path}")
return modified_files
def main():
parser = argparse.ArgumentParser(description="Sync action versions from generated workflows back to templates")
parser.add_argument("--verbose", "-v", action="store_true", help="Enable verbose output")
args = parser.parse_args()
# Get the repository root (assuming script is in pr-checks/)
script_dir = Path(__file__).parent
repo_root = script_dir.parent
workflow_dir = repo_root / ".github" / "workflows"
checks_dir = script_dir / "checks"
sync_py_path = script_dir / "sync.py"
print("Scanning generated workflows for latest action versions...")
action_versions = scan_generated_workflows(str(workflow_dir))
if args.verbose:
print("Found action versions:")
for action, version in action_versions.items():
print(f" {action}@{version}")
if not action_versions:
print("No action versions found in generated workflows")
return 1
# Update files
print("\nUpdating source files...")
modified_files = []
# Update sync.py
if update_sync_py(str(sync_py_path), action_versions):
modified_files.append(str(sync_py_path))
# Update template files
template_modified = update_template_files(str(checks_dir), action_versions)
modified_files.extend(template_modified)
if modified_files:
print(f"\nSync completed. Modified {len(modified_files)} files:")
for file_path in modified_files:
print(f" {file_path}")
else:
print("\nNo files needed updating - all action versions are already in sync")
return 0
if __name__ == "__main__":
sys.exit(main())

250
pr-checks/sync_back.test.ts Executable file
View File

@@ -0,0 +1,250 @@
#!/usr/bin/env npx tsx
/*
Tests for the sync_back.ts script
*/
import * as assert from "node:assert/strict";
import * as fs from "node:fs";
import * as os from "node:os";
import * as path from "node:path";
import { afterEach, beforeEach, describe, it } from "node:test";
import {
scanGeneratedWorkflows,
updateSyncTs,
updateTemplateFiles,
} from "./sync_back";
let testDir: string;
let workflowDir: string;
let checksDir: string;
let syncTsPath: string;
beforeEach(() => {
/** Set up temporary directories and files for testing */
testDir = fs.mkdtempSync(path.join(os.tmpdir(), "sync-back-test-"));
workflowDir = path.join(testDir, ".github", "workflows");
checksDir = path.join(testDir, "pr-checks", "checks");
fs.mkdirSync(workflowDir, { recursive: true });
fs.mkdirSync(checksDir, { recursive: true });
// Create sync.ts file path
syncTsPath = path.join(testDir, "pr-checks", "sync.ts");
});
afterEach(() => {
/** Clean up temporary directories */
fs.rmSync(testDir, { recursive: true, force: true });
});
describe("scanGeneratedWorkflows", () => {
it("basic workflow scanning", () => {
/** Test basic workflow scanning functionality */
const workflowContent = `
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v5
- uses: actions/setup-go@v6
`;
fs.writeFileSync(path.join(workflowDir, "__test.yml"), workflowContent);
const result = scanGeneratedWorkflows(workflowDir);
assert.equal(result["actions/checkout"], "v4");
assert.equal(result["actions/setup-node"], "v5");
assert.equal(result["actions/setup-go"], "v6");
});
it("scanning workflows with version comments", () => {
/** Test scanning workflows with version comments */
const workflowContent = `
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0
- uses: actions/setup-python@v6 # Latest Python
`;
fs.writeFileSync(path.join(workflowDir, "__test.yml"), workflowContent);
const result = scanGeneratedWorkflows(workflowDir);
assert.equal(result["actions/checkout"], "v4");
assert.equal(
result["ruby/setup-ruby"],
"44511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0",
);
assert.equal(result["actions/setup-python"], "v6 # Latest Python");
});
it("ignores local actions", () => {
/** Test that local actions (starting with ./) are ignored */
const workflowContent = `
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/local-action
- uses: ./another-local-action@v1
`;
fs.writeFileSync(path.join(workflowDir, "__test.yml"), workflowContent);
const result = scanGeneratedWorkflows(workflowDir);
assert.equal(result["actions/checkout"], "v4");
assert.equal("./.github/actions/local-action" in result, false);
assert.equal("./another-local-action" in result, false);
});
});
describe("updateSyncTs", () => {
it("updates sync.ts file", () => {
/** Test updating sync.ts file */
const syncTsContent = `
const steps = [
{
uses: "actions/setup-node@v4",
with: { "node-version": "16" },
},
{
uses: "actions/setup-go@v5",
with: { "go-version": "1.19" },
},
];
`;
fs.writeFileSync(syncTsPath, syncTsContent);
const actionVersions = {
"actions/setup-node": "v5",
"actions/setup-go": "v6",
};
const result = updateSyncTs(syncTsPath, actionVersions);
assert.equal(result, true);
const updatedContent = fs.readFileSync(syncTsPath, "utf8");
assert.ok(updatedContent.includes('uses: "actions/setup-node@v5"'));
assert.ok(updatedContent.includes('uses: "actions/setup-go@v6"'));
});
it("strips comments from versions", () => {
/** Test updating sync.ts file when versions have comments */
const syncTsContent = `
const steps = [
{
uses: "actions/setup-node@v4",
with: { "node-version": "16" },
},
];
`;
fs.writeFileSync(syncTsPath, syncTsContent);
const actionVersions = {
"actions/setup-node": "v5 # Latest version",
};
const result = updateSyncTs(syncTsPath, actionVersions);
assert.equal(result, true);
const updatedContent = fs.readFileSync(syncTsPath, "utf8");
// sync.ts should get the version without comment
assert.ok(updatedContent.includes('uses: "actions/setup-node@v5"'));
assert.ok(!updatedContent.includes("# Latest version"));
});
it("returns false when no changes are needed", () => {
/** Test that updateSyncTs returns false when no changes are needed */
const syncTsContent = `
const steps = [
{
uses: "actions/setup-node@v5",
with: { "node-version": "16" },
},
];
`;
fs.writeFileSync(syncTsPath, syncTsContent);
const actionVersions = {
"actions/setup-node": "v5",
};
const result = updateSyncTs(syncTsPath, actionVersions);
assert.equal(result, false);
});
});
describe("updateTemplateFiles", () => {
it("updates template files", () => {
/** Test updating template files */
const templateContent = `
name: Test Template
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v4
with:
node-version: 16
`;
const templatePath = path.join(checksDir, "test.yml");
fs.writeFileSync(templatePath, templateContent);
const actionVersions = {
"actions/checkout": "v4",
"actions/setup-node": "v5 # Latest",
};
const result = updateTemplateFiles(checksDir, actionVersions);
assert.equal(result.length, 1);
assert.ok(result.includes(templatePath));
const updatedContent = fs.readFileSync(templatePath, "utf8");
assert.ok(updatedContent.includes("uses: actions/checkout@v4"));
assert.ok(updatedContent.includes("uses: actions/setup-node@v5 # Latest"));
});
it("preserves version comments", () => {
/** Test that updating template files preserves version comments */
const templateContent = `
name: Test Template
steps:
- uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb # v1.256.0
`;
const templatePath = path.join(checksDir, "test.yml");
fs.writeFileSync(templatePath, templateContent);
const actionVersions = {
"ruby/setup-ruby":
"55511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0",
};
const result = updateTemplateFiles(checksDir, actionVersions);
assert.equal(result.length, 1);
const updatedContent = fs.readFileSync(templatePath, "utf8");
assert.ok(
updatedContent.includes(
"uses: ruby/setup-ruby@55511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0",
),
);
});
});

220
pr-checks/sync_back.ts Executable file
View File

@@ -0,0 +1,220 @@
#!/usr/bin/env npx tsx
/*
Sync-back script to automatically update action versions in source templates
from the generated workflow files after Dependabot updates.
This script scans the generated workflow files (.github/workflows/__*.yml) to find
all external action versions used, then updates:
1. Hardcoded action versions in pr-checks/sync.ts
2. Action version references in template files in pr-checks/checks/
The script automatically detects all actions used in generated workflows and
preserves version comments (e.g., # v1.2.3) when syncing versions.
This ensures that when Dependabot updates action versions in generated workflows,
those changes are properly synced back to the source templates. Regular workflow
files are updated directly by Dependabot and don't need sync-back.
*/
import { parseArgs } from "node:util";
import * as fs from "fs";
import * as path from "path";
const THIS_DIR = __dirname;
const CHECKS_DIR = path.join(THIS_DIR, "checks");
const WORKFLOW_DIR = path.join(THIS_DIR, "..", ".github", "workflows");
const SYNC_TS_PATH = path.join(THIS_DIR, "sync.ts");
/**
* Scan generated workflow files to extract the latest action versions.
*
* @param workflowDir - Path to .github/workflows directory
* @returns Map from action names to their latest versions (including comments)
*/
export function scanGeneratedWorkflows(workflowDir: string): Record<string, string> {
const actionVersions: Record<string, string> = {};
const generatedFiles = fs
.readdirSync(workflowDir)
.filter((f) => f.startsWith("__") && f.endsWith(".yml"))
.map((f) => path.join(workflowDir, f));
for (const filePath of generatedFiles) {
const content = fs.readFileSync(filePath, "utf8");
// Find all action uses in the file, including potential comments
// This pattern captures: action_name@version_with_possible_comment
const pattern = /uses:\s+([^/\s]+\/[^@\s]+)@([^@\n]+)/g;
let match: RegExpExecArray | null;
while ((match = pattern.exec(content)) !== null) {
const actionName = match[1];
const versionWithComment = match[2].trimEnd();
// Only track non-local actions (those with / but not starting with ./)
if (!actionName.startsWith("./")) {
// Assume that version numbers are consistent (this should be the case on a Dependabot update PR)
actionVersions[actionName] = versionWithComment;
}
}
}
return actionVersions;
}
/**
* Update hardcoded action versions in pr-checks/sync.ts
*
* @param syncTsPath - Path to sync.ts file
* @param actionVersions - Map of action names to versions (may include comments)
* @returns True if the file was modified, false otherwise
*/
export function updateSyncTs(
syncTsPath: string,
actionVersions: Record<string, string>,
): boolean {
if (!fs.existsSync(syncTsPath)) {
throw new Error(`Could not find ${syncTsPath}`);
}
let content = fs.readFileSync(syncTsPath, "utf8");
const originalContent = content;
// Update hardcoded action versions
for (const [actionName, versionWithComment] of Object.entries(
actionVersions,
)) {
// Extract just the version part (before any comment) for sync.ts
const version = versionWithComment.includes("#")
? versionWithComment.split("#")[0].trim()
: versionWithComment.trim();
// Look for patterns like uses: "actions/setup-node@v4"
// Note that this will break if we store an Action uses reference in a
// variable - that's a risk we're happy to take since in that case the
// PR checks will just fail.
const escaped = actionName.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(
`(uses:\\s*")${escaped}@(?:[^"]+)(")`,
"g",
);
content = content.replace(pattern, `$1${actionName}@${version}$2`);
}
if (content !== originalContent) {
fs.writeFileSync(syncTsPath, content, "utf8");
console.info(`Updated ${syncTsPath}`);
return true;
} else {
console.info(`No changes needed in ${syncTsPath}`);
return false;
}
}
/**
* Update action versions in template files in pr-checks/checks/
*
* @param checksDir - Path to pr-checks/checks directory
* @param actionVersions - Map of action names to versions (may include comments)
* @returns List of files that were modified
*/
export function updateTemplateFiles(
checksDir: string,
actionVersions: Record<string, string>,
): string[] {
const modifiedFiles: string[] = [];
const templateFiles = fs
.readdirSync(checksDir)
.filter((f) => f.endsWith(".yml"))
.map((f) => path.join(checksDir, f));
for (const filePath of templateFiles) {
let content = fs.readFileSync(filePath, "utf8");
const originalContent = content;
// Update action versions
for (const [actionName, versionWithComment] of Object.entries(
actionVersions,
)) {
// Look for patterns like 'uses: actions/setup-node@v4' or 'uses: actions/setup-node@sha # comment'
const escaped = actionName.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(
`(uses:\\s+${escaped})@(?:[^@\n]+)`,
"g",
);
content = content.replace(pattern, `$1@${versionWithComment}`);
}
if (content !== originalContent) {
fs.writeFileSync(filePath, content, "utf8");
modifiedFiles.push(filePath);
console.info(`Updated ${filePath}`);
}
}
return modifiedFiles;
}
function main(): number {
const { values } = parseArgs({
options: {
verbose: {
type: "boolean",
short: "v",
default: false,
},
},
strict: true,
});
const verbose = values.verbose ?? false;
console.info("Scanning generated workflows for latest action versions...");
const actionVersions = scanGeneratedWorkflows(WORKFLOW_DIR);
if (verbose) {
console.info("Found action versions:");
for (const [action, version] of Object.entries(actionVersions)) {
console.info(` ${action}@${version}`);
}
}
if (Object.keys(actionVersions).length === 0) {
console.error("No action versions found in generated workflows");
return 1;
}
// Update files
console.info("\nUpdating source files...");
const modifiedFiles: string[] = [];
// Update sync.ts
if (updateSyncTs(SYNC_TS_PATH, actionVersions)) {
modifiedFiles.push(SYNC_TS_PATH);
}
// Update template files
const templateModified = updateTemplateFiles(CHECKS_DIR, actionVersions);
modifiedFiles.push(...templateModified);
if (modifiedFiles.length > 0) {
console.info(`\nSync completed. Modified ${modifiedFiles.length} files:`);
for (const filePath of modifiedFiles) {
console.info(` ${filePath}`);
}
} else {
console.info(
"\nNo files needed updating - all action versions are already in sync",
);
}
return 0;
}
// Only call `main` if this script was run directly.
if (require.main === module) {
process.exit(main());
}

View File

@@ -1,237 +0,0 @@
#!/usr/bin/env python3
"""
Tests for the sync_back.py script
"""
import os
import shutil
import tempfile
import unittest
import sync_back
class TestSyncBack(unittest.TestCase):
def setUp(self):
"""Set up temporary directories and files for testing"""
self.test_dir = tempfile.mkdtemp()
self.workflow_dir = os.path.join(self.test_dir, ".github", "workflows")
self.checks_dir = os.path.join(self.test_dir, "pr-checks", "checks")
os.makedirs(self.workflow_dir)
os.makedirs(self.checks_dir)
# Create sync.py file
self.sync_py_path = os.path.join(self.test_dir, "pr-checks", "sync.py")
def tearDown(self):
"""Clean up temporary directories"""
shutil.rmtree(self.test_dir)
def test_scan_generated_workflows_basic(self):
"""Test basic workflow scanning functionality"""
# Create a test generated workflow file
workflow_content = """
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v5
- uses: actions/setup-go@v6
"""
with open(os.path.join(self.workflow_dir, "__test.yml"), 'w') as f:
f.write(workflow_content)
result = sync_back.scan_generated_workflows(self.workflow_dir)
self.assertEqual(result['actions/checkout'], 'v4')
self.assertEqual(result['actions/setup-node'], 'v5')
self.assertEqual(result['actions/setup-go'], 'v6')
def test_scan_generated_workflows_with_comments(self):
"""Test scanning workflows with version comments"""
workflow_content = """
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0
- uses: actions/setup-python@v6 # Latest Python
"""
with open(os.path.join(self.workflow_dir, "__test.yml"), 'w') as f:
f.write(workflow_content)
result = sync_back.scan_generated_workflows(self.workflow_dir)
self.assertEqual(result['actions/checkout'], 'v4')
self.assertEqual(result['ruby/setup-ruby'], '44511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0')
self.assertEqual(result['actions/setup-python'], 'v6 # Latest Python')
def test_scan_generated_workflows_ignores_local_actions(self):
"""Test that local actions (starting with ./) are ignored"""
workflow_content = """
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/local-action
- uses: ./another-local-action@v1
"""
with open(os.path.join(self.workflow_dir, "__test.yml"), 'w') as f:
f.write(workflow_content)
result = sync_back.scan_generated_workflows(self.workflow_dir)
self.assertEqual(result['actions/checkout'], 'v4')
self.assertNotIn('./.github/actions/local-action', result)
self.assertNotIn('./another-local-action', result)
def test_update_sync_py(self):
"""Test updating sync.py file"""
sync_py_content = """
steps = [
{
'uses': 'actions/setup-node@v4',
'with': {'node-version': '16'}
},
{
'uses': 'actions/setup-go@v5',
'with': {'go-version': '1.19'}
}
]
"""
with open(self.sync_py_path, 'w') as f:
f.write(sync_py_content)
action_versions = {
'actions/setup-node': 'v5',
'actions/setup-go': 'v6'
}
result = sync_back.update_sync_py(self.sync_py_path, action_versions)
self.assertTrue(result)
with open(self.sync_py_path, 'r') as f:
updated_content = f.read()
self.assertIn("'uses': 'actions/setup-node@v5'", updated_content)
self.assertIn("'uses': 'actions/setup-go@v6'", updated_content)
def test_update_sync_py_with_comments(self):
"""Test updating sync.py file when versions have comments"""
sync_py_content = """
steps = [
{
'uses': 'actions/setup-node@v4',
'with': {'node-version': '16'}
}
]
"""
with open(self.sync_py_path, 'w') as f:
f.write(sync_py_content)
action_versions = {
'actions/setup-node': 'v5 # Latest version'
}
result = sync_back.update_sync_py(self.sync_py_path, action_versions)
self.assertTrue(result)
with open(self.sync_py_path, 'r') as f:
updated_content = f.read()
# sync.py should get the version without comment
self.assertIn("'uses': 'actions/setup-node@v5'", updated_content)
self.assertNotIn("# Latest version", updated_content)
def test_update_template_files(self):
"""Test updating template files"""
template_content = """
name: Test Template
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v4
with:
node-version: 16
"""
template_path = os.path.join(self.checks_dir, "test.yml")
with open(template_path, 'w') as f:
f.write(template_content)
action_versions = {
'actions/checkout': 'v4',
'actions/setup-node': 'v5 # Latest'
}
result = sync_back.update_template_files(self.checks_dir, action_versions)
self.assertEqual(len(result), 1)
self.assertIn(template_path, result)
with open(template_path, 'r') as f:
updated_content = f.read()
self.assertIn("uses: actions/checkout@v4", updated_content)
self.assertIn("uses: actions/setup-node@v5 # Latest", updated_content)
def test_update_template_files_preserves_comments(self):
"""Test that updating template files preserves version comments"""
template_content = """
name: Test Template
steps:
- uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb # v1.256.0
"""
template_path = os.path.join(self.checks_dir, "test.yml")
with open(template_path, 'w') as f:
f.write(template_content)
action_versions = {
'ruby/setup-ruby': '55511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0'
}
result = sync_back.update_template_files(self.checks_dir, action_versions)
self.assertEqual(len(result), 1)
with open(template_path, 'r') as f:
updated_content = f.read()
self.assertIn("uses: ruby/setup-ruby@55511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0", updated_content)
def test_no_changes_needed(self):
"""Test that functions return False/empty when no changes are needed"""
# Test sync.py with no changes needed
sync_py_content = """
steps = [
{
'uses': 'actions/setup-node@v5',
'with': {'node-version': '16'}
}
]
"""
with open(self.sync_py_path, 'w') as f:
f.write(sync_py_content)
action_versions = {
'actions/setup-node': 'v5'
}
result = sync_back.update_sync_py(self.sync_py_path, action_versions)
self.assertFalse(result)
if __name__ == '__main__':
unittest.main()

View File

@@ -100,7 +100,7 @@ test("computeAutomationID()", async (t) => {
);
});
test("getPullRequestBranches() with pull request context", (t) => {
test.serial("getPullRequestBranches() with pull request context", (t) => {
withMockedContext(
{
pull_request: {
@@ -119,89 +119,104 @@ test("getPullRequestBranches() with pull request context", (t) => {
);
});
test("getPullRequestBranches() returns undefined with push context", (t) => {
withMockedContext(
{
push: {
ref: "refs/heads/main",
},
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
test("getPullRequestBranches() with Default Setup environment variables", (t) => {
withMockedContext({}, () => {
withMockedEnv(
test.serial(
"getPullRequestBranches() returns undefined with push context",
(t) => {
withMockedContext(
{
CODE_SCANNING_REF: "refs/heads/feature-branch",
CODE_SCANNING_BASE_BRANCH: "main",
},
() => {
t.deepEqual(getPullRequestBranches(), {
base: "main",
head: "refs/heads/feature-branch",
});
t.is(isAnalyzingPullRequest(), true);
},
);
});
});
test("getPullRequestBranches() returns undefined when only CODE_SCANNING_REF is set", (t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: "refs/heads/feature-branch",
CODE_SCANNING_BASE_BRANCH: undefined,
push: {
ref: "refs/heads/main",
},
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
});
},
);
test("getPullRequestBranches() returns undefined when only CODE_SCANNING_BASE_BRANCH is set", (t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: undefined,
CODE_SCANNING_BASE_BRANCH: "main",
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
});
test.serial(
"getPullRequestBranches() with Default Setup environment variables",
(t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: "refs/heads/feature-branch",
CODE_SCANNING_BASE_BRANCH: "main",
},
() => {
t.deepEqual(getPullRequestBranches(), {
base: "main",
head: "refs/heads/feature-branch",
});
t.is(isAnalyzingPullRequest(), true);
},
);
});
},
);
test("getPullRequestBranches() returns undefined when no PR context", (t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: undefined,
CODE_SCANNING_BASE_BRANCH: undefined,
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
});
test.serial(
"getPullRequestBranches() returns undefined when only CODE_SCANNING_REF is set",
(t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: "refs/heads/feature-branch",
CODE_SCANNING_BASE_BRANCH: undefined,
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
},
);
test("initializeEnvironment", (t) => {
test.serial(
"getPullRequestBranches() returns undefined when only CODE_SCANNING_BASE_BRANCH is set",
(t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: undefined,
CODE_SCANNING_BASE_BRANCH: "main",
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
},
);
test.serial(
"getPullRequestBranches() returns undefined when no PR context",
(t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: undefined,
CODE_SCANNING_BASE_BRANCH: undefined,
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
},
);
test.serial("initializeEnvironment", (t) => {
initializeEnvironment("1.2.3");
t.deepEqual(process.env[EnvVar.VERSION], "1.2.3");
});
test("fixCodeQualityCategory", (t) => {
test.serial("fixCodeQualityCategory", (t) => {
withMockedEnv(
{
GITHUB_EVENT_NAME: "dynamic",
@@ -249,14 +264,17 @@ test("fixCodeQualityCategory", (t) => {
);
});
test("isDynamicWorkflow() returns true if event name is `dynamic`", (t) => {
process.env.GITHUB_EVENT_NAME = "dynamic";
t.assert(isDynamicWorkflow());
process.env.GITHUB_EVENT_NAME = "push";
t.false(isDynamicWorkflow());
});
test.serial(
"isDynamicWorkflow() returns true if event name is `dynamic`",
(t) => {
process.env.GITHUB_EVENT_NAME = "dynamic";
t.assert(isDynamicWorkflow());
process.env.GITHUB_EVENT_NAME = "push";
t.false(isDynamicWorkflow());
},
);
test("isDefaultSetup() returns true when expected", (t) => {
test.serial("isDefaultSetup() returns true when expected", (t) => {
process.env.GITHUB_EVENT_NAME = "dynamic";
process.env[EnvVar.ANALYSIS_KEY] = "dynamic/github-code-scanning";
t.assert(isDefaultSetup());

View File

@@ -50,31 +50,40 @@ test("Parsing analysis kinds requires at least one analysis kind", async (t) =>
});
});
test("getAnalysisKinds - returns expected analysis kinds for `analysis-kinds` input", async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns("code-scanning,code-quality");
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.assert(result.includes(AnalysisKind.CodeScanning));
t.assert(result.includes(AnalysisKind.CodeQuality));
});
test.serial(
"getAnalysisKinds - returns expected analysis kinds for `analysis-kinds` input",
async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns("code-scanning,code-quality");
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.assert(result.includes(AnalysisKind.CodeScanning));
t.assert(result.includes(AnalysisKind.CodeQuality));
},
);
test("getAnalysisKinds - includes `code-quality` when deprecated `quality-queries` input is used", async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("analysis-kinds").returns("code-scanning");
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("quality-queries").returns("code-quality");
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.assert(result.includes(AnalysisKind.CodeScanning));
t.assert(result.includes(AnalysisKind.CodeQuality));
});
test.serial(
"getAnalysisKinds - includes `code-quality` when deprecated `quality-queries` input is used",
async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("analysis-kinds").returns("code-scanning");
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("quality-queries").returns("code-quality");
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.assert(result.includes(AnalysisKind.CodeScanning));
t.assert(result.includes(AnalysisKind.CodeQuality));
},
);
test("getAnalysisKinds - throws if `analysis-kinds` input is invalid", async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("analysis-kinds").returns("no-such-thing");
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true));
});
test.serial(
"getAnalysisKinds - throws if `analysis-kinds` input is invalid",
async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("analysis-kinds").returns("no-such-thing");
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true));
},
);
// Test the compatibility matrix by looping through all analysis kinds.
const analysisKinds = Object.values(AnalysisKind);
@@ -86,25 +95,31 @@ for (let i = 0; i < analysisKinds.length; i++) {
if (analysisKind === otherAnalysis) continue;
if (compatibilityMatrix[analysisKind].has(otherAnalysis)) {
test(`getAnalysisKinds - allows ${analysisKind} with ${otherAnalysis}`, async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns([analysisKind, otherAnalysis].join(","));
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.is(result.length, 2);
});
test.serial(
`getAnalysisKinds - allows ${analysisKind} with ${otherAnalysis}`,
async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns([analysisKind, otherAnalysis].join(","));
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.is(result.length, 2);
},
);
} else {
test(`getAnalysisKinds - throws if ${analysisKind} is enabled with ${otherAnalysis}`, async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns([analysisKind, otherAnalysis].join(","));
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true), {
instanceOf: ConfigurationError,
message: `${analysisKind} and ${otherAnalysis} cannot be enabled at the same time`,
});
});
test.serial(
`getAnalysisKinds - throws if ${analysisKind} is enabled with ${otherAnalysis}`,
async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns([analysisKind, otherAnalysis].join(","));
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true), {
instanceOf: ConfigurationError,
message: `${analysisKind} and ${otherAnalysis} cannot be enabled at the same time`,
});
},
);
}
}
}
@@ -122,44 +137,50 @@ test("Code Scanning configuration does not accept other SARIF extensions", (t) =
}
});
test("Risk Assessment configuration transforms SARIF upload payload", (t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "1";
const payload = RiskAssessment.transformPayload({
commit_oid: "abc",
sarif: "sarif",
ref: "ref",
workflow_run_attempt: 1,
workflow_run_id: 1,
checkout_uri: "uri",
tool_names: [],
}) as AssessmentPayload;
test.serial(
"Risk Assessment configuration transforms SARIF upload payload",
(t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "1";
const payload = RiskAssessment.transformPayload({
commit_oid: "abc",
sarif: "sarif",
ref: "ref",
workflow_run_attempt: 1,
workflow_run_id: 1,
checkout_uri: "uri",
tool_names: [],
}) as AssessmentPayload;
const expected: AssessmentPayload = { sarif: "sarif", assessment_id: 1 };
t.deepEqual(expected, payload);
});
const expected: AssessmentPayload = { sarif: "sarif", assessment_id: 1 };
t.deepEqual(expected, payload);
},
);
test("Risk Assessment configuration throws for negative assessment IDs", (t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "-1";
t.throws(
() =>
RiskAssessment.transformPayload({
commit_oid: "abc",
sarif: "sarif",
ref: "ref",
workflow_run_attempt: 1,
workflow_run_id: 1,
checkout_uri: "uri",
tool_names: [],
}),
{
instanceOf: Error,
message: (msg) =>
msg.startsWith(`${EnvVar.RISK_ASSESSMENT_ID} must not be negative: `),
},
);
});
test.serial(
"Risk Assessment configuration throws for negative assessment IDs",
(t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "-1";
t.throws(
() =>
RiskAssessment.transformPayload({
commit_oid: "abc",
sarif: "sarif",
ref: "ref",
workflow_run_attempt: 1,
workflow_run_id: 1,
checkout_uri: "uri",
tool_names: [],
}),
{
instanceOf: Error,
message: (msg) =>
msg.startsWith(`${EnvVar.RISK_ASSESSMENT_ID} must not be negative: `),
},
);
},
);
test("Risk Assessment configuration throws for invalid IDs", (t) => {
test.serial("Risk Assessment configuration throws for invalid IDs", (t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "foo";
t.throws(
() =>

View File

@@ -28,9 +28,7 @@ test("analyze action with RAM & threads from environment variables", async (t) =
// it a bit to 20s.
t.timeout(1000 * 20);
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
process.env["GITHUB_API_URL"] = "https://api.github.com";
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(statusReport, "createStatusReportBase")
.resolves({} as statusReport.StatusReportBase);
@@ -54,7 +52,6 @@ test("analyze action with RAM & threads from environment variables", async (t) =
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("expect-error").returns("false");
sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion);
setupActionsVars(tmpDir, tmpDir);
mockFeatureFlagApiEndpoint(200, {});
// When there are no action inputs for RAM and threads, the action uses

View File

@@ -26,9 +26,7 @@ setupTests(test);
test("analyze action with RAM & threads from action inputs", async (t) => {
t.timeout(1000 * 20);
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
process.env["GITHUB_API_URL"] = "https://api.github.com";
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(statusReport, "createStatusReportBase")
.resolves({} as statusReport.StatusReportBase);
@@ -51,7 +49,6 @@ test("analyze action with RAM & threads from action inputs", async (t) => {
optionalInputStub.withArgs("expect-error").returns("false");
sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion);
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
setupActionsVars(tmpDir, tmpDir);
mockFeatureFlagApiEndpoint(200, {});
process.env["CODEQL_THREADS"] = "1";

View File

@@ -32,7 +32,7 @@ setupTests(test);
* - Checks that the duration fields are populated for the correct language.
* - Checks that the QA telemetry status report fields are populated when the QA feature flag is enabled.
*/
test("status report fields", async (t) => {
test.serial("status report fields", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);

View File

@@ -25,6 +25,7 @@ import { FeatureEnablement, Feature } from "./feature-flags";
import { KnownLanguage, Language } from "./languages";
import { Logger, withGroupAsync } from "./logging";
import { OverlayDatabaseMode } from "./overlay";
import type * as sarif from "./sarif";
import { DatabaseCreationTimings, EventReport } from "./status-report";
import { endTracingForCluster } from "./tracer-config";
import * as util from "./util";
@@ -594,7 +595,7 @@ export async function runQueries(
function getPerQueryAlertCounts(sarifPath: string): Record<string, number> {
const sarifObject = JSON.parse(
fs.readFileSync(sarifPath, "utf8"),
) as util.SarifFile;
) as sarif.Log;
// We do not need to compute fingerprints because we are not sending data based off of locations.
// Generate the query: alert count object

View File

@@ -14,7 +14,7 @@ test.beforeEach(() => {
util.initializeEnvironment(actionsUtil.getActionVersion());
});
test("getApiClient", async (t) => {
test.serial("getApiClient", async (t) => {
const pluginStub: sinon.SinonStub = sinon.stub(githubUtils.GitHub, "plugin");
const githubStub: sinon.SinonStub = sinon.stub();
pluginStub.returns(githubStub);
@@ -61,7 +61,7 @@ function mockGetMetaVersionHeader(
return spyGetContents;
}
test("getGitHubVersion for Dotcom", async (t) => {
test.serial("getGitHubVersion for Dotcom", async (t) => {
const apiDetails = {
auth: "",
url: "https://github.com",
@@ -75,7 +75,7 @@ test("getGitHubVersion for Dotcom", async (t) => {
t.deepEqual(util.GitHubVariant.DOTCOM, v.type);
});
test("getGitHubVersion for GHES", async (t) => {
test.serial("getGitHubVersion for GHES", async (t) => {
mockGetMetaVersionHeader("2.0");
const v2 = await api.getGitHubVersionFromApi(api.getApiClient(), {
auth: "",
@@ -88,7 +88,7 @@ test("getGitHubVersion for GHES", async (t) => {
);
});
test("getGitHubVersion for different domain", async (t) => {
test.serial("getGitHubVersion for different domain", async (t) => {
mockGetMetaVersionHeader(undefined);
const v3 = await api.getGitHubVersionFromApi(api.getApiClient(), {
auth: "",
@@ -98,7 +98,7 @@ test("getGitHubVersion for different domain", async (t) => {
t.deepEqual({ type: util.GitHubVariant.DOTCOM }, v3);
});
test("getGitHubVersion for GHEC-DR", async (t) => {
test.serial("getGitHubVersion for GHEC-DR", async (t) => {
mockGetMetaVersionHeader("ghe.com");
const gheDotcom = await api.getGitHubVersionFromApi(api.getApiClient(), {
auth: "",
@@ -108,96 +108,99 @@ test("getGitHubVersion for GHEC-DR", async (t) => {
t.deepEqual({ type: util.GitHubVariant.GHEC_DR }, gheDotcom);
});
test("wrapApiConfigurationError correctly wraps specific configuration errors", (t) => {
// We don't reclassify arbitrary errors
const arbitraryError = new Error("arbitrary error");
let res = api.wrapApiConfigurationError(arbitraryError);
t.is(res, arbitraryError);
test.serial(
"wrapApiConfigurationError correctly wraps specific configuration errors",
(t) => {
// We don't reclassify arbitrary errors
const arbitraryError = new Error("arbitrary error");
let res = api.wrapApiConfigurationError(arbitraryError);
t.is(res, arbitraryError);
// Same goes for arbitrary errors
const configError = new util.ConfigurationError("arbitrary error");
res = api.wrapApiConfigurationError(configError);
t.is(res, configError);
// Same goes for arbitrary errors
const configError = new util.ConfigurationError("arbitrary error");
res = api.wrapApiConfigurationError(configError);
t.is(res, configError);
// If an HTTP error doesn't contain a specific error message, we don't
// wrap is an an API error.
const httpError = new util.HTTPError("arbitrary HTTP error", 456);
res = api.wrapApiConfigurationError(httpError);
t.is(res, httpError);
// If an HTTP error doesn't contain a specific error message, we don't
// wrap is an an API error.
const httpError = new util.HTTPError("arbitrary HTTP error", 456);
res = api.wrapApiConfigurationError(httpError);
t.is(res, httpError);
// For other HTTP errors, we wrap them as Configuration errors if they contain
// specific error messages.
const httpNotFoundError = new util.HTTPError("commit not found", 404);
res = api.wrapApiConfigurationError(httpNotFoundError);
t.deepEqual(res, new util.ConfigurationError("commit not found"));
// For other HTTP errors, we wrap them as Configuration errors if they contain
// specific error messages.
const httpNotFoundError = new util.HTTPError("commit not found", 404);
res = api.wrapApiConfigurationError(httpNotFoundError);
t.deepEqual(res, new util.ConfigurationError("commit not found"));
const refNotFoundError = new util.HTTPError(
"ref 'refs/heads/jitsi' not found in this repository - https://docs.github.com/rest",
404,
);
res = api.wrapApiConfigurationError(refNotFoundError);
t.deepEqual(
res,
new util.ConfigurationError(
const refNotFoundError = new util.HTTPError(
"ref 'refs/heads/jitsi' not found in this repository - https://docs.github.com/rest",
),
);
404,
);
res = api.wrapApiConfigurationError(refNotFoundError);
t.deepEqual(
res,
new util.ConfigurationError(
"ref 'refs/heads/jitsi' not found in this repository - https://docs.github.com/rest",
),
);
const apiRateLimitError = new util.HTTPError(
"API rate limit exceeded for installation",
403,
);
res = api.wrapApiConfigurationError(apiRateLimitError);
t.deepEqual(
res,
new util.ConfigurationError("API rate limit exceeded for installation"),
);
const apiRateLimitError = new util.HTTPError(
"API rate limit exceeded for installation",
403,
);
res = api.wrapApiConfigurationError(apiRateLimitError);
t.deepEqual(
res,
new util.ConfigurationError("API rate limit exceeded for installation"),
);
const tokenSuggestionMessage =
"Please check that your token is valid and has the required permissions: contents: read, security-events: write";
const badCredentialsError = new util.HTTPError("Bad credentials", 401);
res = api.wrapApiConfigurationError(badCredentialsError);
t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage));
const tokenSuggestionMessage =
"Please check that your token is valid and has the required permissions: contents: read, security-events: write";
const badCredentialsError = new util.HTTPError("Bad credentials", 401);
res = api.wrapApiConfigurationError(badCredentialsError);
t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage));
const notFoundError = new util.HTTPError("Not Found", 404);
res = api.wrapApiConfigurationError(notFoundError);
t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage));
const notFoundError = new util.HTTPError("Not Found", 404);
res = api.wrapApiConfigurationError(notFoundError);
t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage));
const resourceNotAccessibleError = new util.HTTPError(
"Resource not accessible by integration",
403,
);
res = api.wrapApiConfigurationError(resourceNotAccessibleError);
t.deepEqual(
res,
new util.ConfigurationError("Resource not accessible by integration"),
);
const resourceNotAccessibleError = new util.HTTPError(
"Resource not accessible by integration",
403,
);
res = api.wrapApiConfigurationError(resourceNotAccessibleError);
t.deepEqual(
res,
new util.ConfigurationError("Resource not accessible by integration"),
);
// Enablement errors.
const enablementErrorMessages = [
"Code Security must be enabled for this repository to use code scanning",
"Advanced Security must be enabled for this repository to use code scanning",
"Code Scanning is not enabled for this repository. Please enable code scanning in the repository settings.",
];
const transforms = [
(msg: string) => msg,
(msg: string) => msg.toLowerCase(),
(msg: string) => msg.toLocaleUpperCase(),
];
// Enablement errors.
const enablementErrorMessages = [
"Code Security must be enabled for this repository to use code scanning",
"Advanced Security must be enabled for this repository to use code scanning",
"Code Scanning is not enabled for this repository. Please enable code scanning in the repository settings.",
];
const transforms = [
(msg: string) => msg,
(msg: string) => msg.toLowerCase(),
(msg: string) => msg.toLocaleUpperCase(),
];
for (const enablementErrorMessage of enablementErrorMessages) {
for (const transform of transforms) {
const enablementError = new util.HTTPError(
transform(enablementErrorMessage),
403,
);
res = api.wrapApiConfigurationError(enablementError);
t.deepEqual(
res,
new util.ConfigurationError(
api.getFeatureEnablementError(enablementError.message),
),
);
for (const enablementErrorMessage of enablementErrorMessages) {
for (const transform of transforms) {
const enablementError = new util.HTTPError(
transform(enablementErrorMessage),
403,
);
res = api.wrapApiConfigurationError(enablementError);
t.deepEqual(
res,
new util.ConfigurationError(
api.getFeatureEnablementError(enablementError.message),
),
);
}
}
}
});
},
);

View File

@@ -1 +1 @@
{"maximumVersion": "3.20", "minimumVersion": "3.14"}
{"maximumVersion": "3.21", "minimumVersion": "3.14"}

View File

@@ -131,27 +131,30 @@ for (const [platform, arch] of [
["linux", "arm64"],
["win32", "arm64"],
]) {
test(`wrapCliConfigurationError - ${platform}/${arch} unsupported`, (t) => {
sinon.stub(process, "platform").value(platform);
sinon.stub(process, "arch").value(arch);
const commandError = new CommandInvocationError(
"codeql",
["version"],
1,
"Some error",
);
const cliError = new CliError(commandError);
test.serial(
`wrapCliConfigurationError - ${platform}/${arch} unsupported`,
(t) => {
sinon.stub(process, "platform").value(platform);
sinon.stub(process, "arch").value(arch);
const commandError = new CommandInvocationError(
"codeql",
["version"],
1,
"Some error",
);
const cliError = new CliError(commandError);
const wrappedError = wrapCliConfigurationError(cliError);
const wrappedError = wrapCliConfigurationError(cliError);
t.true(wrappedError instanceof ConfigurationError);
t.true(
wrappedError.message.includes(
"CodeQL CLI does not support the platform/architecture combination",
),
);
t.true(wrappedError.message.includes(`${platform}/${arch}`));
});
t.true(wrappedError instanceof ConfigurationError);
t.true(
wrappedError.message.includes(
"CodeQL CLI does not support the platform/architecture combination",
),
);
t.true(wrappedError.message.includes(`${platform}/${arch}`));
},
);
}
test("wrapCliConfigurationError - supported platform", (t) => {

View File

@@ -120,19 +120,53 @@ async function stubCodeql(): Promise<codeql.CodeQL> {
return codeqlObject;
}
test("downloads and caches explicitly requested bundles that aren't in the toolcache", async (t) => {
const features = createFeatures([]);
test.serial(
"downloads and caches explicitly requested bundles that aren't in the toolcache",
async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
const versions = ["20200601", "20200610"];
const versions = ["20200601", "20200610"];
for (let i = 0; i < versions.length; i++) {
const version = versions[i];
for (let i = 0; i < versions.length; i++) {
const version = versions[i];
const url = mockBundleDownloadApi({
tagName: `codeql-bundle-${version}`,
isPinned: false,
});
const result = await codeql.setupCodeQL(
url,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
t.is(result.toolsVersion, `0.0.0-${version}`);
t.is(result.toolsSource, ToolsSource.Download);
}
t.is(toolcache.findAllVersions("CodeQL").length, 2);
});
},
);
test.serial(
"caches semantically versioned bundles using their semantic version number",
async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
const url = mockBundleDownloadApi({
tagName: `codeql-bundle-${version}`,
tagName: `codeql-bundle-v2.15.0`,
isPinned: false,
});
const result = await codeql.setupCodeQL(
@@ -146,78 +180,53 @@ test("downloads and caches explicitly requested bundles that aren't in the toolc
false,
);
t.assert(toolcache.find("CodeQL", `0.0.0-${version}`));
t.is(result.toolsVersion, `0.0.0-${version}`);
t.is(toolcache.findAllVersions("CodeQL").length, 1);
t.assert(toolcache.find("CodeQL", `2.15.0`));
t.is(result.toolsVersion, `2.15.0`);
t.is(result.toolsSource, ToolsSource.Download);
}
t.is(toolcache.findAllVersions("CodeQL").length, 2);
});
});
test("caches semantically versioned bundles using their semantic version number", async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
const url = mockBundleDownloadApi({
tagName: `codeql-bundle-v2.15.0`,
isPinned: false,
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
});
const result = await codeql.setupCodeQL(
url,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
},
);
t.is(toolcache.findAllVersions("CodeQL").length, 1);
t.assert(toolcache.find("CodeQL", `2.15.0`));
t.is(result.toolsVersion, `2.15.0`);
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
});
});
test.serial(
"downloads an explicitly requested bundle even if a different version is cached",
async (t) => {
const features = createFeatures([]);
test("downloads an explicitly requested bundle even if a different version is cached", async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await installIntoToolcache({
tagName: "codeql-bundle-20200601",
isPinned: true,
tmpDir,
});
await installIntoToolcache({
tagName: "codeql-bundle-20200601",
isPinned: true,
tmpDir,
const url = mockBundleDownloadApi({
tagName: "codeql-bundle-20200610",
});
const result = await codeql.setupCodeQL(
url,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
t.deepEqual(result.toolsVersion, "0.0.0-20200610");
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
});
const url = mockBundleDownloadApi({
tagName: "codeql-bundle-20200610",
});
const result = await codeql.setupCodeQL(
url,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.assert(toolcache.find("CodeQL", "0.0.0-20200610"));
t.deepEqual(result.toolsVersion, "0.0.0-20200610");
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
});
});
},
);
const EXPLICITLY_REQUESTED_BUNDLE_TEST_CASES = [
{
@@ -234,37 +243,42 @@ for (const {
tagName,
expectedToolcacheVersion,
} of EXPLICITLY_REQUESTED_BUNDLE_TEST_CASES) {
test(`caches explicitly requested bundle ${tagName} as ${expectedToolcacheVersion}`, async (t) => {
const features = createFeatures([]);
test.serial(
`caches explicitly requested bundle ${tagName} as ${expectedToolcacheVersion}`,
async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
mockApiDetails(SAMPLE_DOTCOM_API_DETAILS);
sinon.stub(actionsUtil, "isRunningLocalAction").returns(true);
mockApiDetails(SAMPLE_DOTCOM_API_DETAILS);
sinon.stub(actionsUtil, "isRunningLocalAction").returns(true);
const url = mockBundleDownloadApi({
tagName,
const url = mockBundleDownloadApi({
tagName,
});
const result = await codeql.setupCodeQL(
url,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.assert(toolcache.find("CodeQL", expectedToolcacheVersion));
t.deepEqual(result.toolsVersion, expectedToolcacheVersion);
t.is(result.toolsSource, ToolsSource.Download);
t.assert(
Number.isInteger(
result.toolsDownloadStatusReport?.downloadDurationMs,
),
);
});
const result = await codeql.setupCodeQL(
url,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.assert(toolcache.find("CodeQL", expectedToolcacheVersion));
t.deepEqual(result.toolsVersion, expectedToolcacheVersion);
t.is(result.toolsSource, ToolsSource.Download);
t.assert(
Number.isInteger(result.toolsDownloadStatusReport?.downloadDurationMs),
);
});
});
},
);
}
for (const toolcacheVersion of [
@@ -273,7 +287,7 @@ for (const toolcacheVersion of [
SAMPLE_DEFAULT_CLI_VERSION.cliVersion,
`${SAMPLE_DEFAULT_CLI_VERSION.cliVersion}-20230101`,
]) {
test(
test.serial(
`uses tools from toolcache when ${SAMPLE_DEFAULT_CLI_VERSION.cliVersion} is requested and ` +
`${toolcacheVersion} is installed`,
async (t) => {
@@ -308,158 +322,170 @@ for (const toolcacheVersion of [
);
}
test(`uses a cached bundle when no tools input is given on GHES`, async (t) => {
const features = createFeatures([]);
test.serial(
`uses a cached bundle when no tools input is given on GHES`,
async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await installIntoToolcache({
tagName: "codeql-bundle-20200601",
isPinned: true,
tmpDir,
await installIntoToolcache({
tagName: "codeql-bundle-20200601",
isPinned: true,
tmpDir,
});
const result = await codeql.setupCodeQL(
undefined,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.GHES,
{
cliVersion: defaults.cliVersion,
tagName: defaults.bundleVersion,
},
features,
getRunnerLogger(true),
false,
);
t.deepEqual(result.toolsVersion, "0.0.0-20200601");
t.is(result.toolsSource, ToolsSource.Toolcache);
t.is(result.toolsDownloadStatusReport?.combinedDurationMs, undefined);
t.is(result.toolsDownloadStatusReport?.downloadDurationMs, undefined);
t.is(result.toolsDownloadStatusReport?.extractionDurationMs, undefined);
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 1);
});
},
);
const result = await codeql.setupCodeQL(
undefined,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.GHES,
{
cliVersion: defaults.cliVersion,
test.serial(
`downloads bundle if only an unpinned version is cached on GHES`,
async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await installIntoToolcache({
tagName: "codeql-bundle-20200601",
isPinned: false,
tmpDir,
});
mockBundleDownloadApi({
tagName: defaults.bundleVersion,
},
features,
getRunnerLogger(true),
false,
);
t.deepEqual(result.toolsVersion, "0.0.0-20200601");
t.is(result.toolsSource, ToolsSource.Toolcache);
t.is(result.toolsDownloadStatusReport?.combinedDurationMs, undefined);
t.is(result.toolsDownloadStatusReport?.downloadDurationMs, undefined);
t.is(result.toolsDownloadStatusReport?.extractionDurationMs, undefined);
});
const result = await codeql.setupCodeQL(
undefined,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.GHES,
{
cliVersion: defaults.cliVersion,
tagName: defaults.bundleVersion,
},
features,
getRunnerLogger(true),
false,
);
t.deepEqual(result.toolsVersion, defaults.cliVersion);
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 1);
});
});
test(`downloads bundle if only an unpinned version is cached on GHES`, async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await installIntoToolcache({
tagName: "codeql-bundle-20200601",
isPinned: false,
tmpDir,
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
},
);
mockBundleDownloadApi({
tagName: defaults.bundleVersion,
});
const result = await codeql.setupCodeQL(
undefined,
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.GHES,
{
cliVersion: defaults.cliVersion,
test.serial(
'downloads bundle if "latest" tools specified but not cached',
async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await installIntoToolcache({
tagName: "codeql-bundle-20200601",
isPinned: true,
tmpDir,
});
mockBundleDownloadApi({
tagName: defaults.bundleVersion,
},
features,
getRunnerLogger(true),
false,
);
t.deepEqual(result.toolsVersion, defaults.cliVersion);
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
});
const result = await codeql.setupCodeQL(
"latest",
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.deepEqual(result.toolsVersion, defaults.cliVersion);
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
});
test('downloads bundle if "latest" tools specified but not cached', async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
await installIntoToolcache({
tagName: "codeql-bundle-20200601",
isPinned: true,
tmpDir,
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
},
);
mockBundleDownloadApi({
tagName: defaults.bundleVersion,
test.serial(
"bundle URL from another repo is cached as 0.0.0-bundleVersion",
async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
mockApiDetails(SAMPLE_DOTCOM_API_DETAILS);
sinon.stub(actionsUtil, "isRunningLocalAction").returns(true);
const releasesApiMock = mockReleaseApi({
assetNames: ["cli-version-2.14.6.txt"],
tagName: "codeql-bundle-20230203",
});
mockBundleDownloadApi({
repo: "codeql-testing/codeql-cli-nightlies",
platformSpecific: false,
tagName: "codeql-bundle-20230203",
});
const result = await codeql.setupCodeQL(
"https://github.com/codeql-testing/codeql-cli-nightlies/releases/download/codeql-bundle-20230203/codeql-bundle.tar.gz",
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.is(result.toolsVersion, "0.0.0-20230203");
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 1);
t.is(cachedVersions[0], "0.0.0-20230203");
t.false(releasesApiMock.isDone());
});
const result = await codeql.setupCodeQL(
"latest",
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.deepEqual(result.toolsVersion, defaults.cliVersion);
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 2);
});
});
test("bundle URL from another repo is cached as 0.0.0-bundleVersion", async (t) => {
const features = createFeatures([]);
await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
mockApiDetails(SAMPLE_DOTCOM_API_DETAILS);
sinon.stub(actionsUtil, "isRunningLocalAction").returns(true);
const releasesApiMock = mockReleaseApi({
assetNames: ["cli-version-2.14.6.txt"],
tagName: "codeql-bundle-20230203",
});
mockBundleDownloadApi({
repo: "codeql-testing/codeql-cli-nightlies",
platformSpecific: false,
tagName: "codeql-bundle-20230203",
});
const result = await codeql.setupCodeQL(
"https://github.com/codeql-testing/codeql-cli-nightlies/releases/download/codeql-bundle-20230203/codeql-bundle.tar.gz",
SAMPLE_DOTCOM_API_DETAILS,
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
features,
getRunnerLogger(true),
false,
);
t.is(result.toolsVersion, "0.0.0-20230203");
t.is(result.toolsSource, ToolsSource.Download);
if (result.toolsDownloadStatusReport) {
assertDurationsInteger(t, result.toolsDownloadStatusReport);
}
const cachedVersions = toolcache.findAllVersions("CodeQL");
t.is(cachedVersions.length, 1);
t.is(cachedVersions[0], "0.0.0-20230203");
t.false(releasesApiMock.isDone());
});
});
},
);
function assertDurationsInteger(
t: ExecutionContext<unknown>,
@@ -472,7 +498,7 @@ function assertDurationsInteger(
}
}
test("getExtraOptions works for explicit paths", (t) => {
test.serial("getExtraOptions works for explicit paths", (t) => {
t.deepEqual(codeql.getExtraOptions({}, ["foo"], []), []);
t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ["foo"], []), ["42"]);
@@ -483,11 +509,11 @@ test("getExtraOptions works for explicit paths", (t) => {
);
});
test("getExtraOptions works for wildcards", (t) => {
test.serial("getExtraOptions works for wildcards", (t) => {
t.deepEqual(codeql.getExtraOptions({ "*": [42] }, ["foo"], []), ["42"]);
});
test("getExtraOptions works for wildcards and explicit paths", (t) => {
test.serial("getExtraOptions works for wildcards and explicit paths", (t) => {
const o1 = { "*": [42], foo: [87] };
t.deepEqual(codeql.getExtraOptions(o1, ["foo"], []), ["42", "87"]);
@@ -499,7 +525,7 @@ test("getExtraOptions works for wildcards and explicit paths", (t) => {
t.deepEqual(codeql.getExtraOptions(o3, p, []), ["42", "87", "99"]);
});
test("getExtraOptions throws for bad content", (t) => {
test.serial("getExtraOptions throws for bad content", (t) => {
t.throws(() => codeql.getExtraOptions({ "*": 42 }, ["foo"], []));
t.throws(() => codeql.getExtraOptions({ foo: 87 }, ["foo"], []));
@@ -564,7 +590,7 @@ const injectedConfigMacro = test.macro({
`databaseInitCluster() injected config: ${providedTitle}`,
});
test(
test.serial(
"basic",
injectedConfigMacro,
{
@@ -574,7 +600,7 @@ test(
{},
);
test(
test.serial(
"injected packs from input",
injectedConfigMacro,
{
@@ -587,7 +613,7 @@ test(
},
);
test(
test.serial(
"injected packs from input with existing packs combines",
injectedConfigMacro,
{
@@ -609,7 +635,7 @@ test(
},
);
test(
test.serial(
"injected packs from input with existing packs overrides",
injectedConfigMacro,
{
@@ -629,7 +655,7 @@ test(
);
// similar, but with queries
test(
test.serial(
"injected queries from input",
injectedConfigMacro,
{
@@ -649,7 +675,7 @@ test(
},
);
test(
test.serial(
"injected queries from input overrides",
injectedConfigMacro,
{
@@ -673,7 +699,7 @@ test(
},
);
test(
test.serial(
"injected queries from input combines",
injectedConfigMacro,
{
@@ -701,7 +727,7 @@ test(
},
);
test(
test.serial(
"injected queries from input combines 2",
injectedConfigMacro,
{
@@ -723,7 +749,7 @@ test(
},
);
test(
test.serial(
"injected queries and packs, but empty",
injectedConfigMacro,
{
@@ -742,7 +768,7 @@ test(
{},
);
test(
test.serial(
"repo property queries have the highest precedence",
injectedConfigMacro,
{
@@ -764,7 +790,7 @@ test(
},
);
test(
test.serial(
"repo property queries combines with queries input",
injectedConfigMacro,
{
@@ -791,7 +817,7 @@ test(
},
);
test(
test.serial(
"repo property queries combines everything else",
injectedConfigMacro,
{
@@ -820,55 +846,61 @@ test(
},
);
test("passes a code scanning config AND qlconfig to the CLI", async (t: ExecutionContext<unknown>) => {
await util.withTmpDir(async (tempDir) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await stubCodeql();
await codeqlObject.databaseInitCluster(
{ ...stubConfig, tempDir },
"",
undefined,
"/path/to/qlconfig.yml",
getRunnerLogger(true),
);
test.serial(
"passes a code scanning config AND qlconfig to the CLI",
async (t: ExecutionContext<unknown>) => {
await util.withTmpDir(async (tempDir) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await stubCodeql();
await codeqlObject.databaseInitCluster(
{ ...stubConfig, tempDir },
"",
undefined,
"/path/to/qlconfig.yml",
getRunnerLogger(true),
);
const args = runnerConstructorStub.firstCall.args[1] as string[];
// should have used a config file
const hasCodeScanningConfigArg = args.some((arg: string) =>
arg.startsWith("--codescanning-config="),
);
t.true(hasCodeScanningConfigArg, "Should have injected a qlconfig");
const args = runnerConstructorStub.firstCall.args[1] as string[];
// should have used a config file
const hasCodeScanningConfigArg = args.some((arg: string) =>
arg.startsWith("--codescanning-config="),
);
t.true(hasCodeScanningConfigArg, "Should have injected a qlconfig");
// should have passed a qlconfig file
const hasQlconfigArg = args.some((arg: string) =>
arg.startsWith("--qlconfig-file="),
);
t.truthy(hasQlconfigArg, "Should have injected a codescanning config");
});
});
// should have passed a qlconfig file
const hasQlconfigArg = args.some((arg: string) =>
arg.startsWith("--qlconfig-file="),
);
t.truthy(hasQlconfigArg, "Should have injected a codescanning config");
});
},
);
test("does not pass a qlconfig to the CLI when it is undefined", async (t: ExecutionContext<unknown>) => {
await util.withTmpDir(async (tempDir) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await stubCodeql();
test.serial(
"does not pass a qlconfig to the CLI when it is undefined",
async (t: ExecutionContext<unknown>) => {
await util.withTmpDir(async (tempDir) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await stubCodeql();
await codeqlObject.databaseInitCluster(
{ ...stubConfig, tempDir },
"",
undefined,
undefined, // undefined qlconfigFile
getRunnerLogger(true),
);
await codeqlObject.databaseInitCluster(
{ ...stubConfig, tempDir },
"",
undefined,
undefined, // undefined qlconfigFile
getRunnerLogger(true),
);
const args = runnerConstructorStub.firstCall.args[1] as any[];
const hasQlconfigArg = args.some((arg: string) =>
arg.startsWith("--qlconfig-file="),
);
t.false(hasQlconfigArg, "should NOT have injected a qlconfig");
});
});
const args = runnerConstructorStub.firstCall.args[1] as any[];
const hasQlconfigArg = args.some((arg: string) =>
arg.startsWith("--qlconfig-file="),
);
t.false(hasQlconfigArg, "should NOT have injected a qlconfig");
});
},
);
test("runTool summarizes several fatal errors", async (t) => {
test.serial("runTool summarizes several fatal errors", async (t) => {
const heapError =
"A fatal error occurred: Evaluator heap must be at least 384.00 MiB";
const datasetImportError =
@@ -905,7 +937,7 @@ test("runTool summarizes several fatal errors", async (t) => {
);
});
test("runTool summarizes autobuilder errors", async (t) => {
test.serial("runTool summarizes autobuilder errors", async (t) => {
const stderr = `
[2019-09-18 12:00:00] [autobuild] A non-error message
[2019-09-18 12:00:00] Untagged message
@@ -938,7 +970,7 @@ test("runTool summarizes autobuilder errors", async (t) => {
);
});
test("runTool truncates long autobuilder errors", async (t) => {
test.serial("runTool truncates long autobuilder errors", async (t) => {
const stderr = Array.from(
{ length: 20 },
(_, i) => `[2019-09-18 12:00:00] [autobuild] [ERROR] line${i + 1}`,
@@ -964,7 +996,7 @@ test("runTool truncates long autobuilder errors", async (t) => {
);
});
test("runTool recognizes fatal internal errors", async (t) => {
test.serial("runTool recognizes fatal internal errors", async (t) => {
const stderr = `
[11/31 eval 8m19s] Evaluation done; writing results to codeql/go-queries/Security/CWE-020/MissingRegexpAnchor.bqrs.
Oops! A fatal internal error occurred. Details:
@@ -989,64 +1021,70 @@ test("runTool recognizes fatal internal errors", async (t) => {
);
});
test("runTool outputs last line of stderr if fatal error could not be found", async (t) => {
const cliStderr = "line1\nline2\nline3\nline4\nline5";
stubToolRunnerConstructor(32, cliStderr);
const codeqlObject = await stubCodeql();
// io throws because of the test CodeQL object.
sinon.stub(io, "which").resolves("");
test.serial(
"runTool outputs last line of stderr if fatal error could not be found",
async (t) => {
const cliStderr = "line1\nline2\nline3\nline4\nline5";
stubToolRunnerConstructor(32, cliStderr);
const codeqlObject = await stubCodeql();
// io throws because of the test CodeQL object.
sinon.stub(io, "which").resolves("");
await t.throwsAsync(
async () =>
await codeqlObject.finalizeDatabase(
"db",
"--threads=2",
"--ram=2048",
false,
),
{
instanceOf: util.ConfigurationError,
message: new RegExp(
'Encountered a fatal error while running \\"codeql-for-testing database finalize --finalize-dataset --threads=2 --ram=2048 db\\"\\. ' +
"Exit code was 32 and last log line was: line5\\. See the logs for more details\\.",
),
},
);
});
await t.throwsAsync(
async () =>
await codeqlObject.finalizeDatabase(
"db",
"--threads=2",
"--ram=2048",
false,
),
{
instanceOf: util.ConfigurationError,
message: new RegExp(
'Encountered a fatal error while running \\"codeql-for-testing database finalize --finalize-dataset --threads=2 --ram=2048 db\\"\\. ' +
"Exit code was 32 and last log line was: line5\\. See the logs for more details\\.",
),
},
);
},
);
test("Avoids duplicating --overwrite flag if specified in CODEQL_ACTION_EXTRA_OPTIONS", async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await stubCodeql();
// io throws because of the test CodeQL object.
sinon.stub(io, "which").resolves("");
test.serial(
"Avoids duplicating --overwrite flag if specified in CODEQL_ACTION_EXTRA_OPTIONS",
async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await stubCodeql();
// io throws because of the test CodeQL object.
sinon.stub(io, "which").resolves("");
process.env["CODEQL_ACTION_EXTRA_OPTIONS"] =
'{ "database": { "init": ["--overwrite"] } }';
process.env["CODEQL_ACTION_EXTRA_OPTIONS"] =
'{ "database": { "init": ["--overwrite"] } }';
await codeqlObject.databaseInitCluster(
stubConfig,
"sourceRoot",
undefined,
undefined,
getRunnerLogger(false),
);
await codeqlObject.databaseInitCluster(
stubConfig,
"sourceRoot",
undefined,
undefined,
getRunnerLogger(false),
);
t.true(runnerConstructorStub.calledOnce);
const args = runnerConstructorStub.firstCall.args[1] as string[];
t.is(
args.filter((option: string) => option === "--overwrite").length,
1,
"--overwrite should only be passed once",
);
t.true(runnerConstructorStub.calledOnce);
const args = runnerConstructorStub.firstCall.args[1] as string[];
t.is(
args.filter((option: string) => option === "--overwrite").length,
1,
"--overwrite should only be passed once",
);
// Clean up
const configArg = args.find((arg: string) =>
arg.startsWith("--codescanning-config="),
);
t.truthy(configArg, "Should have injected a codescanning config");
const configFile = configArg!.split("=")[1];
await fs.promises.rm(configFile, { force: true });
});
// Clean up
const configArg = args.find((arg: string) =>
arg.startsWith("--codescanning-config="),
);
t.truthy(configArg, "Should have injected a codescanning config");
const configFile = configArg!.split("=")[1];
await fs.promises.rm(configFile, { force: true });
},
);
export function stubToolRunnerConstructor(
exitCode: number = 0,

File diff suppressed because it is too large Load Diff

View File

@@ -69,6 +69,9 @@ import {
isInTestMode,
joinAtMost,
DiskUsage,
Result,
Success,
Failure,
} from "./util";
/**
@@ -653,14 +656,18 @@ const OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES: Record<Language, Feature> = {
swift: Feature.OverlayAnalysisCodeScanningSwift,
};
async function isOverlayAnalysisFeatureEnabled(
/**
* Checks whether the overlay analysis feature is enabled for the given
* languages and configuration.
*/
async function checkOverlayAnalysisFeatureEnabled(
features: FeatureEnablement,
codeql: CodeQL,
languages: Language[],
codeScanningConfig: UserConfig,
): Promise<boolean> {
): Promise<Result<void, OverlayDisabledReason>> {
if (!(await features.getValue(Feature.OverlayAnalysis, codeql))) {
return false;
return new Failure(OverlayDisabledReason.OverallFeatureNotEnabled);
}
let enableForCodeScanningOnly = false;
for (const language of languages) {
@@ -677,39 +684,35 @@ async function isOverlayAnalysisFeatureEnabled(
enableForCodeScanningOnly = true;
continue;
}
return false;
return new Failure(OverlayDisabledReason.LanguageNotEnabled);
}
if (enableForCodeScanningOnly) {
// A code-scanning configuration runs only the (default) code-scanning suite
// if the default queries are not disabled, and no packs, queries, or
// query-filters are specified.
return (
const usesDefaultQueriesOnly =
codeScanningConfig["disable-default-queries"] !== true &&
codeScanningConfig.packs === undefined &&
codeScanningConfig.queries === undefined &&
codeScanningConfig["query-filters"] === undefined
);
codeScanningConfig["query-filters"] === undefined;
if (!usesDefaultQueriesOnly) {
return new Failure(OverlayDisabledReason.NonDefaultQueries);
}
}
return true;
return new Success(undefined);
}
/** Checks if the runner has enough disk space for overlay analysis. */
function runnerHasSufficientDiskSpace(
diskUsage: DiskUsage | undefined,
diskUsage: DiskUsage,
logger: Logger,
useV2ResourceChecks: boolean,
): boolean {
const minimumDiskSpaceBytes = useV2ResourceChecks
? OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES
: OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_BYTES;
if (
diskUsage === undefined ||
diskUsage.numAvailableBytes < minimumDiskSpaceBytes
) {
const diskSpaceMb =
diskUsage === undefined
? 0
: Math.round(diskUsage.numAvailableBytes / 1_000_000);
if (diskUsage.numAvailableBytes < minimumDiskSpaceBytes) {
const diskSpaceMb = Math.round(diskUsage.numAvailableBytes / 1_000_000);
const minimumDiskSpaceMb = Math.round(minimumDiskSpaceBytes / 1_000_000);
logger.info(
`Setting overlay database mode to ${OverlayDatabaseMode.None} ` +
@@ -754,23 +757,28 @@ async function runnerHasSufficientMemory(
}
/**
* Checks if the runner supports overlay analysis based on available disk space
* and the maximum memory CodeQL will be allowed to use.
* Checks if the runner has sufficient disk space and memory for overlay
* analysis.
*/
async function runnerSupportsOverlayAnalysis(
async function checkRunnerResources(
codeql: CodeQL,
diskUsage: DiskUsage | undefined,
diskUsage: DiskUsage,
ramInput: string | undefined,
logger: Logger,
useV2ResourceChecks: boolean,
): Promise<boolean> {
): Promise<Result<void, OverlayDisabledReason>> {
if (!runnerHasSufficientDiskSpace(diskUsage, logger, useV2ResourceChecks)) {
return false;
return new Failure(OverlayDisabledReason.InsufficientDiskSpace);
}
if (!(await runnerHasSufficientMemory(codeql, ramInput, logger))) {
return false;
return new Failure(OverlayDisabledReason.InsufficientMemory);
}
return true;
return new Success(undefined);
}
interface EnabledOverlayConfig {
overlayDatabaseMode: Exclude<OverlayDatabaseMode, OverlayDatabaseMode.None>;
useOverlayDatabaseCaching: boolean;
}
/**
@@ -791,10 +799,11 @@ async function runnerSupportsOverlayAnalysis(
* For `Overlay` and `OverlayBase`, the function performs further checks and
* reverts to `None` if any check should fail.
*
* @returns An object containing the overlay database mode and whether the
* action should perform overlay-base database caching.
* @returns A `Success` containing the overlay database mode and whether the
* action should perform overlay-base database caching, or a `Failure`
* containing the reason why overlay analysis is disabled.
*/
export async function getOverlayDatabaseMode(
export async function checkOverlayEnablement(
codeql: CodeQL,
features: FeatureEnablement,
languages: Language[],
@@ -805,15 +814,7 @@ export async function getOverlayDatabaseMode(
repositoryProperties: RepositoryProperties,
gitVersion: GitVersionInfo | undefined,
logger: Logger,
): Promise<{
overlayDatabaseMode: OverlayDatabaseMode;
useOverlayDatabaseCaching: boolean;
disabledReason: OverlayDisabledReason | undefined;
}> {
let overlayDatabaseMode = OverlayDatabaseMode.None;
let useOverlayDatabaseCaching = false;
let disabledReason: OverlayDisabledReason | undefined;
): Promise<Result<EnabledOverlayConfig, OverlayDisabledReason>> {
const modeEnv = process.env.CODEQL_OVERLAY_DATABASE_MODE;
// Any unrecognized CODEQL_OVERLAY_DATABASE_MODE value will be ignored and
// treated as if the environment variable was not set.
@@ -822,101 +823,132 @@ export async function getOverlayDatabaseMode(
modeEnv === OverlayDatabaseMode.OverlayBase ||
modeEnv === OverlayDatabaseMode.None
) {
overlayDatabaseMode = modeEnv;
logger.info(
`Setting overlay database mode to ${overlayDatabaseMode} ` +
`Setting overlay database mode to ${modeEnv} ` +
"from the CODEQL_OVERLAY_DATABASE_MODE environment variable.",
);
} else if (
repositoryProperties[RepositoryPropertyName.DISABLE_OVERLAY] === true
) {
if (modeEnv === OverlayDatabaseMode.None) {
return new Failure(OverlayDisabledReason.DisabledByEnvironmentVariable);
}
return validateOverlayDatabaseMode(
modeEnv,
false,
codeql,
languages,
sourceRoot,
buildMode,
gitVersion,
logger,
);
}
if (repositoryProperties[RepositoryPropertyName.DISABLE_OVERLAY] === true) {
logger.info(
`Setting overlay database mode to ${OverlayDatabaseMode.None} ` +
`because the ${RepositoryPropertyName.DISABLE_OVERLAY} repository property is set to true.`,
);
overlayDatabaseMode = OverlayDatabaseMode.None;
disabledReason = OverlayDisabledReason.DisabledByRepositoryProperty;
} else if (
await isOverlayAnalysisFeatureEnabled(
features,
codeql,
languages,
codeScanningConfig,
)
return new Failure(OverlayDisabledReason.DisabledByRepositoryProperty);
}
const featureResult = await checkOverlayAnalysisFeatureEnabled(
features,
codeql,
languages,
codeScanningConfig,
);
if (featureResult.isFailure()) {
return featureResult;
}
const performResourceChecks = !(await features.getValue(
Feature.OverlayAnalysisSkipResourceChecks,
codeql,
));
const useV2ResourceChecks = await features.getValue(
Feature.OverlayAnalysisResourceChecksV2,
);
const checkOverlayStatus = await features.getValue(
Feature.OverlayAnalysisStatusCheck,
);
const needDiskUsage = performResourceChecks || checkOverlayStatus;
const diskUsage = needDiskUsage ? await checkDiskUsage(logger) : undefined;
if (needDiskUsage && diskUsage === undefined) {
logger.warning(
`Unable to determine disk usage, therefore setting overlay database mode to ${OverlayDatabaseMode.None}.`,
);
return new Failure(OverlayDisabledReason.UnableToDetermineDiskUsage);
}
const resourceResult =
performResourceChecks && diskUsage !== undefined
? await checkRunnerResources(
codeql,
diskUsage,
ramInput,
logger,
useV2ResourceChecks,
)
: new Success<void>(undefined);
if (resourceResult.isFailure()) {
return resourceResult;
}
if (
checkOverlayStatus &&
diskUsage !== undefined &&
(await shouldSkipOverlayAnalysis(codeql, languages, diskUsage, logger))
) {
const performResourceChecks = !(await features.getValue(
Feature.OverlayAnalysisSkipResourceChecks,
codeql,
));
const useV2ResourceChecks = await features.getValue(
Feature.OverlayAnalysisResourceChecksV2,
logger.info(
`Setting overlay database mode to ${OverlayDatabaseMode.None} ` +
"because overlay analysis previously failed with this combination of languages, " +
"disk space, and CodeQL version.",
);
const checkOverlayStatus = await features.getValue(
Feature.OverlayAnalysisStatusCheck,
return new Failure(OverlayDisabledReason.SkippedDueToCachedStatus);
}
let overlayDatabaseMode: OverlayDatabaseMode;
if (isAnalyzingPullRequest()) {
overlayDatabaseMode = OverlayDatabaseMode.Overlay;
logger.info(
`Setting overlay database mode to ${overlayDatabaseMode} ` +
"with caching because we are analyzing a pull request.",
);
} else if (await isAnalyzingDefaultBranch()) {
overlayDatabaseMode = OverlayDatabaseMode.OverlayBase;
logger.info(
`Setting overlay database mode to ${overlayDatabaseMode} ` +
"with caching because we are analyzing the default branch.",
);
const diskUsage =
performResourceChecks || checkOverlayStatus
? await checkDiskUsage(logger)
: undefined;
if (
performResourceChecks &&
!(await runnerSupportsOverlayAnalysis(
codeql,
diskUsage,
ramInput,
logger,
useV2ResourceChecks,
))
) {
overlayDatabaseMode = OverlayDatabaseMode.None;
disabledReason = OverlayDisabledReason.InsufficientResources;
} else if (checkOverlayStatus && diskUsage === undefined) {
logger.warning(
`Unable to determine disk usage, therefore setting overlay database mode to ${OverlayDatabaseMode.None}.`,
);
overlayDatabaseMode = OverlayDatabaseMode.None;
disabledReason = OverlayDisabledReason.UnableToDetermineDiskUsage;
} else if (
checkOverlayStatus &&
diskUsage &&
(await shouldSkipOverlayAnalysis(codeql, languages, diskUsage, logger))
) {
logger.info(
`Setting overlay database mode to ${OverlayDatabaseMode.None} ` +
"because overlay analysis previously failed with this combination of languages, " +
"disk space, and CodeQL version.",
);
overlayDatabaseMode = OverlayDatabaseMode.None;
disabledReason = OverlayDisabledReason.SkippedDueToCachedStatus;
} else if (isAnalyzingPullRequest()) {
overlayDatabaseMode = OverlayDatabaseMode.Overlay;
useOverlayDatabaseCaching = true;
logger.info(
`Setting overlay database mode to ${overlayDatabaseMode} ` +
"with caching because we are analyzing a pull request.",
);
} else if (await isAnalyzingDefaultBranch()) {
overlayDatabaseMode = OverlayDatabaseMode.OverlayBase;
useOverlayDatabaseCaching = true;
logger.info(
`Setting overlay database mode to ${overlayDatabaseMode} ` +
"with caching because we are analyzing the default branch.",
);
}
} else {
disabledReason = OverlayDisabledReason.FeatureNotEnabled;
return new Failure(OverlayDisabledReason.NotPullRequestOrDefaultBranch);
}
const disabledResult = (reason: OverlayDisabledReason | undefined) => ({
overlayDatabaseMode: OverlayDatabaseMode.None,
useOverlayDatabaseCaching: false,
disabledReason: reason,
});
if (overlayDatabaseMode === OverlayDatabaseMode.None) {
return disabledResult(disabledReason);
}
return validateOverlayDatabaseMode(
overlayDatabaseMode,
true,
codeql,
languages,
sourceRoot,
buildMode,
gitVersion,
logger,
);
}
/**
* Validates that the given overlay database mode is compatible with the current
* configuration (build mode, CodeQL version, git repository, git version). Returns
* the mode unchanged if all checks pass, or falls back to `None` with the
* appropriate disabled reason.
*/
async function validateOverlayDatabaseMode(
overlayDatabaseMode: Exclude<OverlayDatabaseMode, OverlayDatabaseMode.None>,
useOverlayDatabaseCaching: boolean,
codeql: CodeQL,
languages: Language[],
sourceRoot: string,
buildMode: BuildMode | undefined,
gitVersion: GitVersionInfo | undefined,
logger: Logger,
): Promise<Result<EnabledOverlayConfig, OverlayDisabledReason>> {
if (
buildMode !== BuildMode.None &&
(
@@ -937,7 +969,7 @@ export async function getOverlayDatabaseMode(
`build-mode is set to "${buildMode}" instead of "none". ` +
"Falling back to creating a normal full database instead.",
);
return disabledResult(OverlayDisabledReason.IncompatibleBuildMode);
return new Failure(OverlayDisabledReason.IncompatibleBuildMode);
}
if (!(await codeQlVersionAtLeast(codeql, CODEQL_OVERLAY_MINIMUM_VERSION))) {
logger.warning(
@@ -945,7 +977,7 @@ export async function getOverlayDatabaseMode(
`the CodeQL CLI is older than ${CODEQL_OVERLAY_MINIMUM_VERSION}. ` +
"Falling back to creating a normal full database instead.",
);
return disabledResult(OverlayDisabledReason.IncompatibleCodeQl);
return new Failure(OverlayDisabledReason.IncompatibleCodeQl);
}
if ((await getGitRoot(sourceRoot)) === undefined) {
logger.warning(
@@ -953,7 +985,7 @@ export async function getOverlayDatabaseMode(
`the source root "${sourceRoot}" is not inside a git repository. ` +
"Falling back to creating a normal full database instead.",
);
return disabledResult(OverlayDisabledReason.NoGitRoot);
return new Failure(OverlayDisabledReason.NoGitRoot);
}
if (gitVersion === undefined) {
logger.warning(
@@ -961,7 +993,7 @@ export async function getOverlayDatabaseMode(
"the Git version could not be determined. " +
"Falling back to creating a normal full database instead.",
);
return disabledResult(OverlayDisabledReason.IncompatibleGit);
return new Failure(OverlayDisabledReason.IncompatibleGit);
}
if (!gitVersion.isAtLeast(GIT_MINIMUM_VERSION_FOR_OVERLAY)) {
logger.warning(
@@ -969,14 +1001,13 @@ export async function getOverlayDatabaseMode(
`the installed Git version is older than ${GIT_MINIMUM_VERSION_FOR_OVERLAY}. ` +
"Falling back to creating a normal full database instead.",
);
return disabledResult(OverlayDisabledReason.IncompatibleGit);
return new Failure(OverlayDisabledReason.IncompatibleGit);
}
return {
return new Success({
overlayDatabaseMode,
useOverlayDatabaseCaching,
disabledReason,
};
});
}
function dbLocationOrDefault(
@@ -1122,11 +1153,7 @@ export async function initConfig(
// and queries, which in turn depends on the user config and the augmentation
// properties. So we need to calculate the overlay database mode after the
// rest of the config has been populated.
const {
overlayDatabaseMode,
useOverlayDatabaseCaching,
disabledReason: overlayDisabledReason,
} = await getOverlayDatabaseMode(
const overlayDatabaseModeResult = await checkOverlayEnablement(
inputs.codeql,
inputs.features,
config.languages,
@@ -1138,14 +1165,22 @@ export async function initConfig(
gitVersion,
logger,
);
logger.info(
`Using overlay database mode: ${overlayDatabaseMode} ` +
`${useOverlayDatabaseCaching ? "with" : "without"} caching.`,
);
config.overlayDatabaseMode = overlayDatabaseMode;
config.useOverlayDatabaseCaching = useOverlayDatabaseCaching;
if (overlayDisabledReason !== undefined) {
if (overlayDatabaseModeResult.isSuccess()) {
const { overlayDatabaseMode, useOverlayDatabaseCaching } =
overlayDatabaseModeResult.value;
logger.info(
`Using overlay database mode: ${overlayDatabaseMode} ` +
`${useOverlayDatabaseCaching ? "with" : "without"} caching.`,
);
config.overlayDatabaseMode = overlayDatabaseMode;
config.useOverlayDatabaseCaching = useOverlayDatabaseCaching;
} else {
const overlayDisabledReason = overlayDatabaseModeResult.value;
logger.info(
`Using overlay database mode: ${OverlayDatabaseMode.None} without caching.`,
);
config.overlayDatabaseMode = OverlayDatabaseMode.None;
config.useOverlayDatabaseCaching = false;
await addOverlayDisablementDiagnostics(
config,
inputs.codeql,
@@ -1154,7 +1189,7 @@ export async function initConfig(
}
if (
overlayDatabaseMode === OverlayDatabaseMode.Overlay ||
config.overlayDatabaseMode === OverlayDatabaseMode.Overlay ||
(await shouldPerformDiffInformedAnalysis(
inputs.codeql,
inputs.features,

View File

@@ -82,70 +82,76 @@ function getCodeQL() {
});
}
test("Abort database upload if 'upload-database' input set to false", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("false");
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
test.serial(
"Abort database upload if 'upload-database' input set to false",
async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("false");
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
const loggedMessages = [];
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
getTestConfig(tmpDir),
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message ===
"Database upload disabled in workflow. Skipping upload.",
) !== undefined,
);
});
});
const loggedMessages = [];
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
getTestConfig(tmpDir),
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message ===
"Database upload disabled in workflow. Skipping upload.",
) !== undefined,
);
});
},
);
test("Abort database upload if 'analysis-kinds: code-scanning' is not enabled", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
test.serial(
"Abort database upload if 'analysis-kinds: code-scanning' is not enabled",
async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
await mockHttpRequests(201);
await mockHttpRequests(201);
const loggedMessages = [];
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
{
...getTestConfig(tmpDir),
analysisKinds: [AnalysisKind.CodeQuality],
},
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message ===
"Not uploading database because 'analysis-kinds: code-scanning' is not enabled.",
) !== undefined,
);
});
});
const loggedMessages = [];
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
{
...getTestConfig(tmpDir),
analysisKinds: [AnalysisKind.CodeQuality],
},
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message ===
"Not uploading database because 'analysis-kinds: code-scanning' is not enabled.",
) !== undefined,
);
});
},
);
test("Abort database upload if running against GHES", async (t) => {
test.serial("Abort database upload if running against GHES", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
@@ -177,35 +183,38 @@ test("Abort database upload if running against GHES", async (t) => {
});
});
test("Abort database upload if not analyzing default branch", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false);
test.serial(
"Abort database upload if not analyzing default branch",
async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false);
const loggedMessages = [];
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
getTestConfig(tmpDir),
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message === "Not analyzing default branch. Skipping upload.",
) !== undefined,
);
});
});
const loggedMessages = [];
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
getTestConfig(tmpDir),
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message === "Not analyzing default branch. Skipping upload.",
) !== undefined,
);
});
},
);
test("Don't crash if uploading a database fails", async (t) => {
test.serial("Don't crash if uploading a database fails", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
@@ -237,7 +246,7 @@ test("Don't crash if uploading a database fails", async (t) => {
});
});
test("Successfully uploading a database to github.com", async (t) => {
test.serial("Successfully uploading a database to github.com", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
@@ -267,7 +276,7 @@ test("Successfully uploading a database to github.com", async (t) => {
});
});
test("Successfully uploading a database to GHEC-DR", async (t) => {
test.serial("Successfully uploading a database to GHEC-DR", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon

View File

@@ -1,6 +1,6 @@
{
"bundleVersion": "codeql-bundle-v2.24.2",
"cliVersion": "2.24.2",
"priorBundleVersion": "codeql-bundle-v2.24.1",
"priorCliVersion": "2.24.1"
"bundleVersion": "codeql-bundle-v2.24.3",
"cliVersion": "2.24.3",
"priorBundleVersion": "codeql-bundle-v2.24.2",
"priorCliVersion": "2.24.2"
}

Some files were not shown because too many files have changed in this diff Show More