Merge remote-tracking branch 'origin/main' into mbg/csra/upload-failed-sarif-artifact

This commit is contained in:
Michael B. Gale
2026-03-09 18:32:36 +00:00
144 changed files with 14401 additions and 10117 deletions

View File

@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
all-platform-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: all-platform-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
all-platform-bundle:
strategy:
@@ -95,7 +94,7 @@ jobs:
- id: init
uses: ./../action/init
with:
# Swift is not supported on Ubuntu so we manually exclude it from the list here
# Swift is not supported on Ubuntu so we manually exclude it from the list here
languages: cpp,csharp,go,java,javascript,python,ruby
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code

View File

@@ -87,24 +87,24 @@ jobs:
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
post-processed-sarif-path: ${{ runner.temp }}/post-processed
post-processed-sarif-path: '${{ runner.temp }}/post-processed'
- name: Upload SARIF files
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: |
analysis-kinds-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}
path: ${{ runner.temp }}/results/*.sarif
path: '${{ runner.temp }}/results/*.sarif'
retention-days: 7
- name: Upload post-processed SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: |
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}
path: ${{ runner.temp }}/post-processed
path: '${{ runner.temp }}/post-processed'
retention-days: 7
if-no-files-found: error
@@ -112,7 +112,7 @@ jobs:
if: contains(matrix.analysis-kinds, 'code-scanning')
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif
SARIF_PATH: '${{ runner.temp }}/results/javascript.sarif'
EXPECT_PRESENT: 'false'
with:
script: ${{ env.CHECK_SCRIPT }}
@@ -120,7 +120,7 @@ jobs:
if: contains(matrix.analysis-kinds, 'code-quality')
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/javascript.quality.sarif
SARIF_PATH: '${{ runner.temp }}/results/javascript.quality.sarif'
EXPECT_PRESENT: 'true'
with:
script: ${{ env.CHECK_SCRIPT }}

View File

@@ -30,11 +30,6 @@ on:
description: The version of Go to install
required: false
default: '>=1.21.0'
python-version:
type: string
description: The version of Python to install
required: false
default: '3.13'
dotnet-version:
type: string
description: The version of .NET to install
@@ -47,11 +42,6 @@ on:
description: The version of Go to install
required: false
default: '>=1.21.0'
python-version:
type: string
description: The version of Python to install
required: false
default: '3.13'
dotnet-version:
type: string
description: The version of .NET to install
@@ -62,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
analyze-ref-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: analyze-ref-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
analyze-ref-input:
strategy:
@@ -94,11 +83,6 @@ jobs:
with:
go-version: ${{ inputs.go-version || '>=1.21.0' }}
cache: false
- name: Install Python
if: matrix.version != 'nightly-latest'
uses: actions/setup-python@v6
with:
python-version: ${{ inputs.python-version || '3.13' }}
- name: Install .NET
uses: actions/setup-dotnet@v5
with:
@@ -107,13 +91,12 @@ jobs:
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
- name: Build code
run: ./build.sh
- uses: ./../action/analyze
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -82,7 +82,7 @@ jobs:
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/autobuild
env:
# Explicitly disable the CLR tracer.
# Explicitly disable the CLR tracer.
COR_ENABLE_PROFILING: ''
COR_PROFILER: ''
COR_PROFILER_PATH_64: ''

View File

@@ -42,8 +42,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
autobuild-direct-tracing-with-working-dir-${{github.ref}}-${{inputs.java-version}}
group: autobuild-direct-tracing-with-working-dir-${{github.ref}}-${{inputs.java-version}}
jobs:
autobuild-direct-tracing-with-working-dir:
strategy:

View File

@@ -97,7 +97,7 @@ jobs:
id: init
with:
build-mode: autobuild
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
languages: java
tools: ${{ steps.prepare-test.outputs.tools-url }}

View File

@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
build-mode-manual-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: build-mode-manual-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
build-mode-manual:
strategy:
@@ -92,7 +91,7 @@ jobs:
id: init
with:
build-mode: manual
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
languages: java
tools: ${{ steps.prepare-test.outputs.tools-url }}

View File

@@ -64,7 +64,7 @@ jobs:
id: init
with:
build-mode: none
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
languages: java
tools: ${{ steps.prepare-test.outputs.tools-url }}
@@ -77,7 +77,7 @@ jobs:
exit 1
fi
# The latest nightly supports omitting the autobuild Action when the build mode is specified.
# The latest nightly supports omitting the autobuild Action when the build mode is specified.
- uses: ./../action/autobuild
if: matrix.version != 'nightly-latest'

View File

@@ -68,7 +68,7 @@ jobs:
id: init
with:
build-mode: none
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
languages: java
tools: ${{ steps.prepare-test.outputs.tools-url }}

View File

@@ -66,7 +66,7 @@ jobs:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: javascript
- name: Fail if the CodeQL version is not a nightly
if: "!contains(steps.init.outputs.codeql-version, '+')"
if: ${{ !contains(steps.init.outputs.codeql-version, '+') }}
run: exit 1
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -82,7 +82,7 @@ jobs:
output: ${{ runner.temp }}/results
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: ${{ matrix.os }}-zstd-bundle.sarif
path: ${{ runner.temp }}/results/javascript.sarif

View File

@@ -67,7 +67,7 @@ jobs:
id: init
with:
build-mode: none
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}

View File

@@ -67,18 +67,18 @@ jobs:
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif
path: '${{ runner.temp }}/results/javascript.sarif'
retention-days: 7
- name: Check config properties appear in SARIF
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif
SARIF_PATH: '${{ runner.temp }}/results/javascript.sarif'
with:
script: |
const fs = require('fs');

View File

@@ -78,18 +78,18 @@ jobs:
--ready-for-status-page
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif
path: '${{ runner.temp }}/results/javascript.sarif'
retention-days: 7
- name: Check diagnostics appear in SARIF
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif
SARIF_PATH: '${{ runner.temp }}/results/javascript.sarif'
with:
script: |
const fs = require('fs');

View File

@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
export-file-baseline-information-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: export-file-baseline-information-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
export-file-baseline-information:
strategy:
@@ -101,12 +100,12 @@ jobs:
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif
path: '${{ runner.temp }}/results/javascript.sarif'
retention-days: 7
- name: Check results
run: |

View File

@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
go-custom-queries-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: go-custom-queries-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
go-custom-queries:
strategy:

View File

@@ -77,7 +77,7 @@ jobs:
with:
languages: go
tools: ${{ steps.prepare-test.outputs.tools-url }}
# Deliberately change Go after the `init` step
# Deliberately change Go after the `init` step
- uses: actions/setup-go@v6
with:
go-version: '1.20'
@@ -85,12 +85,12 @@ jobs:
run: go build main.go
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Check diagnostic appears in SARIF
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/go.sarif
SARIF_PATH: '${{ runner.temp }}/results/go.sarif'
with:
script: |
const fs = require('fs');

View File

@@ -42,8 +42,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
go-indirect-tracing-workaround-no-file-program-${{github.ref}}-${{inputs.go-version}}
group: go-indirect-tracing-workaround-no-file-program-${{github.ref}}-${{inputs.go-version}}
jobs:
go-indirect-tracing-workaround-no-file-program:
strategy:
@@ -87,12 +86,12 @@ jobs:
run: go build main.go
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Check diagnostic appears in SARIF
uses: actions/github-script@v8
env:
SARIF_PATH: ${{ runner.temp }}/results/go.sarif
SARIF_PATH: '${{ runner.temp }}/results/go.sarif'
with:
script: |
const fs = require('fs');

View File

@@ -50,7 +50,6 @@ jobs:
permissions:
contents: read
packages: read
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
@@ -66,7 +65,7 @@ jobs:
- name: Init with registries
uses: ./../action/init
with:
db-location: ${{ runner.temp }}/customDbLocation
db-location: '${{ runner.temp }}/customDbLocation'
tools: ${{ steps.prepare-test.outputs.tools-url }}
config-file: ./.github/codeql/codeql-config-registries.yml
languages: javascript

View File

@@ -65,12 +65,12 @@ jobs:
tools: ${{ steps.prepare-test.outputs.tools-url }}
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif
path: '${{ runner.temp }}/results/javascript.sarif'
retention-days: 7
- name: Check results
run: |

View File

@@ -63,7 +63,7 @@ jobs:
languages: C#,java-kotlin,swift,typescript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Check languages
- name: 'Check languages'
run: |
expected_languages="csharp,java,swift,javascript"
actual_languages=$(jq -r '.languages | join(",")' "$RUNNER_TEMP"/config)

20
.github/workflows/__local-bundle.yml generated vendored
View File

@@ -30,11 +30,6 @@ on:
description: The version of Go to install
required: false
default: '>=1.21.0'
python-version:
type: string
description: The version of Python to install
required: false
default: '3.13'
dotnet-version:
type: string
description: The version of .NET to install
@@ -47,11 +42,6 @@ on:
description: The version of Go to install
required: false
default: '>=1.21.0'
python-version:
type: string
description: The version of Python to install
required: false
default: '3.13'
dotnet-version:
type: string
description: The version of .NET to install
@@ -62,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
local-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: local-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
local-bundle:
strategy:
@@ -94,11 +83,6 @@ jobs:
with:
go-version: ${{ inputs.go-version || '>=1.21.0' }}
cache: false
- name: Install Python
if: matrix.version != 'nightly-latest'
uses: actions/setup-python@v6
with:
python-version: ${{ inputs.python-version || '3.13' }}
- name: Install .NET
uses: actions/setup-dotnet@v5
with:
@@ -109,7 +93,7 @@ jobs:
- id: init
uses: ./../action/init
with:
# Swift is not supported on Ubuntu so we manually exclude it from the list here
# Swift is not supported on Ubuntu so we manually exclude it from the list here
languages: cpp,csharp,go,java,javascript,python,ruby
tools: ./codeql-bundle-linux64.tar.zst
- name: Build code

View File

@@ -62,8 +62,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
multi-language-autodetect-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: multi-language-autodetect-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
jobs:
multi-language-autodetect:
strategy:
@@ -144,9 +143,8 @@ jobs:
- uses: ./../action/init
id: init
with:
db-location: ${{ runner.temp }}/customDbLocation
languages: ${{ runner.os == 'Linux' && 'cpp,csharp,go,java,javascript,python,ruby'
|| '' }}
db-location: '${{ runner.temp }}/customDbLocation'
languages: ${{ runner.os == 'Linux' && 'cpp,csharp,go,java,javascript,python,ruby' || '' }}
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code

View File

@@ -30,11 +30,6 @@ on:
description: The version of Go to install
required: false
default: '>=1.21.0'
python-version:
type: string
description: The version of Python to install
required: false
default: '3.13'
dotnet-version:
type: string
description: The version of .NET to install
@@ -47,11 +42,6 @@ on:
description: The version of Go to install
required: false
default: '>=1.21.0'
python-version:
type: string
description: The version of Python to install
required: false
default: '3.13'
dotnet-version:
type: string
description: The version of .NET to install
@@ -62,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
packaging-codescanning-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: packaging-codescanning-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
packaging-codescanning-config-inputs-js:
strategy:
@@ -105,18 +94,13 @@ jobs:
with:
go-version: ${{ inputs.go-version || '>=1.21.0' }}
cache: false
- name: Install Python
if: matrix.version != 'nightly-latest'
uses: actions/setup-python@v6
with:
python-version: ${{ inputs.python-version || '3.13' }}
- name: Install .NET
uses: actions/setup-dotnet@v5
with:
dotnet-version: ${{ inputs.dotnet-version || '9.x' }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging3.yml
config-file: '.github/codeql/codeql-config-packaging3.yml'
packs: +codeql-testing/codeql-pack1@1.0.0
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
@@ -124,15 +108,14 @@ jobs:
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Check results
uses: ./../action/.github/actions/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run:
javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results

View File

@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
packaging-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: packaging-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
packaging-config-inputs-js:
strategy:
@@ -101,7 +100,7 @@ jobs:
dotnet-version: ${{ inputs.dotnet-version || '9.x' }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging3.yml
config-file: '.github/codeql/codeql-config-packaging3.yml'
packs: +codeql-testing/codeql-pack1@1.0.0
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
@@ -109,15 +108,14 @@ jobs:
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Check results
uses: ./../action/.github/actions/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run:
javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results

View File

@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
packaging-config-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: packaging-config-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
packaging-config-js:
strategy:
@@ -101,22 +100,21 @@ jobs:
dotnet-version: ${{ inputs.dotnet-version || '9.x' }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging.yml
config-file: '.github/codeql/codeql-config-packaging.yml'
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Check results
uses: ./../action/.github/actions/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run:
javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results

View File

@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
packaging-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: packaging-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
packaging-inputs-js:
strategy:
@@ -101,7 +100,7 @@ jobs:
dotnet-version: ${{ inputs.dotnet-version || '9.x' }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging2.yml
config-file: '.github/codeql/codeql-config-packaging2.yml'
languages: javascript
packs: codeql-testing/codeql-pack1@1.0.0, codeql-testing/codeql-pack2, codeql-testing/codeql-pack3:other-query.ql
tools: ${{ steps.prepare-test.outputs.tools-url }}
@@ -109,14 +108,13 @@ jobs:
run: ./build.sh
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
- name: Check results
uses: ./../action/.github/actions/check-sarif
with:
sarif-file: ${{ runner.temp }}/results/javascript.sarif
queries-run:
javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block
queries-not-run: foo,bar
- name: Assert Results

View File

@@ -30,11 +30,6 @@ on:
description: The version of Go to install
required: false
default: '>=1.21.0'
python-version:
type: string
description: The version of Python to install
required: false
default: '3.13'
dotnet-version:
type: string
description: The version of .NET to install
@@ -47,11 +42,6 @@ on:
description: The version of Go to install
required: false
default: '>=1.21.0'
python-version:
type: string
description: The version of Python to install
required: false
default: '3.13'
dotnet-version:
type: string
description: The version of .NET to install
@@ -62,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
remote-config-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: remote-config-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
remote-config:
strategy:
@@ -96,11 +85,6 @@ jobs:
with:
go-version: ${{ inputs.go-version || '>=1.21.0' }}
cache: false
- name: Install Python
if: matrix.version != 'nightly-latest'
uses: actions/setup-python@v6
with:
python-version: ${{ inputs.python-version || '3.13' }}
- name: Install .NET
uses: actions/setup-dotnet@v5
with:
@@ -109,8 +93,7 @@ jobs:
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
- name: Build code
run: ./build.sh
- uses: ./../action/analyze

View File

@@ -84,8 +84,7 @@ jobs:
language: javascript-typescript
- name: Fail if JavaScript/TypeScript configuration present
if:
fromJSON(steps.resolve-environment-js.outputs.environment).configuration.javascript
if: fromJSON(steps.resolve-environment-js.outputs.environment).configuration.javascript
run: exit 1
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -99,7 +99,7 @@ jobs:
dotnet-version: ${{ inputs.dotnet-version || '9.x' }}
- uses: ./../action/init
with:
config-file: .github/codeql/codeql-config-packaging3.yml
config-file: '.github/codeql/codeql-config-packaging3.yml'
packs: +codeql-testing/codeql-pack1@1.0.0
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
@@ -108,7 +108,7 @@ jobs:
- uses: ./../action/analyze
with:
skip-queries: true
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Assert No Results
@@ -119,7 +119,7 @@ jobs:
fi
- uses: ./../action/analyze
with:
output: ${{ runner.temp }}/results
output: '${{ runner.temp }}/results'
upload-database: false
- name: Assert Results
run: |

View File

@@ -71,8 +71,7 @@ jobs:
id: proxy
uses: ./../action/start-proxy
with:
registry_secrets: '[{ "type": "nuget_feed", "url": "https://api.nuget.org/v3/index.json"
}]'
registry_secrets: '[{ "type": "nuget_feed", "url": "https://api.nuget.org/v3/index.json" }]'
- name: Print proxy outputs
run: |
@@ -81,8 +80,7 @@ jobs:
echo "${{ steps.proxy.outputs.proxy_urls }}"
- name: Fail if proxy outputs are not set
if: (!steps.proxy.outputs.proxy_host) || (!steps.proxy.outputs.proxy_port)
|| (!steps.proxy.outputs.proxy_ca_certificate) || (!steps.proxy.outputs.proxy_urls)
if: (!steps.proxy.outputs.proxy_host) || (!steps.proxy.outputs.proxy_port) || (!steps.proxy.outputs.proxy_ca_certificate) || (!steps.proxy.outputs.proxy_urls)
run: exit 1
env:
CODEQL_ACTION_TEST_MODE: true

View File

@@ -49,8 +49,7 @@ jobs:
if: github.triggering_actor != 'dependabot[bot]'
permissions:
contents: read
security-events: write # needed to upload the SARIF file
security-events: write
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
@@ -69,26 +68,20 @@ jobs:
languages: javascript
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Fail
# We want this job to pass if the Action correctly uploads the SARIF file for
# the failed run.
# Setting this step to continue on error means that it is marked as completing
# successfully, so will not fail the job.
# We want this job to pass if the Action correctly uploads the SARIF file for
# the failed run.
# Setting this step to continue on error means that it is marked as completing
# successfully, so will not fail the job.
continue-on-error: true
run: exit 1
- uses: ./analyze
# In a real workflow, this step wouldn't run. Since we used `continue-on-error`
# above, we manually disable it with an `if` condition.
# In a real workflow, this step wouldn't run. Since we used `continue-on-error`
# above, we manually disable it with an `if` condition.
if: false
with:
category: /test-codeql-version:${{ matrix.version }}
category: '/test-codeql-version:${{ matrix.version }}'
env:
# Internal-only environment variable used to indicate that the post-init Action
# should expect to upload a SARIF file for the failed run.
CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF: true
# Make sure the uploading SARIF files feature is enabled.
CODEQL_ACTION_UPLOAD_FAILED_SARIF: true
# Upload the failed SARIF file as an integration test of the API endpoint.
CODEQL_ACTION_TEST_MODE: false
# Mark telemetry for this workflow so it can be treated separately.
CODEQL_ACTION_TESTING_ENVIRONMENT: codeql-action-pr-checks

View File

@@ -52,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
swift-custom-build-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
group: swift-custom-build-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
swift-custom-build:
strategy:

View File

@@ -30,11 +30,6 @@ on:
description: The version of Go to install
required: false
default: '>=1.21.0'
python-version:
type: string
description: The version of Python to install
required: false
default: '3.13'
dotnet-version:
type: string
description: The version of .NET to install
@@ -47,11 +42,6 @@ on:
description: The version of Go to install
required: false
default: '>=1.21.0'
python-version:
type: string
description: The version of Python to install
required: false
default: '3.13'
dotnet-version:
type: string
description: The version of .NET to install
@@ -62,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
unset-environment-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: unset-environment-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
unset-environment:
strategy:
@@ -96,11 +85,6 @@ jobs:
with:
go-version: ${{ inputs.go-version || '>=1.21.0' }}
cache: false
- name: Install Python
if: matrix.version != 'nightly-latest'
uses: actions/setup-python@v6
with:
python-version: ${{ inputs.python-version || '3.13' }}
- name: Install .NET
uses: actions/setup-dotnet@v5
with:
@@ -109,7 +93,7 @@ jobs:
id: init
with:
db-location: ${{ runner.temp }}/customDbLocation
# Swift is not supported on Ubuntu so we manually exclude it from the list here
# Swift is not supported on Ubuntu so we manually exclude it from the list here
languages: cpp,csharp,go,java,javascript,python,ruby
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Build code

View File

@@ -30,11 +30,6 @@ on:
description: The version of Go to install
required: false
default: '>=1.21.0'
python-version:
type: string
description: The version of Python to install
required: false
default: '3.13'
dotnet-version:
type: string
description: The version of .NET to install
@@ -47,11 +42,6 @@ on:
description: The version of Go to install
required: false
default: '>=1.21.0'
python-version:
type: string
description: The version of Python to install
required: false
default: '3.13'
dotnet-version:
type: string
description: The version of .NET to install
@@ -62,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
upload-ref-sha-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: upload-ref-sha-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
upload-ref-sha-input:
strategy:
@@ -94,11 +83,6 @@ jobs:
with:
go-version: ${{ inputs.go-version || '>=1.21.0' }}
cache: false
- name: Install Python
if: matrix.version != 'nightly-latest'
uses: actions/setup-python@v6
with:
python-version: ${{ inputs.python-version || '3.13' }}
- name: Install .NET
uses: actions/setup-dotnet@v5
with:
@@ -107,19 +91,18 @@ jobs:
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: cpp,csharp,java,javascript,python
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{
github.sha }}
config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }}
- name: Build code
run: ./build.sh
# Generate some SARIF we can upload with the upload-sarif step
# Generate some SARIF we can upload with the upload-sarif step
- uses: ./../action/analyze
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
upload: never
- uses: ./../action/upload-sarif
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
env:
CODEQL_ACTION_TEST_MODE: true

56
.github/workflows/__upload-sarif.yml generated vendored
View File

@@ -30,11 +30,6 @@ on:
description: The version of Go to install
required: false
default: '>=1.21.0'
python-version:
type: string
description: The version of Python to install
required: false
default: '3.13'
dotnet-version:
type: string
description: The version of .NET to install
@@ -47,11 +42,6 @@ on:
description: The version of Go to install
required: false
default: '>=1.21.0'
python-version:
type: string
description: The version of Python to install
required: false
default: '3.13'
dotnet-version:
type: string
description: The version of .NET to install
@@ -62,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
upload-sarif-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: upload-sarif-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
upload-sarif:
strategy:
@@ -101,11 +90,6 @@ jobs:
with:
go-version: ${{ inputs.go-version || '>=1.21.0' }}
cache: false
- name: Install Python
if: matrix.version != 'nightly-latest'
uses: actions/setup-python@v6
with:
python-version: ${{ inputs.python-version || '3.13' }}
- name: Install .NET
uses: actions/setup-dotnet@v5
with:
@@ -117,11 +101,11 @@ jobs:
analysis-kinds: ${{ matrix.analysis-kinds }}
- name: Build code
run: ./build.sh
# Generate some SARIF we can upload with the upload-sarif step
# Generate some SARIF we can upload with the upload-sarif step
- uses: ./../action/analyze
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
upload: never
output: ${{ runner.temp }}/results
@@ -130,15 +114,15 @@ jobs:
uses: ./../action/upload-sarif
id: upload-sarif
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
sarif_file: ${{ runner.temp }}/results
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:all-files/
- name: Fail for missing output from `upload-sarif` step for `code-scanning`
- name: 'Fail for missing output from `upload-sarif` step for `code-scanning`'
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-scanning)
run: exit 1
- name: Fail for missing output from `upload-sarif` step for `code-quality`
- name: 'Fail for missing output from `upload-sarif` step for `code-quality`'
if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)
run: exit 1
@@ -147,28 +131,26 @@ jobs:
id: upload-single-sarif-code-scanning
if: contains(matrix.analysis-kinds, 'code-scanning')
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
sarif_file: ${{ runner.temp }}/results/javascript.sarif
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-scanning/
- name: Fail for missing output from `upload-single-sarif-code-scanning` step
if: contains(matrix.analysis-kinds, 'code-scanning') &&
!(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning)
- name: 'Fail for missing output from `upload-single-sarif-code-scanning` step'
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning)
run: exit 1
- name: Upload single SARIF file for Code Quality
uses: ./../action/upload-sarif
id: upload-single-sarif-code-quality
if: contains(matrix.analysis-kinds, 'code-quality')
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
sarif_file: ${{ runner.temp }}/results/javascript.quality.sarif
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-quality/
- name: Fail for missing output from `upload-single-sarif-code-quality` step
if: contains(matrix.analysis-kinds, 'code-quality') &&
!(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality)
- name: 'Fail for missing output from `upload-single-sarif-code-quality` step'
if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality)
run: exit 1
- name: Change SARIF file extension
@@ -179,12 +161,12 @@ jobs:
id: upload-single-non-sarif
if: contains(matrix.analysis-kinds, 'code-scanning')
with:
ref: refs/heads/main
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
sarif_file: ${{ runner.temp }}/results/javascript.sarif.json
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:non-sarif/
- name: Fail for missing output from `upload-single-non-sarif` step
- name: 'Fail for missing output from `upload-single-non-sarif` step'
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-non-sarif.outputs.sarif-ids).code-scanning)
run: exit 1
env:

View File

@@ -30,11 +30,6 @@ on:
description: The version of Go to install
required: false
default: '>=1.21.0'
python-version:
type: string
description: The version of Python to install
required: false
default: '3.13'
dotnet-version:
type: string
description: The version of .NET to install
@@ -47,11 +42,6 @@ on:
description: The version of Go to install
required: false
default: '>=1.21.0'
python-version:
type: string
description: The version of Python to install
required: false
default: '3.13'
dotnet-version:
type: string
description: The version of .NET to install
@@ -62,8 +52,7 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group:
with-checkout-path-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}}
group: with-checkout-path-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}}
jobs:
with-checkout-path:
strategy:
@@ -80,6 +69,7 @@ jobs:
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
# This ensures we don't accidentally use the original checkout for any part of the test.
- name: Check out repository
uses: actions/checkout@v6
- name: Prepare test
@@ -94,11 +84,6 @@ jobs:
with:
go-version: ${{ inputs.go-version || '>=1.21.0' }}
cache: false
- name: Install Python
if: matrix.version != 'nightly-latest'
uses: actions/setup-python@v6
with:
python-version: ${{ inputs.python-version || '3.13' }}
- name: Install .NET
uses: actions/setup-dotnet@v5
with:
@@ -109,8 +94,8 @@ jobs:
# Actions does not support deleting the current working directory, so we
# delete the contents of the directory instead.
rm -rf ./* .github .git
# Check out the actions repo again, but at a different location.
# choose an arbitrary SHA so that we can later test that the commit_oid is not from main
# Check out the actions repo again, but at a different location.
# choose an arbitrary SHA so that we can later test that the commit_oid is not from main
- uses: actions/checkout@v6
with:
ref: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6
@@ -119,7 +104,7 @@ jobs:
- uses: ./../action/init
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
# it's enough to test one compiled language and one interpreted language
# it's enough to test one compiled language and one interpreted language
languages: csharp,javascript
source-root: x/y/z/some-path/tests/multi-language-repo

View File

@@ -31,34 +31,29 @@ jobs:
permissions:
contents: read
# We currently need `security-events: read` to access feature flags.
security-events: read
steps:
- uses: actions/checkout@v6
- name: Init with default CodeQL bundle from the VM image
id: init-default
uses: ./init
with:
languages: javascript
- name: Remove empty database
# allows us to run init a second time
run: |
rm -rf "$RUNNER_TEMP/codeql_databases"
- name: Init with latest CodeQL bundle
id: init-latest
uses: ./init
- name: Set up default CodeQL bundle
id: setup-default
uses: ./setup-codeql
- name: Set up linked CodeQL bundle
id: setup-linked
uses: ./setup-codeql
with:
tools: linked
languages: javascript
- name: Compare default and latest CodeQL bundle versions
- name: Compare default and linked CodeQL bundle versions
id: compare
env:
CODEQL_DEFAULT: ${{ steps.init-default.outputs.codeql-path }}
CODEQL_LATEST: ${{ steps.init-latest.outputs.codeql-path }}
CODEQL_DEFAULT: ${{ steps.setup-default.outputs.codeql-path }}
CODEQL_LINKED: ${{ steps.setup-linked.outputs.codeql-path }}
run: |
CODEQL_VERSION_DEFAULT="$("$CODEQL_DEFAULT" version --format terse)"
CODEQL_VERSION_LATEST="$("$CODEQL_LATEST" version --format terse)"
CODEQL_VERSION_LINKED="$("$CODEQL_LINKED" version --format terse)"
echo "Default CodeQL bundle version is $CODEQL_VERSION_DEFAULT"
echo "Latest CodeQL bundle version is $CODEQL_VERSION_LATEST"
echo "Linked CodeQL bundle version is $CODEQL_VERSION_LINKED"
# If we're running on a pull request, run with both bundles, even if `tools: linked` would
# be the same as `tools: null`. This allows us to make the job for each of the bundles a
@@ -66,7 +61,7 @@ jobs:
#
# If we're running on push or schedule, then we can skip running with `tools: linked` when it would be
# the same as running with `tools: null`.
if [[ "$GITHUB_EVENT_NAME" != "pull_request" && "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LATEST" ]]; then
if [[ "$GITHUB_EVENT_NAME" != "pull_request" && "$GITHUB_EVENT_NAME" != "merge_group" && "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LINKED" ]]; then
VERSIONS_JSON='[null]'
else
VERSIONS_JSON='[null, "linked"]'
@@ -110,7 +105,7 @@ jobs:
uses: ./analyze
with:
category: "/language:javascript"
upload: ${{ (matrix.os == 'ubuntu-24.04' && !matrix.tools && 'always') || 'never' }}
upload: ${{ (matrix.os == 'ubuntu-24.04' && !matrix.tools && github.event_name != 'merge_group' && 'always' ) || 'never' }}
analyze-other:
if: github.triggering_actor != 'dependabot[bot]'
@@ -145,3 +140,4 @@ jobs:
uses: ./analyze
with:
category: "/language:${{ matrix.language }}"
upload: ${{ (github.event_name != 'merge_group' && 'always') || 'never' }}

View File

@@ -11,6 +11,8 @@ env:
CODEQL_ACTION_OVERLAY_ANALYSIS: true
CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT: false
CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT: true
CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_CHECK: false
CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS: true
on:
push:
@@ -80,7 +82,7 @@ jobs:
# On PRs, overlay analysis may change the config that is passed to the CLI.
# Therefore, we have two variants of the following test, one for PRs and one for other events.
- name: Empty file (non-PR)
if: github.event_name != 'pull_request' && github.event_name != 'merge_group'
if: github.event_name != 'pull_request'
uses: ./../action/.github/actions/check-codescanning-config
with:
expected-config-file-contents: "{}"
@@ -88,7 +90,7 @@ jobs:
tools: ${{ steps.prepare-test.outputs.tools-url }}
- name: Empty file (PR)
if: github.event_name == 'pull_request' || github.event_name == 'merge_group'
if: github.event_name == 'pull_request'
uses: ./../action/.github/actions/check-codescanning-config
with:
expected-config-file-contents: |

View File

@@ -41,6 +41,8 @@ jobs:
CODEQL_ACTION_TEST_MODE: true
permissions:
contents: read
# We currently need `security-events: read` to access feature flags.
security-events: read
timeout-minutes: 45
runs-on: ubuntu-latest
steps:
@@ -87,7 +89,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download all artifacts
uses: actions/download-artifact@v7
uses: actions/download-artifact@v8
- name: Check expected artifacts exist
run: |
LANGUAGES="cpp csharp go java javascript python"

View File

@@ -40,6 +40,8 @@ jobs:
timeout-minutes: 45
permissions:
contents: read
# We currently need `security-events: read` to access feature flags.
security-events: read
runs-on: ubuntu-latest
steps:
- name: Check out repository
@@ -81,7 +83,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download all artifacts
uses: actions/download-artifact@v7
uses: actions/download-artifact@v8
- name: Check expected artifacts exist
run: |
VERSIONS="stable-v2.20.3 default linked nightly-latest"

View File

@@ -42,11 +42,6 @@ jobs:
node-version: ${{ matrix.node-version }}
cache: 'npm'
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: 3.11
- name: Install dependencies
run: |
# Use the system Bash shell to ensure we can run commands like `npm ci`
@@ -68,7 +63,7 @@ jobs:
- name: Run pr-checks tests
if: always()
working-directory: pr-checks
run: python -m unittest discover
run: npm ci && npx tsx --test
- name: Lint
if: always() && matrix.os != 'windows-latest'

View File

@@ -26,6 +26,8 @@ jobs:
timeout-minutes: 45
permissions:
contents: read
# We currently need `security-events: read` to access feature flags.
security-events: read
runs-on: windows-latest
steps:

View File

@@ -73,24 +73,17 @@ jobs:
npm run lint -- --fix
npm run build
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: 3.11
- name: Sync back version updates to generated workflows
# Only sync back versions on Dependabot update PRs
if: startsWith(env.HEAD_REF, 'dependabot/')
working-directory: pr-checks
run: |
python3 sync_back.py -v
npm ci
npx tsx sync_back.ts --verbose
- name: Generate workflows
working-directory: pr-checks
run: |
python -m pip install --upgrade pip
pip install ruamel.yaml==0.17.31
python3 sync.py
run: ./sync.sh
- name: "Merge in progress: Finish merge and push"
if: steps.merge.outputs.merge-in-progress == 'true'

View File

@@ -29,7 +29,7 @@ fi
echo "Getting checks for $GITHUB_SHA"
# Ignore any checks with "https://", CodeQL, LGTM, Update, and ESLint checks.
CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs.[] | select(.conclusion != "skipped") | .name | select(contains("https://") or . == "CodeQL" or . == "Dependabot" or . == "check-expected-release-files" or contains("Update") or contains("ESLint") or contains("update") or contains("test-setup-python-scripts") or . == "Agent" or . == "Cleanup artifacts" or . == "Prepare" or . == "Upload results" | not)] | unique | sort')"
CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs.[] | select(.conclusion != "skipped") | .name | select(contains("https://") or . == "CodeQL" or . == "Dependabot" or . == "check-expected-release-files" or contains("Update") or contains("ESLint") or contains("update") or contains("test-setup-python-scripts") or . == "Agent" or . == "Cleanup artifacts" or . == "Prepare" or . == "Upload results" or . == "Label PR with size" | not)] | unique | sort')"
echo "$CHECKS" | jq

View File

@@ -19,7 +19,7 @@ if [ ! -z "$(git status --porcelain)" ]; then
# If we get a fail here then the PR needs attention
git diff
git status
>&2 echo "Failed: PR checks are not up to date. Run 'cd pr-checks && python3 sync.py' to update"
>&2 echo "Failed: PR checks are not up to date. Run 'cd pr-checks && ./sync.sh' to update"
echo "### Generated workflows diff" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY

View File

@@ -4,7 +4,22 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th
## [UNRELEASED]
No user facing changes.
- Fixed [a bug](https://github.com/github/codeql-action/issues/3555) which caused the CodeQL Action to fail loading repository properties if a "Multi select" repository property was configured for the repository. [#3557](https://github.com/github/codeql-action/pull/3557)
- The CodeQL Action now loads [custom repository properties](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization) on GitHub Enterprise Server, enabling the customization of features such as `github-codeql-disable-overlay` that was previously only available on GitHub.com. [#3559](https://github.com/github/codeql-action/pull/3559)
## 4.32.6 - 05 Mar 2026
- Update default CodeQL bundle version to [2.24.3](https://github.com/github/codeql-action/releases/tag/codeql-bundle-v2.24.3). [#3548](https://github.com/github/codeql-action/pull/3548)
## 4.32.5 - 02 Mar 2026
- Repositories owned by an organization can now set up the `github-codeql-disable-overlay` custom repository property to disable [improved incremental analysis for CodeQL](https://github.com/github/roadmap/issues/1158). First, create a custom repository property with the name `github-codeql-disable-overlay` and the type "True/false" in the organization's settings. Then in the repository's settings, set this property to `true` to disable improved incremental analysis. For more information, see [Managing custom properties for repositories in your organization](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization). This feature is not yet available on GitHub Enterprise Server. [#3507](https://github.com/github/codeql-action/pull/3507)
- Added an experimental change so that when [improved incremental analysis](https://github.com/github/roadmap/issues/1158) fails on a runner — potentially due to insufficient disk space — the failure is recorded in the Actions cache so that subsequent runs will automatically skip improved incremental analysis until something changes (e.g. a larger runner is provisioned or a new CodeQL version is released). We expect to roll this change out to everyone in March. [#3487](https://github.com/github/codeql-action/pull/3487)
- The minimum memory check for improved incremental analysis is now skipped for CodeQL 2.24.3 and later, which has reduced peak RAM usage. [#3515](https://github.com/github/codeql-action/pull/3515)
- Reduced log levels for best-effort private package registry connection check failures to reduce noise from workflow annotations. [#3516](https://github.com/github/codeql-action/pull/3516)
- Added an experimental change which lowers the minimum disk space requirement for [improved incremental analysis](https://github.com/github/roadmap/issues/1158), enabling it to run on standard GitHub Actions runners. We expect to roll this change out to everyone in March. [#3498](https://github.com/github/codeql-action/pull/3498)
- Added an experimental change which allows the `start-proxy` action to resolve the CodeQL CLI version from feature flags instead of using the linked CLI bundle version. We expect to roll this change out to everyone in March. [#3512](https://github.com/github/codeql-action/pull/3512)
- The previously experimental changes from versions 4.32.3, 4.32.4, 3.32.3 and 3.32.4 are now enabled by default. [#3503](https://github.com/github/codeql-action/pull/3503), [#3504](https://github.com/github/codeql-action/pull/3504)
## 4.32.4 - 20 Feb 2026

View File

@@ -92,7 +92,7 @@ We typically deprecate a version of CodeQL when the GitHub Enterprise Server (GH
1. Remove support for the old version of CodeQL.
- Bump `CODEQL_MINIMUM_VERSION` in `src/codeql.ts` to the new minimum version of CodeQL.
- Remove any code that is only needed to support the old version of CodeQL. This is often behind a version guard, so look for instances of version numbers between the old minimum version and the new minimum version in the codebase. A good place to start is the list of version numbers in `src/codeql.ts`.
- Update the default set of CodeQL test versions in `pr-checks/sync.py`.
- Update the default set of CodeQL test versions in `pr-checks/sync.ts`.
- Remove the old minimum version of CodeQL.
- Add the latest patch release for any new CodeQL minor version series that have shipped in GHES.
- Run the script to update the generated PR checks.

View File

@@ -72,10 +72,12 @@ We typically release new minor versions of the CodeQL Action and Bundle when a n
| Minimum CodeQL Action | Minimum CodeQL Bundle Version | GitHub Environment | Notes |
|-----------------------|-------------------------------|--------------------|-------|
| `v3.28.21` | `2.21.3` | Enterprise Server 3.18 | |
| `v3.28.12` | `2.20.7` | Enterprise Server 3.17 | |
| `v3.28.6` | `2.20.3` | Enterprise Server 3.16 | |
| `v3.28.6` | `2.20.3` | Enterprise Server 3.15 | |
| `v4.31.10` | `2.23.9` | Enterprise Server 3.20 | |
| `v3.29.11` | `2.22.4` | Enterprise Server 3.19 | |
| `v3.28.21` | `2.21.3` | Enterprise Server 3.18 | |
| `v3.28.12` | `2.20.7` | Enterprise Server 3.17 | |
| `v3.28.6` | `2.20.3` | Enterprise Server 3.16 | |
| `v3.28.6` | `2.20.3` | Enterprise Server 3.15 | |
| `v3.28.6` | `2.20.3` | Enterprise Server 3.14 | |
See the full list of GHES release and deprecation dates at [GitHub Enterprise Server releases](https://docs.github.com/en/enterprise-server/admin/all-releases#releases-of-github-enterprise-server).

View File

@@ -21,6 +21,7 @@ export default [
"build.mjs",
"eslint.config.mjs",
".github/**/*",
"pr-checks/**/*",
],
},
// eslint recommended config

View File

@@ -159,6 +159,11 @@ inputs:
description: >-
Explicitly enable or disable caching of project build dependencies.
required: false
check-run-id:
description: >-
[Internal] The ID of the check run, as provided by the Actions runtime environment. Do not set this value manually.
default: ${{ job.check_run_id }}
required: false
outputs:
codeql-path:
description: The path of the CodeQL binary used for analysis

File diff suppressed because it is too large Load Diff

832
lib/analyze-action.js generated

File diff suppressed because it is too large Load Diff

548
lib/autobuild-action.js generated
View File

@@ -45986,7 +45986,7 @@ var require_package = __commonJS({
"package.json"(exports2, module2) {
module2.exports = {
name: "codeql",
version: "4.32.5",
version: "4.32.7",
private: true,
description: "CodeQL action",
scripts: {
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -46052,14 +46053,14 @@ var require_package = __commonJS({
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.5.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
glob: "^11.1.0",
globals: "^16.5.0",
globals: "^17.3.0",
nock: "^14.0.11",
sinon: "^21.0.1",
typescript: "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
},
overrides: {
"@actions/tool-cache": {
@@ -49361,6 +49362,7 @@ var require_minimatch = __commonJS({
pattern = pattern.split(path7.sep).join("/");
}
this.options = options;
this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200;
this.set = [];
this.pattern = pattern;
this.regexp = null;
@@ -49757,50 +49759,147 @@ var require_minimatch = __commonJS({
return this.negate;
};
Minimatch.prototype.matchOne = function(file, pattern, partial) {
var options = this.options;
this.debug(
"matchOne",
{ "this": this, file, pattern }
if (pattern.indexOf(GLOBSTAR) !== -1) {
return this._matchGlobstar(file, pattern, partial, 0, 0);
}
return this._matchOne(file, pattern, partial, 0, 0);
};
Minimatch.prototype._matchGlobstar = function(file, pattern, partial, fileIndex, patternIndex) {
var i;
var firstgs = -1;
for (i = patternIndex; i < pattern.length; i++) {
if (pattern[i] === GLOBSTAR) {
firstgs = i;
break;
}
}
var lastgs = -1;
for (i = pattern.length - 1; i >= 0; i--) {
if (pattern[i] === GLOBSTAR) {
lastgs = i;
break;
}
}
var head = pattern.slice(patternIndex, firstgs);
var body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs);
var tail = partial ? [] : pattern.slice(lastgs + 1);
if (head.length) {
var fileHead = file.slice(fileIndex, fileIndex + head.length);
if (!this._matchOne(fileHead, head, partial, 0, 0)) {
return false;
}
fileIndex += head.length;
}
var fileTailMatch = 0;
if (tail.length) {
if (tail.length + fileIndex > file.length) return false;
var tailStart = file.length - tail.length;
if (this._matchOne(file, tail, partial, tailStart, 0)) {
fileTailMatch = tail.length;
} else {
if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) {
return false;
}
tailStart--;
if (!this._matchOne(file, tail, partial, tailStart, 0)) {
return false;
}
fileTailMatch = tail.length + 1;
}
}
if (!body.length) {
var sawSome = !!fileTailMatch;
for (i = fileIndex; i < file.length - fileTailMatch; i++) {
var f = String(file[i]);
sawSome = true;
if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") {
return false;
}
}
return partial || sawSome;
}
var bodySegments = [[[], 0]];
var currentBody = bodySegments[0];
var nonGsParts = 0;
var nonGsPartsSums = [0];
for (var bi = 0; bi < body.length; bi++) {
var b = body[bi];
if (b === GLOBSTAR) {
nonGsPartsSums.push(nonGsParts);
currentBody = [[], 0];
bodySegments.push(currentBody);
} else {
currentBody[0].push(b);
nonGsParts++;
}
}
var idx = bodySegments.length - 1;
var fileLength = file.length - fileTailMatch;
for (var si = 0; si < bodySegments.length; si++) {
bodySegments[si][1] = fileLength - (nonGsPartsSums[idx--] + bodySegments[si][0].length);
}
return !!this._matchGlobStarBodySections(
file,
bodySegments,
fileIndex,
0,
partial,
0,
!!fileTailMatch
);
this.debug("matchOne", file.length, pattern.length);
for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
};
Minimatch.prototype._matchGlobStarBodySections = function(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) {
var bs = bodySegments[bodyIndex];
if (!bs) {
for (var i = fileIndex; i < file.length; i++) {
sawTail = true;
var f = file[i];
if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") {
return false;
}
}
return sawTail;
}
var body = bs[0];
var after = bs[1];
while (fileIndex <= after) {
var m = this._matchOne(
file.slice(0, fileIndex + body.length),
body,
partial,
fileIndex,
0
);
if (m && globStarDepth < this.maxGlobstarRecursion) {
var sub = this._matchGlobStarBodySections(
file,
bodySegments,
fileIndex + body.length,
bodyIndex + 1,
partial,
globStarDepth + 1,
sawTail
);
if (sub !== false) {
return sub;
}
}
var f = file[fileIndex];
if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") {
return false;
}
fileIndex++;
}
return partial || null;
};
Minimatch.prototype._matchOne = function(file, pattern, partial, fileIndex, patternIndex) {
var fi, pi, fl, pl;
for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
this.debug("matchOne loop");
var p = pattern[pi];
var f = file[fi];
this.debug(pattern, p, f);
if (p === false) return false;
if (p === GLOBSTAR) {
this.debug("GLOBSTAR", [pattern, p, f]);
var fr = fi;
var pr = pi + 1;
if (pr === pl) {
this.debug("** at the end");
for (; fi < fl; fi++) {
if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false;
}
return true;
}
while (fr < fl) {
var swallowee = file[fr];
this.debug("\nglobstar while", file, fr, pattern, pr, swallowee);
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
this.debug("globstar found match!", fr, fl, swallowee);
return true;
} else {
if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") {
this.debug("dot detected!", file, fr, pattern, pr);
break;
}
this.debug("globstar swallow a segment, and continue");
fr++;
}
}
if (partial) {
this.debug("\n>>> no match, partial?", file, fr, pattern, pr);
if (fr === fl) return true;
}
return false;
}
if (p === false || p === GLOBSTAR) return false;
var hit;
if (typeof p === "string") {
hit = f === p;
@@ -61840,7 +61939,7 @@ var require_fxp = __commonJS({
}, o: (t2, e2) => Object.prototype.hasOwnProperty.call(t2, e2), r: (t2) => {
"undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t2, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t2, "__esModule", { value: true });
} }, e = {};
t.r(e), t.d(e, { XMLBuilder: () => dt, XMLParser: () => it, XMLValidator: () => gt });
t.r(e), t.d(e, { XMLBuilder: () => gt, XMLParser: () => it, XMLValidator: () => xt });
const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$");
function s(t2, e2) {
const n2 = [];
@@ -61862,90 +61961,90 @@ var require_fxp = __commonJS({
const n2 = [];
let i2 = false, s2 = false;
"\uFEFF" === t2[0] && (t2 = t2.substr(1));
for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2] && "?" === t2[o2 + 1]) {
if (o2 += 2, o2 = u(t2, o2), o2.err) return o2;
for (let r2 = 0; r2 < t2.length; r2++) if ("<" === t2[r2] && "?" === t2[r2 + 1]) {
if (r2 += 2, r2 = u(t2, r2), r2.err) return r2;
} else {
if ("<" !== t2[o2]) {
if (l(t2[o2])) continue;
return m("InvalidChar", "char '" + t2[o2] + "' is not expected.", b(t2, o2));
if ("<" !== t2[r2]) {
if (l(t2[r2])) continue;
return m("InvalidChar", "char '" + t2[r2] + "' is not expected.", N(t2, r2));
}
{
let a2 = o2;
if (o2++, "!" === t2[o2]) {
o2 = h(t2, o2);
let o2 = r2;
if (r2++, "!" === t2[r2]) {
r2 = d(t2, r2);
continue;
}
{
let d2 = false;
"/" === t2[o2] && (d2 = true, o2++);
let p2 = "";
for (; o2 < t2.length && ">" !== t2[o2] && " " !== t2[o2] && " " !== t2[o2] && "\n" !== t2[o2] && "\r" !== t2[o2]; o2++) p2 += t2[o2];
if (p2 = p2.trim(), "/" === p2[p2.length - 1] && (p2 = p2.substring(0, p2.length - 1), o2--), !r(p2)) {
let a2 = false;
"/" === t2[r2] && (a2 = true, r2++);
let h2 = "";
for (; r2 < t2.length && ">" !== t2[r2] && " " !== t2[r2] && " " !== t2[r2] && "\n" !== t2[r2] && "\r" !== t2[r2]; r2++) h2 += t2[r2];
if (h2 = h2.trim(), "/" === h2[h2.length - 1] && (h2 = h2.substring(0, h2.length - 1), r2--), !b(h2)) {
let e3;
return e3 = 0 === p2.trim().length ? "Invalid space after '<'." : "Tag '" + p2 + "' is an invalid name.", m("InvalidTag", e3, b(t2, o2));
return e3 = 0 === h2.trim().length ? "Invalid space after '<'." : "Tag '" + h2 + "' is an invalid name.", m("InvalidTag", e3, N(t2, r2));
}
const c2 = f(t2, o2);
if (false === c2) return m("InvalidAttr", "Attributes for '" + p2 + "' have open quote.", b(t2, o2));
let E2 = c2.value;
if (o2 = c2.index, "/" === E2[E2.length - 1]) {
const n3 = o2 - E2.length;
E2 = E2.substring(0, E2.length - 1);
const s3 = g(E2, e2);
if (true !== s3) return m(s3.err.code, s3.err.msg, b(t2, n3 + s3.err.line));
const p2 = c(t2, r2);
if (false === p2) return m("InvalidAttr", "Attributes for '" + h2 + "' have open quote.", N(t2, r2));
let f2 = p2.value;
if (r2 = p2.index, "/" === f2[f2.length - 1]) {
const n3 = r2 - f2.length;
f2 = f2.substring(0, f2.length - 1);
const s3 = g(f2, e2);
if (true !== s3) return m(s3.err.code, s3.err.msg, N(t2, n3 + s3.err.line));
i2 = true;
} else if (d2) {
if (!c2.tagClosed) return m("InvalidTag", "Closing tag '" + p2 + "' doesn't have proper closing.", b(t2, o2));
if (E2.trim().length > 0) return m("InvalidTag", "Closing tag '" + p2 + "' can't have attributes or invalid starting.", b(t2, a2));
if (0 === n2.length) return m("InvalidTag", "Closing tag '" + p2 + "' has not been opened.", b(t2, a2));
} else if (a2) {
if (!p2.tagClosed) return m("InvalidTag", "Closing tag '" + h2 + "' doesn't have proper closing.", N(t2, r2));
if (f2.trim().length > 0) return m("InvalidTag", "Closing tag '" + h2 + "' can't have attributes or invalid starting.", N(t2, o2));
if (0 === n2.length) return m("InvalidTag", "Closing tag '" + h2 + "' has not been opened.", N(t2, o2));
{
const e3 = n2.pop();
if (p2 !== e3.tagName) {
let n3 = b(t2, e3.tagStartPos);
return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + p2 + "'.", b(t2, a2));
if (h2 !== e3.tagName) {
let n3 = N(t2, e3.tagStartPos);
return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + h2 + "'.", N(t2, o2));
}
0 == n2.length && (s2 = true);
}
} else {
const r2 = g(E2, e2);
if (true !== r2) return m(r2.err.code, r2.err.msg, b(t2, o2 - E2.length + r2.err.line));
if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", b(t2, o2));
-1 !== e2.unpairedTags.indexOf(p2) || n2.push({ tagName: p2, tagStartPos: a2 }), i2 = true;
const a3 = g(f2, e2);
if (true !== a3) return m(a3.err.code, a3.err.msg, N(t2, r2 - f2.length + a3.err.line));
if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", N(t2, r2));
-1 !== e2.unpairedTags.indexOf(h2) || n2.push({ tagName: h2, tagStartPos: o2 }), i2 = true;
}
for (o2++; o2 < t2.length; o2++) if ("<" === t2[o2]) {
if ("!" === t2[o2 + 1]) {
o2++, o2 = h(t2, o2);
for (r2++; r2 < t2.length; r2++) if ("<" === t2[r2]) {
if ("!" === t2[r2 + 1]) {
r2++, r2 = d(t2, r2);
continue;
}
if ("?" !== t2[o2 + 1]) break;
if (o2 = u(t2, ++o2), o2.err) return o2;
} else if ("&" === t2[o2]) {
const e3 = x(t2, o2);
if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", b(t2, o2));
o2 = e3;
} else if (true === s2 && !l(t2[o2])) return m("InvalidXml", "Extra text at the end", b(t2, o2));
"<" === t2[o2] && o2--;
if ("?" !== t2[r2 + 1]) break;
if (r2 = u(t2, ++r2), r2.err) return r2;
} else if ("&" === t2[r2]) {
const e3 = x(t2, r2);
if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", N(t2, r2));
r2 = e3;
} else if (true === s2 && !l(t2[r2])) return m("InvalidXml", "Extra text at the end", N(t2, r2));
"<" === t2[r2] && r2--;
}
}
}
return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", b(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map(((t3) => t3.tagName)), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1);
return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", N(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map((t3) => t3.tagName), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1);
}
function l(t2) {
return " " === t2 || " " === t2 || "\n" === t2 || "\r" === t2;
}
function u(t2, e2) {
const n2 = e2;
for (; e2 < t2.length; e2++) if ("?" != t2[e2] && " " != t2[e2]) ;
else {
for (; e2 < t2.length; e2++) if ("?" == t2[e2] || " " == t2[e2]) {
const i2 = t2.substr(n2, e2 - n2);
if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", b(t2, e2));
if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", N(t2, e2));
if ("?" == t2[e2] && ">" == t2[e2 + 1]) {
e2++;
break;
}
continue;
}
return e2;
}
function h(t2, e2) {
function d(t2, e2) {
if (t2.length > e2 + 5 && "-" === t2[e2 + 1] && "-" === t2[e2 + 2]) {
for (e2 += 3; e2 < t2.length; e2++) if ("-" === t2[e2] && "-" === t2[e2 + 1] && ">" === t2[e2 + 2]) {
e2 += 2;
@@ -61963,11 +62062,11 @@ var require_fxp = __commonJS({
}
return e2;
}
const d = '"', p = "'";
function f(t2, e2) {
const h = '"', p = "'";
function c(t2, e2) {
let n2 = "", i2 = "", s2 = false;
for (; e2 < t2.length; e2++) {
if (t2[e2] === d || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = "");
if (t2[e2] === h || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = "");
else if (">" === t2[e2] && "" === i2) {
s2 = true;
break;
@@ -61976,16 +62075,16 @@ var require_fxp = __commonJS({
}
return "" === i2 && { value: n2, index: e2, tagClosed: s2 };
}
const c = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g");
const f = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g");
function g(t2, e2) {
const n2 = s(t2, c), i2 = {};
const n2 = s(t2, f), i2 = {};
for (let t3 = 0; t3 < n2.length; t3++) {
if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", N(n2[t3]));
if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", N(n2[t3]));
if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", N(n2[t3]));
if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", y(n2[t3]));
if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", y(n2[t3]));
if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", y(n2[t3]));
const s2 = n2[t3][2];
if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", N(n2[t3]));
if (i2.hasOwnProperty(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", N(n2[t3]));
if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", y(n2[t3]));
if (Object.prototype.hasOwnProperty.call(i2, s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", y(n2[t3]));
i2[s2] = 1;
}
return true;
@@ -62013,49 +62112,52 @@ var require_fxp = __commonJS({
function E(t2) {
return r(t2);
}
function b(t2, e2) {
function b(t2) {
return r(t2);
}
function N(t2, e2) {
const n2 = t2.substring(0, e2).split(/\r?\n/);
return { line: n2.length, col: n2[n2.length - 1].length + 1 };
}
function N(t2) {
function y(t2) {
return t2.startIndex + t2[1].length;
}
const y = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) {
const T = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) {
return e2;
}, attributeValueProcessor: function(t2, e2) {
return e2;
}, stopNodes: [], alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(t2, e2, n2) {
return t2;
}, captureMetaData: false };
function T(t2) {
return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : T(true);
}, captureMetaData: false, maxNestedTags: 100, strictReservedNames: true };
function w(t2) {
return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : w(true);
}
const w = function(t2) {
const e2 = Object.assign({}, y, t2);
return e2.processEntities = T(e2.processEntities), e2;
const v = function(t2) {
const e2 = Object.assign({}, T, t2);
return e2.processEntities = w(e2.processEntities), e2;
};
let v;
v = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata");
let O;
O = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata");
class I {
constructor(t2) {
this.tagname = t2, this.child = [], this[":@"] = {};
this.tagname = t2, this.child = [], this[":@"] = /* @__PURE__ */ Object.create(null);
}
add(t2, e2) {
"__proto__" === t2 && (t2 = "#__proto__"), this.child.push({ [t2]: e2 });
}
addChild(t2, e2) {
"__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][v] = { startIndex: e2 });
"__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][O] = { startIndex: e2 });
}
static getMetaDataSymbol() {
return v;
return O;
}
}
class O {
class P {
constructor(t2) {
this.suppressValidationErr = !t2, this.options = t2;
}
readDocType(t2, e2) {
const n2 = {};
const n2 = /* @__PURE__ */ Object.create(null);
if ("O" !== t2[e2 + 3] || "C" !== t2[e2 + 4] || "T" !== t2[e2 + 5] || "Y" !== t2[e2 + 6] || "P" !== t2[e2 + 7] || "E" !== t2[e2 + 8]) throw new Error("Invalid Tag instead of DOCTYPE");
{
e2 += 9;
@@ -62064,23 +62166,23 @@ var require_fxp = __commonJS({
if (r2 ? "-" === t2[e2 - 1] && "-" === t2[e2 - 2] && (r2 = false, i2--) : i2--, 0 === i2) break;
} else "[" === t2[e2] ? s2 = true : o2 += t2[e2];
else {
if (s2 && A(t2, "!ENTITY", e2)) {
if (s2 && S(t2, "!ENTITY", e2)) {
let i3, s3;
if (e2 += 7, [i3, s3, e2] = this.readEntityExp(t2, e2 + 1, this.suppressValidationErr), -1 === s3.indexOf("&")) {
const t3 = i3.replace(/[.\-+*:]/g, "\\.");
n2[i3] = { regx: RegExp(`&${t3};`, "g"), val: s3 };
}
} else if (s2 && A(t2, "!ELEMENT", e2)) {
} else if (s2 && S(t2, "!ELEMENT", e2)) {
e2 += 8;
const { index: n3 } = this.readElementExp(t2, e2 + 1);
e2 = n3;
} else if (s2 && A(t2, "!ATTLIST", e2)) e2 += 8;
else if (s2 && A(t2, "!NOTATION", e2)) {
} else if (s2 && S(t2, "!ATTLIST", e2)) e2 += 8;
else if (s2 && S(t2, "!NOTATION", e2)) {
e2 += 9;
const { index: n3 } = this.readNotationExp(t2, e2 + 1, this.suppressValidationErr);
e2 = n3;
} else {
if (!A(t2, "!--", e2)) throw new Error("Invalid DOCTYPE");
if (!S(t2, "!--", e2)) throw new Error("Invalid DOCTYPE");
r2 = true;
}
i2++, o2 = "";
@@ -62090,10 +62192,10 @@ var require_fxp = __commonJS({
return { entities: n2, i: e2 };
}
readEntityExp(t2, e2) {
e2 = P(t2, e2);
e2 = A(t2, e2);
let n2 = "";
for (; e2 < t2.length && !/\s/.test(t2[e2]) && '"' !== t2[e2] && "'" !== t2[e2]; ) n2 += t2[e2], e2++;
if (S(n2), e2 = P(t2, e2), !this.suppressValidationErr) {
if (C(n2), e2 = A(t2, e2), !this.suppressValidationErr) {
if ("SYSTEM" === t2.substring(e2, e2 + 6).toUpperCase()) throw new Error("External entities are not supported");
if ("%" === t2[e2]) throw new Error("Parameter entities are not supported");
}
@@ -62102,15 +62204,15 @@ var require_fxp = __commonJS({
return [n2, i2, --e2];
}
readNotationExp(t2, e2) {
e2 = P(t2, e2);
e2 = A(t2, e2);
let n2 = "";
for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++;
!this.suppressValidationErr && S(n2), e2 = P(t2, e2);
!this.suppressValidationErr && C(n2), e2 = A(t2, e2);
const i2 = t2.substring(e2, e2 + 6).toUpperCase();
if (!this.suppressValidationErr && "SYSTEM" !== i2 && "PUBLIC" !== i2) throw new Error(`Expected SYSTEM or PUBLIC, found "${i2}"`);
e2 += i2.length, e2 = P(t2, e2);
e2 += i2.length, e2 = A(t2, e2);
let s2 = null, r2 = null;
if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = P(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"));
if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = A(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"));
else if ("SYSTEM" === i2 && ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"), !this.suppressValidationErr && !r2)) throw new Error("Missing mandatory system identifier for SYSTEM notation");
return { notationName: n2, publicIdentifier: s2, systemIdentifier: r2, index: --e2 };
}
@@ -62123,13 +62225,13 @@ var require_fxp = __commonJS({
return [++e2, i2];
}
readElementExp(t2, e2) {
e2 = P(t2, e2);
e2 = A(t2, e2);
let n2 = "";
for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++;
if (!this.suppressValidationErr && !r(n2)) throw new Error(`Invalid element name: "${n2}"`);
let i2 = "";
if ("E" === t2[e2 = P(t2, e2)] && A(t2, "MPTY", e2)) e2 += 4;
else if ("A" === t2[e2] && A(t2, "NY", e2)) e2 += 2;
if ("E" === t2[e2 = A(t2, e2)] && S(t2, "MPTY", e2)) e2 += 4;
else if ("A" === t2[e2] && S(t2, "NY", e2)) e2 += 2;
else if ("(" === t2[e2]) {
for (e2++; e2 < t2.length && ")" !== t2[e2]; ) i2 += t2[e2], e2++;
if (")" !== t2[e2]) throw new Error("Unterminated content model");
@@ -62137,24 +62239,24 @@ var require_fxp = __commonJS({
return { elementName: n2, contentModel: i2.trim(), index: e2 };
}
readAttlistExp(t2, e2) {
e2 = P(t2, e2);
e2 = A(t2, e2);
let n2 = "";
for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++;
S(n2), e2 = P(t2, e2);
C(n2), e2 = A(t2, e2);
let i2 = "";
for (; e2 < t2.length && !/\s/.test(t2[e2]); ) i2 += t2[e2], e2++;
if (!S(i2)) throw new Error(`Invalid attribute name: "${i2}"`);
e2 = P(t2, e2);
if (!C(i2)) throw new Error(`Invalid attribute name: "${i2}"`);
e2 = A(t2, e2);
let s2 = "";
if ("NOTATION" === t2.substring(e2, e2 + 8).toUpperCase()) {
if (s2 = "NOTATION", "(" !== t2[e2 = P(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`);
if (s2 = "NOTATION", "(" !== t2[e2 = A(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`);
e2++;
let n3 = [];
for (; e2 < t2.length && ")" !== t2[e2]; ) {
let i3 = "";
for (; e2 < t2.length && "|" !== t2[e2] && ")" !== t2[e2]; ) i3 += t2[e2], e2++;
if (i3 = i3.trim(), !S(i3)) throw new Error(`Invalid notation name: "${i3}"`);
n3.push(i3), "|" === t2[e2] && (e2++, e2 = P(t2, e2));
if (i3 = i3.trim(), !C(i3)) throw new Error(`Invalid notation name: "${i3}"`);
n3.push(i3), "|" === t2[e2] && (e2++, e2 = A(t2, e2));
}
if (")" !== t2[e2]) throw new Error("Unterminated list of notations");
e2++, s2 += " (" + n3.join("|") + ")";
@@ -62163,45 +62265,43 @@ var require_fxp = __commonJS({
const n3 = ["CDATA", "ID", "IDREF", "IDREFS", "ENTITY", "ENTITIES", "NMTOKEN", "NMTOKENS"];
if (!this.suppressValidationErr && !n3.includes(s2.toUpperCase())) throw new Error(`Invalid attribute type: "${s2}"`);
}
e2 = P(t2, e2);
e2 = A(t2, e2);
let r2 = "";
return "#REQUIRED" === t2.substring(e2, e2 + 8).toUpperCase() ? (r2 = "#REQUIRED", e2 += 8) : "#IMPLIED" === t2.substring(e2, e2 + 7).toUpperCase() ? (r2 = "#IMPLIED", e2 += 7) : [e2, r2] = this.readIdentifierVal(t2, e2, "ATTLIST"), { elementName: n2, attributeName: i2, attributeType: s2, defaultValue: r2, index: e2 };
}
}
const P = (t2, e2) => {
const A = (t2, e2) => {
for (; e2 < t2.length && /\s/.test(t2[e2]); ) e2++;
return e2;
};
function A(t2, e2, n2) {
function S(t2, e2, n2) {
for (let i2 = 0; i2 < e2.length; i2++) if (e2[i2] !== t2[n2 + i2 + 1]) return false;
return true;
}
function S(t2) {
function C(t2) {
if (r(t2)) return t2;
throw new Error(`Invalid entity name ${t2}`);
}
const C = /^[-+]?0x[a-fA-F0-9]+$/, $ = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true };
const D = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/;
function L(t2) {
return "function" == typeof t2 ? t2 : Array.isArray(t2) ? (e2) => {
for (const n2 of t2) {
if ("string" == typeof n2 && e2 === n2) return true;
if (n2 instanceof RegExp && n2.test(e2)) return true;
}
} : () => false;
}
class F {
const $ = /^[-+]?0x[a-fA-F0-9]+$/, V = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, D = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true };
const j = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/;
class L {
constructor(t2) {
if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e2) => K(e2, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e2) => K(e2, 16, "&#x") } }, this.addExternalEntities = j, this.parseXml = B, this.parseTextData = M, this.resolveNameSpace = _, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = R, this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) {
var e2;
if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e3) => K(e3, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e3) => K(e3, 16, "&#x") } }, this.addExternalEntities = F, this.parseXml = R, this.parseTextData = M, this.resolveNameSpace = k, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = B, this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => {
for (const n2 of e2) {
if ("string" == typeof n2 && t3 === n2) return true;
if (n2 instanceof RegExp && n2.test(t3)) return true;
}
} : () => false, this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) {
this.stopNodesExact = /* @__PURE__ */ new Set(), this.stopNodesWildcard = /* @__PURE__ */ new Set();
for (let t3 = 0; t3 < this.options.stopNodes.length; t3++) {
const e2 = this.options.stopNodes[t3];
"string" == typeof e2 && (e2.startsWith("*.") ? this.stopNodesWildcard.add(e2.substring(2)) : this.stopNodesExact.add(e2));
const e3 = this.options.stopNodes[t3];
"string" == typeof e3 && (e3.startsWith("*.") ? this.stopNodesWildcard.add(e3.substring(2)) : this.stopNodesExact.add(e3));
}
}
}
}
function j(t2) {
function F(t2) {
const e2 = Object.keys(t2);
for (let n2 = 0; n2 < e2.length; n2++) {
const i2 = e2[n2], s2 = i2.replace(/[.\-+*:]/g, "\\.");
@@ -62215,7 +62315,7 @@ var require_fxp = __commonJS({
return null == i3 ? t2 : typeof i3 != typeof t2 || i3 !== t2 ? i3 : this.options.trimValues || t2.trim() === t2 ? Z(t2, this.options.parseTagValue, this.options.numberParseOptions) : t2;
}
}
function _(t2) {
function k(t2) {
if (this.options.removeNSPrefix) {
const e2 = t2.split(":"), n2 = "/" === t2.charAt(0) ? "/" : "";
if ("xmlns" === e2[0]) return "";
@@ -62223,10 +62323,10 @@ var require_fxp = __commonJS({
}
return t2;
}
const k = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm");
const _ = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm");
function U(t2, e2, n2) {
if (true !== this.options.ignoreAttributes && "string" == typeof t2) {
const i2 = s(t2, k), r2 = i2.length, o2 = {};
const i2 = s(t2, _), r2 = i2.length, o2 = {};
for (let t3 = 0; t3 < r2; t3++) {
const s2 = this.resolveNameSpace(i2[t3][1]);
if (this.ignoreAttributesFn(s2, e2)) continue;
@@ -62245,12 +62345,12 @@ var require_fxp = __commonJS({
return o2;
}
}
const B = function(t2) {
const R = function(t2) {
t2 = t2.replace(/\r\n?/g, "\n");
const e2 = new I("!xml");
let n2 = e2, i2 = "", s2 = "";
this.entityExpansionCount = 0, this.currentExpandedLength = 0;
const r2 = new O(this.options.processEntities);
const r2 = new P(this.options.processEntities);
for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2]) if ("/" === t2[o2 + 1]) {
const e3 = z(t2, ">", o2, "Closing Tag is not closed.");
let r3 = t2.substring(o2 + 2, e3).trim();
@@ -62290,26 +62390,27 @@ var require_fxp = __commonJS({
} else {
let r3 = W(t2, o2, this.options.removeNSPrefix), a2 = r3.tagName;
const l2 = r3.rawTagName;
let u2 = r3.tagExp, h2 = r3.attrExpPresent, d2 = r3.closeIndex;
let u2 = r3.tagExp, d2 = r3.attrExpPresent, h2 = r3.closeIndex;
if (this.options.transformTagName) {
const t3 = this.options.transformTagName(a2);
u2 === a2 && (u2 = t3), a2 = t3;
}
if (this.options.strictReservedNames && (a2 === this.options.commentPropName || a2 === this.options.cdataPropName)) throw new Error(`Invalid tag name: ${a2}`);
n2 && i2 && "!xml" !== n2.tagname && (i2 = this.saveTextToParentTag(i2, n2, s2, false));
const p2 = n2;
p2 && -1 !== this.options.unpairedTags.indexOf(p2.tagname) && (n2 = this.tagsNodeStack.pop(), s2 = s2.substring(0, s2.lastIndexOf("."))), a2 !== e2.tagname && (s2 += s2 ? "." + a2 : a2);
const f2 = o2;
const c2 = o2;
if (this.isItStopNode(this.stopNodesExact, this.stopNodesWildcard, s2, a2)) {
let e3 = "";
if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) "/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), o2 = r3.closeIndex;
else if (-1 !== this.options.unpairedTags.indexOf(a2)) o2 = r3.closeIndex;
else {
const n3 = this.readStopNodeData(t2, l2, d2 + 1);
const n3 = this.readStopNodeData(t2, l2, h2 + 1);
if (!n3) throw new Error(`Unexpected end of ${l2}`);
o2 = n3.i, e3 = n3.tagContent;
}
const i3 = new I(a2);
a2 !== u2 && h2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, h2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, f2);
a2 !== u2 && d2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, d2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, c2);
} else {
if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) {
if ("/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), this.options.transformTagName) {
@@ -62317,18 +62418,26 @@ var require_fxp = __commonJS({
u2 === a2 && (u2 = t4), a2 = t4;
}
const t3 = new I(a2);
a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), s2 = s2.substr(0, s2.lastIndexOf("."));
a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf("."));
} else {
const t3 = new I(a2);
this.tagsNodeStack.push(n2), a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), n2 = t3;
if (-1 !== this.options.unpairedTags.indexOf(a2)) {
const t3 = new I(a2);
a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")), o2 = r3.closeIndex;
continue;
}
{
const t3 = new I(a2);
if (this.tagsNodeStack.length > this.options.maxNestedTags) throw new Error("Maximum nested tags exceeded");
this.tagsNodeStack.push(n2), a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), n2 = t3;
}
}
i2 = "", o2 = d2;
i2 = "", o2 = h2;
}
}
else i2 += t2[o2];
return e2.child;
};
function R(t2, e2, n2, i2) {
function B(t2, e2, n2, i2) {
this.options.captureMetaData || (i2 = void 0);
const s2 = this.options.updateTag(e2.tagname, n2, e2[":@"]);
false === s2 || ("string" == typeof s2 ? (e2.tagname = s2, t2.addChild(e2, i2)) : t2.addChild(e2, i2));
@@ -62389,12 +62498,12 @@ var require_fxp = __commonJS({
const o2 = s2.index, a2 = r2.search(/\s/);
let l2 = r2, u2 = true;
-1 !== a2 && (l2 = r2.substring(0, a2), r2 = r2.substring(a2 + 1).trimStart());
const h2 = l2;
const d2 = l2;
if (n2) {
const t3 = l2.indexOf(":");
-1 !== t3 && (l2 = l2.substr(t3 + 1), u2 = l2 !== s2.data.substr(t3 + 1));
}
return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: h2 };
return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: d2 };
}
function q(t2, e2, n2) {
const i2 = n2;
@@ -62415,19 +62524,19 @@ var require_fxp = __commonJS({
if (e2 && "string" == typeof t2) {
const e3 = t2.trim();
return "true" === e3 || "false" !== e3 && (function(t3, e4 = {}) {
if (e4 = Object.assign({}, V, e4), !t3 || "string" != typeof t3) return t3;
if (e4 = Object.assign({}, D, e4), !t3 || "string" != typeof t3) return t3;
let n3 = t3.trim();
if (void 0 !== e4.skipLike && e4.skipLike.test(n3)) return t3;
if ("0" === t3) return 0;
if (e4.hex && C.test(n3)) return (function(t4) {
if (e4.hex && $.test(n3)) return (function(t4) {
if (parseInt) return parseInt(t4, 16);
if (Number.parseInt) return Number.parseInt(t4, 16);
if (window && window.parseInt) return window.parseInt(t4, 16);
throw new Error("parseInt, Number.parseInt, window.parseInt are not supported");
})(n3);
if (-1 !== n3.search(/.+[eE].+/)) return (function(t4, e5, n4) {
if (n3.includes("e") || n3.includes("E")) return (function(t4, e5, n4) {
if (!n4.eNotation) return t4;
const i3 = e5.match(D);
const i3 = e5.match(j);
if (i3) {
let s2 = i3[1] || "";
const r2 = -1 === i3[3].indexOf("e") ? "E" : "e", o2 = i3[2], a2 = s2 ? t4[o2.length + 1] === r2 : t4[o2.length] === r2;
@@ -62436,7 +62545,7 @@ var require_fxp = __commonJS({
return t4;
})(t3, n3, e4);
{
const s2 = $.exec(n3);
const s2 = V.exec(n3);
if (s2) {
const r2 = s2[1] || "", o2 = s2[2];
let a2 = (i2 = s2[3]) && -1 !== i2.indexOf(".") ? ("." === (i2 = i2.replace(/0+$/, "")) ? i2 = "0" : "." === i2[0] ? i2 = "0" + i2 : "." === i2[i2.length - 1] && (i2 = i2.substring(0, i2.length - 1)), i2) : i2;
@@ -62444,7 +62553,7 @@ var require_fxp = __commonJS({
if (!e4.leadingZeros && (o2.length > 1 || 1 === o2.length && !l2)) return t3;
{
const i3 = Number(n3), s3 = String(i3);
if (0 === i3 || -0 === i3) return i3;
if (0 === i3) return i3;
if (-1 !== s3.search(/[eE]/)) return e4.eNotation ? i3 : t3;
if (-1 !== n3.indexOf(".")) return "0" === s3 || s3 === a2 || s3 === `${r2}${a2}` ? i3 : t3;
let l3 = o2 ? a2 : n3;
@@ -62478,7 +62587,7 @@ var require_fxp = __commonJS({
if (o2[a2]) {
let t3 = H(o2[a2], e2, l2);
const n3 = nt(t3, e2);
void 0 !== o2[Q] && (t3[Q] = o2[Q]), o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== s2[a2] && s2.hasOwnProperty(a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3;
o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== o2[Q] && "object" == typeof t3 && null !== t3 && (t3[Q] = o2[Q]), void 0 !== s2[a2] && Object.prototype.hasOwnProperty.call(s2, a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3;
}
}
}
@@ -62506,7 +62615,7 @@ var require_fxp = __commonJS({
}
class it {
constructor(t2) {
this.externalEntities = {}, this.options = w(t2);
this.externalEntities = {}, this.options = v(t2);
}
parse(t2, e2) {
if ("string" != typeof t2 && t2.toString) t2 = t2.toString();
@@ -62516,7 +62625,7 @@ var require_fxp = __commonJS({
const n3 = a(t2, e2);
if (true !== n3) throw Error(`${n3.err.msg}:${n3.err.line}:${n3.err.col}`);
}
const n2 = new F(this.options);
const n2 = new L(this.options);
n2.addExternalEntities(this.externalEntities);
const i2 = n2.parseXml(t2);
return this.options.preserveOrder || void 0 === i2 ? i2 : J(i2, this.options);
@@ -62537,6 +62646,13 @@ var require_fxp = __commonJS({
}
function rt(t2, e2, n2, i2) {
let s2 = "", r2 = false;
if (!Array.isArray(t2)) {
if (null != t2) {
let n3 = t2.toString();
return n3 = ut(n3, e2), n3;
}
return "";
}
for (let o2 = 0; o2 < t2.length; o2++) {
const a2 = t2[o2], l2 = ot(a2);
if (void 0 === l2) continue;
@@ -62560,10 +62676,10 @@ var require_fxp = __commonJS({
o3 = 0 !== o3.length ? " " + o3 : "", s2 += n3 + `<${l2}${o3}${t3}?>`, r2 = true;
continue;
}
let h2 = i2;
"" !== h2 && (h2 += e2.indentBy);
const d2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, h2);
-1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += d2 + ">" : s2 += d2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += d2 + `>${p2}${i2}</${l2}>` : (s2 += d2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("</")) ? s2 += i2 + e2.indentBy + p2 + i2 : s2 += p2, s2 += `</${l2}>`) : s2 += d2 + "/>", r2 = true;
let d2 = i2;
"" !== d2 && (d2 += e2.indentBy);
const h2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, d2);
-1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += h2 + ">" : s2 += h2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += h2 + `>${p2}${i2}</${l2}>` : (s2 += h2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("</")) ? s2 += i2 + e2.indentBy + p2 + i2 : s2 += p2, s2 += `</${l2}>`) : s2 += h2 + "/>", r2 = true;
}
return s2;
}
@@ -62571,13 +62687,13 @@ var require_fxp = __commonJS({
const e2 = Object.keys(t2);
for (let n2 = 0; n2 < e2.length; n2++) {
const i2 = e2[n2];
if (t2.hasOwnProperty(i2) && ":@" !== i2) return i2;
if (Object.prototype.hasOwnProperty.call(t2, i2) && ":@" !== i2) return i2;
}
}
function at(t2, e2) {
let n2 = "";
if (t2 && !e2.ignoreAttributes) for (let i2 in t2) {
if (!t2.hasOwnProperty(i2)) continue;
if (!Object.prototype.hasOwnProperty.call(t2, i2)) continue;
let s2 = e2.attributeValueProcessor(i2, t2[i2]);
s2 = ut(s2, e2), true === s2 && e2.suppressBooleanAttributes ? n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}` : n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}="${s2}"`;
}
@@ -62595,15 +62711,21 @@ var require_fxp = __commonJS({
}
return t2;
}
const ht = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) {
const dt = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) {
return e2;
}, attributeValueProcessor: function(t2, e2) {
return e2;
}, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [{ regex: new RegExp("&", "g"), val: "&amp;" }, { regex: new RegExp(">", "g"), val: "&gt;" }, { regex: new RegExp("<", "g"), val: "&lt;" }, { regex: new RegExp("'", "g"), val: "&apos;" }, { regex: new RegExp('"', "g"), val: "&quot;" }], processEntities: true, stopNodes: [], oneListGroup: false };
function dt(t2) {
this.options = Object.assign({}, ht, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() {
function ht(t2) {
var e2;
this.options = Object.assign({}, dt, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() {
return false;
} : (this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ct), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ft, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() {
} : (this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => {
for (const n2 of e2) {
if ("string" == typeof n2 && t3 === n2) return true;
if (n2 instanceof RegExp && n2.test(t3)) return true;
}
} : () => false, this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ft), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ct, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() {
return "";
}, this.tagEndChar = ">", this.newLine = "");
}
@@ -62611,15 +62733,15 @@ var require_fxp = __commonJS({
const s2 = this.j2x(t2, n2 + 1, i2.concat(e2));
return void 0 !== t2[this.options.textNodeName] && 1 === Object.keys(t2).length ? this.buildTextValNode(t2[this.options.textNodeName], e2, s2.attrStr, n2) : this.buildObjectNode(s2.val, e2, s2.attrStr, n2);
}
function ft(t2) {
function ct(t2) {
return this.options.indentBy.repeat(t2);
}
function ct(t2) {
function ft(t2) {
return !(!t2.startsWith(this.options.attributeNamePrefix) || t2 === this.options.textNodeName) && t2.substr(this.attrPrefixLen);
}
dt.prototype.build = function(t2) {
ht.prototype.build = function(t2) {
return this.options.preserveOrder ? st(t2, this.options) : (Array.isArray(t2) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t2 = { [this.options.arrayNodeName]: t2 }), this.j2x(t2, 0, []).val);
}, dt.prototype.j2x = function(t2, e2, n2) {
}, ht.prototype.j2x = function(t2, e2, n2) {
let i2 = "", s2 = "";
const r2 = n2.join(".");
for (let o2 in t2) if (Object.prototype.hasOwnProperty.call(t2, o2)) if (void 0 === t2[o2]) this.isAttribute(o2) && (s2 += "");
@@ -62654,18 +62776,18 @@ var require_fxp = __commonJS({
for (let s3 = 0; s3 < n3; s3++) i2 += this.buildAttrPairStr(e3[s3], "" + t2[o2][e3[s3]]);
} else s2 += this.processTextOrObjNode(t2[o2], o2, e2, n2);
return { attrStr: i2, val: s2 };
}, dt.prototype.buildAttrPairStr = function(t2, e2) {
}, ht.prototype.buildAttrPairStr = function(t2, e2) {
return e2 = this.options.attributeValueProcessor(t2, "" + e2), e2 = this.replaceEntitiesValue(e2), this.options.suppressBooleanAttributes && "true" === e2 ? " " + t2 : " " + t2 + '="' + e2 + '"';
}, dt.prototype.buildObjectNode = function(t2, e2, n2, i2) {
}, ht.prototype.buildObjectNode = function(t2, e2, n2, i2) {
if ("" === t2) return "?" === e2[0] ? this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar;
{
let s2 = "</" + e2 + this.tagEndChar, r2 = "";
return "?" === e2[0] && (r2 = "?", s2 = ""), !n2 && "" !== n2 || -1 !== t2.indexOf("<") ? false !== this.options.commentPropName && e2 === this.options.commentPropName && 0 === r2.length ? this.indentate(i2) + `<!--${t2}-->` + this.newLine : this.indentate(i2) + "<" + e2 + n2 + r2 + this.tagEndChar + t2 + this.indentate(i2) + s2 : this.indentate(i2) + "<" + e2 + n2 + r2 + ">" + t2 + s2;
}
}, dt.prototype.closeTag = function(t2) {
}, ht.prototype.closeTag = function(t2) {
let e2 = "";
return -1 !== this.options.unpairedTags.indexOf(t2) ? this.options.suppressUnpairedNode || (e2 = "/") : e2 = this.options.suppressEmptyNode ? "/" : `></${t2}`, e2;
}, dt.prototype.buildTextValNode = function(t2, e2, n2, i2) {
}, ht.prototype.buildTextValNode = function(t2, e2, n2, i2) {
if (false !== this.options.cdataPropName && e2 === this.options.cdataPropName) return this.indentate(i2) + `<![CDATA[${t2}]]>` + this.newLine;
if (false !== this.options.commentPropName && e2 === this.options.commentPropName) return this.indentate(i2) + `<!--${t2}-->` + this.newLine;
if ("?" === e2[0]) return this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar;
@@ -62673,14 +62795,14 @@ var require_fxp = __commonJS({
let s2 = this.options.tagValueProcessor(e2, t2);
return s2 = this.replaceEntitiesValue(s2), "" === s2 ? this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + ">" + s2 + "</" + e2 + this.tagEndChar;
}
}, dt.prototype.replaceEntitiesValue = function(t2) {
}, ht.prototype.replaceEntitiesValue = function(t2) {
if (t2 && t2.length > 0 && this.options.processEntities) for (let e2 = 0; e2 < this.options.entities.length; e2++) {
const n2 = this.options.entities[e2];
t2 = t2.replace(n2.regex, n2.val);
}
return t2;
};
const gt = { validate: a };
const gt = ht, xt = { validate: a };
module2.exports = e;
})();
}
@@ -102870,7 +102992,7 @@ var safeDump = renamed("safeDump", "dump");
var semver = __toESM(require_semver2());
// src/api-compatibility.json
var maximumVersion = "3.20";
var maximumVersion = "3.21";
var minimumVersion = "3.14";
// src/util.ts
@@ -103698,8 +103820,8 @@ var path3 = __toESM(require("path"));
var semver5 = __toESM(require_semver2());
// src/defaults.json
var bundleVersion = "codeql-bundle-v2.24.2";
var cliVersion = "2.24.2";
var bundleVersion = "codeql-bundle-v2.24.3";
var cliVersion = "2.24.3";
// src/overlay/index.ts
var fs2 = __toESM(require("fs"));
@@ -104154,6 +104276,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
@@ -104165,11 +104292,6 @@ var featureConfig = {
minimumVersion: void 0,
toolsFeature: "bundleSupportsOverlay" /* BundleSupportsOverlay */
},
["use_repository_properties_v2" /* UseRepositoryProperties */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES",
minimumVersion: void 0
},
["validate_db_config" /* ValidateDbConfig */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG",

View File

@@ -1,6 +1,6 @@
{
"bundleVersion": "codeql-bundle-v2.24.2",
"cliVersion": "2.24.2",
"priorBundleVersion": "codeql-bundle-v2.24.1",
"priorCliVersion": "2.24.1"
"bundleVersion": "codeql-bundle-v2.24.3",
"cliVersion": "2.24.3",
"priorBundleVersion": "codeql-bundle-v2.24.2",
"priorCliVersion": "2.24.2"
}

1819
lib/init-action-post.js generated

File diff suppressed because it is too large Load Diff

842
lib/init-action.js generated

File diff suppressed because it is too large Load Diff

View File

@@ -45986,7 +45986,7 @@ var require_package = __commonJS({
"package.json"(exports2, module2) {
module2.exports = {
name: "codeql",
version: "4.32.5",
version: "4.32.7",
private: true,
description: "CodeQL action",
scripts: {
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -46052,14 +46053,14 @@ var require_package = __commonJS({
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.5.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
glob: "^11.1.0",
globals: "^16.5.0",
globals: "^17.3.0",
nock: "^14.0.11",
sinon: "^21.0.1",
typescript: "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
},
overrides: {
"@actions/tool-cache": {
@@ -49361,6 +49362,7 @@ var require_minimatch = __commonJS({
pattern = pattern.split(path5.sep).join("/");
}
this.options = options;
this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200;
this.set = [];
this.pattern = pattern;
this.regexp = null;
@@ -49757,50 +49759,147 @@ var require_minimatch = __commonJS({
return this.negate;
};
Minimatch.prototype.matchOne = function(file, pattern, partial) {
var options = this.options;
this.debug(
"matchOne",
{ "this": this, file, pattern }
if (pattern.indexOf(GLOBSTAR) !== -1) {
return this._matchGlobstar(file, pattern, partial, 0, 0);
}
return this._matchOne(file, pattern, partial, 0, 0);
};
Minimatch.prototype._matchGlobstar = function(file, pattern, partial, fileIndex, patternIndex) {
var i;
var firstgs = -1;
for (i = patternIndex; i < pattern.length; i++) {
if (pattern[i] === GLOBSTAR) {
firstgs = i;
break;
}
}
var lastgs = -1;
for (i = pattern.length - 1; i >= 0; i--) {
if (pattern[i] === GLOBSTAR) {
lastgs = i;
break;
}
}
var head = pattern.slice(patternIndex, firstgs);
var body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs);
var tail = partial ? [] : pattern.slice(lastgs + 1);
if (head.length) {
var fileHead = file.slice(fileIndex, fileIndex + head.length);
if (!this._matchOne(fileHead, head, partial, 0, 0)) {
return false;
}
fileIndex += head.length;
}
var fileTailMatch = 0;
if (tail.length) {
if (tail.length + fileIndex > file.length) return false;
var tailStart = file.length - tail.length;
if (this._matchOne(file, tail, partial, tailStart, 0)) {
fileTailMatch = tail.length;
} else {
if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) {
return false;
}
tailStart--;
if (!this._matchOne(file, tail, partial, tailStart, 0)) {
return false;
}
fileTailMatch = tail.length + 1;
}
}
if (!body.length) {
var sawSome = !!fileTailMatch;
for (i = fileIndex; i < file.length - fileTailMatch; i++) {
var f = String(file[i]);
sawSome = true;
if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") {
return false;
}
}
return partial || sawSome;
}
var bodySegments = [[[], 0]];
var currentBody = bodySegments[0];
var nonGsParts = 0;
var nonGsPartsSums = [0];
for (var bi = 0; bi < body.length; bi++) {
var b = body[bi];
if (b === GLOBSTAR) {
nonGsPartsSums.push(nonGsParts);
currentBody = [[], 0];
bodySegments.push(currentBody);
} else {
currentBody[0].push(b);
nonGsParts++;
}
}
var idx = bodySegments.length - 1;
var fileLength = file.length - fileTailMatch;
for (var si = 0; si < bodySegments.length; si++) {
bodySegments[si][1] = fileLength - (nonGsPartsSums[idx--] + bodySegments[si][0].length);
}
return !!this._matchGlobStarBodySections(
file,
bodySegments,
fileIndex,
0,
partial,
0,
!!fileTailMatch
);
this.debug("matchOne", file.length, pattern.length);
for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
};
Minimatch.prototype._matchGlobStarBodySections = function(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) {
var bs = bodySegments[bodyIndex];
if (!bs) {
for (var i = fileIndex; i < file.length; i++) {
sawTail = true;
var f = file[i];
if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") {
return false;
}
}
return sawTail;
}
var body = bs[0];
var after = bs[1];
while (fileIndex <= after) {
var m = this._matchOne(
file.slice(0, fileIndex + body.length),
body,
partial,
fileIndex,
0
);
if (m && globStarDepth < this.maxGlobstarRecursion) {
var sub = this._matchGlobStarBodySections(
file,
bodySegments,
fileIndex + body.length,
bodyIndex + 1,
partial,
globStarDepth + 1,
sawTail
);
if (sub !== false) {
return sub;
}
}
var f = file[fileIndex];
if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") {
return false;
}
fileIndex++;
}
return partial || null;
};
Minimatch.prototype._matchOne = function(file, pattern, partial, fileIndex, patternIndex) {
var fi, pi, fl, pl;
for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
this.debug("matchOne loop");
var p = pattern[pi];
var f = file[fi];
this.debug(pattern, p, f);
if (p === false) return false;
if (p === GLOBSTAR) {
this.debug("GLOBSTAR", [pattern, p, f]);
var fr = fi;
var pr = pi + 1;
if (pr === pl) {
this.debug("** at the end");
for (; fi < fl; fi++) {
if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false;
}
return true;
}
while (fr < fl) {
var swallowee = file[fr];
this.debug("\nglobstar while", file, fr, pattern, pr, swallowee);
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
this.debug("globstar found match!", fr, fl, swallowee);
return true;
} else {
if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") {
this.debug("dot detected!", file, fr, pattern, pr);
break;
}
this.debug("globstar swallow a segment, and continue");
fr++;
}
}
if (partial) {
this.debug("\n>>> no match, partial?", file, fr, pattern, pr);
if (fr === fl) return true;
}
return false;
}
if (p === false || p === GLOBSTAR) return false;
var hit;
if (typeof p === "string") {
hit = f === p;
@@ -61840,7 +61939,7 @@ var require_fxp = __commonJS({
}, o: (t2, e2) => Object.prototype.hasOwnProperty.call(t2, e2), r: (t2) => {
"undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t2, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t2, "__esModule", { value: true });
} }, e = {};
t.r(e), t.d(e, { XMLBuilder: () => dt, XMLParser: () => it, XMLValidator: () => gt });
t.r(e), t.d(e, { XMLBuilder: () => gt, XMLParser: () => it, XMLValidator: () => xt });
const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$");
function s(t2, e2) {
const n2 = [];
@@ -61862,90 +61961,90 @@ var require_fxp = __commonJS({
const n2 = [];
let i2 = false, s2 = false;
"\uFEFF" === t2[0] && (t2 = t2.substr(1));
for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2] && "?" === t2[o2 + 1]) {
if (o2 += 2, o2 = u(t2, o2), o2.err) return o2;
for (let r2 = 0; r2 < t2.length; r2++) if ("<" === t2[r2] && "?" === t2[r2 + 1]) {
if (r2 += 2, r2 = u(t2, r2), r2.err) return r2;
} else {
if ("<" !== t2[o2]) {
if (l(t2[o2])) continue;
return m("InvalidChar", "char '" + t2[o2] + "' is not expected.", b(t2, o2));
if ("<" !== t2[r2]) {
if (l(t2[r2])) continue;
return m("InvalidChar", "char '" + t2[r2] + "' is not expected.", N(t2, r2));
}
{
let a2 = o2;
if (o2++, "!" === t2[o2]) {
o2 = h(t2, o2);
let o2 = r2;
if (r2++, "!" === t2[r2]) {
r2 = d(t2, r2);
continue;
}
{
let d2 = false;
"/" === t2[o2] && (d2 = true, o2++);
let p2 = "";
for (; o2 < t2.length && ">" !== t2[o2] && " " !== t2[o2] && " " !== t2[o2] && "\n" !== t2[o2] && "\r" !== t2[o2]; o2++) p2 += t2[o2];
if (p2 = p2.trim(), "/" === p2[p2.length - 1] && (p2 = p2.substring(0, p2.length - 1), o2--), !r(p2)) {
let a2 = false;
"/" === t2[r2] && (a2 = true, r2++);
let h2 = "";
for (; r2 < t2.length && ">" !== t2[r2] && " " !== t2[r2] && " " !== t2[r2] && "\n" !== t2[r2] && "\r" !== t2[r2]; r2++) h2 += t2[r2];
if (h2 = h2.trim(), "/" === h2[h2.length - 1] && (h2 = h2.substring(0, h2.length - 1), r2--), !b(h2)) {
let e3;
return e3 = 0 === p2.trim().length ? "Invalid space after '<'." : "Tag '" + p2 + "' is an invalid name.", m("InvalidTag", e3, b(t2, o2));
return e3 = 0 === h2.trim().length ? "Invalid space after '<'." : "Tag '" + h2 + "' is an invalid name.", m("InvalidTag", e3, N(t2, r2));
}
const c2 = f(t2, o2);
if (false === c2) return m("InvalidAttr", "Attributes for '" + p2 + "' have open quote.", b(t2, o2));
let E2 = c2.value;
if (o2 = c2.index, "/" === E2[E2.length - 1]) {
const n3 = o2 - E2.length;
E2 = E2.substring(0, E2.length - 1);
const s3 = g(E2, e2);
if (true !== s3) return m(s3.err.code, s3.err.msg, b(t2, n3 + s3.err.line));
const p2 = c(t2, r2);
if (false === p2) return m("InvalidAttr", "Attributes for '" + h2 + "' have open quote.", N(t2, r2));
let f2 = p2.value;
if (r2 = p2.index, "/" === f2[f2.length - 1]) {
const n3 = r2 - f2.length;
f2 = f2.substring(0, f2.length - 1);
const s3 = g(f2, e2);
if (true !== s3) return m(s3.err.code, s3.err.msg, N(t2, n3 + s3.err.line));
i2 = true;
} else if (d2) {
if (!c2.tagClosed) return m("InvalidTag", "Closing tag '" + p2 + "' doesn't have proper closing.", b(t2, o2));
if (E2.trim().length > 0) return m("InvalidTag", "Closing tag '" + p2 + "' can't have attributes or invalid starting.", b(t2, a2));
if (0 === n2.length) return m("InvalidTag", "Closing tag '" + p2 + "' has not been opened.", b(t2, a2));
} else if (a2) {
if (!p2.tagClosed) return m("InvalidTag", "Closing tag '" + h2 + "' doesn't have proper closing.", N(t2, r2));
if (f2.trim().length > 0) return m("InvalidTag", "Closing tag '" + h2 + "' can't have attributes or invalid starting.", N(t2, o2));
if (0 === n2.length) return m("InvalidTag", "Closing tag '" + h2 + "' has not been opened.", N(t2, o2));
{
const e3 = n2.pop();
if (p2 !== e3.tagName) {
let n3 = b(t2, e3.tagStartPos);
return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + p2 + "'.", b(t2, a2));
if (h2 !== e3.tagName) {
let n3 = N(t2, e3.tagStartPos);
return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + h2 + "'.", N(t2, o2));
}
0 == n2.length && (s2 = true);
}
} else {
const r2 = g(E2, e2);
if (true !== r2) return m(r2.err.code, r2.err.msg, b(t2, o2 - E2.length + r2.err.line));
if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", b(t2, o2));
-1 !== e2.unpairedTags.indexOf(p2) || n2.push({ tagName: p2, tagStartPos: a2 }), i2 = true;
const a3 = g(f2, e2);
if (true !== a3) return m(a3.err.code, a3.err.msg, N(t2, r2 - f2.length + a3.err.line));
if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", N(t2, r2));
-1 !== e2.unpairedTags.indexOf(h2) || n2.push({ tagName: h2, tagStartPos: o2 }), i2 = true;
}
for (o2++; o2 < t2.length; o2++) if ("<" === t2[o2]) {
if ("!" === t2[o2 + 1]) {
o2++, o2 = h(t2, o2);
for (r2++; r2 < t2.length; r2++) if ("<" === t2[r2]) {
if ("!" === t2[r2 + 1]) {
r2++, r2 = d(t2, r2);
continue;
}
if ("?" !== t2[o2 + 1]) break;
if (o2 = u(t2, ++o2), o2.err) return o2;
} else if ("&" === t2[o2]) {
const e3 = x(t2, o2);
if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", b(t2, o2));
o2 = e3;
} else if (true === s2 && !l(t2[o2])) return m("InvalidXml", "Extra text at the end", b(t2, o2));
"<" === t2[o2] && o2--;
if ("?" !== t2[r2 + 1]) break;
if (r2 = u(t2, ++r2), r2.err) return r2;
} else if ("&" === t2[r2]) {
const e3 = x(t2, r2);
if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", N(t2, r2));
r2 = e3;
} else if (true === s2 && !l(t2[r2])) return m("InvalidXml", "Extra text at the end", N(t2, r2));
"<" === t2[r2] && r2--;
}
}
}
return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", b(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map(((t3) => t3.tagName)), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1);
return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", N(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map((t3) => t3.tagName), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1);
}
function l(t2) {
return " " === t2 || " " === t2 || "\n" === t2 || "\r" === t2;
}
function u(t2, e2) {
const n2 = e2;
for (; e2 < t2.length; e2++) if ("?" != t2[e2] && " " != t2[e2]) ;
else {
for (; e2 < t2.length; e2++) if ("?" == t2[e2] || " " == t2[e2]) {
const i2 = t2.substr(n2, e2 - n2);
if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", b(t2, e2));
if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", N(t2, e2));
if ("?" == t2[e2] && ">" == t2[e2 + 1]) {
e2++;
break;
}
continue;
}
return e2;
}
function h(t2, e2) {
function d(t2, e2) {
if (t2.length > e2 + 5 && "-" === t2[e2 + 1] && "-" === t2[e2 + 2]) {
for (e2 += 3; e2 < t2.length; e2++) if ("-" === t2[e2] && "-" === t2[e2 + 1] && ">" === t2[e2 + 2]) {
e2 += 2;
@@ -61963,11 +62062,11 @@ var require_fxp = __commonJS({
}
return e2;
}
const d = '"', p = "'";
function f(t2, e2) {
const h = '"', p = "'";
function c(t2, e2) {
let n2 = "", i2 = "", s2 = false;
for (; e2 < t2.length; e2++) {
if (t2[e2] === d || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = "");
if (t2[e2] === h || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = "");
else if (">" === t2[e2] && "" === i2) {
s2 = true;
break;
@@ -61976,16 +62075,16 @@ var require_fxp = __commonJS({
}
return "" === i2 && { value: n2, index: e2, tagClosed: s2 };
}
const c = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g");
const f = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g");
function g(t2, e2) {
const n2 = s(t2, c), i2 = {};
const n2 = s(t2, f), i2 = {};
for (let t3 = 0; t3 < n2.length; t3++) {
if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", N(n2[t3]));
if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", N(n2[t3]));
if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", N(n2[t3]));
if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", y(n2[t3]));
if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", y(n2[t3]));
if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", y(n2[t3]));
const s2 = n2[t3][2];
if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", N(n2[t3]));
if (i2.hasOwnProperty(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", N(n2[t3]));
if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", y(n2[t3]));
if (Object.prototype.hasOwnProperty.call(i2, s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", y(n2[t3]));
i2[s2] = 1;
}
return true;
@@ -62013,49 +62112,52 @@ var require_fxp = __commonJS({
function E(t2) {
return r(t2);
}
function b(t2, e2) {
function b(t2) {
return r(t2);
}
function N(t2, e2) {
const n2 = t2.substring(0, e2).split(/\r?\n/);
return { line: n2.length, col: n2[n2.length - 1].length + 1 };
}
function N(t2) {
function y(t2) {
return t2.startIndex + t2[1].length;
}
const y = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) {
const T = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) {
return e2;
}, attributeValueProcessor: function(t2, e2) {
return e2;
}, stopNodes: [], alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(t2, e2, n2) {
return t2;
}, captureMetaData: false };
function T(t2) {
return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : T(true);
}, captureMetaData: false, maxNestedTags: 100, strictReservedNames: true };
function w(t2) {
return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : w(true);
}
const w = function(t2) {
const e2 = Object.assign({}, y, t2);
return e2.processEntities = T(e2.processEntities), e2;
const v = function(t2) {
const e2 = Object.assign({}, T, t2);
return e2.processEntities = w(e2.processEntities), e2;
};
let v;
v = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata");
let O;
O = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata");
class I {
constructor(t2) {
this.tagname = t2, this.child = [], this[":@"] = {};
this.tagname = t2, this.child = [], this[":@"] = /* @__PURE__ */ Object.create(null);
}
add(t2, e2) {
"__proto__" === t2 && (t2 = "#__proto__"), this.child.push({ [t2]: e2 });
}
addChild(t2, e2) {
"__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][v] = { startIndex: e2 });
"__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][O] = { startIndex: e2 });
}
static getMetaDataSymbol() {
return v;
return O;
}
}
class O {
class P {
constructor(t2) {
this.suppressValidationErr = !t2, this.options = t2;
}
readDocType(t2, e2) {
const n2 = {};
const n2 = /* @__PURE__ */ Object.create(null);
if ("O" !== t2[e2 + 3] || "C" !== t2[e2 + 4] || "T" !== t2[e2 + 5] || "Y" !== t2[e2 + 6] || "P" !== t2[e2 + 7] || "E" !== t2[e2 + 8]) throw new Error("Invalid Tag instead of DOCTYPE");
{
e2 += 9;
@@ -62064,23 +62166,23 @@ var require_fxp = __commonJS({
if (r2 ? "-" === t2[e2 - 1] && "-" === t2[e2 - 2] && (r2 = false, i2--) : i2--, 0 === i2) break;
} else "[" === t2[e2] ? s2 = true : o2 += t2[e2];
else {
if (s2 && A(t2, "!ENTITY", e2)) {
if (s2 && S(t2, "!ENTITY", e2)) {
let i3, s3;
if (e2 += 7, [i3, s3, e2] = this.readEntityExp(t2, e2 + 1, this.suppressValidationErr), -1 === s3.indexOf("&")) {
const t3 = i3.replace(/[.\-+*:]/g, "\\.");
n2[i3] = { regx: RegExp(`&${t3};`, "g"), val: s3 };
}
} else if (s2 && A(t2, "!ELEMENT", e2)) {
} else if (s2 && S(t2, "!ELEMENT", e2)) {
e2 += 8;
const { index: n3 } = this.readElementExp(t2, e2 + 1);
e2 = n3;
} else if (s2 && A(t2, "!ATTLIST", e2)) e2 += 8;
else if (s2 && A(t2, "!NOTATION", e2)) {
} else if (s2 && S(t2, "!ATTLIST", e2)) e2 += 8;
else if (s2 && S(t2, "!NOTATION", e2)) {
e2 += 9;
const { index: n3 } = this.readNotationExp(t2, e2 + 1, this.suppressValidationErr);
e2 = n3;
} else {
if (!A(t2, "!--", e2)) throw new Error("Invalid DOCTYPE");
if (!S(t2, "!--", e2)) throw new Error("Invalid DOCTYPE");
r2 = true;
}
i2++, o2 = "";
@@ -62090,10 +62192,10 @@ var require_fxp = __commonJS({
return { entities: n2, i: e2 };
}
readEntityExp(t2, e2) {
e2 = P(t2, e2);
e2 = A(t2, e2);
let n2 = "";
for (; e2 < t2.length && !/\s/.test(t2[e2]) && '"' !== t2[e2] && "'" !== t2[e2]; ) n2 += t2[e2], e2++;
if (S(n2), e2 = P(t2, e2), !this.suppressValidationErr) {
if (C(n2), e2 = A(t2, e2), !this.suppressValidationErr) {
if ("SYSTEM" === t2.substring(e2, e2 + 6).toUpperCase()) throw new Error("External entities are not supported");
if ("%" === t2[e2]) throw new Error("Parameter entities are not supported");
}
@@ -62102,15 +62204,15 @@ var require_fxp = __commonJS({
return [n2, i2, --e2];
}
readNotationExp(t2, e2) {
e2 = P(t2, e2);
e2 = A(t2, e2);
let n2 = "";
for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++;
!this.suppressValidationErr && S(n2), e2 = P(t2, e2);
!this.suppressValidationErr && C(n2), e2 = A(t2, e2);
const i2 = t2.substring(e2, e2 + 6).toUpperCase();
if (!this.suppressValidationErr && "SYSTEM" !== i2 && "PUBLIC" !== i2) throw new Error(`Expected SYSTEM or PUBLIC, found "${i2}"`);
e2 += i2.length, e2 = P(t2, e2);
e2 += i2.length, e2 = A(t2, e2);
let s2 = null, r2 = null;
if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = P(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"));
if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = A(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"));
else if ("SYSTEM" === i2 && ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"), !this.suppressValidationErr && !r2)) throw new Error("Missing mandatory system identifier for SYSTEM notation");
return { notationName: n2, publicIdentifier: s2, systemIdentifier: r2, index: --e2 };
}
@@ -62123,13 +62225,13 @@ var require_fxp = __commonJS({
return [++e2, i2];
}
readElementExp(t2, e2) {
e2 = P(t2, e2);
e2 = A(t2, e2);
let n2 = "";
for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++;
if (!this.suppressValidationErr && !r(n2)) throw new Error(`Invalid element name: "${n2}"`);
let i2 = "";
if ("E" === t2[e2 = P(t2, e2)] && A(t2, "MPTY", e2)) e2 += 4;
else if ("A" === t2[e2] && A(t2, "NY", e2)) e2 += 2;
if ("E" === t2[e2 = A(t2, e2)] && S(t2, "MPTY", e2)) e2 += 4;
else if ("A" === t2[e2] && S(t2, "NY", e2)) e2 += 2;
else if ("(" === t2[e2]) {
for (e2++; e2 < t2.length && ")" !== t2[e2]; ) i2 += t2[e2], e2++;
if (")" !== t2[e2]) throw new Error("Unterminated content model");
@@ -62137,24 +62239,24 @@ var require_fxp = __commonJS({
return { elementName: n2, contentModel: i2.trim(), index: e2 };
}
readAttlistExp(t2, e2) {
e2 = P(t2, e2);
e2 = A(t2, e2);
let n2 = "";
for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++;
S(n2), e2 = P(t2, e2);
C(n2), e2 = A(t2, e2);
let i2 = "";
for (; e2 < t2.length && !/\s/.test(t2[e2]); ) i2 += t2[e2], e2++;
if (!S(i2)) throw new Error(`Invalid attribute name: "${i2}"`);
e2 = P(t2, e2);
if (!C(i2)) throw new Error(`Invalid attribute name: "${i2}"`);
e2 = A(t2, e2);
let s2 = "";
if ("NOTATION" === t2.substring(e2, e2 + 8).toUpperCase()) {
if (s2 = "NOTATION", "(" !== t2[e2 = P(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`);
if (s2 = "NOTATION", "(" !== t2[e2 = A(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`);
e2++;
let n3 = [];
for (; e2 < t2.length && ")" !== t2[e2]; ) {
let i3 = "";
for (; e2 < t2.length && "|" !== t2[e2] && ")" !== t2[e2]; ) i3 += t2[e2], e2++;
if (i3 = i3.trim(), !S(i3)) throw new Error(`Invalid notation name: "${i3}"`);
n3.push(i3), "|" === t2[e2] && (e2++, e2 = P(t2, e2));
if (i3 = i3.trim(), !C(i3)) throw new Error(`Invalid notation name: "${i3}"`);
n3.push(i3), "|" === t2[e2] && (e2++, e2 = A(t2, e2));
}
if (")" !== t2[e2]) throw new Error("Unterminated list of notations");
e2++, s2 += " (" + n3.join("|") + ")";
@@ -62163,45 +62265,43 @@ var require_fxp = __commonJS({
const n3 = ["CDATA", "ID", "IDREF", "IDREFS", "ENTITY", "ENTITIES", "NMTOKEN", "NMTOKENS"];
if (!this.suppressValidationErr && !n3.includes(s2.toUpperCase())) throw new Error(`Invalid attribute type: "${s2}"`);
}
e2 = P(t2, e2);
e2 = A(t2, e2);
let r2 = "";
return "#REQUIRED" === t2.substring(e2, e2 + 8).toUpperCase() ? (r2 = "#REQUIRED", e2 += 8) : "#IMPLIED" === t2.substring(e2, e2 + 7).toUpperCase() ? (r2 = "#IMPLIED", e2 += 7) : [e2, r2] = this.readIdentifierVal(t2, e2, "ATTLIST"), { elementName: n2, attributeName: i2, attributeType: s2, defaultValue: r2, index: e2 };
}
}
const P = (t2, e2) => {
const A = (t2, e2) => {
for (; e2 < t2.length && /\s/.test(t2[e2]); ) e2++;
return e2;
};
function A(t2, e2, n2) {
function S(t2, e2, n2) {
for (let i2 = 0; i2 < e2.length; i2++) if (e2[i2] !== t2[n2 + i2 + 1]) return false;
return true;
}
function S(t2) {
function C(t2) {
if (r(t2)) return t2;
throw new Error(`Invalid entity name ${t2}`);
}
const C = /^[-+]?0x[a-fA-F0-9]+$/, $ = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true };
const D = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/;
function L(t2) {
return "function" == typeof t2 ? t2 : Array.isArray(t2) ? (e2) => {
for (const n2 of t2) {
if ("string" == typeof n2 && e2 === n2) return true;
if (n2 instanceof RegExp && n2.test(e2)) return true;
}
} : () => false;
}
class F {
const $ = /^[-+]?0x[a-fA-F0-9]+$/, V = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, D = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true };
const j = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/;
class L {
constructor(t2) {
if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e2) => K(e2, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e2) => K(e2, 16, "&#x") } }, this.addExternalEntities = j, this.parseXml = B, this.parseTextData = M, this.resolveNameSpace = _, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = R, this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) {
var e2;
if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e3) => K(e3, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e3) => K(e3, 16, "&#x") } }, this.addExternalEntities = F, this.parseXml = R, this.parseTextData = M, this.resolveNameSpace = k, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = B, this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => {
for (const n2 of e2) {
if ("string" == typeof n2 && t3 === n2) return true;
if (n2 instanceof RegExp && n2.test(t3)) return true;
}
} : () => false, this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) {
this.stopNodesExact = /* @__PURE__ */ new Set(), this.stopNodesWildcard = /* @__PURE__ */ new Set();
for (let t3 = 0; t3 < this.options.stopNodes.length; t3++) {
const e2 = this.options.stopNodes[t3];
"string" == typeof e2 && (e2.startsWith("*.") ? this.stopNodesWildcard.add(e2.substring(2)) : this.stopNodesExact.add(e2));
const e3 = this.options.stopNodes[t3];
"string" == typeof e3 && (e3.startsWith("*.") ? this.stopNodesWildcard.add(e3.substring(2)) : this.stopNodesExact.add(e3));
}
}
}
}
function j(t2) {
function F(t2) {
const e2 = Object.keys(t2);
for (let n2 = 0; n2 < e2.length; n2++) {
const i2 = e2[n2], s2 = i2.replace(/[.\-+*:]/g, "\\.");
@@ -62215,7 +62315,7 @@ var require_fxp = __commonJS({
return null == i3 ? t2 : typeof i3 != typeof t2 || i3 !== t2 ? i3 : this.options.trimValues || t2.trim() === t2 ? Z(t2, this.options.parseTagValue, this.options.numberParseOptions) : t2;
}
}
function _(t2) {
function k(t2) {
if (this.options.removeNSPrefix) {
const e2 = t2.split(":"), n2 = "/" === t2.charAt(0) ? "/" : "";
if ("xmlns" === e2[0]) return "";
@@ -62223,10 +62323,10 @@ var require_fxp = __commonJS({
}
return t2;
}
const k = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm");
const _ = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm");
function U(t2, e2, n2) {
if (true !== this.options.ignoreAttributes && "string" == typeof t2) {
const i2 = s(t2, k), r2 = i2.length, o2 = {};
const i2 = s(t2, _), r2 = i2.length, o2 = {};
for (let t3 = 0; t3 < r2; t3++) {
const s2 = this.resolveNameSpace(i2[t3][1]);
if (this.ignoreAttributesFn(s2, e2)) continue;
@@ -62245,12 +62345,12 @@ var require_fxp = __commonJS({
return o2;
}
}
const B = function(t2) {
const R = function(t2) {
t2 = t2.replace(/\r\n?/g, "\n");
const e2 = new I("!xml");
let n2 = e2, i2 = "", s2 = "";
this.entityExpansionCount = 0, this.currentExpandedLength = 0;
const r2 = new O(this.options.processEntities);
const r2 = new P(this.options.processEntities);
for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2]) if ("/" === t2[o2 + 1]) {
const e3 = z(t2, ">", o2, "Closing Tag is not closed.");
let r3 = t2.substring(o2 + 2, e3).trim();
@@ -62290,26 +62390,27 @@ var require_fxp = __commonJS({
} else {
let r3 = W(t2, o2, this.options.removeNSPrefix), a2 = r3.tagName;
const l2 = r3.rawTagName;
let u2 = r3.tagExp, h2 = r3.attrExpPresent, d2 = r3.closeIndex;
let u2 = r3.tagExp, d2 = r3.attrExpPresent, h2 = r3.closeIndex;
if (this.options.transformTagName) {
const t3 = this.options.transformTagName(a2);
u2 === a2 && (u2 = t3), a2 = t3;
}
if (this.options.strictReservedNames && (a2 === this.options.commentPropName || a2 === this.options.cdataPropName)) throw new Error(`Invalid tag name: ${a2}`);
n2 && i2 && "!xml" !== n2.tagname && (i2 = this.saveTextToParentTag(i2, n2, s2, false));
const p2 = n2;
p2 && -1 !== this.options.unpairedTags.indexOf(p2.tagname) && (n2 = this.tagsNodeStack.pop(), s2 = s2.substring(0, s2.lastIndexOf("."))), a2 !== e2.tagname && (s2 += s2 ? "." + a2 : a2);
const f2 = o2;
const c2 = o2;
if (this.isItStopNode(this.stopNodesExact, this.stopNodesWildcard, s2, a2)) {
let e3 = "";
if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) "/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), o2 = r3.closeIndex;
else if (-1 !== this.options.unpairedTags.indexOf(a2)) o2 = r3.closeIndex;
else {
const n3 = this.readStopNodeData(t2, l2, d2 + 1);
const n3 = this.readStopNodeData(t2, l2, h2 + 1);
if (!n3) throw new Error(`Unexpected end of ${l2}`);
o2 = n3.i, e3 = n3.tagContent;
}
const i3 = new I(a2);
a2 !== u2 && h2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, h2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, f2);
a2 !== u2 && d2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, d2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, c2);
} else {
if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) {
if ("/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), this.options.transformTagName) {
@@ -62317,18 +62418,26 @@ var require_fxp = __commonJS({
u2 === a2 && (u2 = t4), a2 = t4;
}
const t3 = new I(a2);
a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), s2 = s2.substr(0, s2.lastIndexOf("."));
a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf("."));
} else {
const t3 = new I(a2);
this.tagsNodeStack.push(n2), a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), n2 = t3;
if (-1 !== this.options.unpairedTags.indexOf(a2)) {
const t3 = new I(a2);
a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")), o2 = r3.closeIndex;
continue;
}
{
const t3 = new I(a2);
if (this.tagsNodeStack.length > this.options.maxNestedTags) throw new Error("Maximum nested tags exceeded");
this.tagsNodeStack.push(n2), a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), n2 = t3;
}
}
i2 = "", o2 = d2;
i2 = "", o2 = h2;
}
}
else i2 += t2[o2];
return e2.child;
};
function R(t2, e2, n2, i2) {
function B(t2, e2, n2, i2) {
this.options.captureMetaData || (i2 = void 0);
const s2 = this.options.updateTag(e2.tagname, n2, e2[":@"]);
false === s2 || ("string" == typeof s2 ? (e2.tagname = s2, t2.addChild(e2, i2)) : t2.addChild(e2, i2));
@@ -62389,12 +62498,12 @@ var require_fxp = __commonJS({
const o2 = s2.index, a2 = r2.search(/\s/);
let l2 = r2, u2 = true;
-1 !== a2 && (l2 = r2.substring(0, a2), r2 = r2.substring(a2 + 1).trimStart());
const h2 = l2;
const d2 = l2;
if (n2) {
const t3 = l2.indexOf(":");
-1 !== t3 && (l2 = l2.substr(t3 + 1), u2 = l2 !== s2.data.substr(t3 + 1));
}
return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: h2 };
return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: d2 };
}
function q(t2, e2, n2) {
const i2 = n2;
@@ -62415,19 +62524,19 @@ var require_fxp = __commonJS({
if (e2 && "string" == typeof t2) {
const e3 = t2.trim();
return "true" === e3 || "false" !== e3 && (function(t3, e4 = {}) {
if (e4 = Object.assign({}, V, e4), !t3 || "string" != typeof t3) return t3;
if (e4 = Object.assign({}, D, e4), !t3 || "string" != typeof t3) return t3;
let n3 = t3.trim();
if (void 0 !== e4.skipLike && e4.skipLike.test(n3)) return t3;
if ("0" === t3) return 0;
if (e4.hex && C.test(n3)) return (function(t4) {
if (e4.hex && $.test(n3)) return (function(t4) {
if (parseInt) return parseInt(t4, 16);
if (Number.parseInt) return Number.parseInt(t4, 16);
if (window && window.parseInt) return window.parseInt(t4, 16);
throw new Error("parseInt, Number.parseInt, window.parseInt are not supported");
})(n3);
if (-1 !== n3.search(/.+[eE].+/)) return (function(t4, e5, n4) {
if (n3.includes("e") || n3.includes("E")) return (function(t4, e5, n4) {
if (!n4.eNotation) return t4;
const i3 = e5.match(D);
const i3 = e5.match(j);
if (i3) {
let s2 = i3[1] || "";
const r2 = -1 === i3[3].indexOf("e") ? "E" : "e", o2 = i3[2], a2 = s2 ? t4[o2.length + 1] === r2 : t4[o2.length] === r2;
@@ -62436,7 +62545,7 @@ var require_fxp = __commonJS({
return t4;
})(t3, n3, e4);
{
const s2 = $.exec(n3);
const s2 = V.exec(n3);
if (s2) {
const r2 = s2[1] || "", o2 = s2[2];
let a2 = (i2 = s2[3]) && -1 !== i2.indexOf(".") ? ("." === (i2 = i2.replace(/0+$/, "")) ? i2 = "0" : "." === i2[0] ? i2 = "0" + i2 : "." === i2[i2.length - 1] && (i2 = i2.substring(0, i2.length - 1)), i2) : i2;
@@ -62444,7 +62553,7 @@ var require_fxp = __commonJS({
if (!e4.leadingZeros && (o2.length > 1 || 1 === o2.length && !l2)) return t3;
{
const i3 = Number(n3), s3 = String(i3);
if (0 === i3 || -0 === i3) return i3;
if (0 === i3) return i3;
if (-1 !== s3.search(/[eE]/)) return e4.eNotation ? i3 : t3;
if (-1 !== n3.indexOf(".")) return "0" === s3 || s3 === a2 || s3 === `${r2}${a2}` ? i3 : t3;
let l3 = o2 ? a2 : n3;
@@ -62478,7 +62587,7 @@ var require_fxp = __commonJS({
if (o2[a2]) {
let t3 = H(o2[a2], e2, l2);
const n3 = nt(t3, e2);
void 0 !== o2[Q] && (t3[Q] = o2[Q]), o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== s2[a2] && s2.hasOwnProperty(a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3;
o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== o2[Q] && "object" == typeof t3 && null !== t3 && (t3[Q] = o2[Q]), void 0 !== s2[a2] && Object.prototype.hasOwnProperty.call(s2, a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3;
}
}
}
@@ -62506,7 +62615,7 @@ var require_fxp = __commonJS({
}
class it {
constructor(t2) {
this.externalEntities = {}, this.options = w(t2);
this.externalEntities = {}, this.options = v(t2);
}
parse(t2, e2) {
if ("string" != typeof t2 && t2.toString) t2 = t2.toString();
@@ -62516,7 +62625,7 @@ var require_fxp = __commonJS({
const n3 = a(t2, e2);
if (true !== n3) throw Error(`${n3.err.msg}:${n3.err.line}:${n3.err.col}`);
}
const n2 = new F(this.options);
const n2 = new L(this.options);
n2.addExternalEntities(this.externalEntities);
const i2 = n2.parseXml(t2);
return this.options.preserveOrder || void 0 === i2 ? i2 : J(i2, this.options);
@@ -62537,6 +62646,13 @@ var require_fxp = __commonJS({
}
function rt(t2, e2, n2, i2) {
let s2 = "", r2 = false;
if (!Array.isArray(t2)) {
if (null != t2) {
let n3 = t2.toString();
return n3 = ut(n3, e2), n3;
}
return "";
}
for (let o2 = 0; o2 < t2.length; o2++) {
const a2 = t2[o2], l2 = ot(a2);
if (void 0 === l2) continue;
@@ -62560,10 +62676,10 @@ var require_fxp = __commonJS({
o3 = 0 !== o3.length ? " " + o3 : "", s2 += n3 + `<${l2}${o3}${t3}?>`, r2 = true;
continue;
}
let h2 = i2;
"" !== h2 && (h2 += e2.indentBy);
const d2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, h2);
-1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += d2 + ">" : s2 += d2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += d2 + `>${p2}${i2}</${l2}>` : (s2 += d2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("</")) ? s2 += i2 + e2.indentBy + p2 + i2 : s2 += p2, s2 += `</${l2}>`) : s2 += d2 + "/>", r2 = true;
let d2 = i2;
"" !== d2 && (d2 += e2.indentBy);
const h2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, d2);
-1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += h2 + ">" : s2 += h2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += h2 + `>${p2}${i2}</${l2}>` : (s2 += h2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("</")) ? s2 += i2 + e2.indentBy + p2 + i2 : s2 += p2, s2 += `</${l2}>`) : s2 += h2 + "/>", r2 = true;
}
return s2;
}
@@ -62571,13 +62687,13 @@ var require_fxp = __commonJS({
const e2 = Object.keys(t2);
for (let n2 = 0; n2 < e2.length; n2++) {
const i2 = e2[n2];
if (t2.hasOwnProperty(i2) && ":@" !== i2) return i2;
if (Object.prototype.hasOwnProperty.call(t2, i2) && ":@" !== i2) return i2;
}
}
function at(t2, e2) {
let n2 = "";
if (t2 && !e2.ignoreAttributes) for (let i2 in t2) {
if (!t2.hasOwnProperty(i2)) continue;
if (!Object.prototype.hasOwnProperty.call(t2, i2)) continue;
let s2 = e2.attributeValueProcessor(i2, t2[i2]);
s2 = ut(s2, e2), true === s2 && e2.suppressBooleanAttributes ? n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}` : n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}="${s2}"`;
}
@@ -62595,15 +62711,21 @@ var require_fxp = __commonJS({
}
return t2;
}
const ht = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) {
const dt = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) {
return e2;
}, attributeValueProcessor: function(t2, e2) {
return e2;
}, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [{ regex: new RegExp("&", "g"), val: "&amp;" }, { regex: new RegExp(">", "g"), val: "&gt;" }, { regex: new RegExp("<", "g"), val: "&lt;" }, { regex: new RegExp("'", "g"), val: "&apos;" }, { regex: new RegExp('"', "g"), val: "&quot;" }], processEntities: true, stopNodes: [], oneListGroup: false };
function dt(t2) {
this.options = Object.assign({}, ht, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() {
function ht(t2) {
var e2;
this.options = Object.assign({}, dt, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() {
return false;
} : (this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ct), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ft, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() {
} : (this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => {
for (const n2 of e2) {
if ("string" == typeof n2 && t3 === n2) return true;
if (n2 instanceof RegExp && n2.test(t3)) return true;
}
} : () => false, this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ft), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ct, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() {
return "";
}, this.tagEndChar = ">", this.newLine = "");
}
@@ -62611,15 +62733,15 @@ var require_fxp = __commonJS({
const s2 = this.j2x(t2, n2 + 1, i2.concat(e2));
return void 0 !== t2[this.options.textNodeName] && 1 === Object.keys(t2).length ? this.buildTextValNode(t2[this.options.textNodeName], e2, s2.attrStr, n2) : this.buildObjectNode(s2.val, e2, s2.attrStr, n2);
}
function ft(t2) {
function ct(t2) {
return this.options.indentBy.repeat(t2);
}
function ct(t2) {
function ft(t2) {
return !(!t2.startsWith(this.options.attributeNamePrefix) || t2 === this.options.textNodeName) && t2.substr(this.attrPrefixLen);
}
dt.prototype.build = function(t2) {
ht.prototype.build = function(t2) {
return this.options.preserveOrder ? st(t2, this.options) : (Array.isArray(t2) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t2 = { [this.options.arrayNodeName]: t2 }), this.j2x(t2, 0, []).val);
}, dt.prototype.j2x = function(t2, e2, n2) {
}, ht.prototype.j2x = function(t2, e2, n2) {
let i2 = "", s2 = "";
const r2 = n2.join(".");
for (let o2 in t2) if (Object.prototype.hasOwnProperty.call(t2, o2)) if (void 0 === t2[o2]) this.isAttribute(o2) && (s2 += "");
@@ -62654,18 +62776,18 @@ var require_fxp = __commonJS({
for (let s3 = 0; s3 < n3; s3++) i2 += this.buildAttrPairStr(e3[s3], "" + t2[o2][e3[s3]]);
} else s2 += this.processTextOrObjNode(t2[o2], o2, e2, n2);
return { attrStr: i2, val: s2 };
}, dt.prototype.buildAttrPairStr = function(t2, e2) {
}, ht.prototype.buildAttrPairStr = function(t2, e2) {
return e2 = this.options.attributeValueProcessor(t2, "" + e2), e2 = this.replaceEntitiesValue(e2), this.options.suppressBooleanAttributes && "true" === e2 ? " " + t2 : " " + t2 + '="' + e2 + '"';
}, dt.prototype.buildObjectNode = function(t2, e2, n2, i2) {
}, ht.prototype.buildObjectNode = function(t2, e2, n2, i2) {
if ("" === t2) return "?" === e2[0] ? this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar;
{
let s2 = "</" + e2 + this.tagEndChar, r2 = "";
return "?" === e2[0] && (r2 = "?", s2 = ""), !n2 && "" !== n2 || -1 !== t2.indexOf("<") ? false !== this.options.commentPropName && e2 === this.options.commentPropName && 0 === r2.length ? this.indentate(i2) + `<!--${t2}-->` + this.newLine : this.indentate(i2) + "<" + e2 + n2 + r2 + this.tagEndChar + t2 + this.indentate(i2) + s2 : this.indentate(i2) + "<" + e2 + n2 + r2 + ">" + t2 + s2;
}
}, dt.prototype.closeTag = function(t2) {
}, ht.prototype.closeTag = function(t2) {
let e2 = "";
return -1 !== this.options.unpairedTags.indexOf(t2) ? this.options.suppressUnpairedNode || (e2 = "/") : e2 = this.options.suppressEmptyNode ? "/" : `></${t2}`, e2;
}, dt.prototype.buildTextValNode = function(t2, e2, n2, i2) {
}, ht.prototype.buildTextValNode = function(t2, e2, n2, i2) {
if (false !== this.options.cdataPropName && e2 === this.options.cdataPropName) return this.indentate(i2) + `<![CDATA[${t2}]]>` + this.newLine;
if (false !== this.options.commentPropName && e2 === this.options.commentPropName) return this.indentate(i2) + `<!--${t2}-->` + this.newLine;
if ("?" === e2[0]) return this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar;
@@ -62673,14 +62795,14 @@ var require_fxp = __commonJS({
let s2 = this.options.tagValueProcessor(e2, t2);
return s2 = this.replaceEntitiesValue(s2), "" === s2 ? this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + ">" + s2 + "</" + e2 + this.tagEndChar;
}
}, dt.prototype.replaceEntitiesValue = function(t2) {
}, ht.prototype.replaceEntitiesValue = function(t2) {
if (t2 && t2.length > 0 && this.options.processEntities) for (let e2 = 0; e2 < this.options.entities.length; e2++) {
const n2 = this.options.entities[e2];
t2 = t2.replace(n2.regex, n2.val);
}
return t2;
};
const gt = { validate: a };
const gt = ht, xt = { validate: a };
module2.exports = e;
})();
}
@@ -102870,7 +102992,7 @@ var safeDump = renamed("safeDump", "dump");
var semver = __toESM(require_semver2());
// src/api-compatibility.json
var maximumVersion = "3.20";
var maximumVersion = "3.21";
var minimumVersion = "3.14";
// src/util.ts
@@ -104145,6 +104267,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
@@ -104156,11 +104283,6 @@ var featureConfig = {
minimumVersion: void 0,
toolsFeature: "bundleSupportsOverlay" /* BundleSupportsOverlay */
},
["use_repository_properties_v2" /* UseRepositoryProperties */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES",
minimumVersion: void 0
},
["validate_db_config" /* ValidateDbConfig */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG",

View File

@@ -45986,7 +45986,7 @@ var require_package = __commonJS({
"package.json"(exports2, module2) {
module2.exports = {
name: "codeql",
version: "4.32.5",
version: "4.32.7",
private: true,
description: "CodeQL action",
scripts: {
@@ -45995,7 +45995,7 @@ var require_package = __commonJS({
lint: "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
ava: "npm run transpile && ava --serial --verbose",
ava: "npm run transpile && ava --verbose",
test: "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
transpile: "tsc --build --verbose"
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -46052,14 +46053,14 @@ var require_package = __commonJS({
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.5.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
glob: "^11.1.0",
globals: "^16.5.0",
globals: "^17.3.0",
nock: "^14.0.11",
sinon: "^21.0.1",
typescript: "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
},
overrides: {
"@actions/tool-cache": {
@@ -48064,6 +48065,7 @@ var require_minimatch = __commonJS({
pattern = pattern.split(path9.sep).join("/");
}
this.options = options;
this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200;
this.set = [];
this.pattern = pattern;
this.regexp = null;
@@ -48460,50 +48462,147 @@ var require_minimatch = __commonJS({
return this.negate;
};
Minimatch.prototype.matchOne = function(file, pattern, partial) {
var options = this.options;
this.debug(
"matchOne",
{ "this": this, file, pattern }
if (pattern.indexOf(GLOBSTAR) !== -1) {
return this._matchGlobstar(file, pattern, partial, 0, 0);
}
return this._matchOne(file, pattern, partial, 0, 0);
};
Minimatch.prototype._matchGlobstar = function(file, pattern, partial, fileIndex, patternIndex) {
var i;
var firstgs = -1;
for (i = patternIndex; i < pattern.length; i++) {
if (pattern[i] === GLOBSTAR) {
firstgs = i;
break;
}
}
var lastgs = -1;
for (i = pattern.length - 1; i >= 0; i--) {
if (pattern[i] === GLOBSTAR) {
lastgs = i;
break;
}
}
var head = pattern.slice(patternIndex, firstgs);
var body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs);
var tail = partial ? [] : pattern.slice(lastgs + 1);
if (head.length) {
var fileHead = file.slice(fileIndex, fileIndex + head.length);
if (!this._matchOne(fileHead, head, partial, 0, 0)) {
return false;
}
fileIndex += head.length;
}
var fileTailMatch = 0;
if (tail.length) {
if (tail.length + fileIndex > file.length) return false;
var tailStart = file.length - tail.length;
if (this._matchOne(file, tail, partial, tailStart, 0)) {
fileTailMatch = tail.length;
} else {
if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) {
return false;
}
tailStart--;
if (!this._matchOne(file, tail, partial, tailStart, 0)) {
return false;
}
fileTailMatch = tail.length + 1;
}
}
if (!body.length) {
var sawSome = !!fileTailMatch;
for (i = fileIndex; i < file.length - fileTailMatch; i++) {
var f = String(file[i]);
sawSome = true;
if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") {
return false;
}
}
return partial || sawSome;
}
var bodySegments = [[[], 0]];
var currentBody = bodySegments[0];
var nonGsParts = 0;
var nonGsPartsSums = [0];
for (var bi = 0; bi < body.length; bi++) {
var b = body[bi];
if (b === GLOBSTAR) {
nonGsPartsSums.push(nonGsParts);
currentBody = [[], 0];
bodySegments.push(currentBody);
} else {
currentBody[0].push(b);
nonGsParts++;
}
}
var idx = bodySegments.length - 1;
var fileLength = file.length - fileTailMatch;
for (var si = 0; si < bodySegments.length; si++) {
bodySegments[si][1] = fileLength - (nonGsPartsSums[idx--] + bodySegments[si][0].length);
}
return !!this._matchGlobStarBodySections(
file,
bodySegments,
fileIndex,
0,
partial,
0,
!!fileTailMatch
);
this.debug("matchOne", file.length, pattern.length);
for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
};
Minimatch.prototype._matchGlobStarBodySections = function(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) {
var bs = bodySegments[bodyIndex];
if (!bs) {
for (var i = fileIndex; i < file.length; i++) {
sawTail = true;
var f = file[i];
if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") {
return false;
}
}
return sawTail;
}
var body = bs[0];
var after = bs[1];
while (fileIndex <= after) {
var m = this._matchOne(
file.slice(0, fileIndex + body.length),
body,
partial,
fileIndex,
0
);
if (m && globStarDepth < this.maxGlobstarRecursion) {
var sub = this._matchGlobStarBodySections(
file,
bodySegments,
fileIndex + body.length,
bodyIndex + 1,
partial,
globStarDepth + 1,
sawTail
);
if (sub !== false) {
return sub;
}
}
var f = file[fileIndex];
if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") {
return false;
}
fileIndex++;
}
return partial || null;
};
Minimatch.prototype._matchOne = function(file, pattern, partial, fileIndex, patternIndex) {
var fi, pi, fl, pl;
for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
this.debug("matchOne loop");
var p = pattern[pi];
var f = file[fi];
this.debug(pattern, p, f);
if (p === false) return false;
if (p === GLOBSTAR) {
this.debug("GLOBSTAR", [pattern, p, f]);
var fr = fi;
var pr = pi + 1;
if (pr === pl) {
this.debug("** at the end");
for (; fi < fl; fi++) {
if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false;
}
return true;
}
while (fr < fl) {
var swallowee = file[fr];
this.debug("\nglobstar while", file, fr, pattern, pr, swallowee);
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
this.debug("globstar found match!", fr, fl, swallowee);
return true;
} else {
if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") {
this.debug("dot detected!", file, fr, pattern, pr);
break;
}
this.debug("globstar swallow a segment, and continue");
fr++;
}
}
if (partial) {
this.debug("\n>>> no match, partial?", file, fr, pattern, pr);
if (fr === fl) return true;
}
return false;
}
if (p === false || p === GLOBSTAR) return false;
var hit;
if (typeof p === "string") {
hit = f === p;
@@ -60543,7 +60642,7 @@ var require_fxp = __commonJS({
}, o: (t2, e2) => Object.prototype.hasOwnProperty.call(t2, e2), r: (t2) => {
"undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t2, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t2, "__esModule", { value: true });
} }, e = {};
t.r(e), t.d(e, { XMLBuilder: () => dt, XMLParser: () => it, XMLValidator: () => gt });
t.r(e), t.d(e, { XMLBuilder: () => gt, XMLParser: () => it, XMLValidator: () => xt });
const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$");
function s(t2, e2) {
const n2 = [];
@@ -60565,90 +60664,90 @@ var require_fxp = __commonJS({
const n2 = [];
let i2 = false, s2 = false;
"\uFEFF" === t2[0] && (t2 = t2.substr(1));
for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2] && "?" === t2[o2 + 1]) {
if (o2 += 2, o2 = u(t2, o2), o2.err) return o2;
for (let r2 = 0; r2 < t2.length; r2++) if ("<" === t2[r2] && "?" === t2[r2 + 1]) {
if (r2 += 2, r2 = u(t2, r2), r2.err) return r2;
} else {
if ("<" !== t2[o2]) {
if (l(t2[o2])) continue;
return m("InvalidChar", "char '" + t2[o2] + "' is not expected.", b(t2, o2));
if ("<" !== t2[r2]) {
if (l(t2[r2])) continue;
return m("InvalidChar", "char '" + t2[r2] + "' is not expected.", N(t2, r2));
}
{
let a2 = o2;
if (o2++, "!" === t2[o2]) {
o2 = h(t2, o2);
let o2 = r2;
if (r2++, "!" === t2[r2]) {
r2 = d(t2, r2);
continue;
}
{
let d2 = false;
"/" === t2[o2] && (d2 = true, o2++);
let p2 = "";
for (; o2 < t2.length && ">" !== t2[o2] && " " !== t2[o2] && " " !== t2[o2] && "\n" !== t2[o2] && "\r" !== t2[o2]; o2++) p2 += t2[o2];
if (p2 = p2.trim(), "/" === p2[p2.length - 1] && (p2 = p2.substring(0, p2.length - 1), o2--), !r(p2)) {
let a2 = false;
"/" === t2[r2] && (a2 = true, r2++);
let h2 = "";
for (; r2 < t2.length && ">" !== t2[r2] && " " !== t2[r2] && " " !== t2[r2] && "\n" !== t2[r2] && "\r" !== t2[r2]; r2++) h2 += t2[r2];
if (h2 = h2.trim(), "/" === h2[h2.length - 1] && (h2 = h2.substring(0, h2.length - 1), r2--), !b(h2)) {
let e3;
return e3 = 0 === p2.trim().length ? "Invalid space after '<'." : "Tag '" + p2 + "' is an invalid name.", m("InvalidTag", e3, b(t2, o2));
return e3 = 0 === h2.trim().length ? "Invalid space after '<'." : "Tag '" + h2 + "' is an invalid name.", m("InvalidTag", e3, N(t2, r2));
}
const c2 = f(t2, o2);
if (false === c2) return m("InvalidAttr", "Attributes for '" + p2 + "' have open quote.", b(t2, o2));
let E2 = c2.value;
if (o2 = c2.index, "/" === E2[E2.length - 1]) {
const n3 = o2 - E2.length;
E2 = E2.substring(0, E2.length - 1);
const s3 = g(E2, e2);
if (true !== s3) return m(s3.err.code, s3.err.msg, b(t2, n3 + s3.err.line));
const p2 = c(t2, r2);
if (false === p2) return m("InvalidAttr", "Attributes for '" + h2 + "' have open quote.", N(t2, r2));
let f2 = p2.value;
if (r2 = p2.index, "/" === f2[f2.length - 1]) {
const n3 = r2 - f2.length;
f2 = f2.substring(0, f2.length - 1);
const s3 = g(f2, e2);
if (true !== s3) return m(s3.err.code, s3.err.msg, N(t2, n3 + s3.err.line));
i2 = true;
} else if (d2) {
if (!c2.tagClosed) return m("InvalidTag", "Closing tag '" + p2 + "' doesn't have proper closing.", b(t2, o2));
if (E2.trim().length > 0) return m("InvalidTag", "Closing tag '" + p2 + "' can't have attributes or invalid starting.", b(t2, a2));
if (0 === n2.length) return m("InvalidTag", "Closing tag '" + p2 + "' has not been opened.", b(t2, a2));
} else if (a2) {
if (!p2.tagClosed) return m("InvalidTag", "Closing tag '" + h2 + "' doesn't have proper closing.", N(t2, r2));
if (f2.trim().length > 0) return m("InvalidTag", "Closing tag '" + h2 + "' can't have attributes or invalid starting.", N(t2, o2));
if (0 === n2.length) return m("InvalidTag", "Closing tag '" + h2 + "' has not been opened.", N(t2, o2));
{
const e3 = n2.pop();
if (p2 !== e3.tagName) {
let n3 = b(t2, e3.tagStartPos);
return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + p2 + "'.", b(t2, a2));
if (h2 !== e3.tagName) {
let n3 = N(t2, e3.tagStartPos);
return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + h2 + "'.", N(t2, o2));
}
0 == n2.length && (s2 = true);
}
} else {
const r2 = g(E2, e2);
if (true !== r2) return m(r2.err.code, r2.err.msg, b(t2, o2 - E2.length + r2.err.line));
if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", b(t2, o2));
-1 !== e2.unpairedTags.indexOf(p2) || n2.push({ tagName: p2, tagStartPos: a2 }), i2 = true;
const a3 = g(f2, e2);
if (true !== a3) return m(a3.err.code, a3.err.msg, N(t2, r2 - f2.length + a3.err.line));
if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", N(t2, r2));
-1 !== e2.unpairedTags.indexOf(h2) || n2.push({ tagName: h2, tagStartPos: o2 }), i2 = true;
}
for (o2++; o2 < t2.length; o2++) if ("<" === t2[o2]) {
if ("!" === t2[o2 + 1]) {
o2++, o2 = h(t2, o2);
for (r2++; r2 < t2.length; r2++) if ("<" === t2[r2]) {
if ("!" === t2[r2 + 1]) {
r2++, r2 = d(t2, r2);
continue;
}
if ("?" !== t2[o2 + 1]) break;
if (o2 = u(t2, ++o2), o2.err) return o2;
} else if ("&" === t2[o2]) {
const e3 = x(t2, o2);
if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", b(t2, o2));
o2 = e3;
} else if (true === s2 && !l(t2[o2])) return m("InvalidXml", "Extra text at the end", b(t2, o2));
"<" === t2[o2] && o2--;
if ("?" !== t2[r2 + 1]) break;
if (r2 = u(t2, ++r2), r2.err) return r2;
} else if ("&" === t2[r2]) {
const e3 = x(t2, r2);
if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", N(t2, r2));
r2 = e3;
} else if (true === s2 && !l(t2[r2])) return m("InvalidXml", "Extra text at the end", N(t2, r2));
"<" === t2[r2] && r2--;
}
}
}
return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", b(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map(((t3) => t3.tagName)), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1);
return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", N(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map((t3) => t3.tagName), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1);
}
function l(t2) {
return " " === t2 || " " === t2 || "\n" === t2 || "\r" === t2;
}
function u(t2, e2) {
const n2 = e2;
for (; e2 < t2.length; e2++) if ("?" != t2[e2] && " " != t2[e2]) ;
else {
for (; e2 < t2.length; e2++) if ("?" == t2[e2] || " " == t2[e2]) {
const i2 = t2.substr(n2, e2 - n2);
if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", b(t2, e2));
if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", N(t2, e2));
if ("?" == t2[e2] && ">" == t2[e2 + 1]) {
e2++;
break;
}
continue;
}
return e2;
}
function h(t2, e2) {
function d(t2, e2) {
if (t2.length > e2 + 5 && "-" === t2[e2 + 1] && "-" === t2[e2 + 2]) {
for (e2 += 3; e2 < t2.length; e2++) if ("-" === t2[e2] && "-" === t2[e2 + 1] && ">" === t2[e2 + 2]) {
e2 += 2;
@@ -60666,11 +60765,11 @@ var require_fxp = __commonJS({
}
return e2;
}
const d = '"', p = "'";
function f(t2, e2) {
const h = '"', p = "'";
function c(t2, e2) {
let n2 = "", i2 = "", s2 = false;
for (; e2 < t2.length; e2++) {
if (t2[e2] === d || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = "");
if (t2[e2] === h || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = "");
else if (">" === t2[e2] && "" === i2) {
s2 = true;
break;
@@ -60679,16 +60778,16 @@ var require_fxp = __commonJS({
}
return "" === i2 && { value: n2, index: e2, tagClosed: s2 };
}
const c = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g");
const f = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g");
function g(t2, e2) {
const n2 = s(t2, c), i2 = {};
const n2 = s(t2, f), i2 = {};
for (let t3 = 0; t3 < n2.length; t3++) {
if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", N(n2[t3]));
if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", N(n2[t3]));
if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", N(n2[t3]));
if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", y(n2[t3]));
if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", y(n2[t3]));
if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", y(n2[t3]));
const s2 = n2[t3][2];
if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", N(n2[t3]));
if (i2.hasOwnProperty(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", N(n2[t3]));
if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", y(n2[t3]));
if (Object.prototype.hasOwnProperty.call(i2, s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", y(n2[t3]));
i2[s2] = 1;
}
return true;
@@ -60716,49 +60815,52 @@ var require_fxp = __commonJS({
function E(t2) {
return r(t2);
}
function b(t2, e2) {
function b(t2) {
return r(t2);
}
function N(t2, e2) {
const n2 = t2.substring(0, e2).split(/\r?\n/);
return { line: n2.length, col: n2[n2.length - 1].length + 1 };
}
function N(t2) {
function y(t2) {
return t2.startIndex + t2[1].length;
}
const y = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) {
const T = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) {
return e2;
}, attributeValueProcessor: function(t2, e2) {
return e2;
}, stopNodes: [], alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(t2, e2, n2) {
return t2;
}, captureMetaData: false };
function T(t2) {
return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : T(true);
}, captureMetaData: false, maxNestedTags: 100, strictReservedNames: true };
function w(t2) {
return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : w(true);
}
const w = function(t2) {
const e2 = Object.assign({}, y, t2);
return e2.processEntities = T(e2.processEntities), e2;
const v = function(t2) {
const e2 = Object.assign({}, T, t2);
return e2.processEntities = w(e2.processEntities), e2;
};
let v;
v = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata");
let O;
O = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata");
class I {
constructor(t2) {
this.tagname = t2, this.child = [], this[":@"] = {};
this.tagname = t2, this.child = [], this[":@"] = /* @__PURE__ */ Object.create(null);
}
add(t2, e2) {
"__proto__" === t2 && (t2 = "#__proto__"), this.child.push({ [t2]: e2 });
}
addChild(t2, e2) {
"__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][v] = { startIndex: e2 });
"__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][O] = { startIndex: e2 });
}
static getMetaDataSymbol() {
return v;
return O;
}
}
class O {
class P {
constructor(t2) {
this.suppressValidationErr = !t2, this.options = t2;
}
readDocType(t2, e2) {
const n2 = {};
const n2 = /* @__PURE__ */ Object.create(null);
if ("O" !== t2[e2 + 3] || "C" !== t2[e2 + 4] || "T" !== t2[e2 + 5] || "Y" !== t2[e2 + 6] || "P" !== t2[e2 + 7] || "E" !== t2[e2 + 8]) throw new Error("Invalid Tag instead of DOCTYPE");
{
e2 += 9;
@@ -60767,23 +60869,23 @@ var require_fxp = __commonJS({
if (r2 ? "-" === t2[e2 - 1] && "-" === t2[e2 - 2] && (r2 = false, i2--) : i2--, 0 === i2) break;
} else "[" === t2[e2] ? s2 = true : o2 += t2[e2];
else {
if (s2 && A(t2, "!ENTITY", e2)) {
if (s2 && S(t2, "!ENTITY", e2)) {
let i3, s3;
if (e2 += 7, [i3, s3, e2] = this.readEntityExp(t2, e2 + 1, this.suppressValidationErr), -1 === s3.indexOf("&")) {
const t3 = i3.replace(/[.\-+*:]/g, "\\.");
n2[i3] = { regx: RegExp(`&${t3};`, "g"), val: s3 };
}
} else if (s2 && A(t2, "!ELEMENT", e2)) {
} else if (s2 && S(t2, "!ELEMENT", e2)) {
e2 += 8;
const { index: n3 } = this.readElementExp(t2, e2 + 1);
e2 = n3;
} else if (s2 && A(t2, "!ATTLIST", e2)) e2 += 8;
else if (s2 && A(t2, "!NOTATION", e2)) {
} else if (s2 && S(t2, "!ATTLIST", e2)) e2 += 8;
else if (s2 && S(t2, "!NOTATION", e2)) {
e2 += 9;
const { index: n3 } = this.readNotationExp(t2, e2 + 1, this.suppressValidationErr);
e2 = n3;
} else {
if (!A(t2, "!--", e2)) throw new Error("Invalid DOCTYPE");
if (!S(t2, "!--", e2)) throw new Error("Invalid DOCTYPE");
r2 = true;
}
i2++, o2 = "";
@@ -60793,10 +60895,10 @@ var require_fxp = __commonJS({
return { entities: n2, i: e2 };
}
readEntityExp(t2, e2) {
e2 = P(t2, e2);
e2 = A(t2, e2);
let n2 = "";
for (; e2 < t2.length && !/\s/.test(t2[e2]) && '"' !== t2[e2] && "'" !== t2[e2]; ) n2 += t2[e2], e2++;
if (S(n2), e2 = P(t2, e2), !this.suppressValidationErr) {
if (C(n2), e2 = A(t2, e2), !this.suppressValidationErr) {
if ("SYSTEM" === t2.substring(e2, e2 + 6).toUpperCase()) throw new Error("External entities are not supported");
if ("%" === t2[e2]) throw new Error("Parameter entities are not supported");
}
@@ -60805,15 +60907,15 @@ var require_fxp = __commonJS({
return [n2, i2, --e2];
}
readNotationExp(t2, e2) {
e2 = P(t2, e2);
e2 = A(t2, e2);
let n2 = "";
for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++;
!this.suppressValidationErr && S(n2), e2 = P(t2, e2);
!this.suppressValidationErr && C(n2), e2 = A(t2, e2);
const i2 = t2.substring(e2, e2 + 6).toUpperCase();
if (!this.suppressValidationErr && "SYSTEM" !== i2 && "PUBLIC" !== i2) throw new Error(`Expected SYSTEM or PUBLIC, found "${i2}"`);
e2 += i2.length, e2 = P(t2, e2);
e2 += i2.length, e2 = A(t2, e2);
let s2 = null, r2 = null;
if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = P(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"));
if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = A(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"));
else if ("SYSTEM" === i2 && ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"), !this.suppressValidationErr && !r2)) throw new Error("Missing mandatory system identifier for SYSTEM notation");
return { notationName: n2, publicIdentifier: s2, systemIdentifier: r2, index: --e2 };
}
@@ -60826,13 +60928,13 @@ var require_fxp = __commonJS({
return [++e2, i2];
}
readElementExp(t2, e2) {
e2 = P(t2, e2);
e2 = A(t2, e2);
let n2 = "";
for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++;
if (!this.suppressValidationErr && !r(n2)) throw new Error(`Invalid element name: "${n2}"`);
let i2 = "";
if ("E" === t2[e2 = P(t2, e2)] && A(t2, "MPTY", e2)) e2 += 4;
else if ("A" === t2[e2] && A(t2, "NY", e2)) e2 += 2;
if ("E" === t2[e2 = A(t2, e2)] && S(t2, "MPTY", e2)) e2 += 4;
else if ("A" === t2[e2] && S(t2, "NY", e2)) e2 += 2;
else if ("(" === t2[e2]) {
for (e2++; e2 < t2.length && ")" !== t2[e2]; ) i2 += t2[e2], e2++;
if (")" !== t2[e2]) throw new Error("Unterminated content model");
@@ -60840,24 +60942,24 @@ var require_fxp = __commonJS({
return { elementName: n2, contentModel: i2.trim(), index: e2 };
}
readAttlistExp(t2, e2) {
e2 = P(t2, e2);
e2 = A(t2, e2);
let n2 = "";
for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++;
S(n2), e2 = P(t2, e2);
C(n2), e2 = A(t2, e2);
let i2 = "";
for (; e2 < t2.length && !/\s/.test(t2[e2]); ) i2 += t2[e2], e2++;
if (!S(i2)) throw new Error(`Invalid attribute name: "${i2}"`);
e2 = P(t2, e2);
if (!C(i2)) throw new Error(`Invalid attribute name: "${i2}"`);
e2 = A(t2, e2);
let s2 = "";
if ("NOTATION" === t2.substring(e2, e2 + 8).toUpperCase()) {
if (s2 = "NOTATION", "(" !== t2[e2 = P(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`);
if (s2 = "NOTATION", "(" !== t2[e2 = A(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`);
e2++;
let n3 = [];
for (; e2 < t2.length && ")" !== t2[e2]; ) {
let i3 = "";
for (; e2 < t2.length && "|" !== t2[e2] && ")" !== t2[e2]; ) i3 += t2[e2], e2++;
if (i3 = i3.trim(), !S(i3)) throw new Error(`Invalid notation name: "${i3}"`);
n3.push(i3), "|" === t2[e2] && (e2++, e2 = P(t2, e2));
if (i3 = i3.trim(), !C(i3)) throw new Error(`Invalid notation name: "${i3}"`);
n3.push(i3), "|" === t2[e2] && (e2++, e2 = A(t2, e2));
}
if (")" !== t2[e2]) throw new Error("Unterminated list of notations");
e2++, s2 += " (" + n3.join("|") + ")";
@@ -60866,45 +60968,43 @@ var require_fxp = __commonJS({
const n3 = ["CDATA", "ID", "IDREF", "IDREFS", "ENTITY", "ENTITIES", "NMTOKEN", "NMTOKENS"];
if (!this.suppressValidationErr && !n3.includes(s2.toUpperCase())) throw new Error(`Invalid attribute type: "${s2}"`);
}
e2 = P(t2, e2);
e2 = A(t2, e2);
let r2 = "";
return "#REQUIRED" === t2.substring(e2, e2 + 8).toUpperCase() ? (r2 = "#REQUIRED", e2 += 8) : "#IMPLIED" === t2.substring(e2, e2 + 7).toUpperCase() ? (r2 = "#IMPLIED", e2 += 7) : [e2, r2] = this.readIdentifierVal(t2, e2, "ATTLIST"), { elementName: n2, attributeName: i2, attributeType: s2, defaultValue: r2, index: e2 };
}
}
const P = (t2, e2) => {
const A = (t2, e2) => {
for (; e2 < t2.length && /\s/.test(t2[e2]); ) e2++;
return e2;
};
function A(t2, e2, n2) {
function S(t2, e2, n2) {
for (let i2 = 0; i2 < e2.length; i2++) if (e2[i2] !== t2[n2 + i2 + 1]) return false;
return true;
}
function S(t2) {
function C(t2) {
if (r(t2)) return t2;
throw new Error(`Invalid entity name ${t2}`);
}
const C = /^[-+]?0x[a-fA-F0-9]+$/, $ = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true };
const D = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/;
function L(t2) {
return "function" == typeof t2 ? t2 : Array.isArray(t2) ? (e2) => {
for (const n2 of t2) {
if ("string" == typeof n2 && e2 === n2) return true;
if (n2 instanceof RegExp && n2.test(e2)) return true;
}
} : () => false;
}
class F {
const $ = /^[-+]?0x[a-fA-F0-9]+$/, V = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, D = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true };
const j = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/;
class L {
constructor(t2) {
if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e2) => K(e2, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e2) => K(e2, 16, "&#x") } }, this.addExternalEntities = j, this.parseXml = B, this.parseTextData = M, this.resolveNameSpace = _, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = R, this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) {
var e2;
if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e3) => K(e3, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e3) => K(e3, 16, "&#x") } }, this.addExternalEntities = F, this.parseXml = R, this.parseTextData = M, this.resolveNameSpace = k, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = B, this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => {
for (const n2 of e2) {
if ("string" == typeof n2 && t3 === n2) return true;
if (n2 instanceof RegExp && n2.test(t3)) return true;
}
} : () => false, this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) {
this.stopNodesExact = /* @__PURE__ */ new Set(), this.stopNodesWildcard = /* @__PURE__ */ new Set();
for (let t3 = 0; t3 < this.options.stopNodes.length; t3++) {
const e2 = this.options.stopNodes[t3];
"string" == typeof e2 && (e2.startsWith("*.") ? this.stopNodesWildcard.add(e2.substring(2)) : this.stopNodesExact.add(e2));
const e3 = this.options.stopNodes[t3];
"string" == typeof e3 && (e3.startsWith("*.") ? this.stopNodesWildcard.add(e3.substring(2)) : this.stopNodesExact.add(e3));
}
}
}
}
function j(t2) {
function F(t2) {
const e2 = Object.keys(t2);
for (let n2 = 0; n2 < e2.length; n2++) {
const i2 = e2[n2], s2 = i2.replace(/[.\-+*:]/g, "\\.");
@@ -60918,7 +61018,7 @@ var require_fxp = __commonJS({
return null == i3 ? t2 : typeof i3 != typeof t2 || i3 !== t2 ? i3 : this.options.trimValues || t2.trim() === t2 ? Z(t2, this.options.parseTagValue, this.options.numberParseOptions) : t2;
}
}
function _(t2) {
function k(t2) {
if (this.options.removeNSPrefix) {
const e2 = t2.split(":"), n2 = "/" === t2.charAt(0) ? "/" : "";
if ("xmlns" === e2[0]) return "";
@@ -60926,10 +61026,10 @@ var require_fxp = __commonJS({
}
return t2;
}
const k = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm");
const _ = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm");
function U(t2, e2, n2) {
if (true !== this.options.ignoreAttributes && "string" == typeof t2) {
const i2 = s(t2, k), r2 = i2.length, o2 = {};
const i2 = s(t2, _), r2 = i2.length, o2 = {};
for (let t3 = 0; t3 < r2; t3++) {
const s2 = this.resolveNameSpace(i2[t3][1]);
if (this.ignoreAttributesFn(s2, e2)) continue;
@@ -60948,12 +61048,12 @@ var require_fxp = __commonJS({
return o2;
}
}
const B = function(t2) {
const R = function(t2) {
t2 = t2.replace(/\r\n?/g, "\n");
const e2 = new I("!xml");
let n2 = e2, i2 = "", s2 = "";
this.entityExpansionCount = 0, this.currentExpandedLength = 0;
const r2 = new O(this.options.processEntities);
const r2 = new P(this.options.processEntities);
for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2]) if ("/" === t2[o2 + 1]) {
const e3 = z(t2, ">", o2, "Closing Tag is not closed.");
let r3 = t2.substring(o2 + 2, e3).trim();
@@ -60993,26 +61093,27 @@ var require_fxp = __commonJS({
} else {
let r3 = W(t2, o2, this.options.removeNSPrefix), a2 = r3.tagName;
const l2 = r3.rawTagName;
let u2 = r3.tagExp, h2 = r3.attrExpPresent, d2 = r3.closeIndex;
let u2 = r3.tagExp, d2 = r3.attrExpPresent, h2 = r3.closeIndex;
if (this.options.transformTagName) {
const t3 = this.options.transformTagName(a2);
u2 === a2 && (u2 = t3), a2 = t3;
}
if (this.options.strictReservedNames && (a2 === this.options.commentPropName || a2 === this.options.cdataPropName)) throw new Error(`Invalid tag name: ${a2}`);
n2 && i2 && "!xml" !== n2.tagname && (i2 = this.saveTextToParentTag(i2, n2, s2, false));
const p2 = n2;
p2 && -1 !== this.options.unpairedTags.indexOf(p2.tagname) && (n2 = this.tagsNodeStack.pop(), s2 = s2.substring(0, s2.lastIndexOf("."))), a2 !== e2.tagname && (s2 += s2 ? "." + a2 : a2);
const f2 = o2;
const c2 = o2;
if (this.isItStopNode(this.stopNodesExact, this.stopNodesWildcard, s2, a2)) {
let e3 = "";
if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) "/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), o2 = r3.closeIndex;
else if (-1 !== this.options.unpairedTags.indexOf(a2)) o2 = r3.closeIndex;
else {
const n3 = this.readStopNodeData(t2, l2, d2 + 1);
const n3 = this.readStopNodeData(t2, l2, h2 + 1);
if (!n3) throw new Error(`Unexpected end of ${l2}`);
o2 = n3.i, e3 = n3.tagContent;
}
const i3 = new I(a2);
a2 !== u2 && h2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, h2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, f2);
a2 !== u2 && d2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, d2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, c2);
} else {
if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) {
if ("/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), this.options.transformTagName) {
@@ -61020,18 +61121,26 @@ var require_fxp = __commonJS({
u2 === a2 && (u2 = t4), a2 = t4;
}
const t3 = new I(a2);
a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), s2 = s2.substr(0, s2.lastIndexOf("."));
a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf("."));
} else {
const t3 = new I(a2);
this.tagsNodeStack.push(n2), a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), n2 = t3;
if (-1 !== this.options.unpairedTags.indexOf(a2)) {
const t3 = new I(a2);
a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")), o2 = r3.closeIndex;
continue;
}
{
const t3 = new I(a2);
if (this.tagsNodeStack.length > this.options.maxNestedTags) throw new Error("Maximum nested tags exceeded");
this.tagsNodeStack.push(n2), a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), n2 = t3;
}
}
i2 = "", o2 = d2;
i2 = "", o2 = h2;
}
}
else i2 += t2[o2];
return e2.child;
};
function R(t2, e2, n2, i2) {
function B(t2, e2, n2, i2) {
this.options.captureMetaData || (i2 = void 0);
const s2 = this.options.updateTag(e2.tagname, n2, e2[":@"]);
false === s2 || ("string" == typeof s2 ? (e2.tagname = s2, t2.addChild(e2, i2)) : t2.addChild(e2, i2));
@@ -61092,12 +61201,12 @@ var require_fxp = __commonJS({
const o2 = s2.index, a2 = r2.search(/\s/);
let l2 = r2, u2 = true;
-1 !== a2 && (l2 = r2.substring(0, a2), r2 = r2.substring(a2 + 1).trimStart());
const h2 = l2;
const d2 = l2;
if (n2) {
const t3 = l2.indexOf(":");
-1 !== t3 && (l2 = l2.substr(t3 + 1), u2 = l2 !== s2.data.substr(t3 + 1));
}
return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: h2 };
return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: d2 };
}
function q(t2, e2, n2) {
const i2 = n2;
@@ -61118,19 +61227,19 @@ var require_fxp = __commonJS({
if (e2 && "string" == typeof t2) {
const e3 = t2.trim();
return "true" === e3 || "false" !== e3 && (function(t3, e4 = {}) {
if (e4 = Object.assign({}, V, e4), !t3 || "string" != typeof t3) return t3;
if (e4 = Object.assign({}, D, e4), !t3 || "string" != typeof t3) return t3;
let n3 = t3.trim();
if (void 0 !== e4.skipLike && e4.skipLike.test(n3)) return t3;
if ("0" === t3) return 0;
if (e4.hex && C.test(n3)) return (function(t4) {
if (e4.hex && $.test(n3)) return (function(t4) {
if (parseInt) return parseInt(t4, 16);
if (Number.parseInt) return Number.parseInt(t4, 16);
if (window && window.parseInt) return window.parseInt(t4, 16);
throw new Error("parseInt, Number.parseInt, window.parseInt are not supported");
})(n3);
if (-1 !== n3.search(/.+[eE].+/)) return (function(t4, e5, n4) {
if (n3.includes("e") || n3.includes("E")) return (function(t4, e5, n4) {
if (!n4.eNotation) return t4;
const i3 = e5.match(D);
const i3 = e5.match(j);
if (i3) {
let s2 = i3[1] || "";
const r2 = -1 === i3[3].indexOf("e") ? "E" : "e", o2 = i3[2], a2 = s2 ? t4[o2.length + 1] === r2 : t4[o2.length] === r2;
@@ -61139,7 +61248,7 @@ var require_fxp = __commonJS({
return t4;
})(t3, n3, e4);
{
const s2 = $.exec(n3);
const s2 = V.exec(n3);
if (s2) {
const r2 = s2[1] || "", o2 = s2[2];
let a2 = (i2 = s2[3]) && -1 !== i2.indexOf(".") ? ("." === (i2 = i2.replace(/0+$/, "")) ? i2 = "0" : "." === i2[0] ? i2 = "0" + i2 : "." === i2[i2.length - 1] && (i2 = i2.substring(0, i2.length - 1)), i2) : i2;
@@ -61147,7 +61256,7 @@ var require_fxp = __commonJS({
if (!e4.leadingZeros && (o2.length > 1 || 1 === o2.length && !l2)) return t3;
{
const i3 = Number(n3), s3 = String(i3);
if (0 === i3 || -0 === i3) return i3;
if (0 === i3) return i3;
if (-1 !== s3.search(/[eE]/)) return e4.eNotation ? i3 : t3;
if (-1 !== n3.indexOf(".")) return "0" === s3 || s3 === a2 || s3 === `${r2}${a2}` ? i3 : t3;
let l3 = o2 ? a2 : n3;
@@ -61181,7 +61290,7 @@ var require_fxp = __commonJS({
if (o2[a2]) {
let t3 = H(o2[a2], e2, l2);
const n3 = nt(t3, e2);
void 0 !== o2[Q] && (t3[Q] = o2[Q]), o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== s2[a2] && s2.hasOwnProperty(a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3;
o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== o2[Q] && "object" == typeof t3 && null !== t3 && (t3[Q] = o2[Q]), void 0 !== s2[a2] && Object.prototype.hasOwnProperty.call(s2, a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3;
}
}
}
@@ -61209,7 +61318,7 @@ var require_fxp = __commonJS({
}
class it {
constructor(t2) {
this.externalEntities = {}, this.options = w(t2);
this.externalEntities = {}, this.options = v(t2);
}
parse(t2, e2) {
if ("string" != typeof t2 && t2.toString) t2 = t2.toString();
@@ -61219,7 +61328,7 @@ var require_fxp = __commonJS({
const n3 = a(t2, e2);
if (true !== n3) throw Error(`${n3.err.msg}:${n3.err.line}:${n3.err.col}`);
}
const n2 = new F(this.options);
const n2 = new L(this.options);
n2.addExternalEntities(this.externalEntities);
const i2 = n2.parseXml(t2);
return this.options.preserveOrder || void 0 === i2 ? i2 : J(i2, this.options);
@@ -61240,6 +61349,13 @@ var require_fxp = __commonJS({
}
function rt(t2, e2, n2, i2) {
let s2 = "", r2 = false;
if (!Array.isArray(t2)) {
if (null != t2) {
let n3 = t2.toString();
return n3 = ut(n3, e2), n3;
}
return "";
}
for (let o2 = 0; o2 < t2.length; o2++) {
const a2 = t2[o2], l2 = ot(a2);
if (void 0 === l2) continue;
@@ -61263,10 +61379,10 @@ var require_fxp = __commonJS({
o3 = 0 !== o3.length ? " " + o3 : "", s2 += n3 + `<${l2}${o3}${t3}?>`, r2 = true;
continue;
}
let h2 = i2;
"" !== h2 && (h2 += e2.indentBy);
const d2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, h2);
-1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += d2 + ">" : s2 += d2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += d2 + `>${p2}${i2}</${l2}>` : (s2 += d2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("</")) ? s2 += i2 + e2.indentBy + p2 + i2 : s2 += p2, s2 += `</${l2}>`) : s2 += d2 + "/>", r2 = true;
let d2 = i2;
"" !== d2 && (d2 += e2.indentBy);
const h2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, d2);
-1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += h2 + ">" : s2 += h2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += h2 + `>${p2}${i2}</${l2}>` : (s2 += h2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("</")) ? s2 += i2 + e2.indentBy + p2 + i2 : s2 += p2, s2 += `</${l2}>`) : s2 += h2 + "/>", r2 = true;
}
return s2;
}
@@ -61274,13 +61390,13 @@ var require_fxp = __commonJS({
const e2 = Object.keys(t2);
for (let n2 = 0; n2 < e2.length; n2++) {
const i2 = e2[n2];
if (t2.hasOwnProperty(i2) && ":@" !== i2) return i2;
if (Object.prototype.hasOwnProperty.call(t2, i2) && ":@" !== i2) return i2;
}
}
function at(t2, e2) {
let n2 = "";
if (t2 && !e2.ignoreAttributes) for (let i2 in t2) {
if (!t2.hasOwnProperty(i2)) continue;
if (!Object.prototype.hasOwnProperty.call(t2, i2)) continue;
let s2 = e2.attributeValueProcessor(i2, t2[i2]);
s2 = ut(s2, e2), true === s2 && e2.suppressBooleanAttributes ? n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}` : n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}="${s2}"`;
}
@@ -61298,15 +61414,21 @@ var require_fxp = __commonJS({
}
return t2;
}
const ht = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) {
const dt = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) {
return e2;
}, attributeValueProcessor: function(t2, e2) {
return e2;
}, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [{ regex: new RegExp("&", "g"), val: "&amp;" }, { regex: new RegExp(">", "g"), val: "&gt;" }, { regex: new RegExp("<", "g"), val: "&lt;" }, { regex: new RegExp("'", "g"), val: "&apos;" }, { regex: new RegExp('"', "g"), val: "&quot;" }], processEntities: true, stopNodes: [], oneListGroup: false };
function dt(t2) {
this.options = Object.assign({}, ht, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() {
function ht(t2) {
var e2;
this.options = Object.assign({}, dt, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() {
return false;
} : (this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ct), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ft, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() {
} : (this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => {
for (const n2 of e2) {
if ("string" == typeof n2 && t3 === n2) return true;
if (n2 instanceof RegExp && n2.test(t3)) return true;
}
} : () => false, this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ft), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ct, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() {
return "";
}, this.tagEndChar = ">", this.newLine = "");
}
@@ -61314,15 +61436,15 @@ var require_fxp = __commonJS({
const s2 = this.j2x(t2, n2 + 1, i2.concat(e2));
return void 0 !== t2[this.options.textNodeName] && 1 === Object.keys(t2).length ? this.buildTextValNode(t2[this.options.textNodeName], e2, s2.attrStr, n2) : this.buildObjectNode(s2.val, e2, s2.attrStr, n2);
}
function ft(t2) {
function ct(t2) {
return this.options.indentBy.repeat(t2);
}
function ct(t2) {
function ft(t2) {
return !(!t2.startsWith(this.options.attributeNamePrefix) || t2 === this.options.textNodeName) && t2.substr(this.attrPrefixLen);
}
dt.prototype.build = function(t2) {
ht.prototype.build = function(t2) {
return this.options.preserveOrder ? st(t2, this.options) : (Array.isArray(t2) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t2 = { [this.options.arrayNodeName]: t2 }), this.j2x(t2, 0, []).val);
}, dt.prototype.j2x = function(t2, e2, n2) {
}, ht.prototype.j2x = function(t2, e2, n2) {
let i2 = "", s2 = "";
const r2 = n2.join(".");
for (let o2 in t2) if (Object.prototype.hasOwnProperty.call(t2, o2)) if (void 0 === t2[o2]) this.isAttribute(o2) && (s2 += "");
@@ -61357,18 +61479,18 @@ var require_fxp = __commonJS({
for (let s3 = 0; s3 < n3; s3++) i2 += this.buildAttrPairStr(e3[s3], "" + t2[o2][e3[s3]]);
} else s2 += this.processTextOrObjNode(t2[o2], o2, e2, n2);
return { attrStr: i2, val: s2 };
}, dt.prototype.buildAttrPairStr = function(t2, e2) {
}, ht.prototype.buildAttrPairStr = function(t2, e2) {
return e2 = this.options.attributeValueProcessor(t2, "" + e2), e2 = this.replaceEntitiesValue(e2), this.options.suppressBooleanAttributes && "true" === e2 ? " " + t2 : " " + t2 + '="' + e2 + '"';
}, dt.prototype.buildObjectNode = function(t2, e2, n2, i2) {
}, ht.prototype.buildObjectNode = function(t2, e2, n2, i2) {
if ("" === t2) return "?" === e2[0] ? this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar;
{
let s2 = "</" + e2 + this.tagEndChar, r2 = "";
return "?" === e2[0] && (r2 = "?", s2 = ""), !n2 && "" !== n2 || -1 !== t2.indexOf("<") ? false !== this.options.commentPropName && e2 === this.options.commentPropName && 0 === r2.length ? this.indentate(i2) + `<!--${t2}-->` + this.newLine : this.indentate(i2) + "<" + e2 + n2 + r2 + this.tagEndChar + t2 + this.indentate(i2) + s2 : this.indentate(i2) + "<" + e2 + n2 + r2 + ">" + t2 + s2;
}
}, dt.prototype.closeTag = function(t2) {
}, ht.prototype.closeTag = function(t2) {
let e2 = "";
return -1 !== this.options.unpairedTags.indexOf(t2) ? this.options.suppressUnpairedNode || (e2 = "/") : e2 = this.options.suppressEmptyNode ? "/" : `></${t2}`, e2;
}, dt.prototype.buildTextValNode = function(t2, e2, n2, i2) {
}, ht.prototype.buildTextValNode = function(t2, e2, n2, i2) {
if (false !== this.options.cdataPropName && e2 === this.options.cdataPropName) return this.indentate(i2) + `<![CDATA[${t2}]]>` + this.newLine;
if (false !== this.options.commentPropName && e2 === this.options.commentPropName) return this.indentate(i2) + `<!--${t2}-->` + this.newLine;
if ("?" === e2[0]) return this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar;
@@ -61376,14 +61498,14 @@ var require_fxp = __commonJS({
let s2 = this.options.tagValueProcessor(e2, t2);
return s2 = this.replaceEntitiesValue(s2), "" === s2 ? this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + ">" + s2 + "</" + e2 + this.tagEndChar;
}
}, dt.prototype.replaceEntitiesValue = function(t2) {
}, ht.prototype.replaceEntitiesValue = function(t2) {
if (t2 && t2.length > 0 && this.options.processEntities) for (let e2 = 0; e2 < this.options.entities.length; e2++) {
const n2 = this.options.entities[e2];
t2 = t2.replace(n2.regex, n2.val);
}
return t2;
};
const gt = { validate: a };
const gt = ht, xt = { validate: a };
module2.exports = e;
})();
}
@@ -102926,7 +103048,7 @@ var safeDump = renamed("safeDump", "dump");
var semver = __toESM(require_semver2());
// src/api-compatibility.json
var maximumVersion = "3.20";
var maximumVersion = "3.21";
var minimumVersion = "3.14";
// src/util.ts
@@ -103557,8 +103679,8 @@ var path4 = __toESM(require("path"));
var semver4 = __toESM(require_semver2());
// src/defaults.json
var bundleVersion = "codeql-bundle-v2.24.2";
var cliVersion = "2.24.2";
var bundleVersion = "codeql-bundle-v2.24.3";
var cliVersion = "2.24.3";
// src/overlay/index.ts
var fs3 = __toESM(require("fs"));
@@ -104042,6 +104164,11 @@ var featureConfig = {
// cannot be found when interpreting results.
minimumVersion: void 0
},
["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES",
minimumVersion: void 0
},
["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE",
@@ -104053,11 +104180,6 @@ var featureConfig = {
minimumVersion: void 0,
toolsFeature: "bundleSupportsOverlay" /* BundleSupportsOverlay */
},
["use_repository_properties_v2" /* UseRepositoryProperties */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES",
minimumVersion: void 0
},
["validate_db_config" /* ValidateDbConfig */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG",

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

838
lib/upload-lib.js generated

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

260
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "codeql",
"version": "4.32.5",
"version": "4.32.7",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "codeql",
"version": "4.32.5",
"version": "4.32.7",
"license": "MIT",
"dependencies": {
"@actions/artifact": "^5.0.3",
@@ -43,6 +43,7 @@
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
"ava": "^6.4.1",
@@ -51,14 +52,14 @@
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.5.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
"glob": "^11.1.0",
"globals": "^16.5.0",
"globals": "^17.3.0",
"nock": "^14.0.11",
"sinon": "^21.0.1",
"typescript": "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
}
},
"node_modules/@aashutoshrathi/word-wrap": {
@@ -849,17 +850,17 @@
}
},
"node_modules/@es-joy/jsdoccomment": {
"version": "0.83.0",
"resolved": "https://registry.npmjs.org/@es-joy/jsdoccomment/-/jsdoccomment-0.83.0.tgz",
"integrity": "sha512-e1MHSEPJ4m35zkBvNT6kcdeH1SvMaJDsPC3Xhfseg3hvF50FUE3f46Yn36jgbrPYYXezlWUQnevv23c+lx2MCA==",
"version": "0.84.0",
"resolved": "https://registry.npmjs.org/@es-joy/jsdoccomment/-/jsdoccomment-0.84.0.tgz",
"integrity": "sha512-0xew1CxOam0gV5OMjh2KjFQZsKL2bByX1+q4j3E73MpYIdyUxcZb/xQct9ccUb+ve5KGUYbCUxyPnYB7RbuP+w==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/estree": "^1.0.8",
"@typescript-eslint/types": "^8.53.1",
"@typescript-eslint/types": "^8.54.0",
"comment-parser": "1.4.5",
"esquery": "^1.7.0",
"jsdoc-type-pratt-parser": "~7.1.0"
"jsdoc-type-pratt-parser": "~7.1.1"
},
"engines": {
"node": "^20.19.0 || ^22.13.0 || >=24"
@@ -2522,6 +2523,13 @@
"@types/node": "*"
}
},
"node_modules/@types/sarif": {
"version": "2.1.7",
"resolved": "https://registry.npmjs.org/@types/sarif/-/sarif-2.1.7.tgz",
"integrity": "sha512-kRz0VEkJqWLf1LLVN4pT1cg1Z9wAuvI6L97V3m2f5B76Tg8d413ddvLBPTEHAZJlnn4XSvu0FkZtViCQGVyrXQ==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/semver": {
"version": "7.7.1",
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.7.1.tgz",
@@ -2545,17 +2553,17 @@
"license": "MIT"
},
"node_modules/@typescript-eslint/eslint-plugin": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.56.0.tgz",
"integrity": "sha512-lRyPDLzNCuae71A3t9NEINBiTn7swyOhvUj3MyUOxb8x6g6vPEFoOU+ZRmGMusNC3X3YMhqMIX7i8ShqhT74Pw==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.56.1.tgz",
"integrity": "sha512-Jz9ZztpB37dNC+HU2HI28Bs9QXpzCz+y/twHOwhyrIRdbuVDxSytJNDl6z/aAKlaRIwC7y8wJdkBv7FxYGgi0A==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/regexpp": "^4.12.2",
"@typescript-eslint/scope-manager": "8.56.0",
"@typescript-eslint/type-utils": "8.56.0",
"@typescript-eslint/utils": "8.56.0",
"@typescript-eslint/visitor-keys": "8.56.0",
"@typescript-eslint/scope-manager": "8.56.1",
"@typescript-eslint/type-utils": "8.56.1",
"@typescript-eslint/utils": "8.56.1",
"@typescript-eslint/visitor-keys": "8.56.1",
"ignore": "^7.0.5",
"natural-compare": "^1.4.0",
"ts-api-utils": "^2.4.0"
@@ -2568,7 +2576,7 @@
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
"@typescript-eslint/parser": "^8.56.0",
"@typescript-eslint/parser": "^8.56.1",
"eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
"typescript": ">=4.8.4 <6.0.0"
}
@@ -2584,16 +2592,16 @@
}
},
"node_modules/@typescript-eslint/parser": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.56.0.tgz",
"integrity": "sha512-IgSWvLobTDOjnaxAfDTIHaECbkNlAlKv2j5SjpB2v7QHKv1FIfjwMy8FsDbVfDX/KjmCmYICcw7uGaXLhtsLNg==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.56.1.tgz",
"integrity": "sha512-klQbnPAAiGYFyI02+znpBRLyjL4/BrBd0nyWkdC0s/6xFLkXYQ8OoRrSkqacS1ddVxf/LDyODIKbQ5TgKAf/Fg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/scope-manager": "8.56.0",
"@typescript-eslint/types": "8.56.0",
"@typescript-eslint/typescript-estree": "8.56.0",
"@typescript-eslint/visitor-keys": "8.56.0",
"@typescript-eslint/scope-manager": "8.56.1",
"@typescript-eslint/types": "8.56.1",
"@typescript-eslint/typescript-estree": "8.56.1",
"@typescript-eslint/visitor-keys": "8.56.1",
"debug": "^4.4.3"
},
"engines": {
@@ -2627,14 +2635,14 @@
}
},
"node_modules/@typescript-eslint/project-service": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.56.0.tgz",
"integrity": "sha512-M3rnyL1vIQOMeWxTWIW096/TtVP+8W3p/XnaFflhmcFp+U4zlxUxWj4XwNs6HbDeTtN4yun0GNTTDBw/SvufKg==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.56.1.tgz",
"integrity": "sha512-TAdqQTzHNNvlVFfR+hu2PDJrURiwKsUvxFn1M0h95BB8ah5jejas08jUWG4dBA68jDMI988IvtfdAI53JzEHOQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/tsconfig-utils": "^8.56.0",
"@typescript-eslint/types": "^8.56.0",
"@typescript-eslint/tsconfig-utils": "^8.56.1",
"@typescript-eslint/types": "^8.56.1",
"debug": "^4.4.3"
},
"engines": {
@@ -2667,14 +2675,14 @@
}
},
"node_modules/@typescript-eslint/scope-manager": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.56.0.tgz",
"integrity": "sha512-7UiO/XwMHquH+ZzfVCfUNkIXlp/yQjjnlYUyYz7pfvlK3/EyyN6BK+emDmGNyQLBtLGaYrTAI6KOw8tFucWL2w==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.56.1.tgz",
"integrity": "sha512-YAi4VDKcIZp0O4tz/haYKhmIDZFEUPOreKbfdAN3SzUDMcPhJ8QI99xQXqX+HoUVq8cs85eRKnD+rne2UAnj2w==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.56.0",
"@typescript-eslint/visitor-keys": "8.56.0"
"@typescript-eslint/types": "8.56.1",
"@typescript-eslint/visitor-keys": "8.56.1"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2685,9 +2693,9 @@
}
},
"node_modules/@typescript-eslint/tsconfig-utils": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.56.0.tgz",
"integrity": "sha512-bSJoIIt4o3lKXD3xmDh9chZcjCz5Lk8xS7Rxn+6l5/pKrDpkCwtQNQQwZ2qRPk7TkUYhrq3WPIHXOXlbXP0itg==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.56.1.tgz",
"integrity": "sha512-qOtCYzKEeyr3aR9f28mPJqBty7+DBqsdd63eO0yyDwc6vgThj2UjWfJIcsFeSucYydqcuudMOprZ+x1SpF3ZuQ==",
"dev": true,
"license": "MIT",
"engines": {
@@ -2702,15 +2710,15 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.56.0.tgz",
"integrity": "sha512-qX2L3HWOU2nuDs6GzglBeuFXviDODreS58tLY/BALPC7iu3Fa+J7EOTwnX9PdNBxUI7Uh0ntP0YWGnxCkXzmfA==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.56.1.tgz",
"integrity": "sha512-yB/7dxi7MgTtGhZdaHCemf7PuwrHMenHjmzgUW1aJpO+bBU43OycnM3Wn+DdvDO/8zzA9HlhaJ0AUGuvri4oGg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.56.0",
"@typescript-eslint/typescript-estree": "8.56.0",
"@typescript-eslint/utils": "8.56.0",
"@typescript-eslint/types": "8.56.1",
"@typescript-eslint/typescript-estree": "8.56.1",
"@typescript-eslint/utils": "8.56.1",
"debug": "^4.4.3",
"ts-api-utils": "^2.4.0"
},
@@ -2745,9 +2753,9 @@
}
},
"node_modules/@typescript-eslint/types": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.56.0.tgz",
"integrity": "sha512-DBsLPs3GsWhX5HylbP9HNG15U0bnwut55Lx12bHB9MpXxQ+R5GC8MwQe+N1UFXxAeQDvEsEDY6ZYwX03K7Z6HQ==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.56.1.tgz",
"integrity": "sha512-dbMkdIUkIkchgGDIv7KLUpa0Mda4IYjo4IAMJUZ+3xNoUXxMsk9YtKpTHSChRS85o+H9ftm51gsK1dZReY9CVw==",
"dev": true,
"license": "MIT",
"engines": {
@@ -2759,18 +2767,18 @@
}
},
"node_modules/@typescript-eslint/typescript-estree": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.56.0.tgz",
"integrity": "sha512-ex1nTUMWrseMltXUHmR2GAQ4d+WjkZCT4f+4bVsps8QEdh0vlBsaCokKTPlnqBFqqGaxilDNJG7b8dolW2m43Q==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.56.1.tgz",
"integrity": "sha512-qzUL1qgalIvKWAf9C1HpvBjif+Vm6rcT5wZd4VoMb9+Km3iS3Cv9DY6dMRMDtPnwRAFyAi7YXJpTIEXLvdfPxg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/project-service": "8.56.0",
"@typescript-eslint/tsconfig-utils": "8.56.0",
"@typescript-eslint/types": "8.56.0",
"@typescript-eslint/visitor-keys": "8.56.0",
"@typescript-eslint/project-service": "8.56.1",
"@typescript-eslint/tsconfig-utils": "8.56.1",
"@typescript-eslint/types": "8.56.1",
"@typescript-eslint/visitor-keys": "8.56.1",
"debug": "^4.4.3",
"minimatch": "^9.0.5",
"minimatch": "^10.2.2",
"semver": "^7.7.3",
"tinyglobby": "^0.2.15",
"ts-api-utils": "^2.4.0"
@@ -2797,9 +2805,9 @@
}
},
"node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.3.tgz",
"integrity": "sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==",
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.4.tgz",
"integrity": "sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -2828,32 +2836,32 @@
}
},
"node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": {
"version": "9.0.6",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.6.tgz",
"integrity": "sha512-kQAVowdR33euIqeA0+VZTDqU+qo1IeVY+hrKYtZMio3Pg0P0vuh/kwRylLUddJhB6pf3q/botcOvRtx4IN1wqQ==",
"version": "10.2.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz",
"integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==",
"dev": true,
"license": "ISC",
"license": "BlueOak-1.0.0",
"dependencies": {
"brace-expansion": "^5.0.2"
},
"engines": {
"node": ">=16 || 14 >=14.17"
"node": "18 || 20 || >=22"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/@typescript-eslint/utils": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.56.0.tgz",
"integrity": "sha512-RZ3Qsmi2nFGsS+n+kjLAYDPVlrzf7UhTffrDIKr+h2yzAlYP/y5ZulU0yeDEPItos2Ph46JAL5P/On3pe7kDIQ==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.56.1.tgz",
"integrity": "sha512-HPAVNIME3tABJ61siYlHzSWCGtOoeP2RTIaHXFMPqjrQKCGB9OgUVdiNgH7TJS2JNIQ5qQ4RsAUDuGaGme/KOA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.9.1",
"@typescript-eslint/scope-manager": "8.56.0",
"@typescript-eslint/types": "8.56.0",
"@typescript-eslint/typescript-estree": "8.56.0"
"@typescript-eslint/scope-manager": "8.56.1",
"@typescript-eslint/types": "8.56.1",
"@typescript-eslint/typescript-estree": "8.56.1"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2868,13 +2876,13 @@
}
},
"node_modules/@typescript-eslint/visitor-keys": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.56.0.tgz",
"integrity": "sha512-q+SL+b+05Ud6LbEE35qe4A99P+htKTKVbyiNEe45eCbJFyh/HVK9QXwlrbz+Q4L8SOW4roxSVwXYj4DMBT7Ieg==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.56.1.tgz",
"integrity": "sha512-KiROIzYdEV85YygXw6BI/Dx4fnBlFQu6Mq4QE4MOH9fFnhohw6wX/OAvDY2/C+ut0I3RSPKenvZJIVYqJNkhEw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.56.0",
"@typescript-eslint/types": "8.56.1",
"eslint-visitor-keys": "^5.0.0"
},
"engines": {
@@ -2886,9 +2894,9 @@
}
},
"node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.0.tgz",
"integrity": "sha512-A0XeIi7CXU7nPlfHS9loMYEKxUaONu/hTEzHTGba9Huu94Cq1hPivf+DE5erJozZOky0LfvXAyrV/tcswpLI0Q==",
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.1.tgz",
"integrity": "sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA==",
"dev": true,
"license": "Apache-2.0",
"engines": {
@@ -5011,6 +5019,19 @@
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}
},
"node_modules/eslint-plugin-github/node_modules/globals": {
"version": "16.5.0",
"resolved": "https://registry.npmjs.org/globals/-/globals-16.5.0.tgz",
"integrity": "sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/eslint-plugin-i18n-text": {
"version": "1.0.1",
"dev": true,
@@ -5114,9 +5135,9 @@
}
},
"node_modules/eslint-plugin-import-x/node_modules/minimatch": {
"version": "10.2.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.2.tgz",
"integrity": "sha512-+G4CpNBxa5MprY+04MbgOw1v7So6n5JY166pFi9KfYwT78fxScCeSNQSNzp6dpPSW2rONOps6Ocam1wFhCgoVw==",
"version": "10.2.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz",
"integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==",
"dev": true,
"license": "BlueOak-1.0.0",
"dependencies": {
@@ -5138,13 +5159,13 @@
}
},
"node_modules/eslint-plugin-jsdoc": {
"version": "62.5.0",
"resolved": "https://registry.npmjs.org/eslint-plugin-jsdoc/-/eslint-plugin-jsdoc-62.5.0.tgz",
"integrity": "sha512-D+1haMVDzW/ZMoPwOnsbXCK07rJtsq98Z1v+ApvDKxSzYTTcPgmFc/nyUDCGmxm2cP7g7hszyjYHO7Zodl/43w==",
"version": "62.7.1",
"resolved": "https://registry.npmjs.org/eslint-plugin-jsdoc/-/eslint-plugin-jsdoc-62.7.1.tgz",
"integrity": "sha512-4Zvx99Q7d1uggYBUX/AIjvoyqXhluGbbKrRmG8SQTLprPFg6fa293tVJH1o1GQwNe3lUydd8ZHzn37OaSncgSQ==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
"@es-joy/jsdoccomment": "~0.83.0",
"@es-joy/jsdoccomment": "~0.84.0",
"@es-joy/resolve.exports": "1.2.0",
"are-docs-informative": "^0.0.2",
"comment-parser": "1.4.5",
@@ -5155,7 +5176,7 @@
"html-entities": "^2.6.0",
"object-deep-merge": "^2.0.0",
"parse-imports-exports": "^0.2.4",
"semver": "^7.7.3",
"semver": "^7.7.4",
"spdx-expression-parse": "^4.0.0",
"to-valid-identifier": "^1.0.0"
},
@@ -5163,7 +5184,7 @@
"node": "^20.19.0 || ^22.13.0 || >=24"
},
"peerDependencies": {
"eslint": "^7.0.0 || ^8.0.0 || ^9.0.0"
"eslint": "^7.0.0 || ^8.0.0 || ^9.0.0 || ^10.0.0"
}
},
"node_modules/eslint-plugin-jsdoc/node_modules/debug": {
@@ -5629,10 +5650,22 @@
"dev": true,
"license": "MIT"
},
"node_modules/fast-xml-builder": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fast-xml-builder/-/fast-xml-builder-1.0.0.tgz",
"integrity": "sha512-fpZuDogrAgnyt9oDDz+5DBz0zgPdPZz6D4IR7iESxRXElrlGTRkHJ9eEt+SACRJwT0FNFrt71DFQIUFBJfX/uQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/NaturalIntelligence"
}
],
"license": "MIT"
},
"node_modules/fast-xml-parser": {
"version": "5.3.6",
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.3.6.tgz",
"integrity": "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA==",
"version": "5.4.1",
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.4.1.tgz",
"integrity": "sha512-BQ30U1mKkvXQXXkAGcuyUA/GA26oEB7NzOtsxCDtyu62sjGw5QraKFhx2Em3WQNjPw9PG6MQ9yuIIgkSDfGu5A==",
"funding": [
{
"type": "github",
@@ -5641,6 +5674,7 @@
],
"license": "MIT",
"dependencies": {
"fast-xml-builder": "^1.0.0",
"strnum": "^2.1.2"
},
"bin": {
@@ -6024,9 +6058,9 @@
}
},
"node_modules/glob/node_modules/minimatch": {
"version": "10.2.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.2.tgz",
"integrity": "sha512-+G4CpNBxa5MprY+04MbgOw1v7So6n5JY166pFi9KfYwT78fxScCeSNQSNzp6dpPSW2rONOps6Ocam1wFhCgoVw==",
"version": "10.2.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz",
"integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==",
"license": "BlueOak-1.0.0",
"dependencies": {
"brace-expansion": "^5.0.2"
@@ -6039,9 +6073,9 @@
}
},
"node_modules/globals": {
"version": "16.5.0",
"resolved": "https://registry.npmjs.org/globals/-/globals-16.5.0.tgz",
"integrity": "sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==",
"version": "17.3.0",
"resolved": "https://registry.npmjs.org/globals/-/globals-17.3.0.tgz",
"integrity": "sha512-yMqGUQVVCkD4tqjOJf3TnrvaaHDMYp4VlUSObbkIiuCPe/ofdMBFIAcBbCSRFWOnos6qRiTVStDwqPLUclaxIw==",
"dev": true,
"license": "MIT",
"engines": {
@@ -6909,9 +6943,9 @@
}
},
"node_modules/jsdoc-type-pratt-parser": {
"version": "7.1.0",
"resolved": "https://registry.npmjs.org/jsdoc-type-pratt-parser/-/jsdoc-type-pratt-parser-7.1.0.tgz",
"integrity": "sha512-SX7q7XyCwzM/MEDCYz0l8GgGbJAACGFII9+WfNYr5SLEKukHWRy2Jk3iWRe7P+lpYJNs7oQ+OSei4JtKGUjd7A==",
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/jsdoc-type-pratt-parser/-/jsdoc-type-pratt-parser-7.1.1.tgz",
"integrity": "sha512-/2uqY7x6bsrpi3i9LVU6J89352C0rpMk0as8trXxCtvd4kPk1ke/Eyif6wqfSLvoNJqcDG9Vk4UsXgygzCt2xA==",
"dev": true,
"license": "MIT",
"engines": {
@@ -7240,9 +7274,9 @@
}
},
"node_modules/minimatch": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.3.tgz",
"integrity": "sha512-M2GCs7Vk83NxkUyQV1bkABc4yxgz9kILhHImZiBPAZ9ybuvCb0/H7lEl5XvIg3g+9d4eNotkZA5IWwYl0tibaA==",
"version": "3.1.5",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz",
"integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==",
"license": "ISC",
"dependencies": {
"brace-expansion": "^1.1.7"
@@ -7928,9 +7962,9 @@
}
},
"node_modules/readdir-glob/node_modules/minimatch": {
"version": "5.1.7",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.7.tgz",
"integrity": "sha512-FjiwU9HaHW6YB3H4a1sFudnv93lvydNjz2lmyUXR6IwKhGI+bgL3SOZrBGn6kvvX2pJvhEkGSGjyTHN47O4rqA==",
"version": "5.1.9",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.9.tgz",
"integrity": "sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw==",
"license": "ISC",
"dependencies": {
"brace-expansion": "^2.0.1"
@@ -8836,9 +8870,9 @@
}
},
"node_modules/tar": {
"version": "7.5.7",
"resolved": "https://registry.npmjs.org/tar/-/tar-7.5.7.tgz",
"integrity": "sha512-fov56fJiRuThVFXD6o6/Q354S7pnWMJIVlDBYijsTNx6jKSE4pvrDTs6lUnmGvNyfJwFQQwWy3owKz1ucIhveQ==",
"version": "7.5.10",
"resolved": "https://registry.npmjs.org/tar/-/tar-7.5.10.tgz",
"integrity": "sha512-8mOPs1//5q/rlkNSPcCegA6hiHJYDmSLEI8aMH/CdSQJNWztHC9WHNam5zdQlfpTwB9Xp7IBEsHfV5LKMJGVAw==",
"dev": true,
"license": "BlueOak-1.0.0",
"dependencies": {
@@ -9168,16 +9202,16 @@
}
},
"node_modules/typescript-eslint": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.56.0.tgz",
"integrity": "sha512-c7toRLrotJ9oixgdW7liukZpsnq5CZ7PuKztubGYlNppuTqhIoWfhgHo/7EU0v06gS2l/x0i2NEFK1qMIf0rIg==",
"version": "8.56.1",
"resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.56.1.tgz",
"integrity": "sha512-U4lM6pjmBX7J5wk4szltF7I1cGBHXZopnAXCMXb3+fZ3B/0Z3hq3wS/CCUB2NZBNAExK92mCU2tEohWuwVMsDQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/eslint-plugin": "8.56.0",
"@typescript-eslint/parser": "8.56.0",
"@typescript-eslint/typescript-estree": "8.56.0",
"@typescript-eslint/utils": "8.56.0"
"@typescript-eslint/eslint-plugin": "8.56.1",
"@typescript-eslint/parser": "8.56.1",
"@typescript-eslint/typescript-estree": "8.56.1",
"@typescript-eslint/utils": "8.56.1"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"

View File

@@ -1,6 +1,6 @@
{
"name": "codeql",
"version": "4.32.5",
"version": "4.32.7",
"private": true,
"description": "CodeQL action",
"scripts": {
@@ -9,7 +9,7 @@
"lint": "eslint --report-unused-disable-directives --max-warnings=0 .",
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
"ava": "npm run transpile && ava --serial --verbose",
"ava": "npm run transpile && ava --verbose",
"test": "npm run ava -- src/",
"test-debug": "npm run test -- --timeout=20m",
"transpile": "tsc --build --verbose"
@@ -58,6 +58,7 @@
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
"ava": "^6.4.1",
@@ -66,14 +67,14 @@
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-github": "^6.0.0",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-jsdoc": "^62.5.0",
"eslint-plugin-jsdoc": "^62.7.1",
"eslint-plugin-no-async-foreach": "^0.1.1",
"glob": "^11.1.0",
"globals": "^16.5.0",
"globals": "^17.3.0",
"nock": "^14.0.11",
"sinon": "^21.0.1",
"typescript": "^5.9.3",
"typescript-eslint": "^8.56.0"
"typescript-eslint": "^8.56.1"
},
"overrides": {
"@actions/tool-cache": {

View File

@@ -1,3 +1 @@
env
__pycache__/
*.pyc
node_modules/

View File

View File

@@ -40,7 +40,7 @@ steps:
post-processed-sarif-path: "${{ runner.temp }}/post-processed"
- name: Upload SARIF files
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: |
analysis-kinds-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}
@@ -48,7 +48,7 @@ steps:
retention-days: 7
- name: Upload post-processed SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: |
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}

View File

@@ -2,7 +2,6 @@ name: "Analyze: 'ref' and 'sha' from inputs"
description: "Checks that specifying 'ref' and 'sha' as inputs works"
versions: ["default"]
installGo: true
installPython: true
installDotNet: true
steps:
- uses: ./../action/init

View File

@@ -5,7 +5,7 @@ description: >
autobuild Action.
operatingSystems: ["ubuntu", "windows"]
versions: ["linked", "nightly-latest"]
installJava: "true"
installJava: true
env:
CODEQL_ACTION_AUTOBUILD_BUILD_MODE_DIRECT_TRACING: true
steps:

View File

@@ -2,8 +2,8 @@ name: "Build mode autobuild"
description: "An end-to-end integration test of a Java repository built using 'build-mode: autobuild'"
operatingSystems: ["ubuntu", "windows"]
versions: ["linked", "nightly-latest"]
installJava: "true"
installYq: "true"
installJava: true
installYq: true
steps:
- name: Set up Java test repo configuration
run: |

View File

@@ -11,5 +11,5 @@ steps:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: javascript
- name: Fail if the CodeQL version is not a nightly
if: "!contains(steps.init.outputs.codeql-version, '+')"
if: ${{ !contains(steps.init.outputs.codeql-version, '+') }}
run: exit 1

View File

@@ -27,7 +27,7 @@ steps:
output: ${{ runner.temp }}/results
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: ${{ matrix.os }}-zstd-bundle.sarif
path: ${{ runner.temp }}/results/javascript.sarif

View File

@@ -12,7 +12,7 @@ steps:
output: "${{ runner.temp }}/results"
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: "${{ runner.temp }}/results/javascript.sarif"

View File

@@ -25,7 +25,7 @@ steps:
output: "${{ runner.temp }}/results"
upload-database: false
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: "${{ runner.temp }}/results/javascript.sarif"

View File

@@ -19,7 +19,7 @@ steps:
with:
output: "${{ runner.temp }}/results"
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: "${{ runner.temp }}/results/javascript.sarif"

View File

@@ -11,7 +11,7 @@ steps:
with:
output: "${{ runner.temp }}/results"
- name: Upload SARIF
uses: actions/upload-artifact@v6
uses: actions/upload-artifact@v7
with:
name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json
path: "${{ runner.temp }}/results/javascript.sarif"

View File

@@ -2,7 +2,6 @@ name: "Local CodeQL bundle"
description: "Tests using a CodeQL bundle from a local file rather than a URL"
versions: ["linked"]
installGo: true
installPython: true
installDotNet: true
steps:
- name: Fetch latest CodeQL bundle

View File

@@ -3,7 +3,6 @@ description: "Checks that specifying packages using a combination of a config fi
versions: ["linked", "default", "nightly-latest"] # This feature is not compatible with old CLIs
installGo: true
installNode: true
installPython: true
installDotNet: true
steps:
- uses: ./../action/init

View File

@@ -6,7 +6,6 @@ versions:
- linked
- nightly-latest
installGo: true
installPython: true
installDotNet: true
steps:
- uses: ./../action/init

View File

@@ -6,7 +6,6 @@ versions:
- linked
- nightly-latest
installGo: true
installPython: true
installDotNet: true
steps:
- uses: ./../action/init

View File

@@ -2,7 +2,6 @@ name: "Upload-sarif: 'ref' and 'sha' from inputs"
description: "Checks that specifying 'ref' and 'sha' as inputs works"
versions: ["default"]
installGo: true
installPython: true
installDotNet: true
steps:
- uses: ./../action/init

View File

@@ -3,7 +3,6 @@ description: "Checks that uploading SARIFs to the code quality endpoint works"
versions: ["default"]
analysisKinds: ["code-scanning", "code-quality", "code-scanning,code-quality"]
installGo: true
installPython: true
installDotNet: true
steps:
- uses: ./../action/init
@@ -32,16 +31,16 @@ steps:
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:all-files/
- name: "Fail for missing output from `upload-sarif` step for `code-scanning`"
if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-scanning)"
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-scanning)
run: exit 1
- name: "Fail for missing output from `upload-sarif` step for `code-quality`"
if: "contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)"
if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)
run: exit 1
- name: Upload single SARIF file for Code Scanning
uses: ./../action/upload-sarif
id: upload-single-sarif-code-scanning
if: "contains(matrix.analysis-kinds, 'code-scanning')"
if: contains(matrix.analysis-kinds, 'code-scanning')
with:
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
@@ -49,12 +48,12 @@ steps:
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-scanning/
- name: "Fail for missing output from `upload-single-sarif-code-scanning` step"
if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning)"
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning)
run: exit 1
- name: Upload single SARIF file for Code Quality
uses: ./../action/upload-sarif
id: upload-single-sarif-code-quality
if: "contains(matrix.analysis-kinds, 'code-quality')"
if: contains(matrix.analysis-kinds, 'code-quality')
with:
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
@@ -62,16 +61,16 @@ steps:
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-quality/
- name: "Fail for missing output from `upload-single-sarif-code-quality` step"
if: "contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality)"
if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality)
run: exit 1
- name: Change SARIF file extension
if: "contains(matrix.analysis-kinds, 'code-scanning')"
if: contains(matrix.analysis-kinds, 'code-scanning')
run: mv ${{ runner.temp }}/results/javascript.sarif ${{ runner.temp }}/results/javascript.sarif.json
- name: Upload single non-`.sarif` file
uses: ./../action/upload-sarif
id: upload-single-non-sarif
if: "contains(matrix.analysis-kinds, 'code-scanning')"
if: contains(matrix.analysis-kinds, 'code-scanning')
with:
ref: 'refs/heads/main'
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
@@ -79,5 +78,5 @@ steps:
category: |
${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:non-sarif/
- name: "Fail for missing output from `upload-single-non-sarif` step"
if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-non-sarif.outputs.sarif-ids).code-scanning)"
if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-non-sarif.outputs.sarif-ids).code-scanning)
run: exit 1

View File

@@ -2,7 +2,6 @@ name: "Use a custom `checkout_path`"
description: "Checks that a custom `checkout_path` will find the proper commit_oid"
versions: ["linked"]
installGo: true
installPython: true
installDotNet: true
steps:
# This ensures we don't accidentally use the original checkout for any part of the test.

605
pr-checks/package-lock.json generated Normal file
View File

@@ -0,0 +1,605 @@
{
"name": "pr-checks",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"dependencies": {
"yaml": "^2.8.2"
},
"devDependencies": {
"@types/node": "^20.19.9",
"tsx": "^4.21.0",
"typescript": "^5.9.3"
}
},
"node_modules/@esbuild/aix-ppc64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.3.tgz",
"integrity": "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"aix"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-arm": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.3.tgz",
"integrity": "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.3.tgz",
"integrity": "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.3.tgz",
"integrity": "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/darwin-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz",
"integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/darwin-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz",
"integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/freebsd-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.3.tgz",
"integrity": "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/freebsd-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.3.tgz",
"integrity": "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-arm": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.3.tgz",
"integrity": "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.3.tgz",
"integrity": "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-ia32": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.3.tgz",
"integrity": "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-loong64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.3.tgz",
"integrity": "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==",
"cpu": [
"loong64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-mips64el": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.3.tgz",
"integrity": "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==",
"cpu": [
"mips64el"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-ppc64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.3.tgz",
"integrity": "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-riscv64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.3.tgz",
"integrity": "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==",
"cpu": [
"riscv64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-s390x": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.3.tgz",
"integrity": "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==",
"cpu": [
"s390x"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.3.tgz",
"integrity": "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/netbsd-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.3.tgz",
"integrity": "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/netbsd-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.3.tgz",
"integrity": "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openbsd-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.3.tgz",
"integrity": "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openbsd-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.3.tgz",
"integrity": "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openharmony-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.3.tgz",
"integrity": "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openharmony"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/sunos-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.3.tgz",
"integrity": "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"sunos"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-arm64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.3.tgz",
"integrity": "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-ia32": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.3.tgz",
"integrity": "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-x64": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.3.tgz",
"integrity": "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@types/node": {
"version": "20.19.35",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.35.tgz",
"integrity": "sha512-Uarfe6J91b9HAUXxjvSOdiO2UPOKLm07Q1oh0JHxoZ1y8HoqxDAu3gVrsrOHeiio0kSsoVBt4wFrKOm0dKxVPQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"undici-types": "~6.21.0"
}
},
"node_modules/esbuild": {
"version": "0.27.3",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz",
"integrity": "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"bin": {
"esbuild": "bin/esbuild"
},
"engines": {
"node": ">=18"
},
"optionalDependencies": {
"@esbuild/aix-ppc64": "0.27.3",
"@esbuild/android-arm": "0.27.3",
"@esbuild/android-arm64": "0.27.3",
"@esbuild/android-x64": "0.27.3",
"@esbuild/darwin-arm64": "0.27.3",
"@esbuild/darwin-x64": "0.27.3",
"@esbuild/freebsd-arm64": "0.27.3",
"@esbuild/freebsd-x64": "0.27.3",
"@esbuild/linux-arm": "0.27.3",
"@esbuild/linux-arm64": "0.27.3",
"@esbuild/linux-ia32": "0.27.3",
"@esbuild/linux-loong64": "0.27.3",
"@esbuild/linux-mips64el": "0.27.3",
"@esbuild/linux-ppc64": "0.27.3",
"@esbuild/linux-riscv64": "0.27.3",
"@esbuild/linux-s390x": "0.27.3",
"@esbuild/linux-x64": "0.27.3",
"@esbuild/netbsd-arm64": "0.27.3",
"@esbuild/netbsd-x64": "0.27.3",
"@esbuild/openbsd-arm64": "0.27.3",
"@esbuild/openbsd-x64": "0.27.3",
"@esbuild/openharmony-arm64": "0.27.3",
"@esbuild/sunos-x64": "0.27.3",
"@esbuild/win32-arm64": "0.27.3",
"@esbuild/win32-ia32": "0.27.3",
"@esbuild/win32-x64": "0.27.3"
}
},
"node_modules/fsevents": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/get-tsconfig": {
"version": "4.13.6",
"resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.6.tgz",
"integrity": "sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==",
"dev": true,
"license": "MIT",
"dependencies": {
"resolve-pkg-maps": "^1.0.0"
},
"funding": {
"url": "https://github.com/privatenumber/get-tsconfig?sponsor=1"
}
},
"node_modules/resolve-pkg-maps": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz",
"integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==",
"dev": true,
"license": "MIT",
"funding": {
"url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1"
}
},
"node_modules/tsx": {
"version": "4.21.0",
"resolved": "https://registry.npmjs.org/tsx/-/tsx-4.21.0.tgz",
"integrity": "sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==",
"dev": true,
"license": "MIT",
"dependencies": {
"esbuild": "~0.27.0",
"get-tsconfig": "^4.7.5"
},
"bin": {
"tsx": "dist/cli.mjs"
},
"engines": {
"node": ">=18.0.0"
},
"optionalDependencies": {
"fsevents": "~2.3.3"
}
},
"node_modules/typescript": {
"version": "5.9.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz",
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
"dev": true,
"license": "Apache-2.0",
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=14.17"
}
},
"node_modules/undici-types": {
"version": "6.21.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
"dev": true,
"license": "MIT"
},
"node_modules/yaml": {
"version": "2.8.2",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz",
"integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==",
"license": "ISC",
"bin": {
"yaml": "bin.mjs"
},
"engines": {
"node": ">= 14.6"
},
"funding": {
"url": "https://github.com/sponsors/eemeli"
}
}
}
}

12
pr-checks/package.json Normal file
View File

@@ -0,0 +1,12 @@
{
"private": true,
"description": "Dependencies for the sync.ts",
"dependencies": {
"yaml": "^2.8.2"
},
"devDependencies": {
"@types/node": "^20.19.9",
"tsx": "^4.21.0",
"typescript": "^5.9.3"
}
}

View File

@@ -6,9 +6,9 @@ to one of the files in this directory.
## Updating workflows
Run `./sync.sh` to invoke the workflow generator and re-generate the workflow files in `.github/workflows/` based on the templates in `pr-checks/checks/`.
Alternatively, you can use `just`:
1. Install https://github.com/casey/just by whichever way you prefer.
2. Run `just update-pr-checks` in your terminal.
### If you don't want to install `just`
Manually run each step in the `justfile`.

View File

@@ -1,402 +0,0 @@
#!/usr/bin/env python
import ruamel.yaml
from ruamel.yaml.scalarstring import SingleQuotedScalarString, LiteralScalarString
import pathlib
import os
# The default set of CodeQL Bundle versions to use for the PR checks.
defaultTestVersions = [
# The oldest supported CodeQL version. If bumping, update `CODEQL_MINIMUM_VERSION` in `codeql.ts`
"stable-v2.17.6",
# The last CodeQL release in the 2.18 series.
"stable-v2.18.4",
# The last CodeQL release in the 2.19 series.
"stable-v2.19.4",
# The last CodeQL release in the 2.20 series.
"stable-v2.20.7",
# The last CodeQL release in the 2.21 series.
"stable-v2.21.4",
# The last CodeQL release in the 2.22 series.
"stable-v2.22.4",
# The default version of CodeQL for Dotcom, as determined by feature flags.
"default",
# The version of CodeQL shipped with the Action in `defaults.json`. During the release process
# for a new CodeQL release, there will be a period of time during which this will be newer than
# the default version on Dotcom.
"linked",
# A nightly build directly from the our private repo, built in the last 24 hours.
"nightly-latest"
]
# When updating the ruamel.yaml version here, update the PR check in
# `.github/workflows/pr-checks.yml` too.
header = """# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pr-checks/sync.sh
# to regenerate this file.
"""
def is_truthy(value):
if isinstance(value, str):
return value.lower() == 'true'
return bool(value)
class NonAliasingRTRepresenter(ruamel.yaml.representer.RoundTripRepresenter):
def ignore_aliases(self, data):
return True
def writeHeader(checkStream):
checkStream.write(header)
yaml = ruamel.yaml.YAML()
yaml.Representer = NonAliasingRTRepresenter
yaml.indent(mapping=2, sequence=4, offset=2)
this_dir = pathlib.Path(__file__).resolve().parent
allJobs = {}
collections = {}
for file in sorted((this_dir / 'checks').glob('*.yml')):
with open(file, 'r') as checkStream:
checkSpecification = yaml.load(checkStream)
matrix = []
workflowInputs = {}
if 'inputs' in checkSpecification:
workflowInputs = checkSpecification['inputs']
for version in checkSpecification.get('versions', defaultTestVersions):
if version == "latest":
raise ValueError('Did not recognize "version: latest". Did you mean "version: linked"?')
runnerImages = ["ubuntu-latest", "macos-latest", "windows-latest"]
operatingSystems = checkSpecification.get('operatingSystems', ["ubuntu"])
for operatingSystem in operatingSystems:
runnerImagesForOs = [image for image in runnerImages if image.startswith(operatingSystem)]
for runnerImage in runnerImagesForOs:
matrix.append({
'os': runnerImage,
'version': version
})
useAllPlatformBundle = "false" # Default to false
if checkSpecification.get('useAllPlatformBundle'):
useAllPlatformBundle = checkSpecification['useAllPlatformBundle']
if 'analysisKinds' in checkSpecification:
newMatrix = []
for matrixInclude in matrix:
for analysisKind in checkSpecification.get('analysisKinds'):
newMatrix.append(
matrixInclude |
{ 'analysis-kinds': analysisKind }
)
matrix = newMatrix
# Construct the workflow steps needed for this check.
steps = [
{
'name': 'Check out repository',
'uses': 'actions/checkout@v6'
},
]
installNode = is_truthy(checkSpecification.get('installNode', ''))
if installNode:
steps.extend([
{
'name': 'Install Node.js',
'uses': 'actions/setup-node@v6',
'with': {
'node-version': '20.x',
'cache': 'npm',
},
},
{
'name': 'Install dependencies',
'run': 'npm ci',
},
])
steps.append({
'name': 'Prepare test',
'id': 'prepare-test',
'uses': './.github/actions/prepare-test',
'with': {
'version': '${{ matrix.version }}',
'use-all-platform-bundle': useAllPlatformBundle,
# If the action is being run from a container, then do not setup kotlin.
# This is because the kotlin binaries cannot be downloaded from the container.
'setup-kotlin': str(not 'container' in checkSpecification).lower(),
}
})
installGo = is_truthy(checkSpecification.get('installGo', ''))
if installGo:
baseGoVersionExpr = '>=1.21.0'
workflowInputs['go-version'] = {
'type': 'string',
'description': 'The version of Go to install',
'required': False,
'default': baseGoVersionExpr,
}
steps.append({
'name': 'Install Go',
'uses': 'actions/setup-go@v6',
'with': {
'go-version': '${{ inputs.go-version || \'' + baseGoVersionExpr + '\' }}',
# to avoid potentially misleading autobuilder results where we expect it to download
# dependencies successfully, but they actually come from a warm cache
'cache': False
}
})
installJava = is_truthy(checkSpecification.get('installJava', ''))
if installJava:
baseJavaVersionExpr = '17'
workflowInputs['java-version'] = {
'type': 'string',
'description': 'The version of Java to install',
'required': False,
'default': baseJavaVersionExpr,
}
steps.append({
'name': 'Install Java',
'uses': 'actions/setup-java@v5',
'with': {
'java-version': '${{ inputs.java-version || \'' + baseJavaVersionExpr + '\' }}',
'distribution': 'temurin'
}
})
installPython = is_truthy(checkSpecification.get('installPython', ''))
if installPython:
basePythonVersionExpr = '3.13'
workflowInputs['python-version'] = {
'type': 'string',
'description': 'The version of Python to install',
'required': False,
'default': basePythonVersionExpr,
}
steps.append({
'name': 'Install Python',
'if': 'matrix.version != \'nightly-latest\'',
'uses': 'actions/setup-python@v6',
'with': {
'python-version': '${{ inputs.python-version || \'' + basePythonVersionExpr + '\' }}'
}
})
installDotNet = is_truthy(checkSpecification.get('installDotNet', ''))
if installDotNet:
baseDotNetVersionExpr = '9.x'
workflowInputs['dotnet-version'] = {
'type': 'string',
'description': 'The version of .NET to install',
'required': False,
'default': baseDotNetVersionExpr,
}
steps.append({
'name': 'Install .NET',
'uses': 'actions/setup-dotnet@v5',
'with': {
'dotnet-version': '${{ inputs.dotnet-version || \'' + baseDotNetVersionExpr + '\' }}'
}
})
installYq = is_truthy(checkSpecification.get('installYq', ''))
if installYq:
steps.append({
'name': 'Install yq',
'if': "runner.os == 'Windows'",
'env': {
'YQ_PATH': '${{ runner.temp }}/yq',
# This is essentially an arbitrary version of `yq`, which happened to be the one that
# `choco` fetched when we moved away from using that here.
# See https://github.com/github/codeql-action/pull/3423
'YQ_VERSION': 'v4.50.1'
},
'run': LiteralScalarString(
'gh release download --repo mikefarah/yq --pattern "yq_windows_amd64.exe" "$YQ_VERSION" -O "$YQ_PATH/yq.exe"\n'
'echo "$YQ_PATH" >> "$GITHUB_PATH"'
),
})
# If container initialisation steps are present in the check specification,
# make sure to execute them first.
if 'container' in checkSpecification and 'container-init-steps' in checkSpecification:
steps.insert(0, checkSpecification['container-init-steps'])
steps.extend(checkSpecification['steps'])
checkJob = {
'strategy': {
'fail-fast': False,
'matrix': {
'include': matrix
}
},
'name': checkSpecification['name'],
'if': 'github.triggering_actor != \'dependabot[bot]\'',
'permissions': {
'contents': 'read',
'security-events': 'read'
},
'timeout-minutes': 45,
'runs-on': '${{ matrix.os }}',
'steps': steps,
}
if 'permissions' in checkSpecification:
checkJob['permissions'] = checkSpecification['permissions']
for key in ["env", "container", "services"]:
if key in checkSpecification:
checkJob[key] = checkSpecification[key]
checkJob['env'] = checkJob.get('env', {})
if 'CODEQL_ACTION_TEST_MODE' not in checkJob['env']:
checkJob['env']['CODEQL_ACTION_TEST_MODE'] = True
checkName = file.stem
# If this check belongs to a named collection, record it.
if 'collection' in checkSpecification:
collection_name = checkSpecification['collection']
collections.setdefault(collection_name, []).append({
'specification': checkSpecification,
'checkName': checkName,
'inputs': workflowInputs
})
raw_file = this_dir.parent / ".github" / "workflows" / f"__{checkName}.yml.raw"
with open(raw_file, 'w', newline='\n') as output_stream:
extraGroupName = ""
for inputName in workflowInputs.keys():
extraGroupName += "-${{inputs." + inputName + "}}"
writeHeader(output_stream)
yaml.dump({
'name': f"PR Check - {checkSpecification['name']}",
'env': {
'GITHUB_TOKEN': '${{ secrets.GITHUB_TOKEN }}',
'GO111MODULE': 'auto'
},
'on': {
'push': {
'branches': ['main', 'releases/v*']
},
'pull_request': {
'types': ["opened", "synchronize", "reopened", "ready_for_review"]
},
'merge_group': {
'types': ['checks_requested']
},
'schedule': [{'cron': SingleQuotedScalarString('0 5 * * *')}],
'workflow_dispatch': {
'inputs': workflowInputs
},
'workflow_call': {
'inputs': workflowInputs
}
},
'defaults': {
'run': {
'shell': 'bash',
},
},
'concurrency': {
# Cancel in-progress workflows in the same 'group' for pull_request events,
# but not other event types. This should have the effect that workflows on PRs
# get cancelled if there is a newer workflow in the same concurrency group.
# For other events, the new workflows should wait until earlier ones have finished.
# This should help reduce the number of concurrent workflows on the repo, and
# consequently the number of concurrent API requests.
# Note, the `|| false` is intentional to rule out that this somehow ends up being
# `true` since we observed workflows for non-`pull_request` events getting cancelled.
'cancel-in-progress': "${{ github.event_name == 'pull_request' || false }}",
# The group is determined by the workflow name, the ref, and the input values.
# The base name is hard-coded to avoid issues when the workflow is triggered by
# a `workflow_call` event (where `github.workflow` would be the name of the caller).
# The input values are added, since they may result in different behaviour for a
# given workflow on the same ref.
'group': checkName + "-${{github.ref}}" + extraGroupName
},
'jobs': {
checkName: checkJob
}
}, output_stream)
with open(raw_file, 'r') as input_stream:
with open(this_dir.parent / ".github" / "workflows" / f"__{checkName}.yml", 'w', newline='\n') as output_stream:
content = input_stream.read()
output_stream.write("\n".join(list(map(lambda x:x.rstrip(), content.splitlines()))+['']))
os.remove(raw_file)
# write workflow files for collections
for collection_name in collections:
jobs = {}
combinedInputs = {}
for check in collections[collection_name]:
checkName = check['checkName']
checkSpecification = check['specification']
checkInputs = check['inputs']
checkWith = {}
combinedInputs |= checkInputs
for inputName in checkInputs.keys():
checkWith[inputName] = "${{ inputs." + inputName + " }}"
jobs[checkName] = {
'name': checkSpecification['name'],
'permissions': {
'contents': 'read',
'security-events': 'read'
},
'uses': "./.github/workflows/" + f"__{checkName}.yml",
'with': checkWith
}
raw_file = this_dir.parent / ".github" / "workflows" / f"__{collection_name}.yml.raw"
with open(raw_file, 'w') as output_stream:
writeHeader(output_stream)
yaml.dump({
'name': f"Manual Check - {collection_name}",
'env': {
'GITHUB_TOKEN': '${{ secrets.GITHUB_TOKEN }}',
'GO111MODULE': 'auto'
},
'on': {
'workflow_dispatch': {
'inputs': combinedInputs
},
},
'jobs': jobs
}, output_stream)
with open(raw_file, 'r') as input_stream:
with open(this_dir.parent / ".github" / "workflows" / f"__{collection_name}.yml", 'w', newline='\n') as output_stream:
content = input_stream.read()
output_stream.write("\n".join(list(map(lambda x:x.rstrip(), content.splitlines()))+['']))
os.remove(raw_file)

View File

@@ -2,8 +2,14 @@
set -e
cd "$(dirname "$0")"
python3 -m venv env
source env/*/activate
pip3 install ruamel.yaml==0.17.31
python3 sync.py
# Run `npm ci` in CI or `npm install` otherwise.
if [ "$GITHUB_ACTIONS" = "true" ]; then
echo "In Actions, running 'npm ci' for 'sync.ts'..."
npm ci
else
echo "Running 'npm install' for 'sync.ts'..."
npm install --no-audit --no-fund
fi
npx tsx sync.ts

525
pr-checks/sync.ts Executable file
View File

@@ -0,0 +1,525 @@
#!/usr/bin/env npx tsx
import * as fs from "fs";
import * as path from "path";
import * as yaml from "yaml";
/** Known workflow input names. */
enum KnownInputName {
GoVersion = "go-version",
JavaVersion = "java-version",
PythonVersion = "python-version",
DotnetVersion = "dotnet-version",
}
/**
* Represents workflow input definitions.
*/
interface WorkflowInput {
type: string;
description: string;
required: boolean;
default: string;
}
/** A partial mapping from known input names to input definitions. */
type WorkflowInputs = Partial<Record<KnownInputName, WorkflowInput>>;
/**
* Represents PR check specifications.
*/
interface Specification {
/** The display name for the check. */
name: string;
/** The workflow steps specific to this check. */
steps: any[];
/** Workflow-level input definitions forwarded to `workflow_dispatch`/`workflow_call`. */
inputs?: Record<string, WorkflowInput>;
/** CodeQL bundle versions to test against. Defaults to `DEFAULT_TEST_VERSIONS`. */
versions?: string[];
/** Operating system prefixes used to select runner images (e.g. `["ubuntu", "macos"]`). */
operatingSystems?: string[];
/** Whether to use the all-platform CodeQL bundle. */
useAllPlatformBundle?: string;
/** Values for the `analysis-kinds` matrix dimension. */
analysisKinds?: string[];
installNode?: boolean;
installGo?: boolean;
installJava?: boolean;
installPython?: boolean;
installDotNet?: boolean;
installYq?: boolean;
/** Container image configuration for the job. */
container?: any;
/** Service containers for the job. */
services?: any;
/** Custom permissions override for the job. */
permissions?: Record<string, string>;
/** Extra environment variables for the job. */
env?: Record<string, any>;
/** If set, this check is part of a named collection that gets its own caller workflow. */
collection?: string;
}
// The default set of CodeQL Bundle versions to use for the PR checks.
const defaultTestVersions = [
// The oldest supported CodeQL version. If bumping, update `CODEQL_MINIMUM_VERSION` in `codeql.ts`
"stable-v2.17.6",
// The last CodeQL release in the 2.18 series.
"stable-v2.18.4",
// The last CodeQL release in the 2.19 series.
"stable-v2.19.4",
// The last CodeQL release in the 2.20 series.
"stable-v2.20.7",
// The last CodeQL release in the 2.21 series.
"stable-v2.21.4",
// The last CodeQL release in the 2.22 series.
"stable-v2.22.4",
// The default version of CodeQL for Dotcom, as determined by feature flags.
"default",
// The version of CodeQL shipped with the Action in `defaults.json`. During the release process
// for a new CodeQL release, there will be a period of time during which this will be newer than
// the default version on Dotcom.
"linked",
// A nightly build directly from the our private repo, built in the last 24 hours.
"nightly-latest",
];
const THIS_DIR = __dirname;
const CHECKS_DIR = path.join(THIS_DIR, "checks");
const OUTPUT_DIR = path.join(THIS_DIR, "..", ".github", "workflows");
/**
* Loads and parses a YAML file.
*/
function loadYaml(filePath: string): yaml.Document {
const content = fs.readFileSync(filePath, "utf8");
return yaml.parseDocument(content);
}
/**
* Serialize a value to YAML and write it to a file, prepended with the
* standard header comment.
*/
function writeYaml(filePath: string, workflow: any): void {
const header = `# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pr-checks/sync.sh
# to regenerate this file.
`;
const workflowDoc = new yaml.Document(workflow, {
aliasDuplicateObjects: false,
});
const yamlStr = yaml.stringify(workflowDoc, {
aliasDuplicateObjects: false,
singleQuote: true,
lineWidth: 0,
});
fs.writeFileSync(filePath, stripTrailingWhitespace(header + yamlStr), "utf8");
}
/**
* Strip trailing whitespace from each line.
*/
function stripTrailingWhitespace(content: string): string {
return content
.split("\n")
.map((line) => line.trimEnd())
.join("\n");
}
/**
* Main entry point for the sync script.
*/
function main(): void {
// Ensure the output directory exists.
fs.mkdirSync(OUTPUT_DIR, { recursive: true });
// Discover and sort all check specification files.
const checkFiles = fs
.readdirSync(CHECKS_DIR)
.filter((f) => f.endsWith(".yml"))
.sort()
.map((f) => path.join(CHECKS_DIR, f));
console.log(`Found ${checkFiles.length} check specification(s).`);
const collections: Record<
string,
Array<{
specification: Specification;
checkName: string;
inputs: Record<string, WorkflowInput>;
}>
> = {};
for (const file of checkFiles) {
const checkName = path.basename(file, ".yml");
const specDocument = loadYaml(file);
const checkSpecification = specDocument.toJS() as Specification;
console.log(`Processing: ${checkName} — "${checkSpecification.name}"`);
const workflowInputs: WorkflowInputs = {};
let matrix: Array<Record<string, any>> = [];
for (const version of checkSpecification.versions ?? defaultTestVersions) {
if (version === "latest") {
throw new Error(
'Did not recognise "version: latest". Did you mean "version: linked"?',
);
}
const runnerImages = ["ubuntu-latest", "macos-latest", "windows-latest"];
const operatingSystems = checkSpecification.operatingSystems ?? [
"ubuntu",
];
for (const operatingSystem of operatingSystems) {
const runnerImagesForOs = runnerImages.filter((image) =>
image.startsWith(operatingSystem),
);
for (const runnerImage of runnerImagesForOs) {
matrix.push({
os: runnerImage,
version,
});
}
}
}
const useAllPlatformBundle = checkSpecification.useAllPlatformBundle
? checkSpecification.useAllPlatformBundle
: "false";
if (checkSpecification.analysisKinds) {
const newMatrix: Array<Record<string, any>> = [];
for (const matrixInclude of matrix) {
for (const analysisKind of checkSpecification.analysisKinds) {
newMatrix.push({
...matrixInclude,
"analysis-kinds": analysisKind,
});
}
}
matrix = newMatrix;
}
// Construct the workflow steps needed for this check.
const steps: any[] = [
{
name: "Check out repository",
uses: "actions/checkout@v6",
},
];
const installNode = checkSpecification.installNode;
if (installNode) {
steps.push(
{
name: "Install Node.js",
uses: "actions/setup-node@v6",
with: {
"node-version": "20.x",
cache: "npm",
},
},
{
name: "Install dependencies",
run: "npm ci",
},
);
}
steps.push({
name: "Prepare test",
id: "prepare-test",
uses: "./.github/actions/prepare-test",
with: {
version: "${{ matrix.version }}",
"use-all-platform-bundle": useAllPlatformBundle,
// If the action is being run from a container, then do not setup kotlin.
// This is because the kotlin binaries cannot be downloaded from the container.
"setup-kotlin": "container" in checkSpecification ? "false" : "true",
},
});
const installGo = checkSpecification.installGo;
if (installGo) {
const baseGoVersionExpr = ">=1.21.0";
workflowInputs[KnownInputName.GoVersion] = {
type: "string",
description: "The version of Go to install",
required: false,
default: baseGoVersionExpr,
};
steps.push({
name: "Install Go",
uses: "actions/setup-go@v6",
with: {
"go-version":
"${{ inputs.go-version || '" + baseGoVersionExpr + "' }}",
// to avoid potentially misleading autobuilder results where we expect it to download
// dependencies successfully, but they actually come from a warm cache
cache: false,
},
});
}
const installJava = checkSpecification.installJava;
if (installJava) {
const baseJavaVersionExpr = "17";
workflowInputs[KnownInputName.JavaVersion] = {
type: "string",
description: "The version of Java to install",
required: false,
default: baseJavaVersionExpr,
};
steps.push({
name: "Install Java",
uses: "actions/setup-java@v5",
with: {
"java-version":
"${{ inputs.java-version || '" + baseJavaVersionExpr + "' }}",
distribution: "temurin",
},
});
}
const installPython = checkSpecification.installPython;
if (installPython) {
const basePythonVersionExpr = "3.13";
workflowInputs[KnownInputName.PythonVersion] = {
type: "string",
description: "The version of Python to install",
required: false,
default: basePythonVersionExpr,
};
steps.push({
name: "Install Python",
if: "matrix.version != 'nightly-latest'",
uses: "actions/setup-python@v6",
with: {
"python-version":
"${{ inputs.python-version || '" + basePythonVersionExpr + "' }}",
},
});
}
const installDotNet = checkSpecification.installDotNet;
if (installDotNet) {
const baseDotNetVersionExpr = "9.x";
workflowInputs[KnownInputName.DotnetVersion] = {
type: "string",
description: "The version of .NET to install",
required: false,
default: baseDotNetVersionExpr,
};
steps.push({
name: "Install .NET",
uses: "actions/setup-dotnet@v5",
with: {
"dotnet-version":
"${{ inputs.dotnet-version || '" + baseDotNetVersionExpr + "' }}",
},
});
}
const installYq = checkSpecification.installYq;
if (installYq) {
steps.push({
name: "Install yq",
if: "runner.os == 'Windows'",
env: {
YQ_PATH: "${{ runner.temp }}/yq",
// This is essentially an arbitrary version of `yq`, which happened to be the one that
// `choco` fetched when we moved away from using that here.
// See https://github.com/github/codeql-action/pull/3423
YQ_VERSION: "v4.50.1",
},
run:
'gh release download --repo mikefarah/yq --pattern "yq_windows_amd64.exe" "$YQ_VERSION" -O "$YQ_PATH/yq.exe"\n' +
'echo "$YQ_PATH" >> "$GITHUB_PATH"',
});
}
// Extract the sequence of steps from the YAML document to persist as much formatting as possible.
const specSteps = specDocument.get("steps") as yaml.YAMLSeq;
// A handful of workflow specifications use double quotes for values, while we generally use single quotes.
// This replaces double quotes with single quotes for consistency.
yaml.visit(specSteps, {
Scalar(_key, node) {
if (node.type === "QUOTE_DOUBLE") {
node.type = "QUOTE_SINGLE";
}
},
});
// Add the generated steps in front of the ones from the specification.
specSteps.items.unshift(...steps);
const checkJob: Record<string, any> = {
strategy: {
"fail-fast": false,
matrix: {
include: matrix,
},
},
name: checkSpecification.name,
if: "github.triggering_actor != 'dependabot[bot]'",
permissions: {
contents: "read",
"security-events": "read",
},
"timeout-minutes": 45,
"runs-on": "${{ matrix.os }}",
steps: specSteps,
};
if (checkSpecification.permissions) {
checkJob.permissions = checkSpecification.permissions;
}
for (const key of ["env", "container", "services"] as const) {
if (checkSpecification[key] !== undefined) {
checkJob[key] = checkSpecification[key];
}
}
checkJob.env = checkJob.env ?? {};
if (!("CODEQL_ACTION_TEST_MODE" in checkJob.env)) {
checkJob.env.CODEQL_ACTION_TEST_MODE = true;
}
// If this check belongs to a named collection, record it.
if (checkSpecification.collection) {
const collectionName = checkSpecification.collection;
if (!collections[collectionName]) {
collections[collectionName] = [];
}
collections[collectionName].push({
specification: checkSpecification,
checkName,
inputs: workflowInputs,
});
}
let extraGroupName = "";
for (const inputName of Object.keys(workflowInputs)) {
extraGroupName += "-${{inputs." + inputName + "}}";
}
const cron = new yaml.Scalar("0 5 * * *");
cron.type = yaml.Scalar.QUOTE_SINGLE;
const workflow = {
name: `PR Check - ${checkSpecification.name}`,
env: {
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}",
GO111MODULE: "auto",
},
on: {
push: {
branches: ["main", "releases/v*"],
},
pull_request: {
types: ["opened", "synchronize", "reopened", "ready_for_review"],
},
merge_group: {
types: ["checks_requested"],
},
schedule: [{ cron }],
workflow_dispatch: {
inputs: workflowInputs,
},
workflow_call: {
inputs: workflowInputs,
},
},
defaults: {
run: {
shell: "bash",
},
},
concurrency: {
"cancel-in-progress":
"${{ github.event_name == 'pull_request' || false }}",
group: checkName + "-${{github.ref}}" + extraGroupName,
},
jobs: {
[checkName]: checkJob,
},
};
const outputPath = path.join(OUTPUT_DIR, `__${checkName}.yml`);
writeYaml(outputPath, workflow);
}
// Write workflow files for collections.
for (const collectionName of Object.keys(collections)) {
const jobs: Record<string, any> = {};
let combinedInputs: Record<string, WorkflowInput> = {};
for (const check of collections[collectionName]) {
const { checkName, specification, inputs: checkInputs } = check;
const checkWith: Record<string, string> = {};
combinedInputs = { ...combinedInputs, ...checkInputs };
for (const inputName of Object.keys(checkInputs)) {
checkWith[inputName] = "${{ inputs." + inputName + " }}";
}
jobs[checkName] = {
name: specification.name,
permissions: {
contents: "read",
"security-events": "read",
},
uses: `./.github/workflows/__${checkName}.yml`,
with: checkWith,
};
}
const collectionWorkflow = {
name: `Manual Check - ${collectionName}`,
env: {
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}",
GO111MODULE: "auto",
},
on: {
workflow_dispatch: {
inputs: combinedInputs,
},
},
jobs,
};
const outputPath = path.join(OUTPUT_DIR, `__${collectionName}.yml`);
writeYaml(outputPath, collectionWorkflow);
}
console.log(
`\nDone. Wrote ${checkFiles.length} workflow file(s) to ${OUTPUT_DIR}`,
);
}
main();

View File

@@ -1,185 +0,0 @@
#!/usr/bin/env python3
"""
Sync-back script to automatically update action versions in source templates
from the generated workflow files after Dependabot updates.
This script scans the generated workflow files (.github/workflows/__*.yml) to find
all external action versions used, then updates:
1. Hardcoded action versions in pr-checks/sync.py
2. Action version references in template files in pr-checks/checks/
The script automatically detects all actions used in generated workflows and
preserves version comments (e.g., # v1.2.3) when syncing versions.
This ensures that when Dependabot updates action versions in generated workflows,
those changes are properly synced back to the source templates. Regular workflow
files are updated directly by Dependabot and don't need sync-back.
"""
import os
import re
import glob
import argparse
import sys
from pathlib import Path
from typing import Dict, List
def scan_generated_workflows(workflow_dir: str) -> Dict[str, str]:
"""
Scan generated workflow files to extract the latest action versions.
Args:
workflow_dir: Path to .github/workflows directory
Returns:
Dictionary mapping action names to their latest versions (including comments)
"""
action_versions = {}
generated_files = glob.glob(os.path.join(workflow_dir, "__*.yml"))
for file_path in generated_files:
with open(file_path, 'r') as f:
content = f.read()
# Find all action uses in the file, including potential comments
# This pattern captures: action_name@version_with_possible_comment
pattern = r'uses:\s+([^/\s]+/[^@\s]+)@([^@\n]+)'
matches = re.findall(pattern, content)
for action_name, version_with_comment in matches:
# Only track non-local actions (those with / but not starting with ./)
if not action_name.startswith('./'):
# Assume that version numbers are consistent (this should be the case on a Dependabot update PR)
action_versions[action_name] = version_with_comment.rstrip()
return action_versions
def update_sync_py(sync_py_path: str, action_versions: Dict[str, str]) -> bool:
"""
Update hardcoded action versions in pr-checks/sync.py
Args:
sync_py_path: Path to sync.py file
action_versions: Dictionary of action names to versions (may include comments)
Returns:
True if file was modified, False otherwise
"""
if not os.path.exists(sync_py_path):
raise FileNotFoundError(f"Could not find {sync_py_path}")
with open(sync_py_path, 'r') as f:
content = f.read()
original_content = content
# Update hardcoded action versions
for action_name, version_with_comment in action_versions.items():
# Extract just the version part (before any comment) for sync.py
version = version_with_comment.split('#')[0].strip() if '#' in version_with_comment else version_with_comment.strip()
# Look for patterns like 'uses': 'actions/setup-node@v4'
# Note that this will break if we store an Action uses reference in a
# variable - that's a risk we're happy to take since in that case the
# PR checks will just fail.
pattern = rf"('uses':\s*'){re.escape(action_name)}@(?:[^']+)(')"
replacement = rf"\1{action_name}@{version}\2"
content = re.sub(pattern, replacement, content)
if content != original_content:
with open(sync_py_path, 'w') as f:
f.write(content)
print(f"Updated {sync_py_path}")
return True
else:
print(f"No changes needed in {sync_py_path}")
return False
def update_template_files(checks_dir: str, action_versions: Dict[str, str]) -> List[str]:
"""
Update action versions in template files in pr-checks/checks/
Args:
checks_dir: Path to pr-checks/checks directory
action_versions: Dictionary of action names to versions (may include comments)
Returns:
List of files that were modified
"""
modified_files = []
template_files = glob.glob(os.path.join(checks_dir, "*.yml"))
for file_path in template_files:
with open(file_path, 'r') as f:
content = f.read()
original_content = content
# Update action versions
for action_name, version_with_comment in action_versions.items():
# Look for patterns like 'uses: actions/setup-node@v4' or 'uses: actions/setup-node@sha # comment'
pattern = rf"(uses:\s+{re.escape(action_name)})@(?:[^@\n]+)"
replacement = rf"\1@{version_with_comment}"
content = re.sub(pattern, replacement, content)
if content != original_content:
with open(file_path, 'w') as f:
f.write(content)
modified_files.append(file_path)
print(f"Updated {file_path}")
return modified_files
def main():
parser = argparse.ArgumentParser(description="Sync action versions from generated workflows back to templates")
parser.add_argument("--verbose", "-v", action="store_true", help="Enable verbose output")
args = parser.parse_args()
# Get the repository root (assuming script is in pr-checks/)
script_dir = Path(__file__).parent
repo_root = script_dir.parent
workflow_dir = repo_root / ".github" / "workflows"
checks_dir = script_dir / "checks"
sync_py_path = script_dir / "sync.py"
print("Scanning generated workflows for latest action versions...")
action_versions = scan_generated_workflows(str(workflow_dir))
if args.verbose:
print("Found action versions:")
for action, version in action_versions.items():
print(f" {action}@{version}")
if not action_versions:
print("No action versions found in generated workflows")
return 1
# Update files
print("\nUpdating source files...")
modified_files = []
# Update sync.py
if update_sync_py(str(sync_py_path), action_versions):
modified_files.append(str(sync_py_path))
# Update template files
template_modified = update_template_files(str(checks_dir), action_versions)
modified_files.extend(template_modified)
if modified_files:
print(f"\nSync completed. Modified {len(modified_files)} files:")
for file_path in modified_files:
print(f" {file_path}")
else:
print("\nNo files needed updating - all action versions are already in sync")
return 0
if __name__ == "__main__":
sys.exit(main())

250
pr-checks/sync_back.test.ts Executable file
View File

@@ -0,0 +1,250 @@
#!/usr/bin/env npx tsx
/*
Tests for the sync_back.ts script
*/
import * as assert from "node:assert/strict";
import * as fs from "node:fs";
import * as os from "node:os";
import * as path from "node:path";
import { afterEach, beforeEach, describe, it } from "node:test";
import {
scanGeneratedWorkflows,
updateSyncTs,
updateTemplateFiles,
} from "./sync_back";
let testDir: string;
let workflowDir: string;
let checksDir: string;
let syncTsPath: string;
beforeEach(() => {
/** Set up temporary directories and files for testing */
testDir = fs.mkdtempSync(path.join(os.tmpdir(), "sync-back-test-"));
workflowDir = path.join(testDir, ".github", "workflows");
checksDir = path.join(testDir, "pr-checks", "checks");
fs.mkdirSync(workflowDir, { recursive: true });
fs.mkdirSync(checksDir, { recursive: true });
// Create sync.ts file path
syncTsPath = path.join(testDir, "pr-checks", "sync.ts");
});
afterEach(() => {
/** Clean up temporary directories */
fs.rmSync(testDir, { recursive: true, force: true });
});
describe("scanGeneratedWorkflows", () => {
it("basic workflow scanning", () => {
/** Test basic workflow scanning functionality */
const workflowContent = `
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v5
- uses: actions/setup-go@v6
`;
fs.writeFileSync(path.join(workflowDir, "__test.yml"), workflowContent);
const result = scanGeneratedWorkflows(workflowDir);
assert.equal(result["actions/checkout"], "v4");
assert.equal(result["actions/setup-node"], "v5");
assert.equal(result["actions/setup-go"], "v6");
});
it("scanning workflows with version comments", () => {
/** Test scanning workflows with version comments */
const workflowContent = `
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0
- uses: actions/setup-python@v6 # Latest Python
`;
fs.writeFileSync(path.join(workflowDir, "__test.yml"), workflowContent);
const result = scanGeneratedWorkflows(workflowDir);
assert.equal(result["actions/checkout"], "v4");
assert.equal(
result["ruby/setup-ruby"],
"44511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0",
);
assert.equal(result["actions/setup-python"], "v6 # Latest Python");
});
it("ignores local actions", () => {
/** Test that local actions (starting with ./) are ignored */
const workflowContent = `
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/local-action
- uses: ./another-local-action@v1
`;
fs.writeFileSync(path.join(workflowDir, "__test.yml"), workflowContent);
const result = scanGeneratedWorkflows(workflowDir);
assert.equal(result["actions/checkout"], "v4");
assert.equal("./.github/actions/local-action" in result, false);
assert.equal("./another-local-action" in result, false);
});
});
describe("updateSyncTs", () => {
it("updates sync.ts file", () => {
/** Test updating sync.ts file */
const syncTsContent = `
const steps = [
{
uses: "actions/setup-node@v4",
with: { "node-version": "16" },
},
{
uses: "actions/setup-go@v5",
with: { "go-version": "1.19" },
},
];
`;
fs.writeFileSync(syncTsPath, syncTsContent);
const actionVersions = {
"actions/setup-node": "v5",
"actions/setup-go": "v6",
};
const result = updateSyncTs(syncTsPath, actionVersions);
assert.equal(result, true);
const updatedContent = fs.readFileSync(syncTsPath, "utf8");
assert.ok(updatedContent.includes('uses: "actions/setup-node@v5"'));
assert.ok(updatedContent.includes('uses: "actions/setup-go@v6"'));
});
it("strips comments from versions", () => {
/** Test updating sync.ts file when versions have comments */
const syncTsContent = `
const steps = [
{
uses: "actions/setup-node@v4",
with: { "node-version": "16" },
},
];
`;
fs.writeFileSync(syncTsPath, syncTsContent);
const actionVersions = {
"actions/setup-node": "v5 # Latest version",
};
const result = updateSyncTs(syncTsPath, actionVersions);
assert.equal(result, true);
const updatedContent = fs.readFileSync(syncTsPath, "utf8");
// sync.ts should get the version without comment
assert.ok(updatedContent.includes('uses: "actions/setup-node@v5"'));
assert.ok(!updatedContent.includes("# Latest version"));
});
it("returns false when no changes are needed", () => {
/** Test that updateSyncTs returns false when no changes are needed */
const syncTsContent = `
const steps = [
{
uses: "actions/setup-node@v5",
with: { "node-version": "16" },
},
];
`;
fs.writeFileSync(syncTsPath, syncTsContent);
const actionVersions = {
"actions/setup-node": "v5",
};
const result = updateSyncTs(syncTsPath, actionVersions);
assert.equal(result, false);
});
});
describe("updateTemplateFiles", () => {
it("updates template files", () => {
/** Test updating template files */
const templateContent = `
name: Test Template
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v4
with:
node-version: 16
`;
const templatePath = path.join(checksDir, "test.yml");
fs.writeFileSync(templatePath, templateContent);
const actionVersions = {
"actions/checkout": "v4",
"actions/setup-node": "v5 # Latest",
};
const result = updateTemplateFiles(checksDir, actionVersions);
assert.equal(result.length, 1);
assert.ok(result.includes(templatePath));
const updatedContent = fs.readFileSync(templatePath, "utf8");
assert.ok(updatedContent.includes("uses: actions/checkout@v4"));
assert.ok(updatedContent.includes("uses: actions/setup-node@v5 # Latest"));
});
it("preserves version comments", () => {
/** Test that updating template files preserves version comments */
const templateContent = `
name: Test Template
steps:
- uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb # v1.256.0
`;
const templatePath = path.join(checksDir, "test.yml");
fs.writeFileSync(templatePath, templateContent);
const actionVersions = {
"ruby/setup-ruby":
"55511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0",
};
const result = updateTemplateFiles(checksDir, actionVersions);
assert.equal(result.length, 1);
const updatedContent = fs.readFileSync(templatePath, "utf8");
assert.ok(
updatedContent.includes(
"uses: ruby/setup-ruby@55511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0",
),
);
});
});

220
pr-checks/sync_back.ts Executable file
View File

@@ -0,0 +1,220 @@
#!/usr/bin/env npx tsx
/*
Sync-back script to automatically update action versions in source templates
from the generated workflow files after Dependabot updates.
This script scans the generated workflow files (.github/workflows/__*.yml) to find
all external action versions used, then updates:
1. Hardcoded action versions in pr-checks/sync.ts
2. Action version references in template files in pr-checks/checks/
The script automatically detects all actions used in generated workflows and
preserves version comments (e.g., # v1.2.3) when syncing versions.
This ensures that when Dependabot updates action versions in generated workflows,
those changes are properly synced back to the source templates. Regular workflow
files are updated directly by Dependabot and don't need sync-back.
*/
import { parseArgs } from "node:util";
import * as fs from "fs";
import * as path from "path";
const THIS_DIR = __dirname;
const CHECKS_DIR = path.join(THIS_DIR, "checks");
const WORKFLOW_DIR = path.join(THIS_DIR, "..", ".github", "workflows");
const SYNC_TS_PATH = path.join(THIS_DIR, "sync.ts");
/**
* Scan generated workflow files to extract the latest action versions.
*
* @param workflowDir - Path to .github/workflows directory
* @returns Map from action names to their latest versions (including comments)
*/
export function scanGeneratedWorkflows(workflowDir: string): Record<string, string> {
const actionVersions: Record<string, string> = {};
const generatedFiles = fs
.readdirSync(workflowDir)
.filter((f) => f.startsWith("__") && f.endsWith(".yml"))
.map((f) => path.join(workflowDir, f));
for (const filePath of generatedFiles) {
const content = fs.readFileSync(filePath, "utf8");
// Find all action uses in the file, including potential comments
// This pattern captures: action_name@version_with_possible_comment
const pattern = /uses:\s+([^/\s]+\/[^@\s]+)@([^@\n]+)/g;
let match: RegExpExecArray | null;
while ((match = pattern.exec(content)) !== null) {
const actionName = match[1];
const versionWithComment = match[2].trimEnd();
// Only track non-local actions (those with / but not starting with ./)
if (!actionName.startsWith("./")) {
// Assume that version numbers are consistent (this should be the case on a Dependabot update PR)
actionVersions[actionName] = versionWithComment;
}
}
}
return actionVersions;
}
/**
* Update hardcoded action versions in pr-checks/sync.ts
*
* @param syncTsPath - Path to sync.ts file
* @param actionVersions - Map of action names to versions (may include comments)
* @returns True if the file was modified, false otherwise
*/
export function updateSyncTs(
syncTsPath: string,
actionVersions: Record<string, string>,
): boolean {
if (!fs.existsSync(syncTsPath)) {
throw new Error(`Could not find ${syncTsPath}`);
}
let content = fs.readFileSync(syncTsPath, "utf8");
const originalContent = content;
// Update hardcoded action versions
for (const [actionName, versionWithComment] of Object.entries(
actionVersions,
)) {
// Extract just the version part (before any comment) for sync.ts
const version = versionWithComment.includes("#")
? versionWithComment.split("#")[0].trim()
: versionWithComment.trim();
// Look for patterns like uses: "actions/setup-node@v4"
// Note that this will break if we store an Action uses reference in a
// variable - that's a risk we're happy to take since in that case the
// PR checks will just fail.
const escaped = actionName.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(
`(uses:\\s*")${escaped}@(?:[^"]+)(")`,
"g",
);
content = content.replace(pattern, `$1${actionName}@${version}$2`);
}
if (content !== originalContent) {
fs.writeFileSync(syncTsPath, content, "utf8");
console.info(`Updated ${syncTsPath}`);
return true;
} else {
console.info(`No changes needed in ${syncTsPath}`);
return false;
}
}
/**
* Update action versions in template files in pr-checks/checks/
*
* @param checksDir - Path to pr-checks/checks directory
* @param actionVersions - Map of action names to versions (may include comments)
* @returns List of files that were modified
*/
export function updateTemplateFiles(
checksDir: string,
actionVersions: Record<string, string>,
): string[] {
const modifiedFiles: string[] = [];
const templateFiles = fs
.readdirSync(checksDir)
.filter((f) => f.endsWith(".yml"))
.map((f) => path.join(checksDir, f));
for (const filePath of templateFiles) {
let content = fs.readFileSync(filePath, "utf8");
const originalContent = content;
// Update action versions
for (const [actionName, versionWithComment] of Object.entries(
actionVersions,
)) {
// Look for patterns like 'uses: actions/setup-node@v4' or 'uses: actions/setup-node@sha # comment'
const escaped = actionName.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const pattern = new RegExp(
`(uses:\\s+${escaped})@(?:[^@\n]+)`,
"g",
);
content = content.replace(pattern, `$1@${versionWithComment}`);
}
if (content !== originalContent) {
fs.writeFileSync(filePath, content, "utf8");
modifiedFiles.push(filePath);
console.info(`Updated ${filePath}`);
}
}
return modifiedFiles;
}
function main(): number {
const { values } = parseArgs({
options: {
verbose: {
type: "boolean",
short: "v",
default: false,
},
},
strict: true,
});
const verbose = values.verbose ?? false;
console.info("Scanning generated workflows for latest action versions...");
const actionVersions = scanGeneratedWorkflows(WORKFLOW_DIR);
if (verbose) {
console.info("Found action versions:");
for (const [action, version] of Object.entries(actionVersions)) {
console.info(` ${action}@${version}`);
}
}
if (Object.keys(actionVersions).length === 0) {
console.error("No action versions found in generated workflows");
return 1;
}
// Update files
console.info("\nUpdating source files...");
const modifiedFiles: string[] = [];
// Update sync.ts
if (updateSyncTs(SYNC_TS_PATH, actionVersions)) {
modifiedFiles.push(SYNC_TS_PATH);
}
// Update template files
const templateModified = updateTemplateFiles(CHECKS_DIR, actionVersions);
modifiedFiles.push(...templateModified);
if (modifiedFiles.length > 0) {
console.info(`\nSync completed. Modified ${modifiedFiles.length} files:`);
for (const filePath of modifiedFiles) {
console.info(` ${filePath}`);
}
} else {
console.info(
"\nNo files needed updating - all action versions are already in sync",
);
}
return 0;
}
// Only call `main` if this script was run directly.
if (require.main === module) {
process.exit(main());
}

View File

@@ -1,237 +0,0 @@
#!/usr/bin/env python3
"""
Tests for the sync_back.py script
"""
import os
import shutil
import tempfile
import unittest
import sync_back
class TestSyncBack(unittest.TestCase):
def setUp(self):
"""Set up temporary directories and files for testing"""
self.test_dir = tempfile.mkdtemp()
self.workflow_dir = os.path.join(self.test_dir, ".github", "workflows")
self.checks_dir = os.path.join(self.test_dir, "pr-checks", "checks")
os.makedirs(self.workflow_dir)
os.makedirs(self.checks_dir)
# Create sync.py file
self.sync_py_path = os.path.join(self.test_dir, "pr-checks", "sync.py")
def tearDown(self):
"""Clean up temporary directories"""
shutil.rmtree(self.test_dir)
def test_scan_generated_workflows_basic(self):
"""Test basic workflow scanning functionality"""
# Create a test generated workflow file
workflow_content = """
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v5
- uses: actions/setup-go@v6
"""
with open(os.path.join(self.workflow_dir, "__test.yml"), 'w') as f:
f.write(workflow_content)
result = sync_back.scan_generated_workflows(self.workflow_dir)
self.assertEqual(result['actions/checkout'], 'v4')
self.assertEqual(result['actions/setup-node'], 'v5')
self.assertEqual(result['actions/setup-go'], 'v6')
def test_scan_generated_workflows_with_comments(self):
"""Test scanning workflows with version comments"""
workflow_content = """
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0
- uses: actions/setup-python@v6 # Latest Python
"""
with open(os.path.join(self.workflow_dir, "__test.yml"), 'w') as f:
f.write(workflow_content)
result = sync_back.scan_generated_workflows(self.workflow_dir)
self.assertEqual(result['actions/checkout'], 'v4')
self.assertEqual(result['ruby/setup-ruby'], '44511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0')
self.assertEqual(result['actions/setup-python'], 'v6 # Latest Python')
def test_scan_generated_workflows_ignores_local_actions(self):
"""Test that local actions (starting with ./) are ignored"""
workflow_content = """
name: Test Workflow
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/local-action
- uses: ./another-local-action@v1
"""
with open(os.path.join(self.workflow_dir, "__test.yml"), 'w') as f:
f.write(workflow_content)
result = sync_back.scan_generated_workflows(self.workflow_dir)
self.assertEqual(result['actions/checkout'], 'v4')
self.assertNotIn('./.github/actions/local-action', result)
self.assertNotIn('./another-local-action', result)
def test_update_sync_py(self):
"""Test updating sync.py file"""
sync_py_content = """
steps = [
{
'uses': 'actions/setup-node@v4',
'with': {'node-version': '16'}
},
{
'uses': 'actions/setup-go@v5',
'with': {'go-version': '1.19'}
}
]
"""
with open(self.sync_py_path, 'w') as f:
f.write(sync_py_content)
action_versions = {
'actions/setup-node': 'v5',
'actions/setup-go': 'v6'
}
result = sync_back.update_sync_py(self.sync_py_path, action_versions)
self.assertTrue(result)
with open(self.sync_py_path, 'r') as f:
updated_content = f.read()
self.assertIn("'uses': 'actions/setup-node@v5'", updated_content)
self.assertIn("'uses': 'actions/setup-go@v6'", updated_content)
def test_update_sync_py_with_comments(self):
"""Test updating sync.py file when versions have comments"""
sync_py_content = """
steps = [
{
'uses': 'actions/setup-node@v4',
'with': {'node-version': '16'}
}
]
"""
with open(self.sync_py_path, 'w') as f:
f.write(sync_py_content)
action_versions = {
'actions/setup-node': 'v5 # Latest version'
}
result = sync_back.update_sync_py(self.sync_py_path, action_versions)
self.assertTrue(result)
with open(self.sync_py_path, 'r') as f:
updated_content = f.read()
# sync.py should get the version without comment
self.assertIn("'uses': 'actions/setup-node@v5'", updated_content)
self.assertNotIn("# Latest version", updated_content)
def test_update_template_files(self):
"""Test updating template files"""
template_content = """
name: Test Template
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v4
with:
node-version: 16
"""
template_path = os.path.join(self.checks_dir, "test.yml")
with open(template_path, 'w') as f:
f.write(template_content)
action_versions = {
'actions/checkout': 'v4',
'actions/setup-node': 'v5 # Latest'
}
result = sync_back.update_template_files(self.checks_dir, action_versions)
self.assertEqual(len(result), 1)
self.assertIn(template_path, result)
with open(template_path, 'r') as f:
updated_content = f.read()
self.assertIn("uses: actions/checkout@v4", updated_content)
self.assertIn("uses: actions/setup-node@v5 # Latest", updated_content)
def test_update_template_files_preserves_comments(self):
"""Test that updating template files preserves version comments"""
template_content = """
name: Test Template
steps:
- uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb # v1.256.0
"""
template_path = os.path.join(self.checks_dir, "test.yml")
with open(template_path, 'w') as f:
f.write(template_content)
action_versions = {
'ruby/setup-ruby': '55511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0'
}
result = sync_back.update_template_files(self.checks_dir, action_versions)
self.assertEqual(len(result), 1)
with open(template_path, 'r') as f:
updated_content = f.read()
self.assertIn("uses: ruby/setup-ruby@55511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0", updated_content)
def test_no_changes_needed(self):
"""Test that functions return False/empty when no changes are needed"""
# Test sync.py with no changes needed
sync_py_content = """
steps = [
{
'uses': 'actions/setup-node@v5',
'with': {'node-version': '16'}
}
]
"""
with open(self.sync_py_path, 'w') as f:
f.write(sync_py_content)
action_versions = {
'actions/setup-node': 'v5'
}
result = sync_back.update_sync_py(self.sync_py_path, action_versions)
self.assertFalse(result)
if __name__ == '__main__':
unittest.main()

View File

@@ -100,7 +100,7 @@ test("computeAutomationID()", async (t) => {
);
});
test("getPullRequestBranches() with pull request context", (t) => {
test.serial("getPullRequestBranches() with pull request context", (t) => {
withMockedContext(
{
pull_request: {
@@ -119,89 +119,104 @@ test("getPullRequestBranches() with pull request context", (t) => {
);
});
test("getPullRequestBranches() returns undefined with push context", (t) => {
withMockedContext(
{
push: {
ref: "refs/heads/main",
},
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
test("getPullRequestBranches() with Default Setup environment variables", (t) => {
withMockedContext({}, () => {
withMockedEnv(
test.serial(
"getPullRequestBranches() returns undefined with push context",
(t) => {
withMockedContext(
{
CODE_SCANNING_REF: "refs/heads/feature-branch",
CODE_SCANNING_BASE_BRANCH: "main",
},
() => {
t.deepEqual(getPullRequestBranches(), {
base: "main",
head: "refs/heads/feature-branch",
});
t.is(isAnalyzingPullRequest(), true);
},
);
});
});
test("getPullRequestBranches() returns undefined when only CODE_SCANNING_REF is set", (t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: "refs/heads/feature-branch",
CODE_SCANNING_BASE_BRANCH: undefined,
push: {
ref: "refs/heads/main",
},
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
});
},
);
test("getPullRequestBranches() returns undefined when only CODE_SCANNING_BASE_BRANCH is set", (t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: undefined,
CODE_SCANNING_BASE_BRANCH: "main",
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
});
test.serial(
"getPullRequestBranches() with Default Setup environment variables",
(t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: "refs/heads/feature-branch",
CODE_SCANNING_BASE_BRANCH: "main",
},
() => {
t.deepEqual(getPullRequestBranches(), {
base: "main",
head: "refs/heads/feature-branch",
});
t.is(isAnalyzingPullRequest(), true);
},
);
});
},
);
test("getPullRequestBranches() returns undefined when no PR context", (t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: undefined,
CODE_SCANNING_BASE_BRANCH: undefined,
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
});
test.serial(
"getPullRequestBranches() returns undefined when only CODE_SCANNING_REF is set",
(t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: "refs/heads/feature-branch",
CODE_SCANNING_BASE_BRANCH: undefined,
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
},
);
test("initializeEnvironment", (t) => {
test.serial(
"getPullRequestBranches() returns undefined when only CODE_SCANNING_BASE_BRANCH is set",
(t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: undefined,
CODE_SCANNING_BASE_BRANCH: "main",
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
},
);
test.serial(
"getPullRequestBranches() returns undefined when no PR context",
(t) => {
withMockedContext({}, () => {
withMockedEnv(
{
CODE_SCANNING_REF: undefined,
CODE_SCANNING_BASE_BRANCH: undefined,
},
() => {
t.is(getPullRequestBranches(), undefined);
t.is(isAnalyzingPullRequest(), false);
},
);
});
},
);
test.serial("initializeEnvironment", (t) => {
initializeEnvironment("1.2.3");
t.deepEqual(process.env[EnvVar.VERSION], "1.2.3");
});
test("fixCodeQualityCategory", (t) => {
test.serial("fixCodeQualityCategory", (t) => {
withMockedEnv(
{
GITHUB_EVENT_NAME: "dynamic",
@@ -249,14 +264,17 @@ test("fixCodeQualityCategory", (t) => {
);
});
test("isDynamicWorkflow() returns true if event name is `dynamic`", (t) => {
process.env.GITHUB_EVENT_NAME = "dynamic";
t.assert(isDynamicWorkflow());
process.env.GITHUB_EVENT_NAME = "push";
t.false(isDynamicWorkflow());
});
test.serial(
"isDynamicWorkflow() returns true if event name is `dynamic`",
(t) => {
process.env.GITHUB_EVENT_NAME = "dynamic";
t.assert(isDynamicWorkflow());
process.env.GITHUB_EVENT_NAME = "push";
t.false(isDynamicWorkflow());
},
);
test("isDefaultSetup() returns true when expected", (t) => {
test.serial("isDefaultSetup() returns true when expected", (t) => {
process.env.GITHUB_EVENT_NAME = "dynamic";
process.env[EnvVar.ANALYSIS_KEY] = "dynamic/github-code-scanning";
t.assert(isDefaultSetup());

View File

@@ -50,31 +50,40 @@ test("Parsing analysis kinds requires at least one analysis kind", async (t) =>
});
});
test("getAnalysisKinds - returns expected analysis kinds for `analysis-kinds` input", async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns("code-scanning,code-quality");
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.assert(result.includes(AnalysisKind.CodeScanning));
t.assert(result.includes(AnalysisKind.CodeQuality));
});
test.serial(
"getAnalysisKinds - returns expected analysis kinds for `analysis-kinds` input",
async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns("code-scanning,code-quality");
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.assert(result.includes(AnalysisKind.CodeScanning));
t.assert(result.includes(AnalysisKind.CodeQuality));
},
);
test("getAnalysisKinds - includes `code-quality` when deprecated `quality-queries` input is used", async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("analysis-kinds").returns("code-scanning");
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("quality-queries").returns("code-quality");
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.assert(result.includes(AnalysisKind.CodeScanning));
t.assert(result.includes(AnalysisKind.CodeQuality));
});
test.serial(
"getAnalysisKinds - includes `code-quality` when deprecated `quality-queries` input is used",
async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("analysis-kinds").returns("code-scanning");
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("quality-queries").returns("code-quality");
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.assert(result.includes(AnalysisKind.CodeScanning));
t.assert(result.includes(AnalysisKind.CodeQuality));
},
);
test("getAnalysisKinds - throws if `analysis-kinds` input is invalid", async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("analysis-kinds").returns("no-such-thing");
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true));
});
test.serial(
"getAnalysisKinds - throws if `analysis-kinds` input is invalid",
async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub.withArgs("analysis-kinds").returns("no-such-thing");
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true));
},
);
// Test the compatibility matrix by looping through all analysis kinds.
const analysisKinds = Object.values(AnalysisKind);
@@ -86,25 +95,31 @@ for (let i = 0; i < analysisKinds.length; i++) {
if (analysisKind === otherAnalysis) continue;
if (compatibilityMatrix[analysisKind].has(otherAnalysis)) {
test(`getAnalysisKinds - allows ${analysisKind} with ${otherAnalysis}`, async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns([analysisKind, otherAnalysis].join(","));
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.is(result.length, 2);
});
test.serial(
`getAnalysisKinds - allows ${analysisKind} with ${otherAnalysis}`,
async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns([analysisKind, otherAnalysis].join(","));
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.is(result.length, 2);
},
);
} else {
test(`getAnalysisKinds - throws if ${analysisKind} is enabled with ${otherAnalysis}`, async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns([analysisKind, otherAnalysis].join(","));
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true), {
instanceOf: ConfigurationError,
message: `${analysisKind} and ${otherAnalysis} cannot be enabled at the same time`,
});
});
test.serial(
`getAnalysisKinds - throws if ${analysisKind} is enabled with ${otherAnalysis}`,
async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns([analysisKind, otherAnalysis].join(","));
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true), {
instanceOf: ConfigurationError,
message: `${analysisKind} and ${otherAnalysis} cannot be enabled at the same time`,
});
},
);
}
}
}
@@ -122,44 +137,50 @@ test("Code Scanning configuration does not accept other SARIF extensions", (t) =
}
});
test("Risk Assessment configuration transforms SARIF upload payload", (t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "1";
const payload = RiskAssessment.transformPayload({
commit_oid: "abc",
sarif: "sarif",
ref: "ref",
workflow_run_attempt: 1,
workflow_run_id: 1,
checkout_uri: "uri",
tool_names: [],
}) as AssessmentPayload;
test.serial(
"Risk Assessment configuration transforms SARIF upload payload",
(t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "1";
const payload = RiskAssessment.transformPayload({
commit_oid: "abc",
sarif: "sarif",
ref: "ref",
workflow_run_attempt: 1,
workflow_run_id: 1,
checkout_uri: "uri",
tool_names: [],
}) as AssessmentPayload;
const expected: AssessmentPayload = { sarif: "sarif", assessment_id: 1 };
t.deepEqual(expected, payload);
});
const expected: AssessmentPayload = { sarif: "sarif", assessment_id: 1 };
t.deepEqual(expected, payload);
},
);
test("Risk Assessment configuration throws for negative assessment IDs", (t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "-1";
t.throws(
() =>
RiskAssessment.transformPayload({
commit_oid: "abc",
sarif: "sarif",
ref: "ref",
workflow_run_attempt: 1,
workflow_run_id: 1,
checkout_uri: "uri",
tool_names: [],
}),
{
instanceOf: Error,
message: (msg) =>
msg.startsWith(`${EnvVar.RISK_ASSESSMENT_ID} must not be negative: `),
},
);
});
test.serial(
"Risk Assessment configuration throws for negative assessment IDs",
(t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "-1";
t.throws(
() =>
RiskAssessment.transformPayload({
commit_oid: "abc",
sarif: "sarif",
ref: "ref",
workflow_run_attempt: 1,
workflow_run_id: 1,
checkout_uri: "uri",
tool_names: [],
}),
{
instanceOf: Error,
message: (msg) =>
msg.startsWith(`${EnvVar.RISK_ASSESSMENT_ID} must not be negative: `),
},
);
},
);
test("Risk Assessment configuration throws for invalid IDs", (t) => {
test.serial("Risk Assessment configuration throws for invalid IDs", (t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "foo";
t.throws(
() =>

View File

@@ -28,9 +28,7 @@ test("analyze action with RAM & threads from environment variables", async (t) =
// it a bit to 20s.
t.timeout(1000 * 20);
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
process.env["GITHUB_API_URL"] = "https://api.github.com";
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(statusReport, "createStatusReportBase")
.resolves({} as statusReport.StatusReportBase);
@@ -54,7 +52,6 @@ test("analyze action with RAM & threads from environment variables", async (t) =
const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput");
optionalInputStub.withArgs("expect-error").returns("false");
sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion);
setupActionsVars(tmpDir, tmpDir);
mockFeatureFlagApiEndpoint(200, {});
// When there are no action inputs for RAM and threads, the action uses

View File

@@ -26,9 +26,7 @@ setupTests(test);
test("analyze action with RAM & threads from action inputs", async (t) => {
t.timeout(1000 * 20);
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
process.env["GITHUB_API_URL"] = "https://api.github.com";
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(statusReport, "createStatusReportBase")
.resolves({} as statusReport.StatusReportBase);
@@ -51,7 +49,6 @@ test("analyze action with RAM & threads from action inputs", async (t) => {
optionalInputStub.withArgs("expect-error").returns("false");
sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion);
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
setupActionsVars(tmpDir, tmpDir);
mockFeatureFlagApiEndpoint(200, {});
process.env["CODEQL_THREADS"] = "1";

View File

@@ -32,7 +32,7 @@ setupTests(test);
* - Checks that the duration fields are populated for the correct language.
* - Checks that the QA telemetry status report fields are populated when the QA feature flag is enabled.
*/
test("status report fields", async (t) => {
test.serial("status report fields", async (t) => {
return await util.withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);

Some files were not shown because too many files have changed in this diff Show More