diff --git a/.github/workflows/__all-platform-bundle.yml b/.github/workflows/__all-platform-bundle.yml index 66700fd68..1be5ba9a4 100644 --- a/.github/workflows/__all-platform-bundle.yml +++ b/.github/workflows/__all-platform-bundle.yml @@ -52,8 +52,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - all-platform-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} + group: all-platform-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} jobs: all-platform-bundle: strategy: @@ -95,7 +94,7 @@ jobs: - id: init uses: ./../action/init with: - # Swift is not supported on Ubuntu so we manually exclude it from the list here + # Swift is not supported on Ubuntu so we manually exclude it from the list here languages: cpp,csharp,go,java,javascript,python,ruby tools: ${{ steps.prepare-test.outputs.tools-url }} - name: Build code diff --git a/.github/workflows/__analysis-kinds.yml b/.github/workflows/__analysis-kinds.yml index e59c1576b..1e43775ff 100644 --- a/.github/workflows/__analysis-kinds.yml +++ b/.github/workflows/__analysis-kinds.yml @@ -87,24 +87,24 @@ jobs: tools: ${{ steps.prepare-test.outputs.tools-url }} - uses: ./../action/analyze with: - output: ${{ runner.temp }}/results + output: '${{ runner.temp }}/results' upload-database: false - post-processed-sarif-path: ${{ runner.temp }}/post-processed + post-processed-sarif-path: '${{ runner.temp }}/post-processed' - name: Upload SARIF files - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: | analysis-kinds-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }} - path: ${{ runner.temp }}/results/*.sarif + path: '${{ runner.temp }}/results/*.sarif' retention-days: 7 - name: Upload post-processed SARIF - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: | post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }} - path: ${{ runner.temp }}/post-processed + path: '${{ runner.temp }}/post-processed' retention-days: 7 if-no-files-found: error @@ -112,7 +112,7 @@ jobs: if: contains(matrix.analysis-kinds, 'code-scanning') uses: actions/github-script@v8 env: - SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif + SARIF_PATH: '${{ runner.temp }}/results/javascript.sarif' EXPECT_PRESENT: 'false' with: script: ${{ env.CHECK_SCRIPT }} @@ -120,7 +120,7 @@ jobs: if: contains(matrix.analysis-kinds, 'code-quality') uses: actions/github-script@v8 env: - SARIF_PATH: ${{ runner.temp }}/results/javascript.quality.sarif + SARIF_PATH: '${{ runner.temp }}/results/javascript.quality.sarif' EXPECT_PRESENT: 'true' with: script: ${{ env.CHECK_SCRIPT }} diff --git a/.github/workflows/__analyze-ref-input.yml b/.github/workflows/__analyze-ref-input.yml index d28bbeb6a..9b4ddcdb7 100644 --- a/.github/workflows/__analyze-ref-input.yml +++ b/.github/workflows/__analyze-ref-input.yml @@ -30,11 +30,6 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' dotnet-version: type: string description: The version of .NET to install @@ -47,11 +42,6 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' dotnet-version: type: string description: The version of .NET to install @@ -62,8 +52,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - analyze-ref-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: analyze-ref-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} jobs: analyze-ref-input: strategy: @@ -94,11 +83,6 @@ jobs: with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install Python - if: matrix.version != 'nightly-latest' - uses: actions/setup-python@v6 - with: - python-version: ${{ inputs.python-version || '3.13' }} - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -107,13 +91,12 @@ jobs: with: tools: ${{ steps.prepare-test.outputs.tools-url }} languages: cpp,csharp,java,javascript,python - config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ - github.sha }} + config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }} - name: Build code run: ./build.sh - uses: ./../action/analyze with: - ref: refs/heads/main - sha: 5e235361806c361d4d3f8859e3c897658025a9a2 + ref: 'refs/heads/main' + sha: '5e235361806c361d4d3f8859e3c897658025a9a2' env: CODEQL_ACTION_TEST_MODE: true diff --git a/.github/workflows/__autobuild-action.yml b/.github/workflows/__autobuild-action.yml index ce7fe4be7..ed5cf1937 100644 --- a/.github/workflows/__autobuild-action.yml +++ b/.github/workflows/__autobuild-action.yml @@ -82,7 +82,7 @@ jobs: tools: ${{ steps.prepare-test.outputs.tools-url }} - uses: ./../action/autobuild env: - # Explicitly disable the CLR tracer. + # Explicitly disable the CLR tracer. COR_ENABLE_PROFILING: '' COR_PROFILER: '' COR_PROFILER_PATH_64: '' diff --git a/.github/workflows/__autobuild-direct-tracing-with-working-dir.yml b/.github/workflows/__autobuild-direct-tracing-with-working-dir.yml index 6711dc727..4a411ad1b 100644 --- a/.github/workflows/__autobuild-direct-tracing-with-working-dir.yml +++ b/.github/workflows/__autobuild-direct-tracing-with-working-dir.yml @@ -42,8 +42,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - autobuild-direct-tracing-with-working-dir-${{github.ref}}-${{inputs.java-version}} + group: autobuild-direct-tracing-with-working-dir-${{github.ref}}-${{inputs.java-version}} jobs: autobuild-direct-tracing-with-working-dir: strategy: diff --git a/.github/workflows/__build-mode-autobuild.yml b/.github/workflows/__build-mode-autobuild.yml index 3d05b3963..2863793fd 100644 --- a/.github/workflows/__build-mode-autobuild.yml +++ b/.github/workflows/__build-mode-autobuild.yml @@ -97,7 +97,7 @@ jobs: id: init with: build-mode: autobuild - db-location: ${{ runner.temp }}/customDbLocation + db-location: '${{ runner.temp }}/customDbLocation' languages: java tools: ${{ steps.prepare-test.outputs.tools-url }} diff --git a/.github/workflows/__build-mode-manual.yml b/.github/workflows/__build-mode-manual.yml index 356c1b1fc..b2723d64f 100644 --- a/.github/workflows/__build-mode-manual.yml +++ b/.github/workflows/__build-mode-manual.yml @@ -52,8 +52,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - build-mode-manual-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} + group: build-mode-manual-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} jobs: build-mode-manual: strategy: @@ -92,7 +91,7 @@ jobs: id: init with: build-mode: manual - db-location: ${{ runner.temp }}/customDbLocation + db-location: '${{ runner.temp }}/customDbLocation' languages: java tools: ${{ steps.prepare-test.outputs.tools-url }} diff --git a/.github/workflows/__build-mode-none.yml b/.github/workflows/__build-mode-none.yml index a570869ba..5611d2381 100644 --- a/.github/workflows/__build-mode-none.yml +++ b/.github/workflows/__build-mode-none.yml @@ -64,7 +64,7 @@ jobs: id: init with: build-mode: none - db-location: ${{ runner.temp }}/customDbLocation + db-location: '${{ runner.temp }}/customDbLocation' languages: java tools: ${{ steps.prepare-test.outputs.tools-url }} @@ -77,7 +77,7 @@ jobs: exit 1 fi - # The latest nightly supports omitting the autobuild Action when the build mode is specified. + # The latest nightly supports omitting the autobuild Action when the build mode is specified. - uses: ./../action/autobuild if: matrix.version != 'nightly-latest' diff --git a/.github/workflows/__build-mode-rollback.yml b/.github/workflows/__build-mode-rollback.yml index a213bd267..b4dd4041b 100644 --- a/.github/workflows/__build-mode-rollback.yml +++ b/.github/workflows/__build-mode-rollback.yml @@ -68,7 +68,7 @@ jobs: id: init with: build-mode: none - db-location: ${{ runner.temp }}/customDbLocation + db-location: '${{ runner.temp }}/customDbLocation' languages: java tools: ${{ steps.prepare-test.outputs.tools-url }} diff --git a/.github/workflows/__bundle-from-nightly.yml b/.github/workflows/__bundle-from-nightly.yml index c052bff67..a7dc934a0 100644 --- a/.github/workflows/__bundle-from-nightly.yml +++ b/.github/workflows/__bundle-from-nightly.yml @@ -66,7 +66,7 @@ jobs: tools: ${{ steps.prepare-test.outputs.tools-url }} languages: javascript - name: Fail if the CodeQL version is not a nightly - if: "!contains(steps.init.outputs.codeql-version, '+')" + if: ${{ !contains(steps.init.outputs.codeql-version, '+') }} run: exit 1 env: CODEQL_ACTION_TEST_MODE: true diff --git a/.github/workflows/__bundle-zstd.yml b/.github/workflows/__bundle-zstd.yml index d1ddf108f..ede1e320c 100644 --- a/.github/workflows/__bundle-zstd.yml +++ b/.github/workflows/__bundle-zstd.yml @@ -82,7 +82,7 @@ jobs: output: ${{ runner.temp }}/results upload-database: false - name: Upload SARIF - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: ${{ matrix.os }}-zstd-bundle.sarif path: ${{ runner.temp }}/results/javascript.sarif diff --git a/.github/workflows/__cleanup-db-cluster-dir.yml b/.github/workflows/__cleanup-db-cluster-dir.yml index 6d794e28c..cc41fea77 100644 --- a/.github/workflows/__cleanup-db-cluster-dir.yml +++ b/.github/workflows/__cleanup-db-cluster-dir.yml @@ -67,7 +67,7 @@ jobs: id: init with: build-mode: none - db-location: ${{ runner.temp }}/customDbLocation + db-location: '${{ runner.temp }}/customDbLocation' languages: javascript tools: ${{ steps.prepare-test.outputs.tools-url }} diff --git a/.github/workflows/__config-export.yml b/.github/workflows/__config-export.yml index 72d76b93f..24549dd18 100644 --- a/.github/workflows/__config-export.yml +++ b/.github/workflows/__config-export.yml @@ -67,18 +67,18 @@ jobs: tools: ${{ steps.prepare-test.outputs.tools-url }} - uses: ./../action/analyze with: - output: ${{ runner.temp }}/results + output: '${{ runner.temp }}/results' upload-database: false - name: Upload SARIF - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json - path: ${{ runner.temp }}/results/javascript.sarif + path: '${{ runner.temp }}/results/javascript.sarif' retention-days: 7 - name: Check config properties appear in SARIF uses: actions/github-script@v8 env: - SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif + SARIF_PATH: '${{ runner.temp }}/results/javascript.sarif' with: script: | const fs = require('fs'); diff --git a/.github/workflows/__diagnostics-export.yml b/.github/workflows/__diagnostics-export.yml index 29c92d9ec..bdf6b0aee 100644 --- a/.github/workflows/__diagnostics-export.yml +++ b/.github/workflows/__diagnostics-export.yml @@ -78,18 +78,18 @@ jobs: --ready-for-status-page - uses: ./../action/analyze with: - output: ${{ runner.temp }}/results + output: '${{ runner.temp }}/results' upload-database: false - name: Upload SARIF - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json - path: ${{ runner.temp }}/results/javascript.sarif + path: '${{ runner.temp }}/results/javascript.sarif' retention-days: 7 - name: Check diagnostics appear in SARIF uses: actions/github-script@v8 env: - SARIF_PATH: ${{ runner.temp }}/results/javascript.sarif + SARIF_PATH: '${{ runner.temp }}/results/javascript.sarif' with: script: | const fs = require('fs'); diff --git a/.github/workflows/__export-file-baseline-information.yml b/.github/workflows/__export-file-baseline-information.yml index 395317ad2..b376d77f0 100644 --- a/.github/workflows/__export-file-baseline-information.yml +++ b/.github/workflows/__export-file-baseline-information.yml @@ -52,8 +52,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - export-file-baseline-information-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} + group: export-file-baseline-information-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} jobs: export-file-baseline-information: strategy: @@ -101,12 +100,12 @@ jobs: run: ./build.sh - uses: ./../action/analyze with: - output: ${{ runner.temp }}/results + output: '${{ runner.temp }}/results' - name: Upload SARIF - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json - path: ${{ runner.temp }}/results/javascript.sarif + path: '${{ runner.temp }}/results/javascript.sarif' retention-days: 7 - name: Check results run: | diff --git a/.github/workflows/__go-custom-queries.yml b/.github/workflows/__go-custom-queries.yml index cc2120e86..f7d5a99f3 100644 --- a/.github/workflows/__go-custom-queries.yml +++ b/.github/workflows/__go-custom-queries.yml @@ -52,8 +52,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - go-custom-queries-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} + group: go-custom-queries-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} jobs: go-custom-queries: strategy: diff --git a/.github/workflows/__go-indirect-tracing-workaround-diagnostic.yml b/.github/workflows/__go-indirect-tracing-workaround-diagnostic.yml index 9c2f42ec4..11497389f 100644 --- a/.github/workflows/__go-indirect-tracing-workaround-diagnostic.yml +++ b/.github/workflows/__go-indirect-tracing-workaround-diagnostic.yml @@ -77,7 +77,7 @@ jobs: with: languages: go tools: ${{ steps.prepare-test.outputs.tools-url }} - # Deliberately change Go after the `init` step + # Deliberately change Go after the `init` step - uses: actions/setup-go@v6 with: go-version: '1.20' @@ -85,12 +85,12 @@ jobs: run: go build main.go - uses: ./../action/analyze with: - output: ${{ runner.temp }}/results + output: '${{ runner.temp }}/results' upload-database: false - name: Check diagnostic appears in SARIF uses: actions/github-script@v8 env: - SARIF_PATH: ${{ runner.temp }}/results/go.sarif + SARIF_PATH: '${{ runner.temp }}/results/go.sarif' with: script: | const fs = require('fs'); diff --git a/.github/workflows/__go-indirect-tracing-workaround-no-file-program.yml b/.github/workflows/__go-indirect-tracing-workaround-no-file-program.yml index 18645dcc3..7d83904e6 100644 --- a/.github/workflows/__go-indirect-tracing-workaround-no-file-program.yml +++ b/.github/workflows/__go-indirect-tracing-workaround-no-file-program.yml @@ -42,8 +42,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - go-indirect-tracing-workaround-no-file-program-${{github.ref}}-${{inputs.go-version}} + group: go-indirect-tracing-workaround-no-file-program-${{github.ref}}-${{inputs.go-version}} jobs: go-indirect-tracing-workaround-no-file-program: strategy: @@ -87,12 +86,12 @@ jobs: run: go build main.go - uses: ./../action/analyze with: - output: ${{ runner.temp }}/results + output: '${{ runner.temp }}/results' upload-database: false - name: Check diagnostic appears in SARIF uses: actions/github-script@v8 env: - SARIF_PATH: ${{ runner.temp }}/results/go.sarif + SARIF_PATH: '${{ runner.temp }}/results/go.sarif' with: script: | const fs = require('fs'); diff --git a/.github/workflows/__init-with-registries.yml b/.github/workflows/__init-with-registries.yml index 81532e847..940784361 100644 --- a/.github/workflows/__init-with-registries.yml +++ b/.github/workflows/__init-with-registries.yml @@ -50,7 +50,6 @@ jobs: permissions: contents: read packages: read - timeout-minutes: 45 runs-on: ${{ matrix.os }} steps: @@ -66,7 +65,7 @@ jobs: - name: Init with registries uses: ./../action/init with: - db-location: ${{ runner.temp }}/customDbLocation + db-location: '${{ runner.temp }}/customDbLocation' tools: ${{ steps.prepare-test.outputs.tools-url }} config-file: ./.github/codeql/codeql-config-registries.yml languages: javascript diff --git a/.github/workflows/__job-run-uuid-sarif.yml b/.github/workflows/__job-run-uuid-sarif.yml index da32ec432..a10ccd1d1 100644 --- a/.github/workflows/__job-run-uuid-sarif.yml +++ b/.github/workflows/__job-run-uuid-sarif.yml @@ -65,12 +65,12 @@ jobs: tools: ${{ steps.prepare-test.outputs.tools-url }} - uses: ./../action/analyze with: - output: ${{ runner.temp }}/results + output: '${{ runner.temp }}/results' - name: Upload SARIF - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json - path: ${{ runner.temp }}/results/javascript.sarif + path: '${{ runner.temp }}/results/javascript.sarif' retention-days: 7 - name: Check results run: | diff --git a/.github/workflows/__language-aliases.yml b/.github/workflows/__language-aliases.yml index afdc089f3..d64abe876 100644 --- a/.github/workflows/__language-aliases.yml +++ b/.github/workflows/__language-aliases.yml @@ -63,7 +63,7 @@ jobs: languages: C#,java-kotlin,swift,typescript tools: ${{ steps.prepare-test.outputs.tools-url }} - - name: Check languages + - name: 'Check languages' run: | expected_languages="csharp,java,swift,javascript" actual_languages=$(jq -r '.languages | join(",")' "$RUNNER_TEMP"/config) diff --git a/.github/workflows/__local-bundle.yml b/.github/workflows/__local-bundle.yml index bc3ab5ed6..2a83eede8 100644 --- a/.github/workflows/__local-bundle.yml +++ b/.github/workflows/__local-bundle.yml @@ -30,11 +30,6 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' dotnet-version: type: string description: The version of .NET to install @@ -47,11 +42,6 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' dotnet-version: type: string description: The version of .NET to install @@ -62,8 +52,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - local-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: local-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} jobs: local-bundle: strategy: @@ -94,11 +83,6 @@ jobs: with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install Python - if: matrix.version != 'nightly-latest' - uses: actions/setup-python@v6 - with: - python-version: ${{ inputs.python-version || '3.13' }} - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -109,7 +93,7 @@ jobs: - id: init uses: ./../action/init with: - # Swift is not supported on Ubuntu so we manually exclude it from the list here + # Swift is not supported on Ubuntu so we manually exclude it from the list here languages: cpp,csharp,go,java,javascript,python,ruby tools: ./codeql-bundle-linux64.tar.zst - name: Build code diff --git a/.github/workflows/__multi-language-autodetect.yml b/.github/workflows/__multi-language-autodetect.yml index c0a573ffc..9cae8d362 100644 --- a/.github/workflows/__multi-language-autodetect.yml +++ b/.github/workflows/__multi-language-autodetect.yml @@ -62,8 +62,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - multi-language-autodetect-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: multi-language-autodetect-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} jobs: multi-language-autodetect: strategy: @@ -144,9 +143,8 @@ jobs: - uses: ./../action/init id: init with: - db-location: ${{ runner.temp }}/customDbLocation - languages: ${{ runner.os == 'Linux' && 'cpp,csharp,go,java,javascript,python,ruby' - || '' }} + db-location: '${{ runner.temp }}/customDbLocation' + languages: ${{ runner.os == 'Linux' && 'cpp,csharp,go,java,javascript,python,ruby' || '' }} tools: ${{ steps.prepare-test.outputs.tools-url }} - name: Build code diff --git a/.github/workflows/__packaging-codescanning-config-inputs-js.yml b/.github/workflows/__packaging-codescanning-config-inputs-js.yml index 43b70163a..be29a9cb0 100644 --- a/.github/workflows/__packaging-codescanning-config-inputs-js.yml +++ b/.github/workflows/__packaging-codescanning-config-inputs-js.yml @@ -30,11 +30,6 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' dotnet-version: type: string description: The version of .NET to install @@ -47,11 +42,6 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' dotnet-version: type: string description: The version of .NET to install @@ -62,8 +52,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - packaging-codescanning-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: packaging-codescanning-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} jobs: packaging-codescanning-config-inputs-js: strategy: @@ -105,18 +94,13 @@ jobs: with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install Python - if: matrix.version != 'nightly-latest' - uses: actions/setup-python@v6 - with: - python-version: ${{ inputs.python-version || '3.13' }} - name: Install .NET uses: actions/setup-dotnet@v5 with: dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - uses: ./../action/init with: - config-file: .github/codeql/codeql-config-packaging3.yml + config-file: '.github/codeql/codeql-config-packaging3.yml' packs: +codeql-testing/codeql-pack1@1.0.0 languages: javascript tools: ${{ steps.prepare-test.outputs.tools-url }} @@ -124,15 +108,14 @@ jobs: run: ./build.sh - uses: ./../action/analyze with: - output: ${{ runner.temp }}/results + output: '${{ runner.temp }}/results' upload-database: false - name: Check results uses: ./../action/.github/actions/check-sarif with: sarif-file: ${{ runner.temp }}/results/javascript.sarif - queries-run: - javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block + queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block queries-not-run: foo,bar - name: Assert Results diff --git a/.github/workflows/__packaging-config-inputs-js.yml b/.github/workflows/__packaging-config-inputs-js.yml index 7ea2729c8..f97103009 100644 --- a/.github/workflows/__packaging-config-inputs-js.yml +++ b/.github/workflows/__packaging-config-inputs-js.yml @@ -52,8 +52,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - packaging-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} + group: packaging-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} jobs: packaging-config-inputs-js: strategy: @@ -101,7 +100,7 @@ jobs: dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - uses: ./../action/init with: - config-file: .github/codeql/codeql-config-packaging3.yml + config-file: '.github/codeql/codeql-config-packaging3.yml' packs: +codeql-testing/codeql-pack1@1.0.0 languages: javascript tools: ${{ steps.prepare-test.outputs.tools-url }} @@ -109,15 +108,14 @@ jobs: run: ./build.sh - uses: ./../action/analyze with: - output: ${{ runner.temp }}/results + output: '${{ runner.temp }}/results' upload-database: false - name: Check results uses: ./../action/.github/actions/check-sarif with: sarif-file: ${{ runner.temp }}/results/javascript.sarif - queries-run: - javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block + queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block queries-not-run: foo,bar - name: Assert Results diff --git a/.github/workflows/__packaging-config-js.yml b/.github/workflows/__packaging-config-js.yml index 7c921cecc..99bd171f9 100644 --- a/.github/workflows/__packaging-config-js.yml +++ b/.github/workflows/__packaging-config-js.yml @@ -52,8 +52,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - packaging-config-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} + group: packaging-config-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} jobs: packaging-config-js: strategy: @@ -101,22 +100,21 @@ jobs: dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - uses: ./../action/init with: - config-file: .github/codeql/codeql-config-packaging.yml + config-file: '.github/codeql/codeql-config-packaging.yml' languages: javascript tools: ${{ steps.prepare-test.outputs.tools-url }} - name: Build code run: ./build.sh - uses: ./../action/analyze with: - output: ${{ runner.temp }}/results + output: '${{ runner.temp }}/results' upload-database: false - name: Check results uses: ./../action/.github/actions/check-sarif with: sarif-file: ${{ runner.temp }}/results/javascript.sarif - queries-run: - javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block + queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block queries-not-run: foo,bar - name: Assert Results diff --git a/.github/workflows/__packaging-inputs-js.yml b/.github/workflows/__packaging-inputs-js.yml index 224b06305..e5cd0182e 100644 --- a/.github/workflows/__packaging-inputs-js.yml +++ b/.github/workflows/__packaging-inputs-js.yml @@ -52,8 +52,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - packaging-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} + group: packaging-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} jobs: packaging-inputs-js: strategy: @@ -101,7 +100,7 @@ jobs: dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - uses: ./../action/init with: - config-file: .github/codeql/codeql-config-packaging2.yml + config-file: '.github/codeql/codeql-config-packaging2.yml' languages: javascript packs: codeql-testing/codeql-pack1@1.0.0, codeql-testing/codeql-pack2, codeql-testing/codeql-pack3:other-query.ql tools: ${{ steps.prepare-test.outputs.tools-url }} @@ -109,14 +108,13 @@ jobs: run: ./build.sh - uses: ./../action/analyze with: - output: ${{ runner.temp }}/results + output: '${{ runner.temp }}/results' - name: Check results uses: ./../action/.github/actions/check-sarif with: sarif-file: ${{ runner.temp }}/results/javascript.sarif - queries-run: - javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block + queries-run: javascript/example/empty-or-one-block,javascript/example/empty-or-one-block,javascript/example/other-query-block,javascript/example/two-block queries-not-run: foo,bar - name: Assert Results diff --git a/.github/workflows/__remote-config.yml b/.github/workflows/__remote-config.yml index a026117a7..856b38aee 100644 --- a/.github/workflows/__remote-config.yml +++ b/.github/workflows/__remote-config.yml @@ -30,11 +30,6 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' dotnet-version: type: string description: The version of .NET to install @@ -47,11 +42,6 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' dotnet-version: type: string description: The version of .NET to install @@ -62,8 +52,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - remote-config-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: remote-config-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} jobs: remote-config: strategy: @@ -96,11 +85,6 @@ jobs: with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install Python - if: matrix.version != 'nightly-latest' - uses: actions/setup-python@v6 - with: - python-version: ${{ inputs.python-version || '3.13' }} - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -109,8 +93,7 @@ jobs: with: tools: ${{ steps.prepare-test.outputs.tools-url }} languages: cpp,csharp,java,javascript,python - config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ - github.sha }} + config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }} - name: Build code run: ./build.sh - uses: ./../action/analyze diff --git a/.github/workflows/__resolve-environment-action.yml b/.github/workflows/__resolve-environment-action.yml index 3acee8d64..8b3391064 100644 --- a/.github/workflows/__resolve-environment-action.yml +++ b/.github/workflows/__resolve-environment-action.yml @@ -84,8 +84,7 @@ jobs: language: javascript-typescript - name: Fail if JavaScript/TypeScript configuration present - if: - fromJSON(steps.resolve-environment-js.outputs.environment).configuration.javascript + if: fromJSON(steps.resolve-environment-js.outputs.environment).configuration.javascript run: exit 1 env: CODEQL_ACTION_TEST_MODE: true diff --git a/.github/workflows/__split-workflow.yml b/.github/workflows/__split-workflow.yml index 9e1cad8e0..58e547f36 100644 --- a/.github/workflows/__split-workflow.yml +++ b/.github/workflows/__split-workflow.yml @@ -99,7 +99,7 @@ jobs: dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - uses: ./../action/init with: - config-file: .github/codeql/codeql-config-packaging3.yml + config-file: '.github/codeql/codeql-config-packaging3.yml' packs: +codeql-testing/codeql-pack1@1.0.0 languages: javascript tools: ${{ steps.prepare-test.outputs.tools-url }} @@ -108,7 +108,7 @@ jobs: - uses: ./../action/analyze with: skip-queries: true - output: ${{ runner.temp }}/results + output: '${{ runner.temp }}/results' upload-database: false - name: Assert No Results @@ -119,7 +119,7 @@ jobs: fi - uses: ./../action/analyze with: - output: ${{ runner.temp }}/results + output: '${{ runner.temp }}/results' upload-database: false - name: Assert Results run: | diff --git a/.github/workflows/__start-proxy.yml b/.github/workflows/__start-proxy.yml index 438a99405..e1a0f833e 100644 --- a/.github/workflows/__start-proxy.yml +++ b/.github/workflows/__start-proxy.yml @@ -71,8 +71,7 @@ jobs: id: proxy uses: ./../action/start-proxy with: - registry_secrets: '[{ "type": "nuget_feed", "url": "https://api.nuget.org/v3/index.json" - }]' + registry_secrets: '[{ "type": "nuget_feed", "url": "https://api.nuget.org/v3/index.json" }]' - name: Print proxy outputs run: | @@ -81,8 +80,7 @@ jobs: echo "${{ steps.proxy.outputs.proxy_urls }}" - name: Fail if proxy outputs are not set - if: (!steps.proxy.outputs.proxy_host) || (!steps.proxy.outputs.proxy_port) - || (!steps.proxy.outputs.proxy_ca_certificate) || (!steps.proxy.outputs.proxy_urls) + if: (!steps.proxy.outputs.proxy_host) || (!steps.proxy.outputs.proxy_port) || (!steps.proxy.outputs.proxy_ca_certificate) || (!steps.proxy.outputs.proxy_urls) run: exit 1 env: CODEQL_ACTION_TEST_MODE: true diff --git a/.github/workflows/__submit-sarif-failure.yml b/.github/workflows/__submit-sarif-failure.yml index 93553d18d..2fdfeddfb 100644 --- a/.github/workflows/__submit-sarif-failure.yml +++ b/.github/workflows/__submit-sarif-failure.yml @@ -49,8 +49,7 @@ jobs: if: github.triggering_actor != 'dependabot[bot]' permissions: contents: read - security-events: write # needed to upload the SARIF file - + security-events: write timeout-minutes: 45 runs-on: ${{ matrix.os }} steps: @@ -69,26 +68,20 @@ jobs: languages: javascript tools: ${{ steps.prepare-test.outputs.tools-url }} - name: Fail - # We want this job to pass if the Action correctly uploads the SARIF file for - # the failed run. - # Setting this step to continue on error means that it is marked as completing - # successfully, so will not fail the job. + # We want this job to pass if the Action correctly uploads the SARIF file for + # the failed run. + # Setting this step to continue on error means that it is marked as completing + # successfully, so will not fail the job. continue-on-error: true run: exit 1 - uses: ./analyze - # In a real workflow, this step wouldn't run. Since we used `continue-on-error` - # above, we manually disable it with an `if` condition. + # In a real workflow, this step wouldn't run. Since we used `continue-on-error` + # above, we manually disable it with an `if` condition. if: false with: - category: /test-codeql-version:${{ matrix.version }} + category: '/test-codeql-version:${{ matrix.version }}' env: - # Internal-only environment variable used to indicate that the post-init Action - # should expect to upload a SARIF file for the failed run. CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF: true - # Make sure the uploading SARIF files feature is enabled. CODEQL_ACTION_UPLOAD_FAILED_SARIF: true - # Upload the failed SARIF file as an integration test of the API endpoint. CODEQL_ACTION_TEST_MODE: false - # Mark telemetry for this workflow so it can be treated separately. CODEQL_ACTION_TESTING_ENVIRONMENT: codeql-action-pr-checks - diff --git a/.github/workflows/__swift-custom-build.yml b/.github/workflows/__swift-custom-build.yml index bc3e5d71f..7749f1b81 100644 --- a/.github/workflows/__swift-custom-build.yml +++ b/.github/workflows/__swift-custom-build.yml @@ -52,8 +52,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - swift-custom-build-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} + group: swift-custom-build-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} jobs: swift-custom-build: strategy: diff --git a/.github/workflows/__unset-environment.yml b/.github/workflows/__unset-environment.yml index b1918fe26..32d953522 100644 --- a/.github/workflows/__unset-environment.yml +++ b/.github/workflows/__unset-environment.yml @@ -30,11 +30,6 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' dotnet-version: type: string description: The version of .NET to install @@ -47,11 +42,6 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' dotnet-version: type: string description: The version of .NET to install @@ -62,8 +52,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - unset-environment-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: unset-environment-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} jobs: unset-environment: strategy: @@ -96,11 +85,6 @@ jobs: with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install Python - if: matrix.version != 'nightly-latest' - uses: actions/setup-python@v6 - with: - python-version: ${{ inputs.python-version || '3.13' }} - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -109,7 +93,7 @@ jobs: id: init with: db-location: ${{ runner.temp }}/customDbLocation - # Swift is not supported on Ubuntu so we manually exclude it from the list here + # Swift is not supported on Ubuntu so we manually exclude it from the list here languages: cpp,csharp,go,java,javascript,python,ruby tools: ${{ steps.prepare-test.outputs.tools-url }} - name: Build code diff --git a/.github/workflows/__upload-ref-sha-input.yml b/.github/workflows/__upload-ref-sha-input.yml index ad242dd7c..afa8371a0 100644 --- a/.github/workflows/__upload-ref-sha-input.yml +++ b/.github/workflows/__upload-ref-sha-input.yml @@ -30,11 +30,6 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' dotnet-version: type: string description: The version of .NET to install @@ -47,11 +42,6 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' dotnet-version: type: string description: The version of .NET to install @@ -62,8 +52,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - upload-ref-sha-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: upload-ref-sha-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} jobs: upload-ref-sha-input: strategy: @@ -94,11 +83,6 @@ jobs: with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install Python - if: matrix.version != 'nightly-latest' - uses: actions/setup-python@v6 - with: - python-version: ${{ inputs.python-version || '3.13' }} - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -107,19 +91,18 @@ jobs: with: tools: ${{ steps.prepare-test.outputs.tools-url }} languages: cpp,csharp,java,javascript,python - config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ - github.sha }} + config-file: ${{ github.repository }}/tests/multi-language-repo/.github/codeql/custom-queries.yml@${{ github.sha }} - name: Build code run: ./build.sh - # Generate some SARIF we can upload with the upload-sarif step + # Generate some SARIF we can upload with the upload-sarif step - uses: ./../action/analyze with: - ref: refs/heads/main - sha: 5e235361806c361d4d3f8859e3c897658025a9a2 + ref: 'refs/heads/main' + sha: '5e235361806c361d4d3f8859e3c897658025a9a2' upload: never - uses: ./../action/upload-sarif with: - ref: refs/heads/main - sha: 5e235361806c361d4d3f8859e3c897658025a9a2 + ref: 'refs/heads/main' + sha: '5e235361806c361d4d3f8859e3c897658025a9a2' env: CODEQL_ACTION_TEST_MODE: true diff --git a/.github/workflows/__upload-sarif.yml b/.github/workflows/__upload-sarif.yml index 494731fa4..b0afc5b2d 100644 --- a/.github/workflows/__upload-sarif.yml +++ b/.github/workflows/__upload-sarif.yml @@ -30,11 +30,6 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' dotnet-version: type: string description: The version of .NET to install @@ -47,11 +42,6 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' dotnet-version: type: string description: The version of .NET to install @@ -62,8 +52,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - upload-sarif-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: upload-sarif-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} jobs: upload-sarif: strategy: @@ -101,11 +90,6 @@ jobs: with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install Python - if: matrix.version != 'nightly-latest' - uses: actions/setup-python@v6 - with: - python-version: ${{ inputs.python-version || '3.13' }} - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -117,11 +101,11 @@ jobs: analysis-kinds: ${{ matrix.analysis-kinds }} - name: Build code run: ./build.sh - # Generate some SARIF we can upload with the upload-sarif step + # Generate some SARIF we can upload with the upload-sarif step - uses: ./../action/analyze with: - ref: refs/heads/main - sha: 5e235361806c361d4d3f8859e3c897658025a9a2 + ref: 'refs/heads/main' + sha: '5e235361806c361d4d3f8859e3c897658025a9a2' upload: never output: ${{ runner.temp }}/results @@ -130,15 +114,15 @@ jobs: uses: ./../action/upload-sarif id: upload-sarif with: - ref: refs/heads/main - sha: 5e235361806c361d4d3f8859e3c897658025a9a2 + ref: 'refs/heads/main' + sha: '5e235361806c361d4d3f8859e3c897658025a9a2' sarif_file: ${{ runner.temp }}/results category: | ${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:all-files/ - - name: Fail for missing output from `upload-sarif` step for `code-scanning` + - name: 'Fail for missing output from `upload-sarif` step for `code-scanning`' if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-scanning) run: exit 1 - - name: Fail for missing output from `upload-sarif` step for `code-quality` + - name: 'Fail for missing output from `upload-sarif` step for `code-quality`' if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality) run: exit 1 @@ -147,28 +131,26 @@ jobs: id: upload-single-sarif-code-scanning if: contains(matrix.analysis-kinds, 'code-scanning') with: - ref: refs/heads/main - sha: 5e235361806c361d4d3f8859e3c897658025a9a2 + ref: 'refs/heads/main' + sha: '5e235361806c361d4d3f8859e3c897658025a9a2' sarif_file: ${{ runner.temp }}/results/javascript.sarif category: | ${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-scanning/ - - name: Fail for missing output from `upload-single-sarif-code-scanning` step - if: contains(matrix.analysis-kinds, 'code-scanning') && - !(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning) + - name: 'Fail for missing output from `upload-single-sarif-code-scanning` step' + if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning) run: exit 1 - name: Upload single SARIF file for Code Quality uses: ./../action/upload-sarif id: upload-single-sarif-code-quality if: contains(matrix.analysis-kinds, 'code-quality') with: - ref: refs/heads/main - sha: 5e235361806c361d4d3f8859e3c897658025a9a2 + ref: 'refs/heads/main' + sha: '5e235361806c361d4d3f8859e3c897658025a9a2' sarif_file: ${{ runner.temp }}/results/javascript.quality.sarif category: | ${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-quality/ - - name: Fail for missing output from `upload-single-sarif-code-quality` step - if: contains(matrix.analysis-kinds, 'code-quality') && - !(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality) + - name: 'Fail for missing output from `upload-single-sarif-code-quality` step' + if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality) run: exit 1 - name: Change SARIF file extension @@ -179,12 +161,12 @@ jobs: id: upload-single-non-sarif if: contains(matrix.analysis-kinds, 'code-scanning') with: - ref: refs/heads/main - sha: 5e235361806c361d4d3f8859e3c897658025a9a2 + ref: 'refs/heads/main' + sha: '5e235361806c361d4d3f8859e3c897658025a9a2' sarif_file: ${{ runner.temp }}/results/javascript.sarif.json category: | ${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:non-sarif/ - - name: Fail for missing output from `upload-single-non-sarif` step + - name: 'Fail for missing output from `upload-single-non-sarif` step' if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-non-sarif.outputs.sarif-ids).code-scanning) run: exit 1 env: diff --git a/.github/workflows/__with-checkout-path.yml b/.github/workflows/__with-checkout-path.yml index c976b4e9b..0e8419186 100644 --- a/.github/workflows/__with-checkout-path.yml +++ b/.github/workflows/__with-checkout-path.yml @@ -30,11 +30,6 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' dotnet-version: type: string description: The version of .NET to install @@ -47,11 +42,6 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' dotnet-version: type: string description: The version of .NET to install @@ -62,8 +52,7 @@ defaults: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: - with-checkout-path-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: with-checkout-path-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} jobs: with-checkout-path: strategy: @@ -80,6 +69,7 @@ jobs: timeout-minutes: 45 runs-on: ${{ matrix.os }} steps: + # This ensures we don't accidentally use the original checkout for any part of the test. - name: Check out repository uses: actions/checkout@v6 - name: Prepare test @@ -94,11 +84,6 @@ jobs: with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install Python - if: matrix.version != 'nightly-latest' - uses: actions/setup-python@v6 - with: - python-version: ${{ inputs.python-version || '3.13' }} - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -109,8 +94,8 @@ jobs: # Actions does not support deleting the current working directory, so we # delete the contents of the directory instead. rm -rf ./* .github .git - # Check out the actions repo again, but at a different location. - # choose an arbitrary SHA so that we can later test that the commit_oid is not from main + # Check out the actions repo again, but at a different location. + # choose an arbitrary SHA so that we can later test that the commit_oid is not from main - uses: actions/checkout@v6 with: ref: 474bbf07f9247ffe1856c6a0f94aeeb10e7afee6 @@ -119,7 +104,7 @@ jobs: - uses: ./../action/init with: tools: ${{ steps.prepare-test.outputs.tools-url }} - # it's enough to test one compiled language and one interpreted language + # it's enough to test one compiled language and one interpreted language languages: csharp,javascript source-root: x/y/z/some-path/tests/multi-language-repo diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 154c21c7b..0b32bc20e 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -31,34 +31,29 @@ jobs: permissions: contents: read + # We currently need `security-events: read` to access feature flags. + security-events: read steps: - uses: actions/checkout@v6 - - name: Init with default CodeQL bundle from the VM image - id: init-default - uses: ./init - with: - languages: javascript - - name: Remove empty database - # allows us to run init a second time - run: | - rm -rf "$RUNNER_TEMP/codeql_databases" - - name: Init with latest CodeQL bundle - id: init-latest - uses: ./init + - name: Set up default CodeQL bundle + id: setup-default + uses: ./setup-codeql + - name: Set up linked CodeQL bundle + id: setup-linked + uses: ./setup-codeql with: tools: linked - languages: javascript - - name: Compare default and latest CodeQL bundle versions + - name: Compare default and linked CodeQL bundle versions id: compare env: - CODEQL_DEFAULT: ${{ steps.init-default.outputs.codeql-path }} - CODEQL_LATEST: ${{ steps.init-latest.outputs.codeql-path }} + CODEQL_DEFAULT: ${{ steps.setup-default.outputs.codeql-path }} + CODEQL_LINKED: ${{ steps.setup-linked.outputs.codeql-path }} run: | CODEQL_VERSION_DEFAULT="$("$CODEQL_DEFAULT" version --format terse)" - CODEQL_VERSION_LATEST="$("$CODEQL_LATEST" version --format terse)" + CODEQL_VERSION_LINKED="$("$CODEQL_LINKED" version --format terse)" echo "Default CodeQL bundle version is $CODEQL_VERSION_DEFAULT" - echo "Latest CodeQL bundle version is $CODEQL_VERSION_LATEST" + echo "Linked CodeQL bundle version is $CODEQL_VERSION_LINKED" # If we're running on a pull request, run with both bundles, even if `tools: linked` would # be the same as `tools: null`. This allows us to make the job for each of the bundles a @@ -66,7 +61,7 @@ jobs: # # If we're running on push or schedule, then we can skip running with `tools: linked` when it would be # the same as running with `tools: null`. - if [[ "$GITHUB_EVENT_NAME" != "pull_request" && "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LATEST" ]]; then + if [[ "$GITHUB_EVENT_NAME" != "pull_request" && "$GITHUB_EVENT_NAME" != "merge_group" && "$CODEQL_VERSION_DEFAULT" == "$CODEQL_VERSION_LINKED" ]]; then VERSIONS_JSON='[null]' else VERSIONS_JSON='[null, "linked"]' @@ -110,7 +105,7 @@ jobs: uses: ./analyze with: category: "/language:javascript" - upload: ${{ (matrix.os == 'ubuntu-24.04' && !matrix.tools && 'always') || 'never' }} + upload: ${{ (matrix.os == 'ubuntu-24.04' && !matrix.tools && github.event_name != 'merge_group' && 'always' ) || 'never' }} analyze-other: if: github.triggering_actor != 'dependabot[bot]' @@ -145,3 +140,4 @@ jobs: uses: ./analyze with: category: "/language:${{ matrix.language }}" + upload: ${{ (github.event_name != 'merge_group' && 'always') || 'never' }} diff --git a/.github/workflows/codescanning-config-cli.yml b/.github/workflows/codescanning-config-cli.yml index ca3b554a9..0c4829339 100644 --- a/.github/workflows/codescanning-config-cli.yml +++ b/.github/workflows/codescanning-config-cli.yml @@ -11,6 +11,8 @@ env: CODEQL_ACTION_OVERLAY_ANALYSIS: true CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT: false CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT: true + CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_CHECK: false + CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS: true on: push: @@ -80,7 +82,7 @@ jobs: # On PRs, overlay analysis may change the config that is passed to the CLI. # Therefore, we have two variants of the following test, one for PRs and one for other events. - name: Empty file (non-PR) - if: github.event_name != 'pull_request' && github.event_name != 'merge_group' + if: github.event_name != 'pull_request' uses: ./../action/.github/actions/check-codescanning-config with: expected-config-file-contents: "{}" @@ -88,7 +90,7 @@ jobs: tools: ${{ steps.prepare-test.outputs.tools-url }} - name: Empty file (PR) - if: github.event_name == 'pull_request' || github.event_name == 'merge_group' + if: github.event_name == 'pull_request' uses: ./../action/.github/actions/check-codescanning-config with: expected-config-file-contents: | diff --git a/.github/workflows/debug-artifacts-failure-safe.yml b/.github/workflows/debug-artifacts-failure-safe.yml index 4d0433535..fdb028ca5 100644 --- a/.github/workflows/debug-artifacts-failure-safe.yml +++ b/.github/workflows/debug-artifacts-failure-safe.yml @@ -41,6 +41,8 @@ jobs: CODEQL_ACTION_TEST_MODE: true permissions: contents: read + # We currently need `security-events: read` to access feature flags. + security-events: read timeout-minutes: 45 runs-on: ubuntu-latest steps: @@ -87,7 +89,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download all artifacts - uses: actions/download-artifact@v7 + uses: actions/download-artifact@v8 - name: Check expected artifacts exist run: | LANGUAGES="cpp csharp go java javascript python" diff --git a/.github/workflows/debug-artifacts-safe.yml b/.github/workflows/debug-artifacts-safe.yml index 7886d44c7..dddc7ba44 100644 --- a/.github/workflows/debug-artifacts-safe.yml +++ b/.github/workflows/debug-artifacts-safe.yml @@ -40,6 +40,8 @@ jobs: timeout-minutes: 45 permissions: contents: read + # We currently need `security-events: read` to access feature flags. + security-events: read runs-on: ubuntu-latest steps: - name: Check out repository @@ -81,7 +83,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download all artifacts - uses: actions/download-artifact@v7 + uses: actions/download-artifact@v8 - name: Check expected artifacts exist run: | VERSIONS="stable-v2.20.3 default linked nightly-latest" diff --git a/.github/workflows/pr-checks.yml b/.github/workflows/pr-checks.yml index 1c78da10f..77a544cfa 100644 --- a/.github/workflows/pr-checks.yml +++ b/.github/workflows/pr-checks.yml @@ -42,11 +42,6 @@ jobs: node-version: ${{ matrix.node-version }} cache: 'npm' - - name: Set up Python - uses: actions/setup-python@v6 - with: - python-version: 3.11 - - name: Install dependencies run: | # Use the system Bash shell to ensure we can run commands like `npm ci` @@ -68,7 +63,7 @@ jobs: - name: Run pr-checks tests if: always() working-directory: pr-checks - run: python -m unittest discover + run: npm ci && npx tsx --test - name: Lint if: always() && matrix.os != 'windows-latest' diff --git a/.github/workflows/python312-windows.yml b/.github/workflows/python312-windows.yml index 79602d056..880ecd578 100644 --- a/.github/workflows/python312-windows.yml +++ b/.github/workflows/python312-windows.yml @@ -26,6 +26,8 @@ jobs: timeout-minutes: 45 permissions: contents: read + # We currently need `security-events: read` to access feature flags. + security-events: read runs-on: windows-latest steps: diff --git a/.github/workflows/rebuild.yml b/.github/workflows/rebuild.yml index 2e88b35d5..095c0726f 100644 --- a/.github/workflows/rebuild.yml +++ b/.github/workflows/rebuild.yml @@ -73,24 +73,17 @@ jobs: npm run lint -- --fix npm run build - - name: Set up Python - uses: actions/setup-python@v6 - with: - python-version: 3.11 - - name: Sync back version updates to generated workflows # Only sync back versions on Dependabot update PRs if: startsWith(env.HEAD_REF, 'dependabot/') working-directory: pr-checks run: | - python3 sync_back.py -v + npm ci + npx tsx sync_back.ts --verbose - name: Generate workflows working-directory: pr-checks - run: | - python -m pip install --upgrade pip - pip install ruamel.yaml==0.17.31 - python3 sync.py + run: ./sync.sh - name: "Merge in progress: Finish merge and push" if: steps.merge.outputs.merge-in-progress == 'true' diff --git a/.github/workflows/script/update-required-checks.sh b/.github/workflows/script/update-required-checks.sh index 8a930cae7..f6a4c4f5c 100755 --- a/.github/workflows/script/update-required-checks.sh +++ b/.github/workflows/script/update-required-checks.sh @@ -29,7 +29,7 @@ fi echo "Getting checks for $GITHUB_SHA" # Ignore any checks with "https://", CodeQL, LGTM, Update, and ESLint checks. -CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs.[] | select(.conclusion != "skipped") | .name | select(contains("https://") or . == "CodeQL" or . == "Dependabot" or . == "check-expected-release-files" or contains("Update") or contains("ESLint") or contains("update") or contains("test-setup-python-scripts") or . == "Agent" or . == "Cleanup artifacts" or . == "Prepare" or . == "Upload results" | not)] | unique | sort')" +CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs.[] | select(.conclusion != "skipped") | .name | select(contains("https://") or . == "CodeQL" or . == "Dependabot" or . == "check-expected-release-files" or contains("Update") or contains("ESLint") or contains("update") or contains("test-setup-python-scripts") or . == "Agent" or . == "Cleanup artifacts" or . == "Prepare" or . == "Upload results" or . == "Label PR with size" | not)] | unique | sort')" echo "$CHECKS" | jq diff --git a/.github/workflows/script/verify-pr-checks.sh b/.github/workflows/script/verify-pr-checks.sh index 6aa1381e2..5be2c599e 100755 --- a/.github/workflows/script/verify-pr-checks.sh +++ b/.github/workflows/script/verify-pr-checks.sh @@ -19,7 +19,7 @@ if [ ! -z "$(git status --porcelain)" ]; then # If we get a fail here then the PR needs attention git diff git status - >&2 echo "Failed: PR checks are not up to date. Run 'cd pr-checks && python3 sync.py' to update" + >&2 echo "Failed: PR checks are not up to date. Run 'cd pr-checks && ./sync.sh' to update" echo "### Generated workflows diff" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY diff --git a/CHANGELOG.md b/CHANGELOG.md index 78f8b94b4..6d6c8a15e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,22 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th ## [UNRELEASED] -No user facing changes. +- Fixed [a bug](https://github.com/github/codeql-action/issues/3555) which caused the CodeQL Action to fail loading repository properties if a "Multi select" repository property was configured for the repository. [#3557](https://github.com/github/codeql-action/pull/3557) +- The CodeQL Action now loads [custom repository properties](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization) on GitHub Enterprise Server, enabling the customization of features such as `github-codeql-disable-overlay` that was previously only available on GitHub.com. [#3559](https://github.com/github/codeql-action/pull/3559) + +## 4.32.6 - 05 Mar 2026 + +- Update default CodeQL bundle version to [2.24.3](https://github.com/github/codeql-action/releases/tag/codeql-bundle-v2.24.3). [#3548](https://github.com/github/codeql-action/pull/3548) + +## 4.32.5 - 02 Mar 2026 + +- Repositories owned by an organization can now set up the `github-codeql-disable-overlay` custom repository property to disable [improved incremental analysis for CodeQL](https://github.com/github/roadmap/issues/1158). First, create a custom repository property with the name `github-codeql-disable-overlay` and the type "True/false" in the organization's settings. Then in the repository's settings, set this property to `true` to disable improved incremental analysis. For more information, see [Managing custom properties for repositories in your organization](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization). This feature is not yet available on GitHub Enterprise Server. [#3507](https://github.com/github/codeql-action/pull/3507) +- Added an experimental change so that when [improved incremental analysis](https://github.com/github/roadmap/issues/1158) fails on a runner — potentially due to insufficient disk space — the failure is recorded in the Actions cache so that subsequent runs will automatically skip improved incremental analysis until something changes (e.g. a larger runner is provisioned or a new CodeQL version is released). We expect to roll this change out to everyone in March. [#3487](https://github.com/github/codeql-action/pull/3487) +- The minimum memory check for improved incremental analysis is now skipped for CodeQL 2.24.3 and later, which has reduced peak RAM usage. [#3515](https://github.com/github/codeql-action/pull/3515) +- Reduced log levels for best-effort private package registry connection check failures to reduce noise from workflow annotations. [#3516](https://github.com/github/codeql-action/pull/3516) +- Added an experimental change which lowers the minimum disk space requirement for [improved incremental analysis](https://github.com/github/roadmap/issues/1158), enabling it to run on standard GitHub Actions runners. We expect to roll this change out to everyone in March. [#3498](https://github.com/github/codeql-action/pull/3498) +- Added an experimental change which allows the `start-proxy` action to resolve the CodeQL CLI version from feature flags instead of using the linked CLI bundle version. We expect to roll this change out to everyone in March. [#3512](https://github.com/github/codeql-action/pull/3512) +- The previously experimental changes from versions 4.32.3, 4.32.4, 3.32.3 and 3.32.4 are now enabled by default. [#3503](https://github.com/github/codeql-action/pull/3503), [#3504](https://github.com/github/codeql-action/pull/3504) ## 4.32.4 - 20 Feb 2026 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 13614cb01..26e06e30d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -92,7 +92,7 @@ We typically deprecate a version of CodeQL when the GitHub Enterprise Server (GH 1. Remove support for the old version of CodeQL. - Bump `CODEQL_MINIMUM_VERSION` in `src/codeql.ts` to the new minimum version of CodeQL. - Remove any code that is only needed to support the old version of CodeQL. This is often behind a version guard, so look for instances of version numbers between the old minimum version and the new minimum version in the codebase. A good place to start is the list of version numbers in `src/codeql.ts`. - - Update the default set of CodeQL test versions in `pr-checks/sync.py`. + - Update the default set of CodeQL test versions in `pr-checks/sync.ts`. - Remove the old minimum version of CodeQL. - Add the latest patch release for any new CodeQL minor version series that have shipped in GHES. - Run the script to update the generated PR checks. diff --git a/README.md b/README.md index da34a9110..35b50c6a3 100644 --- a/README.md +++ b/README.md @@ -72,10 +72,12 @@ We typically release new minor versions of the CodeQL Action and Bundle when a n | Minimum CodeQL Action | Minimum CodeQL Bundle Version | GitHub Environment | Notes | |-----------------------|-------------------------------|--------------------|-------| -| `v3.28.21` | `2.21.3` | Enterprise Server 3.18 | | -| `v3.28.12` | `2.20.7` | Enterprise Server 3.17 | | -| `v3.28.6` | `2.20.3` | Enterprise Server 3.16 | | -| `v3.28.6` | `2.20.3` | Enterprise Server 3.15 | | +| `v4.31.10` | `2.23.9` | Enterprise Server 3.20 | | +| `v3.29.11` | `2.22.4` | Enterprise Server 3.19 | | +| `v3.28.21` | `2.21.3` | Enterprise Server 3.18 | | +| `v3.28.12` | `2.20.7` | Enterprise Server 3.17 | | +| `v3.28.6` | `2.20.3` | Enterprise Server 3.16 | | +| `v3.28.6` | `2.20.3` | Enterprise Server 3.15 | | | `v3.28.6` | `2.20.3` | Enterprise Server 3.14 | | See the full list of GHES release and deprecation dates at [GitHub Enterprise Server releases](https://docs.github.com/en/enterprise-server/admin/all-releases#releases-of-github-enterprise-server). diff --git a/eslint.config.mjs b/eslint.config.mjs index 3e1ea8a6a..6ac800276 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -21,6 +21,7 @@ export default [ "build.mjs", "eslint.config.mjs", ".github/**/*", + "pr-checks/**/*", ], }, // eslint recommended config diff --git a/init/action.yml b/init/action.yml index 57d5a9940..6c36f79bc 100644 --- a/init/action.yml +++ b/init/action.yml @@ -159,6 +159,11 @@ inputs: description: >- Explicitly enable or disable caching of project build dependencies. required: false + check-run-id: + description: >- + [Internal] The ID of the check run, as provided by the Actions runtime environment. Do not set this value manually. + default: ${{ job.check_run_id }} + required: false outputs: codeql-path: description: The path of the CodeQL binary used for analysis diff --git a/lib/analyze-action-post.js b/lib/analyze-action-post.js index 976b62311..454c2d9fb 100644 --- a/lib/analyze-action-post.js +++ b/lib/analyze-action-post.js @@ -45986,7 +45986,7 @@ var require_package = __commonJS({ "package.json"(exports2, module2) { module2.exports = { name: "codeql", - version: "4.32.5", + version: "4.32.7", private: true, description: "CodeQL action", scripts: { @@ -45995,7 +45995,7 @@ var require_package = __commonJS({ lint: "eslint --report-unused-disable-directives --max-warnings=0 .", "lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif", "lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix", - ava: "npm run transpile && ava --serial --verbose", + ava: "npm run transpile && ava --verbose", test: "npm run ava -- src/", "test-debug": "npm run test -- --timeout=20m", transpile: "tsc --build --verbose" @@ -46044,6 +46044,7 @@ var require_package = __commonJS({ "@types/js-yaml": "^4.0.9", "@types/node": "^20.19.9", "@types/node-forge": "^1.3.14", + "@types/sarif": "^2.1.7", "@types/semver": "^7.7.1", "@types/sinon": "^21.0.0", ava: "^6.4.1", @@ -46052,14 +46053,14 @@ var require_package = __commonJS({ "eslint-import-resolver-typescript": "^3.8.7", "eslint-plugin-github": "^6.0.0", "eslint-plugin-import-x": "^4.16.1", - "eslint-plugin-jsdoc": "^62.5.0", + "eslint-plugin-jsdoc": "^62.7.1", "eslint-plugin-no-async-foreach": "^0.1.1", glob: "^11.1.0", - globals: "^16.5.0", + globals: "^17.3.0", nock: "^14.0.11", sinon: "^21.0.1", typescript: "^5.9.3", - "typescript-eslint": "^8.56.0" + "typescript-eslint": "^8.56.1" }, overrides: { "@actions/tool-cache": { @@ -49361,6 +49362,7 @@ var require_minimatch = __commonJS({ pattern = pattern.split(path7.sep).join("/"); } this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200; this.set = []; this.pattern = pattern; this.regexp = null; @@ -49757,50 +49759,147 @@ var require_minimatch = __commonJS({ return this.negate; }; Minimatch.prototype.matchOne = function(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } + if (pattern.indexOf(GLOBSTAR) !== -1) { + return this._matchGlobstar(file, pattern, partial, 0, 0); + } + return this._matchOne(file, pattern, partial, 0, 0); + }; + Minimatch.prototype._matchGlobstar = function(file, pattern, partial, fileIndex, patternIndex) { + var i; + var firstgs = -1; + for (i = patternIndex; i < pattern.length; i++) { + if (pattern[i] === GLOBSTAR) { + firstgs = i; + break; + } + } + var lastgs = -1; + for (i = pattern.length - 1; i >= 0; i--) { + if (pattern[i] === GLOBSTAR) { + lastgs = i; + break; + } + } + var head = pattern.slice(patternIndex, firstgs); + var body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs); + var tail = partial ? [] : pattern.slice(lastgs + 1); + if (head.length) { + var fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this._matchOne(fileHead, head, partial, 0, 0)) { + return false; + } + fileIndex += head.length; + } + var fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) return false; + var tailStart = file.length - tail.length; + if (this._matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; + } + tailStart--; + if (!this._matchOne(file, tail, partial, tailStart, 0)) { + return false; + } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + var sawSome = !!fileTailMatch; + for (i = fileIndex; i < file.length - fileTailMatch; i++) { + var f = String(file[i]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return partial || sawSome; + } + var bodySegments = [[[], 0]]; + var currentBody = bodySegments[0]; + var nonGsParts = 0; + var nonGsPartsSums = [0]; + for (var bi = 0; bi < body.length; bi++) { + var b = body[bi]; + if (b === GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + var idx = bodySegments.length - 1; + var fileLength = file.length - fileTailMatch; + for (var si = 0; si < bodySegments.length; si++) { + bodySegments[si][1] = fileLength - (nonGsPartsSums[idx--] + bodySegments[si][0].length); + } + return !!this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex, + 0, + partial, + 0, + !!fileTailMatch ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + }; + Minimatch.prototype._matchGlobStarBodySections = function(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + var bs = bodySegments[bodyIndex]; + if (!bs) { + for (var i = fileIndex; i < file.length; i++) { + sawTail = true; + var f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return sawTail; + } + var body = bs[0]; + var after = bs[1]; + while (fileIndex <= after) { + var m = this._matchOne( + file.slice(0, fileIndex + body.length), + body, + partial, + fileIndex, + 0 + ); + if (m && globStarDepth < this.maxGlobstarRecursion) { + var sub = this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex + body.length, + bodyIndex + 1, + partial, + globStarDepth + 1, + sawTail + ); + if (sub !== false) { + return sub; + } + } + var f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + fileIndex++; + } + return partial || null; + }; + Minimatch.prototype._matchOne = function(file, pattern, partial, fileIndex, patternIndex) { + var fi, pi, fl, pl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { this.debug("matchOne loop"); var p = pattern[pi]; var f = file[fi]; this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } - return false; - } + if (p === false || p === GLOBSTAR) return false; var hit; if (typeof p === "string") { hit = f === p; @@ -61840,7 +61939,7 @@ var require_fxp = __commonJS({ }, o: (t2, e2) => Object.prototype.hasOwnProperty.call(t2, e2), r: (t2) => { "undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t2, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t2, "__esModule", { value: true }); } }, e = {}; - t.r(e), t.d(e, { XMLBuilder: () => dt, XMLParser: () => it, XMLValidator: () => gt }); + t.r(e), t.d(e, { XMLBuilder: () => gt, XMLParser: () => it, XMLValidator: () => xt }); const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$"); function s(t2, e2) { const n2 = []; @@ -61862,90 +61961,90 @@ var require_fxp = __commonJS({ const n2 = []; let i2 = false, s2 = false; "\uFEFF" === t2[0] && (t2 = t2.substr(1)); - for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2] && "?" === t2[o2 + 1]) { - if (o2 += 2, o2 = u(t2, o2), o2.err) return o2; + for (let r2 = 0; r2 < t2.length; r2++) if ("<" === t2[r2] && "?" === t2[r2 + 1]) { + if (r2 += 2, r2 = u(t2, r2), r2.err) return r2; } else { - if ("<" !== t2[o2]) { - if (l(t2[o2])) continue; - return m("InvalidChar", "char '" + t2[o2] + "' is not expected.", b(t2, o2)); + if ("<" !== t2[r2]) { + if (l(t2[r2])) continue; + return m("InvalidChar", "char '" + t2[r2] + "' is not expected.", N(t2, r2)); } { - let a2 = o2; - if (o2++, "!" === t2[o2]) { - o2 = h(t2, o2); + let o2 = r2; + if (r2++, "!" === t2[r2]) { + r2 = d(t2, r2); continue; } { - let d2 = false; - "/" === t2[o2] && (d2 = true, o2++); - let p2 = ""; - for (; o2 < t2.length && ">" !== t2[o2] && " " !== t2[o2] && " " !== t2[o2] && "\n" !== t2[o2] && "\r" !== t2[o2]; o2++) p2 += t2[o2]; - if (p2 = p2.trim(), "/" === p2[p2.length - 1] && (p2 = p2.substring(0, p2.length - 1), o2--), !r(p2)) { + let a2 = false; + "/" === t2[r2] && (a2 = true, r2++); + let h2 = ""; + for (; r2 < t2.length && ">" !== t2[r2] && " " !== t2[r2] && " " !== t2[r2] && "\n" !== t2[r2] && "\r" !== t2[r2]; r2++) h2 += t2[r2]; + if (h2 = h2.trim(), "/" === h2[h2.length - 1] && (h2 = h2.substring(0, h2.length - 1), r2--), !b(h2)) { let e3; - return e3 = 0 === p2.trim().length ? "Invalid space after '<'." : "Tag '" + p2 + "' is an invalid name.", m("InvalidTag", e3, b(t2, o2)); + return e3 = 0 === h2.trim().length ? "Invalid space after '<'." : "Tag '" + h2 + "' is an invalid name.", m("InvalidTag", e3, N(t2, r2)); } - const c2 = f(t2, o2); - if (false === c2) return m("InvalidAttr", "Attributes for '" + p2 + "' have open quote.", b(t2, o2)); - let E2 = c2.value; - if (o2 = c2.index, "/" === E2[E2.length - 1]) { - const n3 = o2 - E2.length; - E2 = E2.substring(0, E2.length - 1); - const s3 = g(E2, e2); - if (true !== s3) return m(s3.err.code, s3.err.msg, b(t2, n3 + s3.err.line)); + const p2 = c(t2, r2); + if (false === p2) return m("InvalidAttr", "Attributes for '" + h2 + "' have open quote.", N(t2, r2)); + let f2 = p2.value; + if (r2 = p2.index, "/" === f2[f2.length - 1]) { + const n3 = r2 - f2.length; + f2 = f2.substring(0, f2.length - 1); + const s3 = g(f2, e2); + if (true !== s3) return m(s3.err.code, s3.err.msg, N(t2, n3 + s3.err.line)); i2 = true; - } else if (d2) { - if (!c2.tagClosed) return m("InvalidTag", "Closing tag '" + p2 + "' doesn't have proper closing.", b(t2, o2)); - if (E2.trim().length > 0) return m("InvalidTag", "Closing tag '" + p2 + "' can't have attributes or invalid starting.", b(t2, a2)); - if (0 === n2.length) return m("InvalidTag", "Closing tag '" + p2 + "' has not been opened.", b(t2, a2)); + } else if (a2) { + if (!p2.tagClosed) return m("InvalidTag", "Closing tag '" + h2 + "' doesn't have proper closing.", N(t2, r2)); + if (f2.trim().length > 0) return m("InvalidTag", "Closing tag '" + h2 + "' can't have attributes or invalid starting.", N(t2, o2)); + if (0 === n2.length) return m("InvalidTag", "Closing tag '" + h2 + "' has not been opened.", N(t2, o2)); { const e3 = n2.pop(); - if (p2 !== e3.tagName) { - let n3 = b(t2, e3.tagStartPos); - return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + p2 + "'.", b(t2, a2)); + if (h2 !== e3.tagName) { + let n3 = N(t2, e3.tagStartPos); + return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + h2 + "'.", N(t2, o2)); } 0 == n2.length && (s2 = true); } } else { - const r2 = g(E2, e2); - if (true !== r2) return m(r2.err.code, r2.err.msg, b(t2, o2 - E2.length + r2.err.line)); - if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", b(t2, o2)); - -1 !== e2.unpairedTags.indexOf(p2) || n2.push({ tagName: p2, tagStartPos: a2 }), i2 = true; + const a3 = g(f2, e2); + if (true !== a3) return m(a3.err.code, a3.err.msg, N(t2, r2 - f2.length + a3.err.line)); + if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", N(t2, r2)); + -1 !== e2.unpairedTags.indexOf(h2) || n2.push({ tagName: h2, tagStartPos: o2 }), i2 = true; } - for (o2++; o2 < t2.length; o2++) if ("<" === t2[o2]) { - if ("!" === t2[o2 + 1]) { - o2++, o2 = h(t2, o2); + for (r2++; r2 < t2.length; r2++) if ("<" === t2[r2]) { + if ("!" === t2[r2 + 1]) { + r2++, r2 = d(t2, r2); continue; } - if ("?" !== t2[o2 + 1]) break; - if (o2 = u(t2, ++o2), o2.err) return o2; - } else if ("&" === t2[o2]) { - const e3 = x(t2, o2); - if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", b(t2, o2)); - o2 = e3; - } else if (true === s2 && !l(t2[o2])) return m("InvalidXml", "Extra text at the end", b(t2, o2)); - "<" === t2[o2] && o2--; + if ("?" !== t2[r2 + 1]) break; + if (r2 = u(t2, ++r2), r2.err) return r2; + } else if ("&" === t2[r2]) { + const e3 = x(t2, r2); + if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", N(t2, r2)); + r2 = e3; + } else if (true === s2 && !l(t2[r2])) return m("InvalidXml", "Extra text at the end", N(t2, r2)); + "<" === t2[r2] && r2--; } } } - return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", b(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map(((t3) => t3.tagName)), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); + return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", N(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map((t3) => t3.tagName), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); } function l(t2) { return " " === t2 || " " === t2 || "\n" === t2 || "\r" === t2; } function u(t2, e2) { const n2 = e2; - for (; e2 < t2.length; e2++) if ("?" != t2[e2] && " " != t2[e2]) ; - else { + for (; e2 < t2.length; e2++) if ("?" == t2[e2] || " " == t2[e2]) { const i2 = t2.substr(n2, e2 - n2); - if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", b(t2, e2)); + if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", N(t2, e2)); if ("?" == t2[e2] && ">" == t2[e2 + 1]) { e2++; break; } + continue; } return e2; } - function h(t2, e2) { + function d(t2, e2) { if (t2.length > e2 + 5 && "-" === t2[e2 + 1] && "-" === t2[e2 + 2]) { for (e2 += 3; e2 < t2.length; e2++) if ("-" === t2[e2] && "-" === t2[e2 + 1] && ">" === t2[e2 + 2]) { e2 += 2; @@ -61963,11 +62062,11 @@ var require_fxp = __commonJS({ } return e2; } - const d = '"', p = "'"; - function f(t2, e2) { + const h = '"', p = "'"; + function c(t2, e2) { let n2 = "", i2 = "", s2 = false; for (; e2 < t2.length; e2++) { - if (t2[e2] === d || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); + if (t2[e2] === h || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); else if (">" === t2[e2] && "" === i2) { s2 = true; break; @@ -61976,16 +62075,16 @@ var require_fxp = __commonJS({ } return "" === i2 && { value: n2, index: e2, tagClosed: s2 }; } - const c = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); + const f = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); function g(t2, e2) { - const n2 = s(t2, c), i2 = {}; + const n2 = s(t2, f), i2 = {}; for (let t3 = 0; t3 < n2.length; t3++) { - if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", N(n2[t3])); - if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", N(n2[t3])); - if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", N(n2[t3])); + if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", y(n2[t3])); + if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", y(n2[t3])); + if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", y(n2[t3])); const s2 = n2[t3][2]; - if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", N(n2[t3])); - if (i2.hasOwnProperty(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", N(n2[t3])); + if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", y(n2[t3])); + if (Object.prototype.hasOwnProperty.call(i2, s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", y(n2[t3])); i2[s2] = 1; } return true; @@ -62013,49 +62112,52 @@ var require_fxp = __commonJS({ function E(t2) { return r(t2); } - function b(t2, e2) { + function b(t2) { + return r(t2); + } + function N(t2, e2) { const n2 = t2.substring(0, e2).split(/\r?\n/); return { line: n2.length, col: n2[n2.length - 1].length + 1 }; } - function N(t2) { + function y(t2) { return t2.startIndex + t2[1].length; } - const y = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { + const T = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, stopNodes: [], alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(t2, e2, n2) { return t2; - }, captureMetaData: false }; - function T(t2) { - return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : T(true); + }, captureMetaData: false, maxNestedTags: 100, strictReservedNames: true }; + function w(t2) { + return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : w(true); } - const w = function(t2) { - const e2 = Object.assign({}, y, t2); - return e2.processEntities = T(e2.processEntities), e2; + const v = function(t2) { + const e2 = Object.assign({}, T, t2); + return e2.processEntities = w(e2.processEntities), e2; }; - let v; - v = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); + let O; + O = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); class I { constructor(t2) { - this.tagname = t2, this.child = [], this[":@"] = {}; + this.tagname = t2, this.child = [], this[":@"] = /* @__PURE__ */ Object.create(null); } add(t2, e2) { "__proto__" === t2 && (t2 = "#__proto__"), this.child.push({ [t2]: e2 }); } addChild(t2, e2) { - "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][v] = { startIndex: e2 }); + "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][O] = { startIndex: e2 }); } static getMetaDataSymbol() { - return v; + return O; } } - class O { + class P { constructor(t2) { this.suppressValidationErr = !t2, this.options = t2; } readDocType(t2, e2) { - const n2 = {}; + const n2 = /* @__PURE__ */ Object.create(null); if ("O" !== t2[e2 + 3] || "C" !== t2[e2 + 4] || "T" !== t2[e2 + 5] || "Y" !== t2[e2 + 6] || "P" !== t2[e2 + 7] || "E" !== t2[e2 + 8]) throw new Error("Invalid Tag instead of DOCTYPE"); { e2 += 9; @@ -62064,23 +62166,23 @@ var require_fxp = __commonJS({ if (r2 ? "-" === t2[e2 - 1] && "-" === t2[e2 - 2] && (r2 = false, i2--) : i2--, 0 === i2) break; } else "[" === t2[e2] ? s2 = true : o2 += t2[e2]; else { - if (s2 && A(t2, "!ENTITY", e2)) { + if (s2 && S(t2, "!ENTITY", e2)) { let i3, s3; if (e2 += 7, [i3, s3, e2] = this.readEntityExp(t2, e2 + 1, this.suppressValidationErr), -1 === s3.indexOf("&")) { const t3 = i3.replace(/[.\-+*:]/g, "\\."); n2[i3] = { regx: RegExp(`&${t3};`, "g"), val: s3 }; } - } else if (s2 && A(t2, "!ELEMENT", e2)) { + } else if (s2 && S(t2, "!ELEMENT", e2)) { e2 += 8; const { index: n3 } = this.readElementExp(t2, e2 + 1); e2 = n3; - } else if (s2 && A(t2, "!ATTLIST", e2)) e2 += 8; - else if (s2 && A(t2, "!NOTATION", e2)) { + } else if (s2 && S(t2, "!ATTLIST", e2)) e2 += 8; + else if (s2 && S(t2, "!NOTATION", e2)) { e2 += 9; const { index: n3 } = this.readNotationExp(t2, e2 + 1, this.suppressValidationErr); e2 = n3; } else { - if (!A(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); + if (!S(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); r2 = true; } i2++, o2 = ""; @@ -62090,10 +62192,10 @@ var require_fxp = __commonJS({ return { entities: n2, i: e2 }; } readEntityExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]) && '"' !== t2[e2] && "'" !== t2[e2]; ) n2 += t2[e2], e2++; - if (S(n2), e2 = P(t2, e2), !this.suppressValidationErr) { + if (C(n2), e2 = A(t2, e2), !this.suppressValidationErr) { if ("SYSTEM" === t2.substring(e2, e2 + 6).toUpperCase()) throw new Error("External entities are not supported"); if ("%" === t2[e2]) throw new Error("Parameter entities are not supported"); } @@ -62102,15 +62204,15 @@ var require_fxp = __commonJS({ return [n2, i2, --e2]; } readNotationExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - !this.suppressValidationErr && S(n2), e2 = P(t2, e2); + !this.suppressValidationErr && C(n2), e2 = A(t2, e2); const i2 = t2.substring(e2, e2 + 6).toUpperCase(); if (!this.suppressValidationErr && "SYSTEM" !== i2 && "PUBLIC" !== i2) throw new Error(`Expected SYSTEM or PUBLIC, found "${i2}"`); - e2 += i2.length, e2 = P(t2, e2); + e2 += i2.length, e2 = A(t2, e2); let s2 = null, r2 = null; - if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = P(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); + if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = A(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); else if ("SYSTEM" === i2 && ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"), !this.suppressValidationErr && !r2)) throw new Error("Missing mandatory system identifier for SYSTEM notation"); return { notationName: n2, publicIdentifier: s2, systemIdentifier: r2, index: --e2 }; } @@ -62123,13 +62225,13 @@ var require_fxp = __commonJS({ return [++e2, i2]; } readElementExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; if (!this.suppressValidationErr && !r(n2)) throw new Error(`Invalid element name: "${n2}"`); let i2 = ""; - if ("E" === t2[e2 = P(t2, e2)] && A(t2, "MPTY", e2)) e2 += 4; - else if ("A" === t2[e2] && A(t2, "NY", e2)) e2 += 2; + if ("E" === t2[e2 = A(t2, e2)] && S(t2, "MPTY", e2)) e2 += 4; + else if ("A" === t2[e2] && S(t2, "NY", e2)) e2 += 2; else if ("(" === t2[e2]) { for (e2++; e2 < t2.length && ")" !== t2[e2]; ) i2 += t2[e2], e2++; if (")" !== t2[e2]) throw new Error("Unterminated content model"); @@ -62137,24 +62239,24 @@ var require_fxp = __commonJS({ return { elementName: n2, contentModel: i2.trim(), index: e2 }; } readAttlistExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - S(n2), e2 = P(t2, e2); + C(n2), e2 = A(t2, e2); let i2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) i2 += t2[e2], e2++; - if (!S(i2)) throw new Error(`Invalid attribute name: "${i2}"`); - e2 = P(t2, e2); + if (!C(i2)) throw new Error(`Invalid attribute name: "${i2}"`); + e2 = A(t2, e2); let s2 = ""; if ("NOTATION" === t2.substring(e2, e2 + 8).toUpperCase()) { - if (s2 = "NOTATION", "(" !== t2[e2 = P(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); + if (s2 = "NOTATION", "(" !== t2[e2 = A(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); e2++; let n3 = []; for (; e2 < t2.length && ")" !== t2[e2]; ) { let i3 = ""; for (; e2 < t2.length && "|" !== t2[e2] && ")" !== t2[e2]; ) i3 += t2[e2], e2++; - if (i3 = i3.trim(), !S(i3)) throw new Error(`Invalid notation name: "${i3}"`); - n3.push(i3), "|" === t2[e2] && (e2++, e2 = P(t2, e2)); + if (i3 = i3.trim(), !C(i3)) throw new Error(`Invalid notation name: "${i3}"`); + n3.push(i3), "|" === t2[e2] && (e2++, e2 = A(t2, e2)); } if (")" !== t2[e2]) throw new Error("Unterminated list of notations"); e2++, s2 += " (" + n3.join("|") + ")"; @@ -62163,45 +62265,43 @@ var require_fxp = __commonJS({ const n3 = ["CDATA", "ID", "IDREF", "IDREFS", "ENTITY", "ENTITIES", "NMTOKEN", "NMTOKENS"]; if (!this.suppressValidationErr && !n3.includes(s2.toUpperCase())) throw new Error(`Invalid attribute type: "${s2}"`); } - e2 = P(t2, e2); + e2 = A(t2, e2); let r2 = ""; return "#REQUIRED" === t2.substring(e2, e2 + 8).toUpperCase() ? (r2 = "#REQUIRED", e2 += 8) : "#IMPLIED" === t2.substring(e2, e2 + 7).toUpperCase() ? (r2 = "#IMPLIED", e2 += 7) : [e2, r2] = this.readIdentifierVal(t2, e2, "ATTLIST"), { elementName: n2, attributeName: i2, attributeType: s2, defaultValue: r2, index: e2 }; } } - const P = (t2, e2) => { + const A = (t2, e2) => { for (; e2 < t2.length && /\s/.test(t2[e2]); ) e2++; return e2; }; - function A(t2, e2, n2) { + function S(t2, e2, n2) { for (let i2 = 0; i2 < e2.length; i2++) if (e2[i2] !== t2[n2 + i2 + 1]) return false; return true; } - function S(t2) { + function C(t2) { if (r(t2)) return t2; throw new Error(`Invalid entity name ${t2}`); } - const C = /^[-+]?0x[a-fA-F0-9]+$/, $ = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; - const D = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; - function L(t2) { - return "function" == typeof t2 ? t2 : Array.isArray(t2) ? (e2) => { - for (const n2 of t2) { - if ("string" == typeof n2 && e2 === n2) return true; - if (n2 instanceof RegExp && n2.test(e2)) return true; - } - } : () => false; - } - class F { + const $ = /^[-+]?0x[a-fA-F0-9]+$/, V = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, D = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; + const j = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; + class L { constructor(t2) { - if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e2) => K(e2, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e2) => K(e2, 16, "&#x") } }, this.addExternalEntities = j, this.parseXml = B, this.parseTextData = M, this.resolveNameSpace = _2, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = R, this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { + var e2; + if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e3) => K(e3, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e3) => K(e3, 16, "&#x") } }, this.addExternalEntities = F, this.parseXml = R, this.parseTextData = M, this.resolveNameSpace = k, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = B, this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { this.stopNodesExact = /* @__PURE__ */ new Set(), this.stopNodesWildcard = /* @__PURE__ */ new Set(); for (let t3 = 0; t3 < this.options.stopNodes.length; t3++) { - const e2 = this.options.stopNodes[t3]; - "string" == typeof e2 && (e2.startsWith("*.") ? this.stopNodesWildcard.add(e2.substring(2)) : this.stopNodesExact.add(e2)); + const e3 = this.options.stopNodes[t3]; + "string" == typeof e3 && (e3.startsWith("*.") ? this.stopNodesWildcard.add(e3.substring(2)) : this.stopNodesExact.add(e3)); } } } } - function j(t2) { + function F(t2) { const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2], s2 = i2.replace(/[.\-+*:]/g, "\\."); @@ -62215,7 +62315,7 @@ var require_fxp = __commonJS({ return null == i3 ? t2 : typeof i3 != typeof t2 || i3 !== t2 ? i3 : this.options.trimValues || t2.trim() === t2 ? Z(t2, this.options.parseTagValue, this.options.numberParseOptions) : t2; } } - function _2(t2) { + function k(t2) { if (this.options.removeNSPrefix) { const e2 = t2.split(":"), n2 = "/" === t2.charAt(0) ? "/" : ""; if ("xmlns" === e2[0]) return ""; @@ -62223,10 +62323,10 @@ var require_fxp = __commonJS({ } return t2; } - const k = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); + const _2 = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); function U(t2, e2, n2) { if (true !== this.options.ignoreAttributes && "string" == typeof t2) { - const i2 = s(t2, k), r2 = i2.length, o2 = {}; + const i2 = s(t2, _2), r2 = i2.length, o2 = {}; for (let t3 = 0; t3 < r2; t3++) { const s2 = this.resolveNameSpace(i2[t3][1]); if (this.ignoreAttributesFn(s2, e2)) continue; @@ -62245,12 +62345,12 @@ var require_fxp = __commonJS({ return o2; } } - const B = function(t2) { + const R = function(t2) { t2 = t2.replace(/\r\n?/g, "\n"); const e2 = new I("!xml"); let n2 = e2, i2 = "", s2 = ""; this.entityExpansionCount = 0, this.currentExpandedLength = 0; - const r2 = new O(this.options.processEntities); + const r2 = new P(this.options.processEntities); for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2]) if ("/" === t2[o2 + 1]) { const e3 = z(t2, ">", o2, "Closing Tag is not closed."); let r3 = t2.substring(o2 + 2, e3).trim(); @@ -62290,26 +62390,27 @@ var require_fxp = __commonJS({ } else { let r3 = W(t2, o2, this.options.removeNSPrefix), a2 = r3.tagName; const l2 = r3.rawTagName; - let u2 = r3.tagExp, h2 = r3.attrExpPresent, d2 = r3.closeIndex; + let u2 = r3.tagExp, d2 = r3.attrExpPresent, h2 = r3.closeIndex; if (this.options.transformTagName) { const t3 = this.options.transformTagName(a2); u2 === a2 && (u2 = t3), a2 = t3; } + if (this.options.strictReservedNames && (a2 === this.options.commentPropName || a2 === this.options.cdataPropName)) throw new Error(`Invalid tag name: ${a2}`); n2 && i2 && "!xml" !== n2.tagname && (i2 = this.saveTextToParentTag(i2, n2, s2, false)); const p2 = n2; p2 && -1 !== this.options.unpairedTags.indexOf(p2.tagname) && (n2 = this.tagsNodeStack.pop(), s2 = s2.substring(0, s2.lastIndexOf("."))), a2 !== e2.tagname && (s2 += s2 ? "." + a2 : a2); - const f2 = o2; + const c2 = o2; if (this.isItStopNode(this.stopNodesExact, this.stopNodesWildcard, s2, a2)) { let e3 = ""; if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) "/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), o2 = r3.closeIndex; else if (-1 !== this.options.unpairedTags.indexOf(a2)) o2 = r3.closeIndex; else { - const n3 = this.readStopNodeData(t2, l2, d2 + 1); + const n3 = this.readStopNodeData(t2, l2, h2 + 1); if (!n3) throw new Error(`Unexpected end of ${l2}`); o2 = n3.i, e3 = n3.tagContent; } const i3 = new I(a2); - a2 !== u2 && h2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, h2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, f2); + a2 !== u2 && d2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, d2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, c2); } else { if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) { if ("/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), this.options.transformTagName) { @@ -62317,18 +62418,26 @@ var require_fxp = __commonJS({ u2 === a2 && (u2 = t4), a2 = t4; } const t3 = new I(a2); - a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), s2 = s2.substr(0, s2.lastIndexOf(".")); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")); } else { - const t3 = new I(a2); - this.tagsNodeStack.push(n2), a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), n2 = t3; + if (-1 !== this.options.unpairedTags.indexOf(a2)) { + const t3 = new I(a2); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")), o2 = r3.closeIndex; + continue; + } + { + const t3 = new I(a2); + if (this.tagsNodeStack.length > this.options.maxNestedTags) throw new Error("Maximum nested tags exceeded"); + this.tagsNodeStack.push(n2), a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), n2 = t3; + } } - i2 = "", o2 = d2; + i2 = "", o2 = h2; } } else i2 += t2[o2]; return e2.child; }; - function R(t2, e2, n2, i2) { + function B(t2, e2, n2, i2) { this.options.captureMetaData || (i2 = void 0); const s2 = this.options.updateTag(e2.tagname, n2, e2[":@"]); false === s2 || ("string" == typeof s2 ? (e2.tagname = s2, t2.addChild(e2, i2)) : t2.addChild(e2, i2)); @@ -62389,12 +62498,12 @@ var require_fxp = __commonJS({ const o2 = s2.index, a2 = r2.search(/\s/); let l2 = r2, u2 = true; -1 !== a2 && (l2 = r2.substring(0, a2), r2 = r2.substring(a2 + 1).trimStart()); - const h2 = l2; + const d2 = l2; if (n2) { const t3 = l2.indexOf(":"); -1 !== t3 && (l2 = l2.substr(t3 + 1), u2 = l2 !== s2.data.substr(t3 + 1)); } - return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: h2 }; + return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: d2 }; } function q(t2, e2, n2) { const i2 = n2; @@ -62415,19 +62524,19 @@ var require_fxp = __commonJS({ if (e2 && "string" == typeof t2) { const e3 = t2.trim(); return "true" === e3 || "false" !== e3 && (function(t3, e4 = {}) { - if (e4 = Object.assign({}, V, e4), !t3 || "string" != typeof t3) return t3; + if (e4 = Object.assign({}, D, e4), !t3 || "string" != typeof t3) return t3; let n3 = t3.trim(); if (void 0 !== e4.skipLike && e4.skipLike.test(n3)) return t3; if ("0" === t3) return 0; - if (e4.hex && C.test(n3)) return (function(t4) { + if (e4.hex && $.test(n3)) return (function(t4) { if (parseInt) return parseInt(t4, 16); if (Number.parseInt) return Number.parseInt(t4, 16); if (window && window.parseInt) return window.parseInt(t4, 16); throw new Error("parseInt, Number.parseInt, window.parseInt are not supported"); })(n3); - if (-1 !== n3.search(/.+[eE].+/)) return (function(t4, e5, n4) { + if (n3.includes("e") || n3.includes("E")) return (function(t4, e5, n4) { if (!n4.eNotation) return t4; - const i3 = e5.match(D); + const i3 = e5.match(j); if (i3) { let s2 = i3[1] || ""; const r2 = -1 === i3[3].indexOf("e") ? "E" : "e", o2 = i3[2], a2 = s2 ? t4[o2.length + 1] === r2 : t4[o2.length] === r2; @@ -62436,7 +62545,7 @@ var require_fxp = __commonJS({ return t4; })(t3, n3, e4); { - const s2 = $.exec(n3); + const s2 = V.exec(n3); if (s2) { const r2 = s2[1] || "", o2 = s2[2]; let a2 = (i2 = s2[3]) && -1 !== i2.indexOf(".") ? ("." === (i2 = i2.replace(/0+$/, "")) ? i2 = "0" : "." === i2[0] ? i2 = "0" + i2 : "." === i2[i2.length - 1] && (i2 = i2.substring(0, i2.length - 1)), i2) : i2; @@ -62444,7 +62553,7 @@ var require_fxp = __commonJS({ if (!e4.leadingZeros && (o2.length > 1 || 1 === o2.length && !l2)) return t3; { const i3 = Number(n3), s3 = String(i3); - if (0 === i3 || -0 === i3) return i3; + if (0 === i3) return i3; if (-1 !== s3.search(/[eE]/)) return e4.eNotation ? i3 : t3; if (-1 !== n3.indexOf(".")) return "0" === s3 || s3 === a2 || s3 === `${r2}${a2}` ? i3 : t3; let l3 = o2 ? a2 : n3; @@ -62478,7 +62587,7 @@ var require_fxp = __commonJS({ if (o2[a2]) { let t3 = H(o2[a2], e2, l2); const n3 = nt(t3, e2); - void 0 !== o2[Q] && (t3[Q] = o2[Q]), o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== s2[a2] && s2.hasOwnProperty(a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; + o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== o2[Q] && "object" == typeof t3 && null !== t3 && (t3[Q] = o2[Q]), void 0 !== s2[a2] && Object.prototype.hasOwnProperty.call(s2, a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; } } } @@ -62506,7 +62615,7 @@ var require_fxp = __commonJS({ } class it { constructor(t2) { - this.externalEntities = {}, this.options = w(t2); + this.externalEntities = {}, this.options = v(t2); } parse(t2, e2) { if ("string" != typeof t2 && t2.toString) t2 = t2.toString(); @@ -62516,7 +62625,7 @@ var require_fxp = __commonJS({ const n3 = a(t2, e2); if (true !== n3) throw Error(`${n3.err.msg}:${n3.err.line}:${n3.err.col}`); } - const n2 = new F(this.options); + const n2 = new L(this.options); n2.addExternalEntities(this.externalEntities); const i2 = n2.parseXml(t2); return this.options.preserveOrder || void 0 === i2 ? i2 : J(i2, this.options); @@ -62537,6 +62646,13 @@ var require_fxp = __commonJS({ } function rt(t2, e2, n2, i2) { let s2 = "", r2 = false; + if (!Array.isArray(t2)) { + if (null != t2) { + let n3 = t2.toString(); + return n3 = ut(n3, e2), n3; + } + return ""; + } for (let o2 = 0; o2 < t2.length; o2++) { const a2 = t2[o2], l2 = ot(a2); if (void 0 === l2) continue; @@ -62560,10 +62676,10 @@ var require_fxp = __commonJS({ o3 = 0 !== o3.length ? " " + o3 : "", s2 += n3 + `<${l2}${o3}${t3}?>`, r2 = true; continue; } - let h2 = i2; - "" !== h2 && (h2 += e2.indentBy); - const d2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, h2); - -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += d2 + ">" : s2 += d2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += d2 + `>${p2}${i2}` : (s2 += d2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += d2 + "/>", r2 = true; + let d2 = i2; + "" !== d2 && (d2 += e2.indentBy); + const h2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, d2); + -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += h2 + ">" : s2 += h2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += h2 + `>${p2}${i2}` : (s2 += h2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += h2 + "/>", r2 = true; } return s2; } @@ -62571,13 +62687,13 @@ var require_fxp = __commonJS({ const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2]; - if (t2.hasOwnProperty(i2) && ":@" !== i2) return i2; + if (Object.prototype.hasOwnProperty.call(t2, i2) && ":@" !== i2) return i2; } } function at(t2, e2) { let n2 = ""; if (t2 && !e2.ignoreAttributes) for (let i2 in t2) { - if (!t2.hasOwnProperty(i2)) continue; + if (!Object.prototype.hasOwnProperty.call(t2, i2)) continue; let s2 = e2.attributeValueProcessor(i2, t2[i2]); s2 = ut(s2, e2), true === s2 && e2.suppressBooleanAttributes ? n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}` : n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}="${s2}"`; } @@ -62595,15 +62711,21 @@ var require_fxp = __commonJS({ } return t2; } - const ht = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { + const dt = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [{ regex: new RegExp("&", "g"), val: "&" }, { regex: new RegExp(">", "g"), val: ">" }, { regex: new RegExp("<", "g"), val: "<" }, { regex: new RegExp("'", "g"), val: "'" }, { regex: new RegExp('"', "g"), val: """ }], processEntities: true, stopNodes: [], oneListGroup: false }; - function dt(t2) { - this.options = Object.assign({}, ht, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { + function ht(t2) { + var e2; + this.options = Object.assign({}, dt, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { return false; - } : (this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ct), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ft, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { + } : (this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ft), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ct, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { return ""; }, this.tagEndChar = ">", this.newLine = ""); } @@ -62611,15 +62733,15 @@ var require_fxp = __commonJS({ const s2 = this.j2x(t2, n2 + 1, i2.concat(e2)); return void 0 !== t2[this.options.textNodeName] && 1 === Object.keys(t2).length ? this.buildTextValNode(t2[this.options.textNodeName], e2, s2.attrStr, n2) : this.buildObjectNode(s2.val, e2, s2.attrStr, n2); } - function ft(t2) { + function ct(t2) { return this.options.indentBy.repeat(t2); } - function ct(t2) { + function ft(t2) { return !(!t2.startsWith(this.options.attributeNamePrefix) || t2 === this.options.textNodeName) && t2.substr(this.attrPrefixLen); } - dt.prototype.build = function(t2) { + ht.prototype.build = function(t2) { return this.options.preserveOrder ? st(t2, this.options) : (Array.isArray(t2) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t2 = { [this.options.arrayNodeName]: t2 }), this.j2x(t2, 0, []).val); - }, dt.prototype.j2x = function(t2, e2, n2) { + }, ht.prototype.j2x = function(t2, e2, n2) { let i2 = "", s2 = ""; const r2 = n2.join("."); for (let o2 in t2) if (Object.prototype.hasOwnProperty.call(t2, o2)) if (void 0 === t2[o2]) this.isAttribute(o2) && (s2 += ""); @@ -62654,18 +62776,18 @@ var require_fxp = __commonJS({ for (let s3 = 0; s3 < n3; s3++) i2 += this.buildAttrPairStr(e3[s3], "" + t2[o2][e3[s3]]); } else s2 += this.processTextOrObjNode(t2[o2], o2, e2, n2); return { attrStr: i2, val: s2 }; - }, dt.prototype.buildAttrPairStr = function(t2, e2) { + }, ht.prototype.buildAttrPairStr = function(t2, e2) { return e2 = this.options.attributeValueProcessor(t2, "" + e2), e2 = this.replaceEntitiesValue(e2), this.options.suppressBooleanAttributes && "true" === e2 ? " " + t2 : " " + t2 + '="' + e2 + '"'; - }, dt.prototype.buildObjectNode = function(t2, e2, n2, i2) { + }, ht.prototype.buildObjectNode = function(t2, e2, n2, i2) { if ("" === t2) return "?" === e2[0] ? this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar; { let s2 = "` + this.newLine : this.indentate(i2) + "<" + e2 + n2 + r2 + this.tagEndChar + t2 + this.indentate(i2) + s2 : this.indentate(i2) + "<" + e2 + n2 + r2 + ">" + t2 + s2; } - }, dt.prototype.closeTag = function(t2) { + }, ht.prototype.closeTag = function(t2) { let e2 = ""; return -1 !== this.options.unpairedTags.indexOf(t2) ? this.options.suppressUnpairedNode || (e2 = "/") : e2 = this.options.suppressEmptyNode ? "/" : `>` + this.newLine; if (false !== this.options.commentPropName && e2 === this.options.commentPropName) return this.indentate(i2) + `` + this.newLine; if ("?" === e2[0]) return this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar; @@ -62673,14 +62795,14 @@ var require_fxp = __commonJS({ let s2 = this.options.tagValueProcessor(e2, t2); return s2 = this.replaceEntitiesValue(s2), "" === s2 ? this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + ">" + s2 + " 0 && this.options.processEntities) for (let e2 = 0; e2 < this.options.entities.length; e2++) { const n2 = this.options.entities[e2]; t2 = t2.replace(n2.regex, n2.val); } return t2; }; - const gt = { validate: a }; + const gt = ht, xt = { validate: a }; module2.exports = e; })(); } @@ -103376,6 +103498,7 @@ var require_minimatch2 = __commonJS({ assertValidPattern(pattern); if (!options) options = {}; this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200; this.set = []; this.pattern = pattern; this.windowsPathsNoEscape = !!options.windowsPathsNoEscape || options.allowWindowsEscape === false; @@ -103432,51 +103555,146 @@ var require_minimatch2 = __commonJS({ // out of pattern, then that's fine, as long as all // the parts match. matchOne(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } - ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { - this.debug("matchOne loop"); - var p = pattern[pi]; - var f = file[fi]; - this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } + if (pattern.indexOf(GLOBSTAR) !== -1) { + return this._matchGlobstar(file, pattern, partial, 0, 0); + } + return this._matchOne(file, pattern, partial, 0, 0); + } + _matchGlobstar(file, pattern, partial, fileIndex, patternIndex) { + let firstgs = -1; + for (let i = patternIndex; i < pattern.length; i++) { + if (pattern[i] === GLOBSTAR) { + firstgs = i; + break; + } + } + let lastgs = -1; + for (let i = pattern.length - 1; i >= 0; i--) { + if (pattern[i] === GLOBSTAR) { + lastgs = i; + break; + } + } + const head = pattern.slice(patternIndex, firstgs); + const body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs); + const tail = partial ? [] : pattern.slice(lastgs + 1); + if (head.length) { + const fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this._matchOne(fileHead, head, partial, 0, 0)) { return false; } - var hit; + fileIndex += head.length; + } + let fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) return false; + const tailStart = file.length - tail.length; + if (this._matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; + } + if (!this._matchOne(file, tail, partial, tailStart - 1, 0)) { + return false; + } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + let sawSome = !!fileTailMatch; + for (let i = fileIndex; i < file.length - fileTailMatch; i++) { + const f = String(file[i]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return partial || sawSome; + } + const bodySegments = [[[], 0]]; + let currentBody = bodySegments[0]; + let nonGsParts = 0; + const nonGsPartsSums = [0]; + for (const b of body) { + if (b === GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + let idx = bodySegments.length - 1; + const fileLength = file.length - fileTailMatch; + for (const b of bodySegments) { + b[1] = fileLength - (nonGsPartsSums[idx--] + b[0].length); + } + return !!this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex, + 0, + partial, + 0, + !!fileTailMatch + ); + } + // return false for "nope, not matching" + // return null for "not matching, cannot keep trying" + _matchGlobStarBodySections(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + const bs = bodySegments[bodyIndex]; + if (!bs) { + for (let i = fileIndex; i < file.length; i++) { + sawTail = true; + const f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return sawTail; + } + const [body, after] = bs; + while (fileIndex <= after) { + const m = this._matchOne( + file.slice(0, fileIndex + body.length), + body, + partial, + fileIndex, + 0 + ); + if (m && globStarDepth < this.maxGlobstarRecursion) { + const sub = this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex + body.length, + bodyIndex + 1, + partial, + globStarDepth + 1, + sawTail + ); + if (sub !== false) { + return sub; + } + } + const f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + fileIndex++; + } + return partial || null; + } + _matchOne(file, pattern, partial, fileIndex, patternIndex) { + let fi, pi, fl, pl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + this.debug("matchOne loop"); + const p = pattern[pi]; + const f = file[fi]; + this.debug(pattern, p, f); + if (p === false || p === GLOBSTAR) return false; + let hit; if (typeof p === "string") { hit = f === p; this.debug("string match", p, f, hit); @@ -116797,12 +117015,60 @@ var require_unescape = __commonJS({ var require_ast = __commonJS({ "node_modules/glob/node_modules/minimatch/dist/commonjs/ast.js"(exports2) { "use strict"; + var _a; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.AST = void 0; var brace_expressions_js_1 = require_brace_expressions(); var unescape_js_1 = require_unescape(); var types = /* @__PURE__ */ new Set(["!", "?", "+", "*", "@"]); var isExtglobType = (c) => types.has(c); + var isExtglobAST = (c) => isExtglobType(c.type); + var adoptionMap = /* @__PURE__ */ new Map([ + ["!", ["@"]], + ["?", ["?", "@"]], + ["@", ["@"]], + ["*", ["*", "+", "?", "@"]], + ["+", ["+", "@"]] + ]); + var adoptionWithSpaceMap = /* @__PURE__ */ new Map([ + ["!", ["?"]], + ["@", ["?"]], + ["+", ["?", "*"]] + ]); + var adoptionAnyMap = /* @__PURE__ */ new Map([ + ["!", ["?", "@"]], + ["?", ["?", "@"]], + ["@", ["?", "@"]], + ["*", ["*", "+", "?", "@"]], + ["+", ["+", "@", "?", "*"]] + ]); + var usurpMap = /* @__PURE__ */ new Map([ + ["!", /* @__PURE__ */ new Map([["!", "@"]])], + [ + "?", + /* @__PURE__ */ new Map([ + ["*", "*"], + ["+", "*"] + ]) + ], + [ + "@", + /* @__PURE__ */ new Map([ + ["!", "!"], + ["?", "?"], + ["@", "@"], + ["*", "*"], + ["+", "+"] + ]) + ], + [ + "+", + /* @__PURE__ */ new Map([ + ["?", "*"], + ["*", "*"] + ]) + ] + ]); var startNoTraversal = "(?!(?:^|/)\\.\\.?(?:$|/))"; var startNoDot = "(?!\\.)"; var addPatternStart = /* @__PURE__ */ new Set(["[", "."]); @@ -116812,7 +117078,8 @@ var require_ast = __commonJS({ var qmark = "[^/]"; var star = qmark + "*?"; var starNoEmpty = qmark + "+?"; - var AST = class _AST { + var ID = 0; + var AST = class { type; #root; #hasMagic; @@ -116827,6 +117094,22 @@ var require_ast = __commonJS({ // set to true if it's an extglob with no children // (which really means one child of '') #emptyExt = false; + id = ++ID; + get depth() { + return (this.#parent?.depth ?? -1) + 1; + } + [/* @__PURE__ */ Symbol.for("nodejs.util.inspect.custom")]() { + return { + "@@type": "AST", + id: this.id, + type: this.type, + root: this.#root.id, + parent: this.#parent?.id, + depth: this.depth, + partsLength: this.#parts.length, + parts: this.#parts + }; + } constructor(type2, parent, options = {}) { this.type = type2; if (type2) @@ -116892,7 +117175,7 @@ var require_ast = __commonJS({ for (const p of parts) { if (p === "") continue; - if (typeof p !== "string" && !(p instanceof _AST && p.#parent === this)) { + if (typeof p !== "string" && !(p instanceof _a && p.#parent === this)) { throw new Error("invalid part: " + p); } this.#parts.push(p); @@ -116917,7 +117200,7 @@ var require_ast = __commonJS({ const p = this.#parent; for (let i = 0; i < this.#parentIndex; i++) { const pp = p.#parts[i]; - if (!(pp instanceof _AST && pp.type === "!")) { + if (!(pp instanceof _a && pp.type === "!")) { return false; } } @@ -116942,13 +117225,14 @@ var require_ast = __commonJS({ this.push(part.clone(this)); } clone(parent) { - const c = new _AST(this.type, parent); + const c = new _a(this.type, parent); for (const p of this.#parts) { c.copyIn(p); } return c; } - static #parseAST(str2, ast, pos, opt) { + static #parseAST(str2, ast, pos, opt, extDepth) { + const maxDepth = opt.maxExtglobRecursion ?? 2; let escaping = false; let inBrace = false; let braceStart = -1; @@ -116980,11 +117264,12 @@ var require_ast = __commonJS({ acc2 += c; continue; } - if (!opt.noext && isExtglobType(c) && str2.charAt(i2) === "(") { + const doRecurse = !opt.noext && isExtglobType(c) && str2.charAt(i2) === "(" && extDepth <= maxDepth; + if (doRecurse) { ast.push(acc2); acc2 = ""; - const ext = new _AST(c, ast); - i2 = _AST.#parseAST(str2, ext, i2, opt); + const ext = new _a(c, ast); + i2 = _a.#parseAST(str2, ext, i2, opt, extDepth + 1); ast.push(ext); continue; } @@ -116994,7 +117279,7 @@ var require_ast = __commonJS({ return i2; } let i = pos + 1; - let part = new _AST(null, ast); + let part = new _a(null, ast); const parts = []; let acc = ""; while (i < str2.length) { @@ -117021,19 +117306,22 @@ var require_ast = __commonJS({ acc += c; continue; } - if (isExtglobType(c) && str2.charAt(i) === "(") { + const doRecurse = !opt.noext && isExtglobType(c) && str2.charAt(i) === "(" && /* c8 ignore start - the maxDepth is sufficient here */ + (extDepth <= maxDepth || ast && ast.#canAdoptType(c)); + if (doRecurse) { + const depthAdd = ast && ast.#canAdoptType(c) ? 0 : 1; part.push(acc); acc = ""; - const ext = new _AST(c, part); + const ext = new _a(c, part); part.push(ext); - i = _AST.#parseAST(str2, ext, i, opt); + i = _a.#parseAST(str2, ext, i, opt, extDepth + depthAdd); continue; } if (c === "|") { part.push(acc); acc = ""; parts.push(part); - part = new _AST(null, ast); + part = new _a(null, ast); continue; } if (c === ")") { @@ -117052,9 +117340,71 @@ var require_ast = __commonJS({ ast.#parts = [str2.substring(pos - 1)]; return i; } + #canAdoptWithSpace(child) { + return this.#canAdopt(child, adoptionWithSpaceMap); + } + #canAdopt(child, map2 = adoptionMap) { + if (!child || typeof child !== "object" || child.type !== null || child.#parts.length !== 1 || this.type === null) { + return false; + } + const gc = child.#parts[0]; + if (!gc || typeof gc !== "object" || gc.type === null) { + return false; + } + return this.#canAdoptType(gc.type, map2); + } + #canAdoptType(c, map2 = adoptionAnyMap) { + return !!map2.get(this.type)?.includes(c); + } + #adoptWithSpace(child, index) { + const gc = child.#parts[0]; + const blank = new _a(null, gc, this.options); + blank.#parts.push(""); + gc.push(blank); + this.#adopt(child, index); + } + #adopt(child, index) { + const gc = child.#parts[0]; + this.#parts.splice(index, 1, ...gc.#parts); + for (const p of gc.#parts) { + if (typeof p === "object") + p.#parent = this; + } + this.#toString = void 0; + } + #canUsurpType(c) { + const m = usurpMap.get(this.type); + return !!m?.has(c); + } + #canUsurp(child) { + if (!child || typeof child !== "object" || child.type !== null || child.#parts.length !== 1 || this.type === null || this.#parts.length !== 1) { + return false; + } + const gc = child.#parts[0]; + if (!gc || typeof gc !== "object" || gc.type === null) { + return false; + } + return this.#canUsurpType(gc.type); + } + #usurp(child) { + const m = usurpMap.get(this.type); + const gc = child.#parts[0]; + const nt = m?.get(gc.type); + if (!nt) + return false; + this.#parts = gc.#parts; + for (const p of this.#parts) { + if (typeof p === "object") { + p.#parent = this; + } + } + this.type = nt; + this.#toString = void 0; + this.#emptyExt = false; + } static fromGlob(pattern, options = {}) { - const ast = new _AST(null, void 0, options); - _AST.#parseAST(pattern, ast, 0, options); + const ast = new _a(null, void 0, options); + _a.#parseAST(pattern, ast, 0, options, 0); return ast; } // returns the regular expression if there's magic, or the unescaped @@ -117148,12 +117498,14 @@ var require_ast = __commonJS({ // or start or whatever) and prepend ^ or / at the Regexp construction. toRegExpSource(allowDot) { const dot = allowDot ?? !!this.#options.dot; - if (this.#root === this) + if (this.#root === this) { + this.#flatten(); this.#fillNegs(); - if (!this.type) { + } + if (!isExtglobAST(this)) { const noEmpty = this.isStart() && this.isEnd() && !this.#parts.some((s) => typeof s !== "string"); const src = this.#parts.map((p) => { - const [re, _2, hasMagic, uflag] = typeof p === "string" ? _AST.#parseGlob(p, this.#hasMagic, noEmpty) : p.toRegExpSource(allowDot); + const [re, _2, hasMagic, uflag] = typeof p === "string" ? _a.#parseGlob(p, this.#hasMagic, noEmpty) : p.toRegExpSource(allowDot); this.#hasMagic = this.#hasMagic || hasMagic; this.#uflag = this.#uflag || uflag; return re; @@ -117192,9 +117544,10 @@ var require_ast = __commonJS({ let body = this.#partsToRegExp(dot); if (this.isStart() && this.isEnd() && !body && this.type !== "!") { const s = this.toString(); - this.#parts = [s]; - this.type = null; - this.#hasMagic = void 0; + const me = this; + me.#parts = [s]; + me.type = null; + me.#hasMagic = void 0; return [s, (0, unescape_js_1.unescape)(this.toString()), false, false]; } let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot ? "" : this.#partsToRegExp(true); @@ -117221,6 +117574,38 @@ var require_ast = __commonJS({ this.#uflag ]; } + #flatten() { + if (!isExtglobAST(this)) { + for (const p of this.#parts) { + if (typeof p === "object") { + p.#flatten(); + } + } + } else { + let iterations = 0; + let done = false; + do { + done = true; + for (let i = 0; i < this.#parts.length; i++) { + const c = this.#parts[i]; + if (typeof c === "object") { + c.#flatten(); + if (this.#canAdopt(c)) { + done = false; + this.#adopt(c, i); + } else if (this.#canAdoptWithSpace(c)) { + done = false; + this.#adoptWithSpace(c, i); + } else if (this.#canUsurp(c)) { + done = false; + this.#usurp(c); + } + } + } + } while (!done && ++iterations < 10); + } + this.#toString = void 0; + } #partsToRegExp(dot) { return this.#parts.map((p) => { if (typeof p === "string") { @@ -117282,6 +117667,7 @@ var require_ast = __commonJS({ } }; exports2.AST = AST; + _a = AST; } }); @@ -117466,11 +117852,13 @@ var require_commonjs20 = __commonJS({ isWindows; platform; windowsNoMagicRoot; + maxGlobstarRecursion; regexp; constructor(pattern, options = {}) { (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); options = options || {}; this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion ?? 200; this.pattern = pattern; this.platform = options.platform || defaultPlatform; this.isWindows = this.platform === "win32"; @@ -117807,7 +118195,8 @@ var require_commonjs20 = __commonJS({ // out of pattern, then that's fine, as long as all // the parts match. matchOne(file, pattern, partial = false) { - const options = this.options; + let fileStartIndex = 0; + let patternStartIndex = 0; if (this.isWindows) { const fileDrive = typeof file[0] === "string" && /^[a-z]:$/i.test(file[0]); const fileUNC = !fileDrive && file[0] === "" && file[1] === "" && file[2] === "?" && /^[a-z]:$/i.test(file[3]); @@ -117822,11 +118211,8 @@ var require_commonjs20 = __commonJS({ ]; if (fd.toLowerCase() === pd.toLowerCase()) { pattern[pdi] = fd; - if (pdi > fdi) { - pattern = pattern.slice(pdi); - } else if (fdi > pdi) { - file = file.slice(fdi); - } + patternStartIndex = pdi; + fileStartIndex = fdi; } } } @@ -117834,49 +118220,123 @@ var require_commonjs20 = __commonJS({ if (optimizationLevel >= 2) { file = this.levelTwoFileOptimize(file); } - this.debug("matchOne", this, { file, pattern }); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { - this.debug("matchOne loop"); - var p = pattern[pi]; - var f = file[fi]; - this.debug(pattern, p, f); - if (p === false) { + if (pattern.includes(exports2.GLOBSTAR)) { + return this.#matchGlobstar(file, pattern, partial, fileStartIndex, patternStartIndex); + } + return this.#matchOne(file, pattern, partial, fileStartIndex, patternStartIndex); + } + #matchGlobstar(file, pattern, partial, fileIndex, patternIndex) { + const firstgs = pattern.indexOf(exports2.GLOBSTAR, patternIndex); + const lastgs = pattern.lastIndexOf(exports2.GLOBSTAR); + const [head, body, tail] = partial ? [ + pattern.slice(patternIndex, firstgs), + pattern.slice(firstgs + 1), + [] + ] : [ + pattern.slice(patternIndex, firstgs), + pattern.slice(firstgs + 1, lastgs), + pattern.slice(lastgs + 1) + ]; + if (head.length) { + const fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this.#matchOne(fileHead, head, partial, 0, 0)) { return false; } - if (p === exports2.GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") - return false; - } - return true; + fileIndex += head.length; + patternIndex += head.length; + } + let fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) + return false; + let tailStart = file.length - tail.length; + if (this.#matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } + tailStart--; + if (!this.#matchOne(file, tail, partial, tailStart, 0)) { + return false; } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) { - return true; - } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + let sawSome = !!fileTailMatch; + for (let i2 = fileIndex; i2 < file.length - fileTailMatch; i2++) { + const f = String(file[i2]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) { + return false; } + } + return partial || sawSome; + } + const bodySegments = [[[], 0]]; + let currentBody = bodySegments[0]; + let nonGsParts = 0; + const nonGsPartsSums = [0]; + for (const b of body) { + if (b === exports2.GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + let i = bodySegments.length - 1; + const fileLength = file.length - fileTailMatch; + for (const b of bodySegments) { + b[1] = fileLength - (nonGsPartsSums[i--] + b[0].length); + } + return !!this.#matchGlobStarBodySections(file, bodySegments, fileIndex, 0, partial, 0, !!fileTailMatch); + } + // return false for "nope, not matching" + // return null for "not matching, cannot keep trying" + #matchGlobStarBodySections(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + const bs = bodySegments[bodyIndex]; + if (!bs) { + for (let i = fileIndex; i < file.length; i++) { + sawTail = true; + const f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) { + return false; + } + } + return sawTail; + } + const [body, after] = bs; + while (fileIndex <= after) { + const m = this.#matchOne(file.slice(0, fileIndex + body.length), body, partial, fileIndex, 0); + if (m && globStarDepth < this.maxGlobstarRecursion) { + const sub = this.#matchGlobStarBodySections(file, bodySegments, fileIndex + body.length, bodyIndex + 1, partial, globStarDepth + 1, sawTail); + if (sub !== false) { + return sub; + } + } + const f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) { + return false; + } + fileIndex++; + } + return partial || null; + } + #matchOne(file, pattern, partial, fileIndex, patternIndex) { + let fi; + let pi; + let pl; + let fl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + this.debug("matchOne loop"); + let p = pattern[pi]; + let f = file[fi]; + this.debug(pattern, p, f); + if (p === false || p === exports2.GLOBSTAR) { return false; } let hit; @@ -160640,7 +161100,7 @@ var safeDump = renamed("safeDump", "dump"); var semver = __toESM(require_semver2()); // src/api-compatibility.json -var maximumVersion = "3.20"; +var maximumVersion = "3.21"; var minimumVersion = "3.14"; // src/util.ts @@ -161767,6 +162227,11 @@ var featureConfig = { // cannot be found when interpreting results. minimumVersion: void 0 }, + ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES", + minimumVersion: void 0 + }, ["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: { defaultValue: false, envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE", @@ -161778,11 +162243,6 @@ var featureConfig = { minimumVersion: void 0, toolsFeature: "bundleSupportsOverlay" /* BundleSupportsOverlay */ }, - ["use_repository_properties_v2" /* UseRepositoryProperties */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES", - minimumVersion: void 0 - }, ["validate_db_config" /* ValidateDbConfig */]: { defaultValue: false, envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG", diff --git a/lib/analyze-action.js b/lib/analyze-action.js index 9e0920d52..78ce035ed 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -204,7 +204,7 @@ var require_file_command = __commonJS({ exports2.issueFileCommand = issueFileCommand; exports2.prepareKeyValueMessage = prepareKeyValueMessage; var crypto3 = __importStar2(require("crypto")); - var fs17 = __importStar2(require("fs")); + var fs18 = __importStar2(require("fs")); var os5 = __importStar2(require("os")); var utils_1 = require_utils(); function issueFileCommand(command, message) { @@ -212,10 +212,10 @@ var require_file_command = __commonJS({ if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs17.existsSync(filePath)) { + if (!fs18.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs17.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os5.EOL}`, { + fs18.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os5.EOL}`, { encoding: "utf8" }); } @@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({ exports2.isRooted = isRooted; exports2.tryGetExecutablePath = tryGetExecutablePath; exports2.getCmdPath = getCmdPath; - var fs17 = __importStar2(require("fs")); + var fs18 = __importStar2(require("fs")); var path16 = __importStar2(require("path")); - _a = fs17.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + _a = fs18.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; function readlink(fsPath) { return __awaiter2(this, void 0, void 0, function* () { - const result = yield fs17.promises.readlink(fsPath); + const result = yield fs18.promises.readlink(fsPath); if (exports2.IS_WINDOWS && !result.endsWith("\\")) { return `${result}\\`; } @@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({ }); } exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs17.constants.O_RDONLY; + exports2.READONLY = fs18.constants.O_RDONLY; function exists(fsPath) { return __awaiter2(this, void 0, void 0, function* () { try { @@ -45986,7 +45986,7 @@ var require_package = __commonJS({ "package.json"(exports2, module2) { module2.exports = { name: "codeql", - version: "4.32.5", + version: "4.32.7", private: true, description: "CodeQL action", scripts: { @@ -45995,7 +45995,7 @@ var require_package = __commonJS({ lint: "eslint --report-unused-disable-directives --max-warnings=0 .", "lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif", "lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix", - ava: "npm run transpile && ava --serial --verbose", + ava: "npm run transpile && ava --verbose", test: "npm run ava -- src/", "test-debug": "npm run test -- --timeout=20m", transpile: "tsc --build --verbose" @@ -46044,6 +46044,7 @@ var require_package = __commonJS({ "@types/js-yaml": "^4.0.9", "@types/node": "^20.19.9", "@types/node-forge": "^1.3.14", + "@types/sarif": "^2.1.7", "@types/semver": "^7.7.1", "@types/sinon": "^21.0.0", ava: "^6.4.1", @@ -46052,14 +46053,14 @@ var require_package = __commonJS({ "eslint-import-resolver-typescript": "^3.8.7", "eslint-plugin-github": "^6.0.0", "eslint-plugin-import-x": "^4.16.1", - "eslint-plugin-jsdoc": "^62.5.0", + "eslint-plugin-jsdoc": "^62.7.1", "eslint-plugin-no-async-foreach": "^0.1.1", glob: "^11.1.0", - globals: "^16.5.0", + globals: "^17.3.0", nock: "^14.0.11", sinon: "^21.0.1", typescript: "^5.9.3", - "typescript-eslint": "^8.56.0" + "typescript-eslint": "^8.56.1" }, overrides: { "@actions/tool-cache": { @@ -49361,6 +49362,7 @@ var require_minimatch = __commonJS({ pattern = pattern.split(path16.sep).join("/"); } this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200; this.set = []; this.pattern = pattern; this.regexp = null; @@ -49757,50 +49759,147 @@ var require_minimatch = __commonJS({ return this.negate; }; Minimatch.prototype.matchOne = function(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } + if (pattern.indexOf(GLOBSTAR) !== -1) { + return this._matchGlobstar(file, pattern, partial, 0, 0); + } + return this._matchOne(file, pattern, partial, 0, 0); + }; + Minimatch.prototype._matchGlobstar = function(file, pattern, partial, fileIndex, patternIndex) { + var i; + var firstgs = -1; + for (i = patternIndex; i < pattern.length; i++) { + if (pattern[i] === GLOBSTAR) { + firstgs = i; + break; + } + } + var lastgs = -1; + for (i = pattern.length - 1; i >= 0; i--) { + if (pattern[i] === GLOBSTAR) { + lastgs = i; + break; + } + } + var head = pattern.slice(patternIndex, firstgs); + var body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs); + var tail = partial ? [] : pattern.slice(lastgs + 1); + if (head.length) { + var fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this._matchOne(fileHead, head, partial, 0, 0)) { + return false; + } + fileIndex += head.length; + } + var fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) return false; + var tailStart = file.length - tail.length; + if (this._matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; + } + tailStart--; + if (!this._matchOne(file, tail, partial, tailStart, 0)) { + return false; + } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + var sawSome = !!fileTailMatch; + for (i = fileIndex; i < file.length - fileTailMatch; i++) { + var f = String(file[i]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return partial || sawSome; + } + var bodySegments = [[[], 0]]; + var currentBody = bodySegments[0]; + var nonGsParts = 0; + var nonGsPartsSums = [0]; + for (var bi = 0; bi < body.length; bi++) { + var b = body[bi]; + if (b === GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + var idx = bodySegments.length - 1; + var fileLength = file.length - fileTailMatch; + for (var si = 0; si < bodySegments.length; si++) { + bodySegments[si][1] = fileLength - (nonGsPartsSums[idx--] + bodySegments[si][0].length); + } + return !!this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex, + 0, + partial, + 0, + !!fileTailMatch ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + }; + Minimatch.prototype._matchGlobStarBodySections = function(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + var bs = bodySegments[bodyIndex]; + if (!bs) { + for (var i = fileIndex; i < file.length; i++) { + sawTail = true; + var f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return sawTail; + } + var body = bs[0]; + var after = bs[1]; + while (fileIndex <= after) { + var m = this._matchOne( + file.slice(0, fileIndex + body.length), + body, + partial, + fileIndex, + 0 + ); + if (m && globStarDepth < this.maxGlobstarRecursion) { + var sub = this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex + body.length, + bodyIndex + 1, + partial, + globStarDepth + 1, + sawTail + ); + if (sub !== false) { + return sub; + } + } + var f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + fileIndex++; + } + return partial || null; + }; + Minimatch.prototype._matchOne = function(file, pattern, partial, fileIndex, patternIndex) { + var fi, pi, fl, pl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { this.debug("matchOne loop"); var p = pattern[pi]; var f = file[fi]; this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } - return false; - } + if (p === false || p === GLOBSTAR) return false; var hit; if (typeof p === "string") { hit = f === p; @@ -50305,7 +50404,7 @@ var require_internal_globber = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core15 = __importStar2(require_core()); - var fs17 = __importStar2(require("fs")); + var fs18 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path16 = __importStar2(require("path")); var patternHelper = __importStar2(require_internal_pattern_helper()); @@ -50359,7 +50458,7 @@ var require_internal_globber = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core15.debug(`Search path '${searchPath}'`); try { - yield __await2(fs17.promises.lstat(searchPath)); + yield __await2(fs18.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -50393,7 +50492,7 @@ var require_internal_globber = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await2(fs17.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path16.join(item.path, x), childLevel)); + const childItems = (yield __await2(fs18.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path16.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await2(item.path); @@ -50428,7 +50527,7 @@ var require_internal_globber = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs17.promises.stat(item.path); + stats = yield fs18.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -50440,10 +50539,10 @@ var require_internal_globber = __commonJS({ throw err; } } else { - stats = yield fs17.promises.lstat(item.path); + stats = yield fs18.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs17.promises.realpath(item.path); + const realPath = yield fs18.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -50552,7 +50651,7 @@ var require_internal_hash_files = __commonJS({ exports2.hashFiles = hashFiles2; var crypto3 = __importStar2(require("crypto")); var core15 = __importStar2(require_core()); - var fs17 = __importStar2(require("fs")); + var fs18 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); var path16 = __importStar2(require("path")); @@ -50575,13 +50674,13 @@ var require_internal_hash_files = __commonJS({ writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); continue; } - if (fs17.statSync(file).isDirectory()) { + if (fs18.statSync(file).isDirectory()) { writeDelegate(`Skip directory '${file}'.`); continue; } const hash2 = crypto3.createHash("sha256"); const pipeline = util.promisify(stream2.pipeline); - yield pipeline(fs17.createReadStream(file), hash2); + yield pipeline(fs18.createReadStream(file), hash2); result.write(hash2.digest()); count++; if (!hasMatch) { @@ -51956,7 +52055,7 @@ var require_cacheUtils = __commonJS({ var glob2 = __importStar2(require_glob()); var io7 = __importStar2(require_io()); var crypto3 = __importStar2(require("crypto")); - var fs17 = __importStar2(require("fs")); + var fs18 = __importStar2(require("fs")); var path16 = __importStar2(require("path")); var semver9 = __importStar2(require_semver3()); var util = __importStar2(require("util")); @@ -51985,7 +52084,7 @@ var require_cacheUtils = __commonJS({ }); } function getArchiveFileSizeInBytes(filePath) { - return fs17.statSync(filePath).size; + return fs18.statSync(filePath).size; } function resolvePaths(patterns) { return __awaiter2(this, void 0, void 0, function* () { @@ -52023,7 +52122,7 @@ var require_cacheUtils = __commonJS({ } function unlinkFile(filePath) { return __awaiter2(this, void 0, void 0, function* () { - return util.promisify(fs17.unlink)(filePath); + return util.promisify(fs18.unlink)(filePath); }); } function getVersion(app_1) { @@ -52065,7 +52164,7 @@ var require_cacheUtils = __commonJS({ } function getGnuTarPathOnWindows() { return __awaiter2(this, void 0, void 0, function* () { - if (fs17.existsSync(constants_1.GnuTarPathOnWindows)) { + if (fs18.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion("tar"); @@ -61840,7 +61939,7 @@ var require_fxp = __commonJS({ }, o: (t2, e2) => Object.prototype.hasOwnProperty.call(t2, e2), r: (t2) => { "undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t2, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t2, "__esModule", { value: true }); } }, e = {}; - t.r(e), t.d(e, { XMLBuilder: () => dt, XMLParser: () => it, XMLValidator: () => gt }); + t.r(e), t.d(e, { XMLBuilder: () => gt, XMLParser: () => it, XMLValidator: () => xt }); const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$"); function s(t2, e2) { const n2 = []; @@ -61862,90 +61961,90 @@ var require_fxp = __commonJS({ const n2 = []; let i2 = false, s2 = false; "\uFEFF" === t2[0] && (t2 = t2.substr(1)); - for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2] && "?" === t2[o2 + 1]) { - if (o2 += 2, o2 = u(t2, o2), o2.err) return o2; + for (let r2 = 0; r2 < t2.length; r2++) if ("<" === t2[r2] && "?" === t2[r2 + 1]) { + if (r2 += 2, r2 = u(t2, r2), r2.err) return r2; } else { - if ("<" !== t2[o2]) { - if (l(t2[o2])) continue; - return m("InvalidChar", "char '" + t2[o2] + "' is not expected.", b(t2, o2)); + if ("<" !== t2[r2]) { + if (l(t2[r2])) continue; + return m("InvalidChar", "char '" + t2[r2] + "' is not expected.", N(t2, r2)); } { - let a2 = o2; - if (o2++, "!" === t2[o2]) { - o2 = h(t2, o2); + let o2 = r2; + if (r2++, "!" === t2[r2]) { + r2 = d(t2, r2); continue; } { - let d2 = false; - "/" === t2[o2] && (d2 = true, o2++); - let p2 = ""; - for (; o2 < t2.length && ">" !== t2[o2] && " " !== t2[o2] && " " !== t2[o2] && "\n" !== t2[o2] && "\r" !== t2[o2]; o2++) p2 += t2[o2]; - if (p2 = p2.trim(), "/" === p2[p2.length - 1] && (p2 = p2.substring(0, p2.length - 1), o2--), !r(p2)) { + let a2 = false; + "/" === t2[r2] && (a2 = true, r2++); + let h2 = ""; + for (; r2 < t2.length && ">" !== t2[r2] && " " !== t2[r2] && " " !== t2[r2] && "\n" !== t2[r2] && "\r" !== t2[r2]; r2++) h2 += t2[r2]; + if (h2 = h2.trim(), "/" === h2[h2.length - 1] && (h2 = h2.substring(0, h2.length - 1), r2--), !b(h2)) { let e3; - return e3 = 0 === p2.trim().length ? "Invalid space after '<'." : "Tag '" + p2 + "' is an invalid name.", m("InvalidTag", e3, b(t2, o2)); + return e3 = 0 === h2.trim().length ? "Invalid space after '<'." : "Tag '" + h2 + "' is an invalid name.", m("InvalidTag", e3, N(t2, r2)); } - const c2 = f(t2, o2); - if (false === c2) return m("InvalidAttr", "Attributes for '" + p2 + "' have open quote.", b(t2, o2)); - let E2 = c2.value; - if (o2 = c2.index, "/" === E2[E2.length - 1]) { - const n3 = o2 - E2.length; - E2 = E2.substring(0, E2.length - 1); - const s3 = g(E2, e2); - if (true !== s3) return m(s3.err.code, s3.err.msg, b(t2, n3 + s3.err.line)); + const p2 = c(t2, r2); + if (false === p2) return m("InvalidAttr", "Attributes for '" + h2 + "' have open quote.", N(t2, r2)); + let f2 = p2.value; + if (r2 = p2.index, "/" === f2[f2.length - 1]) { + const n3 = r2 - f2.length; + f2 = f2.substring(0, f2.length - 1); + const s3 = g(f2, e2); + if (true !== s3) return m(s3.err.code, s3.err.msg, N(t2, n3 + s3.err.line)); i2 = true; - } else if (d2) { - if (!c2.tagClosed) return m("InvalidTag", "Closing tag '" + p2 + "' doesn't have proper closing.", b(t2, o2)); - if (E2.trim().length > 0) return m("InvalidTag", "Closing tag '" + p2 + "' can't have attributes or invalid starting.", b(t2, a2)); - if (0 === n2.length) return m("InvalidTag", "Closing tag '" + p2 + "' has not been opened.", b(t2, a2)); + } else if (a2) { + if (!p2.tagClosed) return m("InvalidTag", "Closing tag '" + h2 + "' doesn't have proper closing.", N(t2, r2)); + if (f2.trim().length > 0) return m("InvalidTag", "Closing tag '" + h2 + "' can't have attributes or invalid starting.", N(t2, o2)); + if (0 === n2.length) return m("InvalidTag", "Closing tag '" + h2 + "' has not been opened.", N(t2, o2)); { const e3 = n2.pop(); - if (p2 !== e3.tagName) { - let n3 = b(t2, e3.tagStartPos); - return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + p2 + "'.", b(t2, a2)); + if (h2 !== e3.tagName) { + let n3 = N(t2, e3.tagStartPos); + return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + h2 + "'.", N(t2, o2)); } 0 == n2.length && (s2 = true); } } else { - const r2 = g(E2, e2); - if (true !== r2) return m(r2.err.code, r2.err.msg, b(t2, o2 - E2.length + r2.err.line)); - if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", b(t2, o2)); - -1 !== e2.unpairedTags.indexOf(p2) || n2.push({ tagName: p2, tagStartPos: a2 }), i2 = true; + const a3 = g(f2, e2); + if (true !== a3) return m(a3.err.code, a3.err.msg, N(t2, r2 - f2.length + a3.err.line)); + if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", N(t2, r2)); + -1 !== e2.unpairedTags.indexOf(h2) || n2.push({ tagName: h2, tagStartPos: o2 }), i2 = true; } - for (o2++; o2 < t2.length; o2++) if ("<" === t2[o2]) { - if ("!" === t2[o2 + 1]) { - o2++, o2 = h(t2, o2); + for (r2++; r2 < t2.length; r2++) if ("<" === t2[r2]) { + if ("!" === t2[r2 + 1]) { + r2++, r2 = d(t2, r2); continue; } - if ("?" !== t2[o2 + 1]) break; - if (o2 = u(t2, ++o2), o2.err) return o2; - } else if ("&" === t2[o2]) { - const e3 = x(t2, o2); - if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", b(t2, o2)); - o2 = e3; - } else if (true === s2 && !l(t2[o2])) return m("InvalidXml", "Extra text at the end", b(t2, o2)); - "<" === t2[o2] && o2--; + if ("?" !== t2[r2 + 1]) break; + if (r2 = u(t2, ++r2), r2.err) return r2; + } else if ("&" === t2[r2]) { + const e3 = x(t2, r2); + if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", N(t2, r2)); + r2 = e3; + } else if (true === s2 && !l(t2[r2])) return m("InvalidXml", "Extra text at the end", N(t2, r2)); + "<" === t2[r2] && r2--; } } } - return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", b(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map(((t3) => t3.tagName)), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); + return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", N(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map((t3) => t3.tagName), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); } function l(t2) { return " " === t2 || " " === t2 || "\n" === t2 || "\r" === t2; } function u(t2, e2) { const n2 = e2; - for (; e2 < t2.length; e2++) if ("?" != t2[e2] && " " != t2[e2]) ; - else { + for (; e2 < t2.length; e2++) if ("?" == t2[e2] || " " == t2[e2]) { const i2 = t2.substr(n2, e2 - n2); - if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", b(t2, e2)); + if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", N(t2, e2)); if ("?" == t2[e2] && ">" == t2[e2 + 1]) { e2++; break; } + continue; } return e2; } - function h(t2, e2) { + function d(t2, e2) { if (t2.length > e2 + 5 && "-" === t2[e2 + 1] && "-" === t2[e2 + 2]) { for (e2 += 3; e2 < t2.length; e2++) if ("-" === t2[e2] && "-" === t2[e2 + 1] && ">" === t2[e2 + 2]) { e2 += 2; @@ -61963,11 +62062,11 @@ var require_fxp = __commonJS({ } return e2; } - const d = '"', p = "'"; - function f(t2, e2) { + const h = '"', p = "'"; + function c(t2, e2) { let n2 = "", i2 = "", s2 = false; for (; e2 < t2.length; e2++) { - if (t2[e2] === d || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); + if (t2[e2] === h || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); else if (">" === t2[e2] && "" === i2) { s2 = true; break; @@ -61976,16 +62075,16 @@ var require_fxp = __commonJS({ } return "" === i2 && { value: n2, index: e2, tagClosed: s2 }; } - const c = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); + const f = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); function g(t2, e2) { - const n2 = s(t2, c), i2 = {}; + const n2 = s(t2, f), i2 = {}; for (let t3 = 0; t3 < n2.length; t3++) { - if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", N(n2[t3])); - if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", N(n2[t3])); - if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", N(n2[t3])); + if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", y(n2[t3])); + if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", y(n2[t3])); + if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", y(n2[t3])); const s2 = n2[t3][2]; - if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", N(n2[t3])); - if (i2.hasOwnProperty(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", N(n2[t3])); + if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", y(n2[t3])); + if (Object.prototype.hasOwnProperty.call(i2, s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", y(n2[t3])); i2[s2] = 1; } return true; @@ -62013,49 +62112,52 @@ var require_fxp = __commonJS({ function E(t2) { return r(t2); } - function b(t2, e2) { + function b(t2) { + return r(t2); + } + function N(t2, e2) { const n2 = t2.substring(0, e2).split(/\r?\n/); return { line: n2.length, col: n2[n2.length - 1].length + 1 }; } - function N(t2) { + function y(t2) { return t2.startIndex + t2[1].length; } - const y = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { + const T = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, stopNodes: [], alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(t2, e2, n2) { return t2; - }, captureMetaData: false }; - function T(t2) { - return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : T(true); + }, captureMetaData: false, maxNestedTags: 100, strictReservedNames: true }; + function w(t2) { + return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : w(true); } - const w = function(t2) { - const e2 = Object.assign({}, y, t2); - return e2.processEntities = T(e2.processEntities), e2; + const v = function(t2) { + const e2 = Object.assign({}, T, t2); + return e2.processEntities = w(e2.processEntities), e2; }; - let v; - v = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); + let O; + O = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); class I { constructor(t2) { - this.tagname = t2, this.child = [], this[":@"] = {}; + this.tagname = t2, this.child = [], this[":@"] = /* @__PURE__ */ Object.create(null); } add(t2, e2) { "__proto__" === t2 && (t2 = "#__proto__"), this.child.push({ [t2]: e2 }); } addChild(t2, e2) { - "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][v] = { startIndex: e2 }); + "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][O] = { startIndex: e2 }); } static getMetaDataSymbol() { - return v; + return O; } } - class O { + class P { constructor(t2) { this.suppressValidationErr = !t2, this.options = t2; } readDocType(t2, e2) { - const n2 = {}; + const n2 = /* @__PURE__ */ Object.create(null); if ("O" !== t2[e2 + 3] || "C" !== t2[e2 + 4] || "T" !== t2[e2 + 5] || "Y" !== t2[e2 + 6] || "P" !== t2[e2 + 7] || "E" !== t2[e2 + 8]) throw new Error("Invalid Tag instead of DOCTYPE"); { e2 += 9; @@ -62064,23 +62166,23 @@ var require_fxp = __commonJS({ if (r2 ? "-" === t2[e2 - 1] && "-" === t2[e2 - 2] && (r2 = false, i2--) : i2--, 0 === i2) break; } else "[" === t2[e2] ? s2 = true : o2 += t2[e2]; else { - if (s2 && A(t2, "!ENTITY", e2)) { + if (s2 && S(t2, "!ENTITY", e2)) { let i3, s3; if (e2 += 7, [i3, s3, e2] = this.readEntityExp(t2, e2 + 1, this.suppressValidationErr), -1 === s3.indexOf("&")) { const t3 = i3.replace(/[.\-+*:]/g, "\\."); n2[i3] = { regx: RegExp(`&${t3};`, "g"), val: s3 }; } - } else if (s2 && A(t2, "!ELEMENT", e2)) { + } else if (s2 && S(t2, "!ELEMENT", e2)) { e2 += 8; const { index: n3 } = this.readElementExp(t2, e2 + 1); e2 = n3; - } else if (s2 && A(t2, "!ATTLIST", e2)) e2 += 8; - else if (s2 && A(t2, "!NOTATION", e2)) { + } else if (s2 && S(t2, "!ATTLIST", e2)) e2 += 8; + else if (s2 && S(t2, "!NOTATION", e2)) { e2 += 9; const { index: n3 } = this.readNotationExp(t2, e2 + 1, this.suppressValidationErr); e2 = n3; } else { - if (!A(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); + if (!S(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); r2 = true; } i2++, o2 = ""; @@ -62090,10 +62192,10 @@ var require_fxp = __commonJS({ return { entities: n2, i: e2 }; } readEntityExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]) && '"' !== t2[e2] && "'" !== t2[e2]; ) n2 += t2[e2], e2++; - if (S(n2), e2 = P(t2, e2), !this.suppressValidationErr) { + if (C(n2), e2 = A(t2, e2), !this.suppressValidationErr) { if ("SYSTEM" === t2.substring(e2, e2 + 6).toUpperCase()) throw new Error("External entities are not supported"); if ("%" === t2[e2]) throw new Error("Parameter entities are not supported"); } @@ -62102,15 +62204,15 @@ var require_fxp = __commonJS({ return [n2, i2, --e2]; } readNotationExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - !this.suppressValidationErr && S(n2), e2 = P(t2, e2); + !this.suppressValidationErr && C(n2), e2 = A(t2, e2); const i2 = t2.substring(e2, e2 + 6).toUpperCase(); if (!this.suppressValidationErr && "SYSTEM" !== i2 && "PUBLIC" !== i2) throw new Error(`Expected SYSTEM or PUBLIC, found "${i2}"`); - e2 += i2.length, e2 = P(t2, e2); + e2 += i2.length, e2 = A(t2, e2); let s2 = null, r2 = null; - if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = P(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); + if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = A(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); else if ("SYSTEM" === i2 && ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"), !this.suppressValidationErr && !r2)) throw new Error("Missing mandatory system identifier for SYSTEM notation"); return { notationName: n2, publicIdentifier: s2, systemIdentifier: r2, index: --e2 }; } @@ -62123,13 +62225,13 @@ var require_fxp = __commonJS({ return [++e2, i2]; } readElementExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; if (!this.suppressValidationErr && !r(n2)) throw new Error(`Invalid element name: "${n2}"`); let i2 = ""; - if ("E" === t2[e2 = P(t2, e2)] && A(t2, "MPTY", e2)) e2 += 4; - else if ("A" === t2[e2] && A(t2, "NY", e2)) e2 += 2; + if ("E" === t2[e2 = A(t2, e2)] && S(t2, "MPTY", e2)) e2 += 4; + else if ("A" === t2[e2] && S(t2, "NY", e2)) e2 += 2; else if ("(" === t2[e2]) { for (e2++; e2 < t2.length && ")" !== t2[e2]; ) i2 += t2[e2], e2++; if (")" !== t2[e2]) throw new Error("Unterminated content model"); @@ -62137,24 +62239,24 @@ var require_fxp = __commonJS({ return { elementName: n2, contentModel: i2.trim(), index: e2 }; } readAttlistExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - S(n2), e2 = P(t2, e2); + C(n2), e2 = A(t2, e2); let i2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) i2 += t2[e2], e2++; - if (!S(i2)) throw new Error(`Invalid attribute name: "${i2}"`); - e2 = P(t2, e2); + if (!C(i2)) throw new Error(`Invalid attribute name: "${i2}"`); + e2 = A(t2, e2); let s2 = ""; if ("NOTATION" === t2.substring(e2, e2 + 8).toUpperCase()) { - if (s2 = "NOTATION", "(" !== t2[e2 = P(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); + if (s2 = "NOTATION", "(" !== t2[e2 = A(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); e2++; let n3 = []; for (; e2 < t2.length && ")" !== t2[e2]; ) { let i3 = ""; for (; e2 < t2.length && "|" !== t2[e2] && ")" !== t2[e2]; ) i3 += t2[e2], e2++; - if (i3 = i3.trim(), !S(i3)) throw new Error(`Invalid notation name: "${i3}"`); - n3.push(i3), "|" === t2[e2] && (e2++, e2 = P(t2, e2)); + if (i3 = i3.trim(), !C(i3)) throw new Error(`Invalid notation name: "${i3}"`); + n3.push(i3), "|" === t2[e2] && (e2++, e2 = A(t2, e2)); } if (")" !== t2[e2]) throw new Error("Unterminated list of notations"); e2++, s2 += " (" + n3.join("|") + ")"; @@ -62163,45 +62265,43 @@ var require_fxp = __commonJS({ const n3 = ["CDATA", "ID", "IDREF", "IDREFS", "ENTITY", "ENTITIES", "NMTOKEN", "NMTOKENS"]; if (!this.suppressValidationErr && !n3.includes(s2.toUpperCase())) throw new Error(`Invalid attribute type: "${s2}"`); } - e2 = P(t2, e2); + e2 = A(t2, e2); let r2 = ""; return "#REQUIRED" === t2.substring(e2, e2 + 8).toUpperCase() ? (r2 = "#REQUIRED", e2 += 8) : "#IMPLIED" === t2.substring(e2, e2 + 7).toUpperCase() ? (r2 = "#IMPLIED", e2 += 7) : [e2, r2] = this.readIdentifierVal(t2, e2, "ATTLIST"), { elementName: n2, attributeName: i2, attributeType: s2, defaultValue: r2, index: e2 }; } } - const P = (t2, e2) => { + const A = (t2, e2) => { for (; e2 < t2.length && /\s/.test(t2[e2]); ) e2++; return e2; }; - function A(t2, e2, n2) { + function S(t2, e2, n2) { for (let i2 = 0; i2 < e2.length; i2++) if (e2[i2] !== t2[n2 + i2 + 1]) return false; return true; } - function S(t2) { + function C(t2) { if (r(t2)) return t2; throw new Error(`Invalid entity name ${t2}`); } - const C = /^[-+]?0x[a-fA-F0-9]+$/, $ = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; - const D = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; - function L(t2) { - return "function" == typeof t2 ? t2 : Array.isArray(t2) ? (e2) => { - for (const n2 of t2) { - if ("string" == typeof n2 && e2 === n2) return true; - if (n2 instanceof RegExp && n2.test(e2)) return true; - } - } : () => false; - } - class F { + const $ = /^[-+]?0x[a-fA-F0-9]+$/, V = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, D = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; + const j = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; + class L { constructor(t2) { - if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e2) => K(e2, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e2) => K(e2, 16, "&#x") } }, this.addExternalEntities = j, this.parseXml = B, this.parseTextData = M, this.resolveNameSpace = _, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = R, this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { + var e2; + if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e3) => K(e3, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e3) => K(e3, 16, "&#x") } }, this.addExternalEntities = F, this.parseXml = R, this.parseTextData = M, this.resolveNameSpace = k, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = B, this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { this.stopNodesExact = /* @__PURE__ */ new Set(), this.stopNodesWildcard = /* @__PURE__ */ new Set(); for (let t3 = 0; t3 < this.options.stopNodes.length; t3++) { - const e2 = this.options.stopNodes[t3]; - "string" == typeof e2 && (e2.startsWith("*.") ? this.stopNodesWildcard.add(e2.substring(2)) : this.stopNodesExact.add(e2)); + const e3 = this.options.stopNodes[t3]; + "string" == typeof e3 && (e3.startsWith("*.") ? this.stopNodesWildcard.add(e3.substring(2)) : this.stopNodesExact.add(e3)); } } } } - function j(t2) { + function F(t2) { const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2], s2 = i2.replace(/[.\-+*:]/g, "\\."); @@ -62215,7 +62315,7 @@ var require_fxp = __commonJS({ return null == i3 ? t2 : typeof i3 != typeof t2 || i3 !== t2 ? i3 : this.options.trimValues || t2.trim() === t2 ? Z(t2, this.options.parseTagValue, this.options.numberParseOptions) : t2; } } - function _(t2) { + function k(t2) { if (this.options.removeNSPrefix) { const e2 = t2.split(":"), n2 = "/" === t2.charAt(0) ? "/" : ""; if ("xmlns" === e2[0]) return ""; @@ -62223,10 +62323,10 @@ var require_fxp = __commonJS({ } return t2; } - const k = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); + const _ = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); function U(t2, e2, n2) { if (true !== this.options.ignoreAttributes && "string" == typeof t2) { - const i2 = s(t2, k), r2 = i2.length, o2 = {}; + const i2 = s(t2, _), r2 = i2.length, o2 = {}; for (let t3 = 0; t3 < r2; t3++) { const s2 = this.resolveNameSpace(i2[t3][1]); if (this.ignoreAttributesFn(s2, e2)) continue; @@ -62245,12 +62345,12 @@ var require_fxp = __commonJS({ return o2; } } - const B = function(t2) { + const R = function(t2) { t2 = t2.replace(/\r\n?/g, "\n"); const e2 = new I("!xml"); let n2 = e2, i2 = "", s2 = ""; this.entityExpansionCount = 0, this.currentExpandedLength = 0; - const r2 = new O(this.options.processEntities); + const r2 = new P(this.options.processEntities); for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2]) if ("/" === t2[o2 + 1]) { const e3 = z(t2, ">", o2, "Closing Tag is not closed."); let r3 = t2.substring(o2 + 2, e3).trim(); @@ -62290,26 +62390,27 @@ var require_fxp = __commonJS({ } else { let r3 = W(t2, o2, this.options.removeNSPrefix), a2 = r3.tagName; const l2 = r3.rawTagName; - let u2 = r3.tagExp, h2 = r3.attrExpPresent, d2 = r3.closeIndex; + let u2 = r3.tagExp, d2 = r3.attrExpPresent, h2 = r3.closeIndex; if (this.options.transformTagName) { const t3 = this.options.transformTagName(a2); u2 === a2 && (u2 = t3), a2 = t3; } + if (this.options.strictReservedNames && (a2 === this.options.commentPropName || a2 === this.options.cdataPropName)) throw new Error(`Invalid tag name: ${a2}`); n2 && i2 && "!xml" !== n2.tagname && (i2 = this.saveTextToParentTag(i2, n2, s2, false)); const p2 = n2; p2 && -1 !== this.options.unpairedTags.indexOf(p2.tagname) && (n2 = this.tagsNodeStack.pop(), s2 = s2.substring(0, s2.lastIndexOf("."))), a2 !== e2.tagname && (s2 += s2 ? "." + a2 : a2); - const f2 = o2; + const c2 = o2; if (this.isItStopNode(this.stopNodesExact, this.stopNodesWildcard, s2, a2)) { let e3 = ""; if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) "/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), o2 = r3.closeIndex; else if (-1 !== this.options.unpairedTags.indexOf(a2)) o2 = r3.closeIndex; else { - const n3 = this.readStopNodeData(t2, l2, d2 + 1); + const n3 = this.readStopNodeData(t2, l2, h2 + 1); if (!n3) throw new Error(`Unexpected end of ${l2}`); o2 = n3.i, e3 = n3.tagContent; } const i3 = new I(a2); - a2 !== u2 && h2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, h2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, f2); + a2 !== u2 && d2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, d2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, c2); } else { if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) { if ("/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), this.options.transformTagName) { @@ -62317,18 +62418,26 @@ var require_fxp = __commonJS({ u2 === a2 && (u2 = t4), a2 = t4; } const t3 = new I(a2); - a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), s2 = s2.substr(0, s2.lastIndexOf(".")); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")); } else { - const t3 = new I(a2); - this.tagsNodeStack.push(n2), a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), n2 = t3; + if (-1 !== this.options.unpairedTags.indexOf(a2)) { + const t3 = new I(a2); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")), o2 = r3.closeIndex; + continue; + } + { + const t3 = new I(a2); + if (this.tagsNodeStack.length > this.options.maxNestedTags) throw new Error("Maximum nested tags exceeded"); + this.tagsNodeStack.push(n2), a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), n2 = t3; + } } - i2 = "", o2 = d2; + i2 = "", o2 = h2; } } else i2 += t2[o2]; return e2.child; }; - function R(t2, e2, n2, i2) { + function B(t2, e2, n2, i2) { this.options.captureMetaData || (i2 = void 0); const s2 = this.options.updateTag(e2.tagname, n2, e2[":@"]); false === s2 || ("string" == typeof s2 ? (e2.tagname = s2, t2.addChild(e2, i2)) : t2.addChild(e2, i2)); @@ -62389,12 +62498,12 @@ var require_fxp = __commonJS({ const o2 = s2.index, a2 = r2.search(/\s/); let l2 = r2, u2 = true; -1 !== a2 && (l2 = r2.substring(0, a2), r2 = r2.substring(a2 + 1).trimStart()); - const h2 = l2; + const d2 = l2; if (n2) { const t3 = l2.indexOf(":"); -1 !== t3 && (l2 = l2.substr(t3 + 1), u2 = l2 !== s2.data.substr(t3 + 1)); } - return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: h2 }; + return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: d2 }; } function q(t2, e2, n2) { const i2 = n2; @@ -62415,19 +62524,19 @@ var require_fxp = __commonJS({ if (e2 && "string" == typeof t2) { const e3 = t2.trim(); return "true" === e3 || "false" !== e3 && (function(t3, e4 = {}) { - if (e4 = Object.assign({}, V, e4), !t3 || "string" != typeof t3) return t3; + if (e4 = Object.assign({}, D, e4), !t3 || "string" != typeof t3) return t3; let n3 = t3.trim(); if (void 0 !== e4.skipLike && e4.skipLike.test(n3)) return t3; if ("0" === t3) return 0; - if (e4.hex && C.test(n3)) return (function(t4) { + if (e4.hex && $.test(n3)) return (function(t4) { if (parseInt) return parseInt(t4, 16); if (Number.parseInt) return Number.parseInt(t4, 16); if (window && window.parseInt) return window.parseInt(t4, 16); throw new Error("parseInt, Number.parseInt, window.parseInt are not supported"); })(n3); - if (-1 !== n3.search(/.+[eE].+/)) return (function(t4, e5, n4) { + if (n3.includes("e") || n3.includes("E")) return (function(t4, e5, n4) { if (!n4.eNotation) return t4; - const i3 = e5.match(D); + const i3 = e5.match(j); if (i3) { let s2 = i3[1] || ""; const r2 = -1 === i3[3].indexOf("e") ? "E" : "e", o2 = i3[2], a2 = s2 ? t4[o2.length + 1] === r2 : t4[o2.length] === r2; @@ -62436,7 +62545,7 @@ var require_fxp = __commonJS({ return t4; })(t3, n3, e4); { - const s2 = $.exec(n3); + const s2 = V.exec(n3); if (s2) { const r2 = s2[1] || "", o2 = s2[2]; let a2 = (i2 = s2[3]) && -1 !== i2.indexOf(".") ? ("." === (i2 = i2.replace(/0+$/, "")) ? i2 = "0" : "." === i2[0] ? i2 = "0" + i2 : "." === i2[i2.length - 1] && (i2 = i2.substring(0, i2.length - 1)), i2) : i2; @@ -62444,7 +62553,7 @@ var require_fxp = __commonJS({ if (!e4.leadingZeros && (o2.length > 1 || 1 === o2.length && !l2)) return t3; { const i3 = Number(n3), s3 = String(i3); - if (0 === i3 || -0 === i3) return i3; + if (0 === i3) return i3; if (-1 !== s3.search(/[eE]/)) return e4.eNotation ? i3 : t3; if (-1 !== n3.indexOf(".")) return "0" === s3 || s3 === a2 || s3 === `${r2}${a2}` ? i3 : t3; let l3 = o2 ? a2 : n3; @@ -62478,7 +62587,7 @@ var require_fxp = __commonJS({ if (o2[a2]) { let t3 = H(o2[a2], e2, l2); const n3 = nt(t3, e2); - void 0 !== o2[Q] && (t3[Q] = o2[Q]), o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== s2[a2] && s2.hasOwnProperty(a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; + o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== o2[Q] && "object" == typeof t3 && null !== t3 && (t3[Q] = o2[Q]), void 0 !== s2[a2] && Object.prototype.hasOwnProperty.call(s2, a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; } } } @@ -62506,7 +62615,7 @@ var require_fxp = __commonJS({ } class it { constructor(t2) { - this.externalEntities = {}, this.options = w(t2); + this.externalEntities = {}, this.options = v(t2); } parse(t2, e2) { if ("string" != typeof t2 && t2.toString) t2 = t2.toString(); @@ -62516,7 +62625,7 @@ var require_fxp = __commonJS({ const n3 = a(t2, e2); if (true !== n3) throw Error(`${n3.err.msg}:${n3.err.line}:${n3.err.col}`); } - const n2 = new F(this.options); + const n2 = new L(this.options); n2.addExternalEntities(this.externalEntities); const i2 = n2.parseXml(t2); return this.options.preserveOrder || void 0 === i2 ? i2 : J(i2, this.options); @@ -62537,6 +62646,13 @@ var require_fxp = __commonJS({ } function rt(t2, e2, n2, i2) { let s2 = "", r2 = false; + if (!Array.isArray(t2)) { + if (null != t2) { + let n3 = t2.toString(); + return n3 = ut(n3, e2), n3; + } + return ""; + } for (let o2 = 0; o2 < t2.length; o2++) { const a2 = t2[o2], l2 = ot(a2); if (void 0 === l2) continue; @@ -62560,10 +62676,10 @@ var require_fxp = __commonJS({ o3 = 0 !== o3.length ? " " + o3 : "", s2 += n3 + `<${l2}${o3}${t3}?>`, r2 = true; continue; } - let h2 = i2; - "" !== h2 && (h2 += e2.indentBy); - const d2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, h2); - -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += d2 + ">" : s2 += d2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += d2 + `>${p2}${i2}` : (s2 += d2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += d2 + "/>", r2 = true; + let d2 = i2; + "" !== d2 && (d2 += e2.indentBy); + const h2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, d2); + -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += h2 + ">" : s2 += h2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += h2 + `>${p2}${i2}` : (s2 += h2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += h2 + "/>", r2 = true; } return s2; } @@ -62571,13 +62687,13 @@ var require_fxp = __commonJS({ const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2]; - if (t2.hasOwnProperty(i2) && ":@" !== i2) return i2; + if (Object.prototype.hasOwnProperty.call(t2, i2) && ":@" !== i2) return i2; } } function at(t2, e2) { let n2 = ""; if (t2 && !e2.ignoreAttributes) for (let i2 in t2) { - if (!t2.hasOwnProperty(i2)) continue; + if (!Object.prototype.hasOwnProperty.call(t2, i2)) continue; let s2 = e2.attributeValueProcessor(i2, t2[i2]); s2 = ut(s2, e2), true === s2 && e2.suppressBooleanAttributes ? n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}` : n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}="${s2}"`; } @@ -62595,15 +62711,21 @@ var require_fxp = __commonJS({ } return t2; } - const ht = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { + const dt = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [{ regex: new RegExp("&", "g"), val: "&" }, { regex: new RegExp(">", "g"), val: ">" }, { regex: new RegExp("<", "g"), val: "<" }, { regex: new RegExp("'", "g"), val: "'" }, { regex: new RegExp('"', "g"), val: """ }], processEntities: true, stopNodes: [], oneListGroup: false }; - function dt(t2) { - this.options = Object.assign({}, ht, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { + function ht(t2) { + var e2; + this.options = Object.assign({}, dt, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { return false; - } : (this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ct), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ft, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { + } : (this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ft), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ct, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { return ""; }, this.tagEndChar = ">", this.newLine = ""); } @@ -62611,15 +62733,15 @@ var require_fxp = __commonJS({ const s2 = this.j2x(t2, n2 + 1, i2.concat(e2)); return void 0 !== t2[this.options.textNodeName] && 1 === Object.keys(t2).length ? this.buildTextValNode(t2[this.options.textNodeName], e2, s2.attrStr, n2) : this.buildObjectNode(s2.val, e2, s2.attrStr, n2); } - function ft(t2) { + function ct(t2) { return this.options.indentBy.repeat(t2); } - function ct(t2) { + function ft(t2) { return !(!t2.startsWith(this.options.attributeNamePrefix) || t2 === this.options.textNodeName) && t2.substr(this.attrPrefixLen); } - dt.prototype.build = function(t2) { + ht.prototype.build = function(t2) { return this.options.preserveOrder ? st(t2, this.options) : (Array.isArray(t2) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t2 = { [this.options.arrayNodeName]: t2 }), this.j2x(t2, 0, []).val); - }, dt.prototype.j2x = function(t2, e2, n2) { + }, ht.prototype.j2x = function(t2, e2, n2) { let i2 = "", s2 = ""; const r2 = n2.join("."); for (let o2 in t2) if (Object.prototype.hasOwnProperty.call(t2, o2)) if (void 0 === t2[o2]) this.isAttribute(o2) && (s2 += ""); @@ -62654,18 +62776,18 @@ var require_fxp = __commonJS({ for (let s3 = 0; s3 < n3; s3++) i2 += this.buildAttrPairStr(e3[s3], "" + t2[o2][e3[s3]]); } else s2 += this.processTextOrObjNode(t2[o2], o2, e2, n2); return { attrStr: i2, val: s2 }; - }, dt.prototype.buildAttrPairStr = function(t2, e2) { + }, ht.prototype.buildAttrPairStr = function(t2, e2) { return e2 = this.options.attributeValueProcessor(t2, "" + e2), e2 = this.replaceEntitiesValue(e2), this.options.suppressBooleanAttributes && "true" === e2 ? " " + t2 : " " + t2 + '="' + e2 + '"'; - }, dt.prototype.buildObjectNode = function(t2, e2, n2, i2) { + }, ht.prototype.buildObjectNode = function(t2, e2, n2, i2) { if ("" === t2) return "?" === e2[0] ? this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar; { let s2 = "` + this.newLine : this.indentate(i2) + "<" + e2 + n2 + r2 + this.tagEndChar + t2 + this.indentate(i2) + s2 : this.indentate(i2) + "<" + e2 + n2 + r2 + ">" + t2 + s2; } - }, dt.prototype.closeTag = function(t2) { + }, ht.prototype.closeTag = function(t2) { let e2 = ""; return -1 !== this.options.unpairedTags.indexOf(t2) ? this.options.suppressUnpairedNode || (e2 = "/") : e2 = this.options.suppressEmptyNode ? "/" : `>` + this.newLine; if (false !== this.options.commentPropName && e2 === this.options.commentPropName) return this.indentate(i2) + `` + this.newLine; if ("?" === e2[0]) return this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar; @@ -62673,14 +62795,14 @@ var require_fxp = __commonJS({ let s2 = this.options.tagValueProcessor(e2, t2); return s2 = this.replaceEntitiesValue(s2), "" === s2 ? this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + ">" + s2 + " 0 && this.options.processEntities) for (let e2 = 0; e2 < this.options.entities.length; e2++) { const n2 = this.options.entities[e2]; t2 = t2.replace(n2.regex, n2.val); } return t2; }; - const gt = { validate: a }; + const gt = ht, xt = { validate: a }; module2.exports = e; })(); } @@ -92199,7 +92321,7 @@ var require_downloadUtils = __commonJS({ var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); - var fs17 = __importStar2(require("fs")); + var fs18 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); var utils = __importStar2(require_cacheUtils()); @@ -92310,7 +92432,7 @@ var require_downloadUtils = __commonJS({ exports2.DownloadProgress = DownloadProgress; function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter2(this, void 0, void 0, function* () { - const writeStream = fs17.createWriteStream(archivePath); + const writeStream = fs18.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); @@ -92335,7 +92457,7 @@ var require_downloadUtils = __commonJS({ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { return __awaiter2(this, void 0, void 0, function* () { var _a; - const archiveDescriptor = yield fs17.promises.open(archivePath, "w"); + const archiveDescriptor = yield fs18.promises.open(archivePath, "w"); const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { socketTimeout: options.timeoutInMs, keepAlive: true @@ -92451,7 +92573,7 @@ var require_downloadUtils = __commonJS({ } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); - const fd = fs17.openSync(archivePath, "w"); + const fd = fs18.openSync(archivePath, "w"); try { downloadProgress.startDisplayTimer(); const controller = new abort_controller_1.AbortController(); @@ -92469,12 +92591,12 @@ var require_downloadUtils = __commonJS({ controller.abort(); throw new Error("Aborting cache download as the download time exceeded the timeout."); } else if (Buffer.isBuffer(result)) { - fs17.writeFileSync(fd, result); + fs18.writeFileSync(fd, result); } } } finally { downloadProgress.stopDisplayTimer(); - fs17.closeSync(fd); + fs18.closeSync(fd); } } }); @@ -92796,7 +92918,7 @@ var require_cacheHttpClient = __commonJS({ var core15 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); - var fs17 = __importStar2(require("fs")); + var fs18 = __importStar2(require("fs")); var url_1 = require("url"); var utils = __importStar2(require_cacheUtils()); var uploadUtils_1 = require_uploadUtils(); @@ -92931,7 +93053,7 @@ Other caches with similar key:`); return __awaiter2(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs17.openSync(archivePath, "r"); + const fd = fs18.openSync(archivePath, "r"); const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); @@ -92945,7 +93067,7 @@ Other caches with similar key:`); const start = offset; const end = offset + chunkSize - 1; offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs17.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs18.createReadStream(archivePath, { fd, start, end, @@ -92956,7 +93078,7 @@ Other caches with similar key:`); } }))); } finally { - fs17.closeSync(fd); + fs18.closeSync(fd); } return; }); @@ -98912,7 +99034,7 @@ var require_manifest = __commonJS({ var core_1 = require_core(); var os5 = require("os"); var cp = require("child_process"); - var fs17 = require("fs"); + var fs18 = require("fs"); function _findMatch(versionSpec, stable, candidates, archFilter) { return __awaiter2(this, void 0, void 0, function* () { const platFilter = os5.platform(); @@ -98974,10 +99096,10 @@ var require_manifest = __commonJS({ const lsbReleaseFile = "/etc/lsb-release"; const osReleaseFile = "/etc/os-release"; let contents = ""; - if (fs17.existsSync(lsbReleaseFile)) { - contents = fs17.readFileSync(lsbReleaseFile).toString(); - } else if (fs17.existsSync(osReleaseFile)) { - contents = fs17.readFileSync(osReleaseFile).toString(); + if (fs18.existsSync(lsbReleaseFile)) { + contents = fs18.readFileSync(lsbReleaseFile).toString(); + } else if (fs18.existsSync(osReleaseFile)) { + contents = fs18.readFileSync(osReleaseFile).toString(); } return contents; } @@ -99186,7 +99308,7 @@ var require_tool_cache = __commonJS({ var core15 = __importStar2(require_core()); var io7 = __importStar2(require_io()); var crypto3 = __importStar2(require("crypto")); - var fs17 = __importStar2(require("fs")); + var fs18 = __importStar2(require("fs")); var mm = __importStar2(require_manifest()); var os5 = __importStar2(require("os")); var path16 = __importStar2(require("path")); @@ -99232,7 +99354,7 @@ var require_tool_cache = __commonJS({ } function downloadToolAttempt(url2, dest, auth2, headers) { return __awaiter2(this, void 0, void 0, function* () { - if (fs17.existsSync(dest)) { + if (fs18.existsSync(dest)) { throw new Error(`Destination file path ${dest} already exists`); } const http = new httpm.HttpClient(userAgent2, [], { @@ -99256,7 +99378,7 @@ var require_tool_cache = __commonJS({ const readStream = responseMessageFactory(); let succeeded = false; try { - yield pipeline(readStream, fs17.createWriteStream(dest)); + yield pipeline(readStream, fs18.createWriteStream(dest)); core15.debug("download complete"); succeeded = true; return dest; @@ -99468,11 +99590,11 @@ var require_tool_cache = __commonJS({ arch2 = arch2 || os5.arch(); core15.debug(`Caching tool ${tool} ${version} ${arch2}`); core15.debug(`source dir: ${sourceDir}`); - if (!fs17.statSync(sourceDir).isDirectory()) { + if (!fs18.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } const destPath = yield _createToolPath(tool, version, arch2); - for (const itemName of fs17.readdirSync(sourceDir)) { + for (const itemName of fs18.readdirSync(sourceDir)) { const s = path16.join(sourceDir, itemName); yield io7.cp(s, destPath, { recursive: true }); } @@ -99486,7 +99608,7 @@ var require_tool_cache = __commonJS({ arch2 = arch2 || os5.arch(); core15.debug(`Caching tool ${tool} ${version} ${arch2}`); core15.debug(`source file: ${sourceFile}`); - if (!fs17.statSync(sourceFile).isFile()) { + if (!fs18.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); @@ -99515,7 +99637,7 @@ var require_tool_cache = __commonJS({ versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path16.join(_getCacheDirectory(), toolName, versionSpec, arch2); core15.debug(`checking cache: ${cachePath}`); - if (fs17.existsSync(cachePath) && fs17.existsSync(`${cachePath}.complete`)) { + if (fs18.existsSync(cachePath) && fs18.existsSync(`${cachePath}.complete`)) { core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { @@ -99528,12 +99650,12 @@ var require_tool_cache = __commonJS({ const versions = []; arch2 = arch2 || os5.arch(); const toolPath = path16.join(_getCacheDirectory(), toolName); - if (fs17.existsSync(toolPath)) { - const children = fs17.readdirSync(toolPath); + if (fs18.existsSync(toolPath)) { + const children = fs18.readdirSync(toolPath); for (const child of children) { if (isExplicitVersion(child)) { const fullPath = path16.join(toolPath, child, arch2 || ""); - if (fs17.existsSync(fullPath) && fs17.existsSync(`${fullPath}.complete`)) { + if (fs18.existsSync(fullPath) && fs18.existsSync(`${fullPath}.complete`)) { versions.push(child); } } @@ -99604,7 +99726,7 @@ var require_tool_cache = __commonJS({ function _completeToolPath(tool, version, arch2) { const folderPath = path16.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; - fs17.writeFileSync(markerPath, ""); + fs18.writeFileSync(markerPath, ""); core15.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { @@ -103111,7 +103233,7 @@ __export(analyze_action_exports, { runPromise: () => runPromise }); module.exports = __toCommonJS(analyze_action_exports); -var fs16 = __toESM(require("fs")); +var fs17 = __toESM(require("fs")); var import_path4 = __toESM(require("path")); var import_perf_hooks3 = require("perf_hooks"); var core14 = __toESM(require_core()); @@ -103140,21 +103262,21 @@ async function getFolderSize(itemPath, options) { getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); async function core(rootItemPath, options = {}, returnType = {}) { - const fs17 = options.fs || await import("node:fs/promises"); + const fs18 = options.fs || await import("node:fs/promises"); let folderSize = 0n; const foundInos = /* @__PURE__ */ new Set(); const errors = []; await processItem(rootItemPath); async function processItem(itemPath) { if (options.ignore?.test(itemPath)) return; - const stats = returnType.strict ? await fs17.lstat(itemPath, { bigint: true }) : await fs17.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3)); + const stats = returnType.strict ? await fs18.lstat(itemPath, { bigint: true }) : await fs18.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3)); if (typeof stats !== "object") return; if (!foundInos.has(stats.ino)) { foundInos.add(stats.ino); folderSize += stats.size; } if (stats.isDirectory()) { - const directoryItems = returnType.strict ? await fs17.readdir(itemPath) : await fs17.readdir(itemPath).catch((error3) => errors.push(error3)); + const directoryItems = returnType.strict ? await fs18.readdir(itemPath) : await fs18.readdir(itemPath).catch((error3) => errors.push(error3)); if (typeof directoryItems !== "object") return; await Promise.all( directoryItems.map( @@ -105795,17 +105917,6 @@ function getExtraOptionsEnvParam() { ); } } -function getToolNames(sarif) { - const toolNames = {}; - for (const run2 of sarif.runs || []) { - const tool = run2.tool || {}; - const driver = tool.driver || {}; - if (typeof driver.name === "string" && driver.name.length > 0) { - toolNames[driver.name] = true; - } - } - return Object.keys(toolNames); -} function getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, platform2) { const fixedAmount = 1024 * (platform2 === "win32" ? 1.5 : 1); const scaledAmount = getReservedRamScaleFactor() * Math.max(totalMemoryMegaBytes - 8 * 1024, 0); @@ -107239,8 +107350,8 @@ var path5 = __toESM(require("path")); var semver5 = __toESM(require_semver2()); // src/defaults.json -var bundleVersion = "codeql-bundle-v2.24.2"; -var cliVersion = "2.24.2"; +var bundleVersion = "codeql-bundle-v2.24.3"; +var cliVersion = "2.24.3"; // src/overlay/index.ts var fs3 = __toESM(require("fs")); @@ -107865,6 +107976,11 @@ var featureConfig = { // cannot be found when interpreting results. minimumVersion: void 0 }, + ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES", + minimumVersion: void 0 + }, ["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: { defaultValue: false, envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE", @@ -107876,11 +107992,6 @@ var featureConfig = { minimumVersion: void 0, toolsFeature: "bundleSupportsOverlay" /* BundleSupportsOverlay */ }, - ["use_repository_properties_v2" /* UseRepositoryProperties */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES", - minimumVersion: void 0 - }, ["validate_db_config" /* ValidateDbConfig */]: { defaultValue: false, envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG", @@ -111114,7 +111225,7 @@ async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error } // src/upload-lib.ts -var fs15 = __toESM(require("fs")); +var fs16 = __toESM(require("fs")); var path14 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); @@ -112195,12 +112306,12 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) { } return uri; } -async function addFingerprints(sarif, sourceRoot, logger) { +async function addFingerprints(sarifLog, sourceRoot, logger) { logger.info( `Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.` ); const callbacksByFile = {}; - for (const run2 of sarif.runs || []) { + for (const run2 of sarifLog.runs || []) { const artifacts = run2.artifacts || []; for (const result of run2.results || []) { const primaryLocation = (result.locations || [])[0]; @@ -112240,7 +112351,7 @@ async function addFingerprints(sarif, sourceRoot, logger) { }; await hash(teeCallback, filepath); } - return sarif; + return sarifLog; } // src/init.ts @@ -112275,36 +112386,48 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe }; } -// src/upload-lib.ts -var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning."; -var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository."; +// src/sarif/index.ts +var fs15 = __toESM(require("fs")); +var InvalidSarifUploadError = class extends Error { +}; +function getToolNames(sarifFile) { + const toolNames = {}; + for (const run2 of sarifFile.runs || []) { + const tool = run2.tool || {}; + const driver = tool.driver || {}; + if (typeof driver.name === "string" && driver.name.length > 0) { + toolNames[driver.name] = true; + } + } + return Object.keys(toolNames); +} +function readSarifFile(sarifFilePath) { + return JSON.parse(fs15.readFileSync(sarifFilePath, "utf8")); +} function combineSarifFiles(sarifFiles, logger) { logger.info(`Loading SARIF file(s)`); - const combinedSarif = { - version: null, - runs: [] - }; + const runs = []; + let version = void 0; for (const sarifFile of sarifFiles) { logger.debug(`Loading SARIF file: ${sarifFile}`); - const sarifObject = JSON.parse( - fs15.readFileSync(sarifFile, "utf8") - ); - if (combinedSarif.version === null) { - combinedSarif.version = sarifObject.version; - } else if (combinedSarif.version !== sarifObject.version) { + const sarifLog = readSarifFile(sarifFile); + if (version === void 0) { + version = sarifLog.version; + } else if (version !== sarifLog.version) { throw new InvalidSarifUploadError( - `Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}` + `Different SARIF versions encountered: ${version} and ${sarifLog.version}` ); } - combinedSarif.runs.push(...sarifObject.runs); + runs.push(...sarifLog?.runs || []); } - return combinedSarif; + if (version === void 0) { + version = "2.1.0"; + } + return { version, runs }; } -function areAllRunsProducedByCodeQL(sarifObjects) { - return sarifObjects.every((sarifObject) => { - return sarifObject.runs?.every( - (run2) => run2.tool?.driver?.name === "CodeQL" - ); +function areAllRunsProducedByCodeQL(sarifLogs) { + return sarifLogs.every((sarifLog) => { + return sarifLog.runs?.every((run2) => run2.tool?.driver?.name === "CodeQL"); }); } function createRunKey(run2) { @@ -112317,10 +112440,13 @@ function createRunKey(run2) { automationId: run2.automationDetails?.id }; } -function areAllRunsUnique(sarifObjects) { +function areAllRunsUnique(sarifLogs) { const keys = /* @__PURE__ */ new Set(); - for (const sarifObject of sarifObjects) { - for (const run2 of sarifObject.runs) { + for (const sarifLog of sarifLogs) { + if (sarifLog.runs === void 0) { + continue; + } + for (const run2 of sarifLog.runs) { const key = JSON.stringify(createRunKey(run2)); if (keys.has(key)) { return false; @@ -112330,6 +112456,10 @@ function areAllRunsUnique(sarifObjects) { } return true; } + +// src/upload-lib.ts +var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning."; +var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository."; async function shouldShowCombineSarifFilesDeprecationWarning(sarifObjects, githubVersion) { if (githubVersion.type === "GitHub Enterprise Server" /* GHES */ && satisfiesGHESVersion(githubVersion.version, "<3.14", true)) { return false; @@ -112358,9 +112488,7 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) { } async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) { logger.info("Combining SARIF files using the CodeQL CLI"); - const sarifObjects = sarifFiles.map((sarifFile) => { - return JSON.parse(fs15.readFileSync(sarifFile, "utf8")); - }); + const sarifObjects = sarifFiles.map(readSarifFile); const deprecationWarningMessage = gitHubVersion.type === "GitHub Enterprise Server" /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025"; const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload"; if (!areAllRunsProducedByCodeQL(sarifObjects)) { @@ -112413,27 +112541,27 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo codeQL = initCodeQLResult.codeql; } const baseTempDir = path14.resolve(tempDir, "combined-sarif"); - fs15.mkdirSync(baseTempDir, { recursive: true }); - const outputDirectory = fs15.mkdtempSync(path14.resolve(baseTempDir, "output-")); + fs16.mkdirSync(baseTempDir, { recursive: true }); + const outputDirectory = fs16.mkdtempSync(path14.resolve(baseTempDir, "output-")); const outputFile = path14.resolve(outputDirectory, "combined-sarif.sarif"); await codeQL.mergeResults(sarifFiles, outputFile, { mergeRunsFromEqualCategory: true }); - return JSON.parse(fs15.readFileSync(outputFile, "utf8")); + return readSarifFile(outputFile); } -function populateRunAutomationDetails(sarif, category, analysis_key, environment) { +function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) { const automationID = getAutomationID2(category, analysis_key, environment); if (automationID !== void 0) { - for (const run2 of sarif.runs || []) { + for (const run2 of sarifFile.runs || []) { if (run2.automationDetails === void 0) { run2.automationDetails = { id: automationID }; } } - return sarif; + return sarifFile; } - return sarif; + return sarifFile; } function getAutomationID2(category, analysis_key, environment) { if (category !== void 0) { @@ -112456,7 +112584,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { `SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}` ); logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`); - fs15.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); + fs16.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); return "dummy-sarif-id"; } const client = getApiClient(); @@ -112490,7 +112618,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { function findSarifFilesInDir(sarifPath, isSarif) { const sarifFiles = []; const walkSarifFiles = (dir) => { - const entries = fs15.readdirSync(dir, { withFileTypes: true }); + const entries = fs16.readdirSync(dir, { withFileTypes: true }); for (const entry of entries) { if (entry.isFile() && isSarif(entry.name)) { sarifFiles.push(path14.resolve(dir, entry.name)); @@ -112503,7 +112631,7 @@ function findSarifFilesInDir(sarifPath, isSarif) { return sarifFiles; } async function getGroupedSarifFilePaths(logger, sarifPath) { - const stats = fs15.statSync(sarifPath, { throwIfNoEntry: false }); + const stats = fs16.statSync(sarifPath, { throwIfNoEntry: false }); if (stats === void 0) { throw new ConfigurationError(`Path does not exist: ${sarifPath}`); } @@ -112550,9 +112678,9 @@ async function getGroupedSarifFilePaths(logger, sarifPath) { } return results; } -function countResultsInSarif(sarif) { +function countResultsInSarif(sarifLog) { let numResults = 0; - const parsedSarif = JSON.parse(sarif); + const parsedSarif = JSON.parse(sarifLog); if (!Array.isArray(parsedSarif.runs)) { throw new InvalidSarifUploadError("Invalid SARIF. Missing 'runs' array."); } @@ -112566,26 +112694,26 @@ function countResultsInSarif(sarif) { } return numResults; } -function readSarifFile(sarifFilePath) { +function readSarifFileOrThrow(sarifFilePath) { try { - return JSON.parse(fs15.readFileSync(sarifFilePath, "utf8")); + return readSarifFile(sarifFilePath); } catch (e) { throw new InvalidSarifUploadError( `Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}` ); } } -function validateSarifFileSchema(sarif, sarifFilePath, logger) { - if (areAllRunsProducedByCodeQL([sarif]) && // We want to validate CodeQL SARIF in testing environments. +function validateSarifFileSchema(sarifLog, sarifFilePath, logger) { + if (areAllRunsProducedByCodeQL([sarifLog]) && // We want to validate CodeQL SARIF in testing environments. !getTestingEnvironment()) { logger.debug( `Skipping SARIF schema validation for ${sarifFilePath} as all runs are produced by CodeQL.` ); - return; + return true; } logger.info(`Validating ${sarifFilePath}`); const schema2 = require_sarif_schema_2_1_0(); - const result = new jsonschema2.Validator().validate(sarif, schema2); + const result = new jsonschema2.Validator().validate(sarifLog, schema2); const warningAttributes = ["uri-reference", "uri"]; const errors = (result.errors ?? []).filter( (err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument)) @@ -112612,6 +112740,7 @@ ${sarifErrors.join( )}` ); } + return true; } function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, mergeBaseCommitOid) { const payloadObj = { @@ -112637,7 +112766,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo payloadObj.base_sha = mergeBaseCommitOid; } else if (process.env.GITHUB_EVENT_PATH) { const githubEvent = JSON.parse( - fs15.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") + fs16.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") ); payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`; payloadObj.base_sha = githubEvent.pull_request.base.sha; @@ -112648,14 +112777,14 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) { logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`); const gitHubVersion = await getGitHubVersion(); - let sarif; + let sarifLog; category = analysis.fixCategory(logger, category); if (sarifPaths.length > 1) { for (const sarifPath of sarifPaths) { - const parsedSarif = readSarifFile(sarifPath); + const parsedSarif = readSarifFileOrThrow(sarifPath); validateSarifFileSchema(parsedSarif, sarifPath, logger); } - sarif = await combineSarifFilesUsingCLI( + sarifLog = await combineSarifFilesUsingCLI( sarifPaths, gitHubVersion, features, @@ -112663,21 +112792,21 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, ); } else { const sarifPath = sarifPaths[0]; - sarif = readSarifFile(sarifPath); - validateSarifFileSchema(sarif, sarifPath, logger); - await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion); + sarifLog = readSarifFileOrThrow(sarifPath); + validateSarifFileSchema(sarifLog, sarifPath, logger); + await throwIfCombineSarifFilesDisabled([sarifLog], gitHubVersion); } - sarif = filterAlertsByDiffRange(logger, sarif); - sarif = await addFingerprints(sarif, checkoutPath, logger); + sarifLog = filterAlertsByDiffRange(logger, sarifLog); + sarifLog = await addFingerprints(sarifLog, checkoutPath, logger); const analysisKey = await getAnalysisKey(); const environment = getRequiredInput("matrix"); - sarif = populateRunAutomationDetails( - sarif, + sarifLog = populateRunAutomationDetails( + sarifLog, category, analysisKey, environment ); - return { sarif, analysisKey, environment }; + return { sarif: sarifLog, analysisKey, environment }; } async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) { const outputPath = pathInput || getOptionalEnvVar("CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */); @@ -112694,12 +112823,12 @@ async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProc } async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) { logger.startGroup(`Uploading ${uploadTarget.name} results`); - const sarif = postProcessingResults.sarif; - const toolNames = getToolNames(sarif); + const sarifLog = postProcessingResults.sarif; + const toolNames = getToolNames(sarifLog); logger.debug(`Validating that each SARIF run has a unique category`); - validateUniqueCategory(sarif, uploadTarget.sentinelPrefix); + validateUniqueCategory(sarifLog, uploadTarget.sentinelPrefix); logger.debug(`Serializing SARIF for upload`); - const sarifPayload = JSON.stringify(sarif); + const sarifPayload = JSON.stringify(sarifLog); logger.debug(`Compressing serialized SARIF`); const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64"); const checkoutURI = url.pathToFileURL(checkoutPath).href; @@ -112741,9 +112870,9 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post }; } function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) { - if (!fs15.existsSync(outputDir)) { - fs15.mkdirSync(outputDir, { recursive: true }); - } else if (!fs15.lstatSync(outputDir).isDirectory()) { + if (!fs16.existsSync(outputDir)) { + fs16.mkdirSync(outputDir, { recursive: true }); + } else if (!fs16.lstatSync(outputDir).isDirectory()) { throw new ConfigurationError( `The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}` ); @@ -112753,7 +112882,7 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) { `upload${uploadTarget.sarifExtension}` ); logger.info(`Writing processed SARIF file to ${outputFile}`); - fs15.writeFileSync(outputFile, sarifPayload); + fs16.writeFileSync(outputFile, sarifPayload); } var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3; var STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1e3; @@ -112851,9 +112980,9 @@ function handleProcessingResultForUnsuccessfulExecution(response, status, logger assertNever(status); } } -function validateUniqueCategory(sarif, sentinelPrefix) { +function validateUniqueCategory(sarifLog, sentinelPrefix) { const categories = {}; - for (const run2 of sarif.runs) { + for (const run2 of sarifLog.runs || []) { const id = run2?.automationDetails?.id; const tool = run2.tool?.driver?.name; const category = `${sanitize(id)}_${sanitize(tool)}`; @@ -112872,15 +113001,16 @@ function validateUniqueCategory(sarif, sentinelPrefix) { function sanitize(str2) { return (str2 ?? "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase(); } -var InvalidSarifUploadError = class extends Error { -}; -function filterAlertsByDiffRange(logger, sarif) { +function filterAlertsByDiffRange(logger, sarifLog) { const diffRanges = readDiffRangesJsonFile(logger); if (!diffRanges?.length) { - return sarif; + return sarifLog; + } + if (sarifLog.runs === void 0) { + return sarifLog; } const checkoutPath = getRequiredInput("checkout_path"); - for (const run2 of sarif.runs) { + for (const run2 of sarifLog.runs) { if (run2.results) { run2.results = run2.results.filter((result) => { const locations = [ @@ -112901,7 +113031,7 @@ function filterAlertsByDiffRange(logger, sarif) { }); } } - return sarif; + return sarifLog; } // src/upload-sarif.ts @@ -112986,7 +113116,7 @@ function doesGoExtractionOutputExist(config) { "go" /* go */ ); const trapDirectory = import_path4.default.join(golangDbDirectory, "trap", "go" /* go */); - return fs16.existsSync(trapDirectory) && fs16.readdirSync(trapDirectory).some( + return fs17.existsSync(trapDirectory) && fs17.readdirSync(trapDirectory).some( (fileName) => [ ".trap", ".trap.gz", diff --git a/lib/autobuild-action.js b/lib/autobuild-action.js index 01add7fd5..acd1b250e 100644 --- a/lib/autobuild-action.js +++ b/lib/autobuild-action.js @@ -45986,7 +45986,7 @@ var require_package = __commonJS({ "package.json"(exports2, module2) { module2.exports = { name: "codeql", - version: "4.32.5", + version: "4.32.7", private: true, description: "CodeQL action", scripts: { @@ -45995,7 +45995,7 @@ var require_package = __commonJS({ lint: "eslint --report-unused-disable-directives --max-warnings=0 .", "lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif", "lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix", - ava: "npm run transpile && ava --serial --verbose", + ava: "npm run transpile && ava --verbose", test: "npm run ava -- src/", "test-debug": "npm run test -- --timeout=20m", transpile: "tsc --build --verbose" @@ -46044,6 +46044,7 @@ var require_package = __commonJS({ "@types/js-yaml": "^4.0.9", "@types/node": "^20.19.9", "@types/node-forge": "^1.3.14", + "@types/sarif": "^2.1.7", "@types/semver": "^7.7.1", "@types/sinon": "^21.0.0", ava: "^6.4.1", @@ -46052,14 +46053,14 @@ var require_package = __commonJS({ "eslint-import-resolver-typescript": "^3.8.7", "eslint-plugin-github": "^6.0.0", "eslint-plugin-import-x": "^4.16.1", - "eslint-plugin-jsdoc": "^62.5.0", + "eslint-plugin-jsdoc": "^62.7.1", "eslint-plugin-no-async-foreach": "^0.1.1", glob: "^11.1.0", - globals: "^16.5.0", + globals: "^17.3.0", nock: "^14.0.11", sinon: "^21.0.1", typescript: "^5.9.3", - "typescript-eslint": "^8.56.0" + "typescript-eslint": "^8.56.1" }, overrides: { "@actions/tool-cache": { @@ -49361,6 +49362,7 @@ var require_minimatch = __commonJS({ pattern = pattern.split(path7.sep).join("/"); } this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200; this.set = []; this.pattern = pattern; this.regexp = null; @@ -49757,50 +49759,147 @@ var require_minimatch = __commonJS({ return this.negate; }; Minimatch.prototype.matchOne = function(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } + if (pattern.indexOf(GLOBSTAR) !== -1) { + return this._matchGlobstar(file, pattern, partial, 0, 0); + } + return this._matchOne(file, pattern, partial, 0, 0); + }; + Minimatch.prototype._matchGlobstar = function(file, pattern, partial, fileIndex, patternIndex) { + var i; + var firstgs = -1; + for (i = patternIndex; i < pattern.length; i++) { + if (pattern[i] === GLOBSTAR) { + firstgs = i; + break; + } + } + var lastgs = -1; + for (i = pattern.length - 1; i >= 0; i--) { + if (pattern[i] === GLOBSTAR) { + lastgs = i; + break; + } + } + var head = pattern.slice(patternIndex, firstgs); + var body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs); + var tail = partial ? [] : pattern.slice(lastgs + 1); + if (head.length) { + var fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this._matchOne(fileHead, head, partial, 0, 0)) { + return false; + } + fileIndex += head.length; + } + var fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) return false; + var tailStart = file.length - tail.length; + if (this._matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; + } + tailStart--; + if (!this._matchOne(file, tail, partial, tailStart, 0)) { + return false; + } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + var sawSome = !!fileTailMatch; + for (i = fileIndex; i < file.length - fileTailMatch; i++) { + var f = String(file[i]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return partial || sawSome; + } + var bodySegments = [[[], 0]]; + var currentBody = bodySegments[0]; + var nonGsParts = 0; + var nonGsPartsSums = [0]; + for (var bi = 0; bi < body.length; bi++) { + var b = body[bi]; + if (b === GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + var idx = bodySegments.length - 1; + var fileLength = file.length - fileTailMatch; + for (var si = 0; si < bodySegments.length; si++) { + bodySegments[si][1] = fileLength - (nonGsPartsSums[idx--] + bodySegments[si][0].length); + } + return !!this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex, + 0, + partial, + 0, + !!fileTailMatch ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + }; + Minimatch.prototype._matchGlobStarBodySections = function(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + var bs = bodySegments[bodyIndex]; + if (!bs) { + for (var i = fileIndex; i < file.length; i++) { + sawTail = true; + var f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return sawTail; + } + var body = bs[0]; + var after = bs[1]; + while (fileIndex <= after) { + var m = this._matchOne( + file.slice(0, fileIndex + body.length), + body, + partial, + fileIndex, + 0 + ); + if (m && globStarDepth < this.maxGlobstarRecursion) { + var sub = this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex + body.length, + bodyIndex + 1, + partial, + globStarDepth + 1, + sawTail + ); + if (sub !== false) { + return sub; + } + } + var f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + fileIndex++; + } + return partial || null; + }; + Minimatch.prototype._matchOne = function(file, pattern, partial, fileIndex, patternIndex) { + var fi, pi, fl, pl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { this.debug("matchOne loop"); var p = pattern[pi]; var f = file[fi]; this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } - return false; - } + if (p === false || p === GLOBSTAR) return false; var hit; if (typeof p === "string") { hit = f === p; @@ -61840,7 +61939,7 @@ var require_fxp = __commonJS({ }, o: (t2, e2) => Object.prototype.hasOwnProperty.call(t2, e2), r: (t2) => { "undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t2, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t2, "__esModule", { value: true }); } }, e = {}; - t.r(e), t.d(e, { XMLBuilder: () => dt, XMLParser: () => it, XMLValidator: () => gt }); + t.r(e), t.d(e, { XMLBuilder: () => gt, XMLParser: () => it, XMLValidator: () => xt }); const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$"); function s(t2, e2) { const n2 = []; @@ -61862,90 +61961,90 @@ var require_fxp = __commonJS({ const n2 = []; let i2 = false, s2 = false; "\uFEFF" === t2[0] && (t2 = t2.substr(1)); - for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2] && "?" === t2[o2 + 1]) { - if (o2 += 2, o2 = u(t2, o2), o2.err) return o2; + for (let r2 = 0; r2 < t2.length; r2++) if ("<" === t2[r2] && "?" === t2[r2 + 1]) { + if (r2 += 2, r2 = u(t2, r2), r2.err) return r2; } else { - if ("<" !== t2[o2]) { - if (l(t2[o2])) continue; - return m("InvalidChar", "char '" + t2[o2] + "' is not expected.", b(t2, o2)); + if ("<" !== t2[r2]) { + if (l(t2[r2])) continue; + return m("InvalidChar", "char '" + t2[r2] + "' is not expected.", N(t2, r2)); } { - let a2 = o2; - if (o2++, "!" === t2[o2]) { - o2 = h(t2, o2); + let o2 = r2; + if (r2++, "!" === t2[r2]) { + r2 = d(t2, r2); continue; } { - let d2 = false; - "/" === t2[o2] && (d2 = true, o2++); - let p2 = ""; - for (; o2 < t2.length && ">" !== t2[o2] && " " !== t2[o2] && " " !== t2[o2] && "\n" !== t2[o2] && "\r" !== t2[o2]; o2++) p2 += t2[o2]; - if (p2 = p2.trim(), "/" === p2[p2.length - 1] && (p2 = p2.substring(0, p2.length - 1), o2--), !r(p2)) { + let a2 = false; + "/" === t2[r2] && (a2 = true, r2++); + let h2 = ""; + for (; r2 < t2.length && ">" !== t2[r2] && " " !== t2[r2] && " " !== t2[r2] && "\n" !== t2[r2] && "\r" !== t2[r2]; r2++) h2 += t2[r2]; + if (h2 = h2.trim(), "/" === h2[h2.length - 1] && (h2 = h2.substring(0, h2.length - 1), r2--), !b(h2)) { let e3; - return e3 = 0 === p2.trim().length ? "Invalid space after '<'." : "Tag '" + p2 + "' is an invalid name.", m("InvalidTag", e3, b(t2, o2)); + return e3 = 0 === h2.trim().length ? "Invalid space after '<'." : "Tag '" + h2 + "' is an invalid name.", m("InvalidTag", e3, N(t2, r2)); } - const c2 = f(t2, o2); - if (false === c2) return m("InvalidAttr", "Attributes for '" + p2 + "' have open quote.", b(t2, o2)); - let E2 = c2.value; - if (o2 = c2.index, "/" === E2[E2.length - 1]) { - const n3 = o2 - E2.length; - E2 = E2.substring(0, E2.length - 1); - const s3 = g(E2, e2); - if (true !== s3) return m(s3.err.code, s3.err.msg, b(t2, n3 + s3.err.line)); + const p2 = c(t2, r2); + if (false === p2) return m("InvalidAttr", "Attributes for '" + h2 + "' have open quote.", N(t2, r2)); + let f2 = p2.value; + if (r2 = p2.index, "/" === f2[f2.length - 1]) { + const n3 = r2 - f2.length; + f2 = f2.substring(0, f2.length - 1); + const s3 = g(f2, e2); + if (true !== s3) return m(s3.err.code, s3.err.msg, N(t2, n3 + s3.err.line)); i2 = true; - } else if (d2) { - if (!c2.tagClosed) return m("InvalidTag", "Closing tag '" + p2 + "' doesn't have proper closing.", b(t2, o2)); - if (E2.trim().length > 0) return m("InvalidTag", "Closing tag '" + p2 + "' can't have attributes or invalid starting.", b(t2, a2)); - if (0 === n2.length) return m("InvalidTag", "Closing tag '" + p2 + "' has not been opened.", b(t2, a2)); + } else if (a2) { + if (!p2.tagClosed) return m("InvalidTag", "Closing tag '" + h2 + "' doesn't have proper closing.", N(t2, r2)); + if (f2.trim().length > 0) return m("InvalidTag", "Closing tag '" + h2 + "' can't have attributes or invalid starting.", N(t2, o2)); + if (0 === n2.length) return m("InvalidTag", "Closing tag '" + h2 + "' has not been opened.", N(t2, o2)); { const e3 = n2.pop(); - if (p2 !== e3.tagName) { - let n3 = b(t2, e3.tagStartPos); - return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + p2 + "'.", b(t2, a2)); + if (h2 !== e3.tagName) { + let n3 = N(t2, e3.tagStartPos); + return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + h2 + "'.", N(t2, o2)); } 0 == n2.length && (s2 = true); } } else { - const r2 = g(E2, e2); - if (true !== r2) return m(r2.err.code, r2.err.msg, b(t2, o2 - E2.length + r2.err.line)); - if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", b(t2, o2)); - -1 !== e2.unpairedTags.indexOf(p2) || n2.push({ tagName: p2, tagStartPos: a2 }), i2 = true; + const a3 = g(f2, e2); + if (true !== a3) return m(a3.err.code, a3.err.msg, N(t2, r2 - f2.length + a3.err.line)); + if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", N(t2, r2)); + -1 !== e2.unpairedTags.indexOf(h2) || n2.push({ tagName: h2, tagStartPos: o2 }), i2 = true; } - for (o2++; o2 < t2.length; o2++) if ("<" === t2[o2]) { - if ("!" === t2[o2 + 1]) { - o2++, o2 = h(t2, o2); + for (r2++; r2 < t2.length; r2++) if ("<" === t2[r2]) { + if ("!" === t2[r2 + 1]) { + r2++, r2 = d(t2, r2); continue; } - if ("?" !== t2[o2 + 1]) break; - if (o2 = u(t2, ++o2), o2.err) return o2; - } else if ("&" === t2[o2]) { - const e3 = x(t2, o2); - if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", b(t2, o2)); - o2 = e3; - } else if (true === s2 && !l(t2[o2])) return m("InvalidXml", "Extra text at the end", b(t2, o2)); - "<" === t2[o2] && o2--; + if ("?" !== t2[r2 + 1]) break; + if (r2 = u(t2, ++r2), r2.err) return r2; + } else if ("&" === t2[r2]) { + const e3 = x(t2, r2); + if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", N(t2, r2)); + r2 = e3; + } else if (true === s2 && !l(t2[r2])) return m("InvalidXml", "Extra text at the end", N(t2, r2)); + "<" === t2[r2] && r2--; } } } - return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", b(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map(((t3) => t3.tagName)), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); + return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", N(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map((t3) => t3.tagName), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); } function l(t2) { return " " === t2 || " " === t2 || "\n" === t2 || "\r" === t2; } function u(t2, e2) { const n2 = e2; - for (; e2 < t2.length; e2++) if ("?" != t2[e2] && " " != t2[e2]) ; - else { + for (; e2 < t2.length; e2++) if ("?" == t2[e2] || " " == t2[e2]) { const i2 = t2.substr(n2, e2 - n2); - if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", b(t2, e2)); + if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", N(t2, e2)); if ("?" == t2[e2] && ">" == t2[e2 + 1]) { e2++; break; } + continue; } return e2; } - function h(t2, e2) { + function d(t2, e2) { if (t2.length > e2 + 5 && "-" === t2[e2 + 1] && "-" === t2[e2 + 2]) { for (e2 += 3; e2 < t2.length; e2++) if ("-" === t2[e2] && "-" === t2[e2 + 1] && ">" === t2[e2 + 2]) { e2 += 2; @@ -61963,11 +62062,11 @@ var require_fxp = __commonJS({ } return e2; } - const d = '"', p = "'"; - function f(t2, e2) { + const h = '"', p = "'"; + function c(t2, e2) { let n2 = "", i2 = "", s2 = false; for (; e2 < t2.length; e2++) { - if (t2[e2] === d || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); + if (t2[e2] === h || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); else if (">" === t2[e2] && "" === i2) { s2 = true; break; @@ -61976,16 +62075,16 @@ var require_fxp = __commonJS({ } return "" === i2 && { value: n2, index: e2, tagClosed: s2 }; } - const c = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); + const f = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); function g(t2, e2) { - const n2 = s(t2, c), i2 = {}; + const n2 = s(t2, f), i2 = {}; for (let t3 = 0; t3 < n2.length; t3++) { - if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", N(n2[t3])); - if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", N(n2[t3])); - if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", N(n2[t3])); + if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", y(n2[t3])); + if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", y(n2[t3])); + if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", y(n2[t3])); const s2 = n2[t3][2]; - if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", N(n2[t3])); - if (i2.hasOwnProperty(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", N(n2[t3])); + if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", y(n2[t3])); + if (Object.prototype.hasOwnProperty.call(i2, s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", y(n2[t3])); i2[s2] = 1; } return true; @@ -62013,49 +62112,52 @@ var require_fxp = __commonJS({ function E(t2) { return r(t2); } - function b(t2, e2) { + function b(t2) { + return r(t2); + } + function N(t2, e2) { const n2 = t2.substring(0, e2).split(/\r?\n/); return { line: n2.length, col: n2[n2.length - 1].length + 1 }; } - function N(t2) { + function y(t2) { return t2.startIndex + t2[1].length; } - const y = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { + const T = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, stopNodes: [], alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(t2, e2, n2) { return t2; - }, captureMetaData: false }; - function T(t2) { - return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : T(true); + }, captureMetaData: false, maxNestedTags: 100, strictReservedNames: true }; + function w(t2) { + return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : w(true); } - const w = function(t2) { - const e2 = Object.assign({}, y, t2); - return e2.processEntities = T(e2.processEntities), e2; + const v = function(t2) { + const e2 = Object.assign({}, T, t2); + return e2.processEntities = w(e2.processEntities), e2; }; - let v; - v = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); + let O; + O = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); class I { constructor(t2) { - this.tagname = t2, this.child = [], this[":@"] = {}; + this.tagname = t2, this.child = [], this[":@"] = /* @__PURE__ */ Object.create(null); } add(t2, e2) { "__proto__" === t2 && (t2 = "#__proto__"), this.child.push({ [t2]: e2 }); } addChild(t2, e2) { - "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][v] = { startIndex: e2 }); + "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][O] = { startIndex: e2 }); } static getMetaDataSymbol() { - return v; + return O; } } - class O { + class P { constructor(t2) { this.suppressValidationErr = !t2, this.options = t2; } readDocType(t2, e2) { - const n2 = {}; + const n2 = /* @__PURE__ */ Object.create(null); if ("O" !== t2[e2 + 3] || "C" !== t2[e2 + 4] || "T" !== t2[e2 + 5] || "Y" !== t2[e2 + 6] || "P" !== t2[e2 + 7] || "E" !== t2[e2 + 8]) throw new Error("Invalid Tag instead of DOCTYPE"); { e2 += 9; @@ -62064,23 +62166,23 @@ var require_fxp = __commonJS({ if (r2 ? "-" === t2[e2 - 1] && "-" === t2[e2 - 2] && (r2 = false, i2--) : i2--, 0 === i2) break; } else "[" === t2[e2] ? s2 = true : o2 += t2[e2]; else { - if (s2 && A(t2, "!ENTITY", e2)) { + if (s2 && S(t2, "!ENTITY", e2)) { let i3, s3; if (e2 += 7, [i3, s3, e2] = this.readEntityExp(t2, e2 + 1, this.suppressValidationErr), -1 === s3.indexOf("&")) { const t3 = i3.replace(/[.\-+*:]/g, "\\."); n2[i3] = { regx: RegExp(`&${t3};`, "g"), val: s3 }; } - } else if (s2 && A(t2, "!ELEMENT", e2)) { + } else if (s2 && S(t2, "!ELEMENT", e2)) { e2 += 8; const { index: n3 } = this.readElementExp(t2, e2 + 1); e2 = n3; - } else if (s2 && A(t2, "!ATTLIST", e2)) e2 += 8; - else if (s2 && A(t2, "!NOTATION", e2)) { + } else if (s2 && S(t2, "!ATTLIST", e2)) e2 += 8; + else if (s2 && S(t2, "!NOTATION", e2)) { e2 += 9; const { index: n3 } = this.readNotationExp(t2, e2 + 1, this.suppressValidationErr); e2 = n3; } else { - if (!A(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); + if (!S(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); r2 = true; } i2++, o2 = ""; @@ -62090,10 +62192,10 @@ var require_fxp = __commonJS({ return { entities: n2, i: e2 }; } readEntityExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]) && '"' !== t2[e2] && "'" !== t2[e2]; ) n2 += t2[e2], e2++; - if (S(n2), e2 = P(t2, e2), !this.suppressValidationErr) { + if (C(n2), e2 = A(t2, e2), !this.suppressValidationErr) { if ("SYSTEM" === t2.substring(e2, e2 + 6).toUpperCase()) throw new Error("External entities are not supported"); if ("%" === t2[e2]) throw new Error("Parameter entities are not supported"); } @@ -62102,15 +62204,15 @@ var require_fxp = __commonJS({ return [n2, i2, --e2]; } readNotationExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - !this.suppressValidationErr && S(n2), e2 = P(t2, e2); + !this.suppressValidationErr && C(n2), e2 = A(t2, e2); const i2 = t2.substring(e2, e2 + 6).toUpperCase(); if (!this.suppressValidationErr && "SYSTEM" !== i2 && "PUBLIC" !== i2) throw new Error(`Expected SYSTEM or PUBLIC, found "${i2}"`); - e2 += i2.length, e2 = P(t2, e2); + e2 += i2.length, e2 = A(t2, e2); let s2 = null, r2 = null; - if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = P(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); + if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = A(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); else if ("SYSTEM" === i2 && ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"), !this.suppressValidationErr && !r2)) throw new Error("Missing mandatory system identifier for SYSTEM notation"); return { notationName: n2, publicIdentifier: s2, systemIdentifier: r2, index: --e2 }; } @@ -62123,13 +62225,13 @@ var require_fxp = __commonJS({ return [++e2, i2]; } readElementExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; if (!this.suppressValidationErr && !r(n2)) throw new Error(`Invalid element name: "${n2}"`); let i2 = ""; - if ("E" === t2[e2 = P(t2, e2)] && A(t2, "MPTY", e2)) e2 += 4; - else if ("A" === t2[e2] && A(t2, "NY", e2)) e2 += 2; + if ("E" === t2[e2 = A(t2, e2)] && S(t2, "MPTY", e2)) e2 += 4; + else if ("A" === t2[e2] && S(t2, "NY", e2)) e2 += 2; else if ("(" === t2[e2]) { for (e2++; e2 < t2.length && ")" !== t2[e2]; ) i2 += t2[e2], e2++; if (")" !== t2[e2]) throw new Error("Unterminated content model"); @@ -62137,24 +62239,24 @@ var require_fxp = __commonJS({ return { elementName: n2, contentModel: i2.trim(), index: e2 }; } readAttlistExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - S(n2), e2 = P(t2, e2); + C(n2), e2 = A(t2, e2); let i2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) i2 += t2[e2], e2++; - if (!S(i2)) throw new Error(`Invalid attribute name: "${i2}"`); - e2 = P(t2, e2); + if (!C(i2)) throw new Error(`Invalid attribute name: "${i2}"`); + e2 = A(t2, e2); let s2 = ""; if ("NOTATION" === t2.substring(e2, e2 + 8).toUpperCase()) { - if (s2 = "NOTATION", "(" !== t2[e2 = P(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); + if (s2 = "NOTATION", "(" !== t2[e2 = A(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); e2++; let n3 = []; for (; e2 < t2.length && ")" !== t2[e2]; ) { let i3 = ""; for (; e2 < t2.length && "|" !== t2[e2] && ")" !== t2[e2]; ) i3 += t2[e2], e2++; - if (i3 = i3.trim(), !S(i3)) throw new Error(`Invalid notation name: "${i3}"`); - n3.push(i3), "|" === t2[e2] && (e2++, e2 = P(t2, e2)); + if (i3 = i3.trim(), !C(i3)) throw new Error(`Invalid notation name: "${i3}"`); + n3.push(i3), "|" === t2[e2] && (e2++, e2 = A(t2, e2)); } if (")" !== t2[e2]) throw new Error("Unterminated list of notations"); e2++, s2 += " (" + n3.join("|") + ")"; @@ -62163,45 +62265,43 @@ var require_fxp = __commonJS({ const n3 = ["CDATA", "ID", "IDREF", "IDREFS", "ENTITY", "ENTITIES", "NMTOKEN", "NMTOKENS"]; if (!this.suppressValidationErr && !n3.includes(s2.toUpperCase())) throw new Error(`Invalid attribute type: "${s2}"`); } - e2 = P(t2, e2); + e2 = A(t2, e2); let r2 = ""; return "#REQUIRED" === t2.substring(e2, e2 + 8).toUpperCase() ? (r2 = "#REQUIRED", e2 += 8) : "#IMPLIED" === t2.substring(e2, e2 + 7).toUpperCase() ? (r2 = "#IMPLIED", e2 += 7) : [e2, r2] = this.readIdentifierVal(t2, e2, "ATTLIST"), { elementName: n2, attributeName: i2, attributeType: s2, defaultValue: r2, index: e2 }; } } - const P = (t2, e2) => { + const A = (t2, e2) => { for (; e2 < t2.length && /\s/.test(t2[e2]); ) e2++; return e2; }; - function A(t2, e2, n2) { + function S(t2, e2, n2) { for (let i2 = 0; i2 < e2.length; i2++) if (e2[i2] !== t2[n2 + i2 + 1]) return false; return true; } - function S(t2) { + function C(t2) { if (r(t2)) return t2; throw new Error(`Invalid entity name ${t2}`); } - const C = /^[-+]?0x[a-fA-F0-9]+$/, $ = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; - const D = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; - function L(t2) { - return "function" == typeof t2 ? t2 : Array.isArray(t2) ? (e2) => { - for (const n2 of t2) { - if ("string" == typeof n2 && e2 === n2) return true; - if (n2 instanceof RegExp && n2.test(e2)) return true; - } - } : () => false; - } - class F { + const $ = /^[-+]?0x[a-fA-F0-9]+$/, V = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, D = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; + const j = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; + class L { constructor(t2) { - if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e2) => K(e2, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e2) => K(e2, 16, "&#x") } }, this.addExternalEntities = j, this.parseXml = B, this.parseTextData = M, this.resolveNameSpace = _, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = R, this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { + var e2; + if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e3) => K(e3, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e3) => K(e3, 16, "&#x") } }, this.addExternalEntities = F, this.parseXml = R, this.parseTextData = M, this.resolveNameSpace = k, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = B, this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { this.stopNodesExact = /* @__PURE__ */ new Set(), this.stopNodesWildcard = /* @__PURE__ */ new Set(); for (let t3 = 0; t3 < this.options.stopNodes.length; t3++) { - const e2 = this.options.stopNodes[t3]; - "string" == typeof e2 && (e2.startsWith("*.") ? this.stopNodesWildcard.add(e2.substring(2)) : this.stopNodesExact.add(e2)); + const e3 = this.options.stopNodes[t3]; + "string" == typeof e3 && (e3.startsWith("*.") ? this.stopNodesWildcard.add(e3.substring(2)) : this.stopNodesExact.add(e3)); } } } } - function j(t2) { + function F(t2) { const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2], s2 = i2.replace(/[.\-+*:]/g, "\\."); @@ -62215,7 +62315,7 @@ var require_fxp = __commonJS({ return null == i3 ? t2 : typeof i3 != typeof t2 || i3 !== t2 ? i3 : this.options.trimValues || t2.trim() === t2 ? Z(t2, this.options.parseTagValue, this.options.numberParseOptions) : t2; } } - function _(t2) { + function k(t2) { if (this.options.removeNSPrefix) { const e2 = t2.split(":"), n2 = "/" === t2.charAt(0) ? "/" : ""; if ("xmlns" === e2[0]) return ""; @@ -62223,10 +62323,10 @@ var require_fxp = __commonJS({ } return t2; } - const k = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); + const _ = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); function U(t2, e2, n2) { if (true !== this.options.ignoreAttributes && "string" == typeof t2) { - const i2 = s(t2, k), r2 = i2.length, o2 = {}; + const i2 = s(t2, _), r2 = i2.length, o2 = {}; for (let t3 = 0; t3 < r2; t3++) { const s2 = this.resolveNameSpace(i2[t3][1]); if (this.ignoreAttributesFn(s2, e2)) continue; @@ -62245,12 +62345,12 @@ var require_fxp = __commonJS({ return o2; } } - const B = function(t2) { + const R = function(t2) { t2 = t2.replace(/\r\n?/g, "\n"); const e2 = new I("!xml"); let n2 = e2, i2 = "", s2 = ""; this.entityExpansionCount = 0, this.currentExpandedLength = 0; - const r2 = new O(this.options.processEntities); + const r2 = new P(this.options.processEntities); for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2]) if ("/" === t2[o2 + 1]) { const e3 = z(t2, ">", o2, "Closing Tag is not closed."); let r3 = t2.substring(o2 + 2, e3).trim(); @@ -62290,26 +62390,27 @@ var require_fxp = __commonJS({ } else { let r3 = W(t2, o2, this.options.removeNSPrefix), a2 = r3.tagName; const l2 = r3.rawTagName; - let u2 = r3.tagExp, h2 = r3.attrExpPresent, d2 = r3.closeIndex; + let u2 = r3.tagExp, d2 = r3.attrExpPresent, h2 = r3.closeIndex; if (this.options.transformTagName) { const t3 = this.options.transformTagName(a2); u2 === a2 && (u2 = t3), a2 = t3; } + if (this.options.strictReservedNames && (a2 === this.options.commentPropName || a2 === this.options.cdataPropName)) throw new Error(`Invalid tag name: ${a2}`); n2 && i2 && "!xml" !== n2.tagname && (i2 = this.saveTextToParentTag(i2, n2, s2, false)); const p2 = n2; p2 && -1 !== this.options.unpairedTags.indexOf(p2.tagname) && (n2 = this.tagsNodeStack.pop(), s2 = s2.substring(0, s2.lastIndexOf("."))), a2 !== e2.tagname && (s2 += s2 ? "." + a2 : a2); - const f2 = o2; + const c2 = o2; if (this.isItStopNode(this.stopNodesExact, this.stopNodesWildcard, s2, a2)) { let e3 = ""; if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) "/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), o2 = r3.closeIndex; else if (-1 !== this.options.unpairedTags.indexOf(a2)) o2 = r3.closeIndex; else { - const n3 = this.readStopNodeData(t2, l2, d2 + 1); + const n3 = this.readStopNodeData(t2, l2, h2 + 1); if (!n3) throw new Error(`Unexpected end of ${l2}`); o2 = n3.i, e3 = n3.tagContent; } const i3 = new I(a2); - a2 !== u2 && h2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, h2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, f2); + a2 !== u2 && d2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, d2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, c2); } else { if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) { if ("/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), this.options.transformTagName) { @@ -62317,18 +62418,26 @@ var require_fxp = __commonJS({ u2 === a2 && (u2 = t4), a2 = t4; } const t3 = new I(a2); - a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), s2 = s2.substr(0, s2.lastIndexOf(".")); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")); } else { - const t3 = new I(a2); - this.tagsNodeStack.push(n2), a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), n2 = t3; + if (-1 !== this.options.unpairedTags.indexOf(a2)) { + const t3 = new I(a2); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")), o2 = r3.closeIndex; + continue; + } + { + const t3 = new I(a2); + if (this.tagsNodeStack.length > this.options.maxNestedTags) throw new Error("Maximum nested tags exceeded"); + this.tagsNodeStack.push(n2), a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), n2 = t3; + } } - i2 = "", o2 = d2; + i2 = "", o2 = h2; } } else i2 += t2[o2]; return e2.child; }; - function R(t2, e2, n2, i2) { + function B(t2, e2, n2, i2) { this.options.captureMetaData || (i2 = void 0); const s2 = this.options.updateTag(e2.tagname, n2, e2[":@"]); false === s2 || ("string" == typeof s2 ? (e2.tagname = s2, t2.addChild(e2, i2)) : t2.addChild(e2, i2)); @@ -62389,12 +62498,12 @@ var require_fxp = __commonJS({ const o2 = s2.index, a2 = r2.search(/\s/); let l2 = r2, u2 = true; -1 !== a2 && (l2 = r2.substring(0, a2), r2 = r2.substring(a2 + 1).trimStart()); - const h2 = l2; + const d2 = l2; if (n2) { const t3 = l2.indexOf(":"); -1 !== t3 && (l2 = l2.substr(t3 + 1), u2 = l2 !== s2.data.substr(t3 + 1)); } - return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: h2 }; + return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: d2 }; } function q(t2, e2, n2) { const i2 = n2; @@ -62415,19 +62524,19 @@ var require_fxp = __commonJS({ if (e2 && "string" == typeof t2) { const e3 = t2.trim(); return "true" === e3 || "false" !== e3 && (function(t3, e4 = {}) { - if (e4 = Object.assign({}, V, e4), !t3 || "string" != typeof t3) return t3; + if (e4 = Object.assign({}, D, e4), !t3 || "string" != typeof t3) return t3; let n3 = t3.trim(); if (void 0 !== e4.skipLike && e4.skipLike.test(n3)) return t3; if ("0" === t3) return 0; - if (e4.hex && C.test(n3)) return (function(t4) { + if (e4.hex && $.test(n3)) return (function(t4) { if (parseInt) return parseInt(t4, 16); if (Number.parseInt) return Number.parseInt(t4, 16); if (window && window.parseInt) return window.parseInt(t4, 16); throw new Error("parseInt, Number.parseInt, window.parseInt are not supported"); })(n3); - if (-1 !== n3.search(/.+[eE].+/)) return (function(t4, e5, n4) { + if (n3.includes("e") || n3.includes("E")) return (function(t4, e5, n4) { if (!n4.eNotation) return t4; - const i3 = e5.match(D); + const i3 = e5.match(j); if (i3) { let s2 = i3[1] || ""; const r2 = -1 === i3[3].indexOf("e") ? "E" : "e", o2 = i3[2], a2 = s2 ? t4[o2.length + 1] === r2 : t4[o2.length] === r2; @@ -62436,7 +62545,7 @@ var require_fxp = __commonJS({ return t4; })(t3, n3, e4); { - const s2 = $.exec(n3); + const s2 = V.exec(n3); if (s2) { const r2 = s2[1] || "", o2 = s2[2]; let a2 = (i2 = s2[3]) && -1 !== i2.indexOf(".") ? ("." === (i2 = i2.replace(/0+$/, "")) ? i2 = "0" : "." === i2[0] ? i2 = "0" + i2 : "." === i2[i2.length - 1] && (i2 = i2.substring(0, i2.length - 1)), i2) : i2; @@ -62444,7 +62553,7 @@ var require_fxp = __commonJS({ if (!e4.leadingZeros && (o2.length > 1 || 1 === o2.length && !l2)) return t3; { const i3 = Number(n3), s3 = String(i3); - if (0 === i3 || -0 === i3) return i3; + if (0 === i3) return i3; if (-1 !== s3.search(/[eE]/)) return e4.eNotation ? i3 : t3; if (-1 !== n3.indexOf(".")) return "0" === s3 || s3 === a2 || s3 === `${r2}${a2}` ? i3 : t3; let l3 = o2 ? a2 : n3; @@ -62478,7 +62587,7 @@ var require_fxp = __commonJS({ if (o2[a2]) { let t3 = H(o2[a2], e2, l2); const n3 = nt(t3, e2); - void 0 !== o2[Q] && (t3[Q] = o2[Q]), o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== s2[a2] && s2.hasOwnProperty(a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; + o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== o2[Q] && "object" == typeof t3 && null !== t3 && (t3[Q] = o2[Q]), void 0 !== s2[a2] && Object.prototype.hasOwnProperty.call(s2, a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; } } } @@ -62506,7 +62615,7 @@ var require_fxp = __commonJS({ } class it { constructor(t2) { - this.externalEntities = {}, this.options = w(t2); + this.externalEntities = {}, this.options = v(t2); } parse(t2, e2) { if ("string" != typeof t2 && t2.toString) t2 = t2.toString(); @@ -62516,7 +62625,7 @@ var require_fxp = __commonJS({ const n3 = a(t2, e2); if (true !== n3) throw Error(`${n3.err.msg}:${n3.err.line}:${n3.err.col}`); } - const n2 = new F(this.options); + const n2 = new L(this.options); n2.addExternalEntities(this.externalEntities); const i2 = n2.parseXml(t2); return this.options.preserveOrder || void 0 === i2 ? i2 : J(i2, this.options); @@ -62537,6 +62646,13 @@ var require_fxp = __commonJS({ } function rt(t2, e2, n2, i2) { let s2 = "", r2 = false; + if (!Array.isArray(t2)) { + if (null != t2) { + let n3 = t2.toString(); + return n3 = ut(n3, e2), n3; + } + return ""; + } for (let o2 = 0; o2 < t2.length; o2++) { const a2 = t2[o2], l2 = ot(a2); if (void 0 === l2) continue; @@ -62560,10 +62676,10 @@ var require_fxp = __commonJS({ o3 = 0 !== o3.length ? " " + o3 : "", s2 += n3 + `<${l2}${o3}${t3}?>`, r2 = true; continue; } - let h2 = i2; - "" !== h2 && (h2 += e2.indentBy); - const d2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, h2); - -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += d2 + ">" : s2 += d2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += d2 + `>${p2}${i2}` : (s2 += d2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += d2 + "/>", r2 = true; + let d2 = i2; + "" !== d2 && (d2 += e2.indentBy); + const h2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, d2); + -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += h2 + ">" : s2 += h2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += h2 + `>${p2}${i2}` : (s2 += h2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += h2 + "/>", r2 = true; } return s2; } @@ -62571,13 +62687,13 @@ var require_fxp = __commonJS({ const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2]; - if (t2.hasOwnProperty(i2) && ":@" !== i2) return i2; + if (Object.prototype.hasOwnProperty.call(t2, i2) && ":@" !== i2) return i2; } } function at(t2, e2) { let n2 = ""; if (t2 && !e2.ignoreAttributes) for (let i2 in t2) { - if (!t2.hasOwnProperty(i2)) continue; + if (!Object.prototype.hasOwnProperty.call(t2, i2)) continue; let s2 = e2.attributeValueProcessor(i2, t2[i2]); s2 = ut(s2, e2), true === s2 && e2.suppressBooleanAttributes ? n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}` : n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}="${s2}"`; } @@ -62595,15 +62711,21 @@ var require_fxp = __commonJS({ } return t2; } - const ht = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { + const dt = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [{ regex: new RegExp("&", "g"), val: "&" }, { regex: new RegExp(">", "g"), val: ">" }, { regex: new RegExp("<", "g"), val: "<" }, { regex: new RegExp("'", "g"), val: "'" }, { regex: new RegExp('"', "g"), val: """ }], processEntities: true, stopNodes: [], oneListGroup: false }; - function dt(t2) { - this.options = Object.assign({}, ht, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { + function ht(t2) { + var e2; + this.options = Object.assign({}, dt, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { return false; - } : (this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ct), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ft, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { + } : (this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ft), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ct, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { return ""; }, this.tagEndChar = ">", this.newLine = ""); } @@ -62611,15 +62733,15 @@ var require_fxp = __commonJS({ const s2 = this.j2x(t2, n2 + 1, i2.concat(e2)); return void 0 !== t2[this.options.textNodeName] && 1 === Object.keys(t2).length ? this.buildTextValNode(t2[this.options.textNodeName], e2, s2.attrStr, n2) : this.buildObjectNode(s2.val, e2, s2.attrStr, n2); } - function ft(t2) { + function ct(t2) { return this.options.indentBy.repeat(t2); } - function ct(t2) { + function ft(t2) { return !(!t2.startsWith(this.options.attributeNamePrefix) || t2 === this.options.textNodeName) && t2.substr(this.attrPrefixLen); } - dt.prototype.build = function(t2) { + ht.prototype.build = function(t2) { return this.options.preserveOrder ? st(t2, this.options) : (Array.isArray(t2) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t2 = { [this.options.arrayNodeName]: t2 }), this.j2x(t2, 0, []).val); - }, dt.prototype.j2x = function(t2, e2, n2) { + }, ht.prototype.j2x = function(t2, e2, n2) { let i2 = "", s2 = ""; const r2 = n2.join("."); for (let o2 in t2) if (Object.prototype.hasOwnProperty.call(t2, o2)) if (void 0 === t2[o2]) this.isAttribute(o2) && (s2 += ""); @@ -62654,18 +62776,18 @@ var require_fxp = __commonJS({ for (let s3 = 0; s3 < n3; s3++) i2 += this.buildAttrPairStr(e3[s3], "" + t2[o2][e3[s3]]); } else s2 += this.processTextOrObjNode(t2[o2], o2, e2, n2); return { attrStr: i2, val: s2 }; - }, dt.prototype.buildAttrPairStr = function(t2, e2) { + }, ht.prototype.buildAttrPairStr = function(t2, e2) { return e2 = this.options.attributeValueProcessor(t2, "" + e2), e2 = this.replaceEntitiesValue(e2), this.options.suppressBooleanAttributes && "true" === e2 ? " " + t2 : " " + t2 + '="' + e2 + '"'; - }, dt.prototype.buildObjectNode = function(t2, e2, n2, i2) { + }, ht.prototype.buildObjectNode = function(t2, e2, n2, i2) { if ("" === t2) return "?" === e2[0] ? this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar; { let s2 = "` + this.newLine : this.indentate(i2) + "<" + e2 + n2 + r2 + this.tagEndChar + t2 + this.indentate(i2) + s2 : this.indentate(i2) + "<" + e2 + n2 + r2 + ">" + t2 + s2; } - }, dt.prototype.closeTag = function(t2) { + }, ht.prototype.closeTag = function(t2) { let e2 = ""; return -1 !== this.options.unpairedTags.indexOf(t2) ? this.options.suppressUnpairedNode || (e2 = "/") : e2 = this.options.suppressEmptyNode ? "/" : `>` + this.newLine; if (false !== this.options.commentPropName && e2 === this.options.commentPropName) return this.indentate(i2) + `` + this.newLine; if ("?" === e2[0]) return this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar; @@ -62673,14 +62795,14 @@ var require_fxp = __commonJS({ let s2 = this.options.tagValueProcessor(e2, t2); return s2 = this.replaceEntitiesValue(s2), "" === s2 ? this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + ">" + s2 + " 0 && this.options.processEntities) for (let e2 = 0; e2 < this.options.entities.length; e2++) { const n2 = this.options.entities[e2]; t2 = t2.replace(n2.regex, n2.val); } return t2; }; - const gt = { validate: a }; + const gt = ht, xt = { validate: a }; module2.exports = e; })(); } @@ -102870,7 +102992,7 @@ var safeDump = renamed("safeDump", "dump"); var semver = __toESM(require_semver2()); // src/api-compatibility.json -var maximumVersion = "3.20"; +var maximumVersion = "3.21"; var minimumVersion = "3.14"; // src/util.ts @@ -103698,8 +103820,8 @@ var path3 = __toESM(require("path")); var semver5 = __toESM(require_semver2()); // src/defaults.json -var bundleVersion = "codeql-bundle-v2.24.2"; -var cliVersion = "2.24.2"; +var bundleVersion = "codeql-bundle-v2.24.3"; +var cliVersion = "2.24.3"; // src/overlay/index.ts var fs2 = __toESM(require("fs")); @@ -104154,6 +104276,11 @@ var featureConfig = { // cannot be found when interpreting results. minimumVersion: void 0 }, + ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES", + minimumVersion: void 0 + }, ["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: { defaultValue: false, envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE", @@ -104165,11 +104292,6 @@ var featureConfig = { minimumVersion: void 0, toolsFeature: "bundleSupportsOverlay" /* BundleSupportsOverlay */ }, - ["use_repository_properties_v2" /* UseRepositoryProperties */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES", - minimumVersion: void 0 - }, ["validate_db_config" /* ValidateDbConfig */]: { defaultValue: false, envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG", diff --git a/lib/defaults.json b/lib/defaults.json index 94988f4cf..9b6ec84bd 100644 --- a/lib/defaults.json +++ b/lib/defaults.json @@ -1,6 +1,6 @@ { - "bundleVersion": "codeql-bundle-v2.24.2", - "cliVersion": "2.24.2", - "priorBundleVersion": "codeql-bundle-v2.24.1", - "priorCliVersion": "2.24.1" + "bundleVersion": "codeql-bundle-v2.24.3", + "cliVersion": "2.24.3", + "priorBundleVersion": "codeql-bundle-v2.24.2", + "priorCliVersion": "2.24.2" } diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 8c223911c..b18a94abb 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -204,7 +204,7 @@ var require_file_command = __commonJS({ exports2.issueFileCommand = issueFileCommand; exports2.prepareKeyValueMessage = prepareKeyValueMessage; var crypto2 = __importStar2(require("crypto")); - var fs19 = __importStar2(require("fs")); + var fs20 = __importStar2(require("fs")); var os4 = __importStar2(require("os")); var utils_1 = require_utils(); function issueFileCommand(command, message) { @@ -212,10 +212,10 @@ var require_file_command = __commonJS({ if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs19.existsSync(filePath)) { + if (!fs20.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs19.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os4.EOL}`, { + fs20.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os4.EOL}`, { encoding: "utf8" }); } @@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({ exports2.isRooted = isRooted; exports2.tryGetExecutablePath = tryGetExecutablePath; exports2.getCmdPath = getCmdPath; - var fs19 = __importStar2(require("fs")); + var fs20 = __importStar2(require("fs")); var path19 = __importStar2(require("path")); - _a = fs19.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + _a = fs20.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; function readlink(fsPath) { return __awaiter2(this, void 0, void 0, function* () { - const result = yield fs19.promises.readlink(fsPath); + const result = yield fs20.promises.readlink(fsPath); if (exports2.IS_WINDOWS && !result.endsWith("\\")) { return `${result}\\`; } @@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({ }); } exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs19.constants.O_RDONLY; + exports2.READONLY = fs20.constants.O_RDONLY; function exists(fsPath) { return __awaiter2(this, void 0, void 0, function* () { try { @@ -45986,7 +45986,7 @@ var require_package = __commonJS({ "package.json"(exports2, module2) { module2.exports = { name: "codeql", - version: "4.32.5", + version: "4.32.7", private: true, description: "CodeQL action", scripts: { @@ -45995,7 +45995,7 @@ var require_package = __commonJS({ lint: "eslint --report-unused-disable-directives --max-warnings=0 .", "lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif", "lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix", - ava: "npm run transpile && ava --serial --verbose", + ava: "npm run transpile && ava --verbose", test: "npm run ava -- src/", "test-debug": "npm run test -- --timeout=20m", transpile: "tsc --build --verbose" @@ -46044,6 +46044,7 @@ var require_package = __commonJS({ "@types/js-yaml": "^4.0.9", "@types/node": "^20.19.9", "@types/node-forge": "^1.3.14", + "@types/sarif": "^2.1.7", "@types/semver": "^7.7.1", "@types/sinon": "^21.0.0", ava: "^6.4.1", @@ -46052,14 +46053,14 @@ var require_package = __commonJS({ "eslint-import-resolver-typescript": "^3.8.7", "eslint-plugin-github": "^6.0.0", "eslint-plugin-import-x": "^4.16.1", - "eslint-plugin-jsdoc": "^62.5.0", + "eslint-plugin-jsdoc": "^62.7.1", "eslint-plugin-no-async-foreach": "^0.1.1", glob: "^11.1.0", - globals: "^16.5.0", + globals: "^17.3.0", nock: "^14.0.11", sinon: "^21.0.1", typescript: "^5.9.3", - "typescript-eslint": "^8.56.0" + "typescript-eslint": "^8.56.1" }, overrides: { "@actions/tool-cache": { @@ -49361,6 +49362,7 @@ var require_minimatch = __commonJS({ pattern = pattern.split(path19.sep).join("/"); } this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200; this.set = []; this.pattern = pattern; this.regexp = null; @@ -49757,50 +49759,147 @@ var require_minimatch = __commonJS({ return this.negate; }; Minimatch.prototype.matchOne = function(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } + if (pattern.indexOf(GLOBSTAR) !== -1) { + return this._matchGlobstar(file, pattern, partial, 0, 0); + } + return this._matchOne(file, pattern, partial, 0, 0); + }; + Minimatch.prototype._matchGlobstar = function(file, pattern, partial, fileIndex, patternIndex) { + var i; + var firstgs = -1; + for (i = patternIndex; i < pattern.length; i++) { + if (pattern[i] === GLOBSTAR) { + firstgs = i; + break; + } + } + var lastgs = -1; + for (i = pattern.length - 1; i >= 0; i--) { + if (pattern[i] === GLOBSTAR) { + lastgs = i; + break; + } + } + var head = pattern.slice(patternIndex, firstgs); + var body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs); + var tail = partial ? [] : pattern.slice(lastgs + 1); + if (head.length) { + var fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this._matchOne(fileHead, head, partial, 0, 0)) { + return false; + } + fileIndex += head.length; + } + var fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) return false; + var tailStart = file.length - tail.length; + if (this._matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; + } + tailStart--; + if (!this._matchOne(file, tail, partial, tailStart, 0)) { + return false; + } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + var sawSome = !!fileTailMatch; + for (i = fileIndex; i < file.length - fileTailMatch; i++) { + var f = String(file[i]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return partial || sawSome; + } + var bodySegments = [[[], 0]]; + var currentBody = bodySegments[0]; + var nonGsParts = 0; + var nonGsPartsSums = [0]; + for (var bi = 0; bi < body.length; bi++) { + var b = body[bi]; + if (b === GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + var idx = bodySegments.length - 1; + var fileLength = file.length - fileTailMatch; + for (var si = 0; si < bodySegments.length; si++) { + bodySegments[si][1] = fileLength - (nonGsPartsSums[idx--] + bodySegments[si][0].length); + } + return !!this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex, + 0, + partial, + 0, + !!fileTailMatch ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + }; + Minimatch.prototype._matchGlobStarBodySections = function(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + var bs = bodySegments[bodyIndex]; + if (!bs) { + for (var i = fileIndex; i < file.length; i++) { + sawTail = true; + var f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return sawTail; + } + var body = bs[0]; + var after = bs[1]; + while (fileIndex <= after) { + var m = this._matchOne( + file.slice(0, fileIndex + body.length), + body, + partial, + fileIndex, + 0 + ); + if (m && globStarDepth < this.maxGlobstarRecursion) { + var sub = this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex + body.length, + bodyIndex + 1, + partial, + globStarDepth + 1, + sawTail + ); + if (sub !== false) { + return sub; + } + } + var f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + fileIndex++; + } + return partial || null; + }; + Minimatch.prototype._matchOne = function(file, pattern, partial, fileIndex, patternIndex) { + var fi, pi, fl, pl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { this.debug("matchOne loop"); var p = pattern[pi]; var f = file[fi]; this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } - return false; - } + if (p === false || p === GLOBSTAR) return false; var hit; if (typeof p === "string") { hit = f === p; @@ -50305,7 +50404,7 @@ var require_internal_globber = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core17 = __importStar2(require_core()); - var fs19 = __importStar2(require("fs")); + var fs20 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path19 = __importStar2(require("path")); var patternHelper = __importStar2(require_internal_pattern_helper()); @@ -50359,7 +50458,7 @@ var require_internal_globber = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core17.debug(`Search path '${searchPath}'`); try { - yield __await2(fs19.promises.lstat(searchPath)); + yield __await2(fs20.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -50393,7 +50492,7 @@ var require_internal_globber = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await2(fs19.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path19.join(item.path, x), childLevel)); + const childItems = (yield __await2(fs20.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path19.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await2(item.path); @@ -50428,7 +50527,7 @@ var require_internal_globber = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs19.promises.stat(item.path); + stats = yield fs20.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -50440,10 +50539,10 @@ var require_internal_globber = __commonJS({ throw err; } } else { - stats = yield fs19.promises.lstat(item.path); + stats = yield fs20.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs19.promises.realpath(item.path); + const realPath = yield fs20.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -50552,7 +50651,7 @@ var require_internal_hash_files = __commonJS({ exports2.hashFiles = hashFiles2; var crypto2 = __importStar2(require("crypto")); var core17 = __importStar2(require_core()); - var fs19 = __importStar2(require("fs")); + var fs20 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); var path19 = __importStar2(require("path")); @@ -50575,13 +50674,13 @@ var require_internal_hash_files = __commonJS({ writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); continue; } - if (fs19.statSync(file).isDirectory()) { + if (fs20.statSync(file).isDirectory()) { writeDelegate(`Skip directory '${file}'.`); continue; } const hash2 = crypto2.createHash("sha256"); const pipeline = util.promisify(stream2.pipeline); - yield pipeline(fs19.createReadStream(file), hash2); + yield pipeline(fs20.createReadStream(file), hash2); result.write(hash2.digest()); count++; if (!hasMatch) { @@ -51956,7 +52055,7 @@ var require_cacheUtils = __commonJS({ var glob2 = __importStar2(require_glob()); var io7 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); - var fs19 = __importStar2(require("fs")); + var fs20 = __importStar2(require("fs")); var path19 = __importStar2(require("path")); var semver9 = __importStar2(require_semver3()); var util = __importStar2(require("util")); @@ -51985,7 +52084,7 @@ var require_cacheUtils = __commonJS({ }); } function getArchiveFileSizeInBytes(filePath) { - return fs19.statSync(filePath).size; + return fs20.statSync(filePath).size; } function resolvePaths(patterns) { return __awaiter2(this, void 0, void 0, function* () { @@ -52023,7 +52122,7 @@ var require_cacheUtils = __commonJS({ } function unlinkFile(filePath) { return __awaiter2(this, void 0, void 0, function* () { - return util.promisify(fs19.unlink)(filePath); + return util.promisify(fs20.unlink)(filePath); }); } function getVersion(app_1) { @@ -52065,7 +52164,7 @@ var require_cacheUtils = __commonJS({ } function getGnuTarPathOnWindows() { return __awaiter2(this, void 0, void 0, function* () { - if (fs19.existsSync(constants_1.GnuTarPathOnWindows)) { + if (fs20.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion("tar"); @@ -61840,7 +61939,7 @@ var require_fxp = __commonJS({ }, o: (t2, e2) => Object.prototype.hasOwnProperty.call(t2, e2), r: (t2) => { "undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t2, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t2, "__esModule", { value: true }); } }, e = {}; - t.r(e), t.d(e, { XMLBuilder: () => dt, XMLParser: () => it, XMLValidator: () => gt }); + t.r(e), t.d(e, { XMLBuilder: () => gt, XMLParser: () => it, XMLValidator: () => xt }); const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$"); function s(t2, e2) { const n2 = []; @@ -61862,90 +61961,90 @@ var require_fxp = __commonJS({ const n2 = []; let i2 = false, s2 = false; "\uFEFF" === t2[0] && (t2 = t2.substr(1)); - for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2] && "?" === t2[o2 + 1]) { - if (o2 += 2, o2 = u(t2, o2), o2.err) return o2; + for (let r2 = 0; r2 < t2.length; r2++) if ("<" === t2[r2] && "?" === t2[r2 + 1]) { + if (r2 += 2, r2 = u(t2, r2), r2.err) return r2; } else { - if ("<" !== t2[o2]) { - if (l(t2[o2])) continue; - return m("InvalidChar", "char '" + t2[o2] + "' is not expected.", b(t2, o2)); + if ("<" !== t2[r2]) { + if (l(t2[r2])) continue; + return m("InvalidChar", "char '" + t2[r2] + "' is not expected.", N(t2, r2)); } { - let a2 = o2; - if (o2++, "!" === t2[o2]) { - o2 = h(t2, o2); + let o2 = r2; + if (r2++, "!" === t2[r2]) { + r2 = d(t2, r2); continue; } { - let d2 = false; - "/" === t2[o2] && (d2 = true, o2++); - let p2 = ""; - for (; o2 < t2.length && ">" !== t2[o2] && " " !== t2[o2] && " " !== t2[o2] && "\n" !== t2[o2] && "\r" !== t2[o2]; o2++) p2 += t2[o2]; - if (p2 = p2.trim(), "/" === p2[p2.length - 1] && (p2 = p2.substring(0, p2.length - 1), o2--), !r(p2)) { + let a2 = false; + "/" === t2[r2] && (a2 = true, r2++); + let h2 = ""; + for (; r2 < t2.length && ">" !== t2[r2] && " " !== t2[r2] && " " !== t2[r2] && "\n" !== t2[r2] && "\r" !== t2[r2]; r2++) h2 += t2[r2]; + if (h2 = h2.trim(), "/" === h2[h2.length - 1] && (h2 = h2.substring(0, h2.length - 1), r2--), !b(h2)) { let e3; - return e3 = 0 === p2.trim().length ? "Invalid space after '<'." : "Tag '" + p2 + "' is an invalid name.", m("InvalidTag", e3, b(t2, o2)); + return e3 = 0 === h2.trim().length ? "Invalid space after '<'." : "Tag '" + h2 + "' is an invalid name.", m("InvalidTag", e3, N(t2, r2)); } - const c2 = f(t2, o2); - if (false === c2) return m("InvalidAttr", "Attributes for '" + p2 + "' have open quote.", b(t2, o2)); - let E2 = c2.value; - if (o2 = c2.index, "/" === E2[E2.length - 1]) { - const n3 = o2 - E2.length; - E2 = E2.substring(0, E2.length - 1); - const s3 = g(E2, e2); - if (true !== s3) return m(s3.err.code, s3.err.msg, b(t2, n3 + s3.err.line)); + const p2 = c(t2, r2); + if (false === p2) return m("InvalidAttr", "Attributes for '" + h2 + "' have open quote.", N(t2, r2)); + let f2 = p2.value; + if (r2 = p2.index, "/" === f2[f2.length - 1]) { + const n3 = r2 - f2.length; + f2 = f2.substring(0, f2.length - 1); + const s3 = g(f2, e2); + if (true !== s3) return m(s3.err.code, s3.err.msg, N(t2, n3 + s3.err.line)); i2 = true; - } else if (d2) { - if (!c2.tagClosed) return m("InvalidTag", "Closing tag '" + p2 + "' doesn't have proper closing.", b(t2, o2)); - if (E2.trim().length > 0) return m("InvalidTag", "Closing tag '" + p2 + "' can't have attributes or invalid starting.", b(t2, a2)); - if (0 === n2.length) return m("InvalidTag", "Closing tag '" + p2 + "' has not been opened.", b(t2, a2)); + } else if (a2) { + if (!p2.tagClosed) return m("InvalidTag", "Closing tag '" + h2 + "' doesn't have proper closing.", N(t2, r2)); + if (f2.trim().length > 0) return m("InvalidTag", "Closing tag '" + h2 + "' can't have attributes or invalid starting.", N(t2, o2)); + if (0 === n2.length) return m("InvalidTag", "Closing tag '" + h2 + "' has not been opened.", N(t2, o2)); { const e3 = n2.pop(); - if (p2 !== e3.tagName) { - let n3 = b(t2, e3.tagStartPos); - return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + p2 + "'.", b(t2, a2)); + if (h2 !== e3.tagName) { + let n3 = N(t2, e3.tagStartPos); + return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + h2 + "'.", N(t2, o2)); } 0 == n2.length && (s2 = true); } } else { - const r2 = g(E2, e2); - if (true !== r2) return m(r2.err.code, r2.err.msg, b(t2, o2 - E2.length + r2.err.line)); - if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", b(t2, o2)); - -1 !== e2.unpairedTags.indexOf(p2) || n2.push({ tagName: p2, tagStartPos: a2 }), i2 = true; + const a3 = g(f2, e2); + if (true !== a3) return m(a3.err.code, a3.err.msg, N(t2, r2 - f2.length + a3.err.line)); + if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", N(t2, r2)); + -1 !== e2.unpairedTags.indexOf(h2) || n2.push({ tagName: h2, tagStartPos: o2 }), i2 = true; } - for (o2++; o2 < t2.length; o2++) if ("<" === t2[o2]) { - if ("!" === t2[o2 + 1]) { - o2++, o2 = h(t2, o2); + for (r2++; r2 < t2.length; r2++) if ("<" === t2[r2]) { + if ("!" === t2[r2 + 1]) { + r2++, r2 = d(t2, r2); continue; } - if ("?" !== t2[o2 + 1]) break; - if (o2 = u(t2, ++o2), o2.err) return o2; - } else if ("&" === t2[o2]) { - const e3 = x(t2, o2); - if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", b(t2, o2)); - o2 = e3; - } else if (true === s2 && !l(t2[o2])) return m("InvalidXml", "Extra text at the end", b(t2, o2)); - "<" === t2[o2] && o2--; + if ("?" !== t2[r2 + 1]) break; + if (r2 = u(t2, ++r2), r2.err) return r2; + } else if ("&" === t2[r2]) { + const e3 = x(t2, r2); + if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", N(t2, r2)); + r2 = e3; + } else if (true === s2 && !l(t2[r2])) return m("InvalidXml", "Extra text at the end", N(t2, r2)); + "<" === t2[r2] && r2--; } } } - return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", b(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map(((t3) => t3.tagName)), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); + return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", N(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map((t3) => t3.tagName), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); } function l(t2) { return " " === t2 || " " === t2 || "\n" === t2 || "\r" === t2; } function u(t2, e2) { const n2 = e2; - for (; e2 < t2.length; e2++) if ("?" != t2[e2] && " " != t2[e2]) ; - else { + for (; e2 < t2.length; e2++) if ("?" == t2[e2] || " " == t2[e2]) { const i2 = t2.substr(n2, e2 - n2); - if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", b(t2, e2)); + if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", N(t2, e2)); if ("?" == t2[e2] && ">" == t2[e2 + 1]) { e2++; break; } + continue; } return e2; } - function h(t2, e2) { + function d(t2, e2) { if (t2.length > e2 + 5 && "-" === t2[e2 + 1] && "-" === t2[e2 + 2]) { for (e2 += 3; e2 < t2.length; e2++) if ("-" === t2[e2] && "-" === t2[e2 + 1] && ">" === t2[e2 + 2]) { e2 += 2; @@ -61963,11 +62062,11 @@ var require_fxp = __commonJS({ } return e2; } - const d = '"', p = "'"; - function f(t2, e2) { + const h = '"', p = "'"; + function c(t2, e2) { let n2 = "", i2 = "", s2 = false; for (; e2 < t2.length; e2++) { - if (t2[e2] === d || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); + if (t2[e2] === h || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); else if (">" === t2[e2] && "" === i2) { s2 = true; break; @@ -61976,16 +62075,16 @@ var require_fxp = __commonJS({ } return "" === i2 && { value: n2, index: e2, tagClosed: s2 }; } - const c = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); + const f = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); function g(t2, e2) { - const n2 = s(t2, c), i2 = {}; + const n2 = s(t2, f), i2 = {}; for (let t3 = 0; t3 < n2.length; t3++) { - if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", N(n2[t3])); - if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", N(n2[t3])); - if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", N(n2[t3])); + if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", y(n2[t3])); + if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", y(n2[t3])); + if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", y(n2[t3])); const s2 = n2[t3][2]; - if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", N(n2[t3])); - if (i2.hasOwnProperty(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", N(n2[t3])); + if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", y(n2[t3])); + if (Object.prototype.hasOwnProperty.call(i2, s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", y(n2[t3])); i2[s2] = 1; } return true; @@ -62013,49 +62112,52 @@ var require_fxp = __commonJS({ function E(t2) { return r(t2); } - function b(t2, e2) { + function b(t2) { + return r(t2); + } + function N(t2, e2) { const n2 = t2.substring(0, e2).split(/\r?\n/); return { line: n2.length, col: n2[n2.length - 1].length + 1 }; } - function N(t2) { + function y(t2) { return t2.startIndex + t2[1].length; } - const y = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { + const T = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, stopNodes: [], alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(t2, e2, n2) { return t2; - }, captureMetaData: false }; - function T(t2) { - return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : T(true); + }, captureMetaData: false, maxNestedTags: 100, strictReservedNames: true }; + function w(t2) { + return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : w(true); } - const w = function(t2) { - const e2 = Object.assign({}, y, t2); - return e2.processEntities = T(e2.processEntities), e2; + const v = function(t2) { + const e2 = Object.assign({}, T, t2); + return e2.processEntities = w(e2.processEntities), e2; }; - let v; - v = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); + let O; + O = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); class I { constructor(t2) { - this.tagname = t2, this.child = [], this[":@"] = {}; + this.tagname = t2, this.child = [], this[":@"] = /* @__PURE__ */ Object.create(null); } add(t2, e2) { "__proto__" === t2 && (t2 = "#__proto__"), this.child.push({ [t2]: e2 }); } addChild(t2, e2) { - "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][v] = { startIndex: e2 }); + "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][O] = { startIndex: e2 }); } static getMetaDataSymbol() { - return v; + return O; } } - class O { + class P { constructor(t2) { this.suppressValidationErr = !t2, this.options = t2; } readDocType(t2, e2) { - const n2 = {}; + const n2 = /* @__PURE__ */ Object.create(null); if ("O" !== t2[e2 + 3] || "C" !== t2[e2 + 4] || "T" !== t2[e2 + 5] || "Y" !== t2[e2 + 6] || "P" !== t2[e2 + 7] || "E" !== t2[e2 + 8]) throw new Error("Invalid Tag instead of DOCTYPE"); { e2 += 9; @@ -62064,23 +62166,23 @@ var require_fxp = __commonJS({ if (r2 ? "-" === t2[e2 - 1] && "-" === t2[e2 - 2] && (r2 = false, i2--) : i2--, 0 === i2) break; } else "[" === t2[e2] ? s2 = true : o2 += t2[e2]; else { - if (s2 && A(t2, "!ENTITY", e2)) { + if (s2 && S(t2, "!ENTITY", e2)) { let i3, s3; if (e2 += 7, [i3, s3, e2] = this.readEntityExp(t2, e2 + 1, this.suppressValidationErr), -1 === s3.indexOf("&")) { const t3 = i3.replace(/[.\-+*:]/g, "\\."); n2[i3] = { regx: RegExp(`&${t3};`, "g"), val: s3 }; } - } else if (s2 && A(t2, "!ELEMENT", e2)) { + } else if (s2 && S(t2, "!ELEMENT", e2)) { e2 += 8; const { index: n3 } = this.readElementExp(t2, e2 + 1); e2 = n3; - } else if (s2 && A(t2, "!ATTLIST", e2)) e2 += 8; - else if (s2 && A(t2, "!NOTATION", e2)) { + } else if (s2 && S(t2, "!ATTLIST", e2)) e2 += 8; + else if (s2 && S(t2, "!NOTATION", e2)) { e2 += 9; const { index: n3 } = this.readNotationExp(t2, e2 + 1, this.suppressValidationErr); e2 = n3; } else { - if (!A(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); + if (!S(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); r2 = true; } i2++, o2 = ""; @@ -62090,10 +62192,10 @@ var require_fxp = __commonJS({ return { entities: n2, i: e2 }; } readEntityExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]) && '"' !== t2[e2] && "'" !== t2[e2]; ) n2 += t2[e2], e2++; - if (S(n2), e2 = P(t2, e2), !this.suppressValidationErr) { + if (C(n2), e2 = A(t2, e2), !this.suppressValidationErr) { if ("SYSTEM" === t2.substring(e2, e2 + 6).toUpperCase()) throw new Error("External entities are not supported"); if ("%" === t2[e2]) throw new Error("Parameter entities are not supported"); } @@ -62102,15 +62204,15 @@ var require_fxp = __commonJS({ return [n2, i2, --e2]; } readNotationExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - !this.suppressValidationErr && S(n2), e2 = P(t2, e2); + !this.suppressValidationErr && C(n2), e2 = A(t2, e2); const i2 = t2.substring(e2, e2 + 6).toUpperCase(); if (!this.suppressValidationErr && "SYSTEM" !== i2 && "PUBLIC" !== i2) throw new Error(`Expected SYSTEM or PUBLIC, found "${i2}"`); - e2 += i2.length, e2 = P(t2, e2); + e2 += i2.length, e2 = A(t2, e2); let s2 = null, r2 = null; - if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = P(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); + if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = A(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); else if ("SYSTEM" === i2 && ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"), !this.suppressValidationErr && !r2)) throw new Error("Missing mandatory system identifier for SYSTEM notation"); return { notationName: n2, publicIdentifier: s2, systemIdentifier: r2, index: --e2 }; } @@ -62123,13 +62225,13 @@ var require_fxp = __commonJS({ return [++e2, i2]; } readElementExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; if (!this.suppressValidationErr && !r(n2)) throw new Error(`Invalid element name: "${n2}"`); let i2 = ""; - if ("E" === t2[e2 = P(t2, e2)] && A(t2, "MPTY", e2)) e2 += 4; - else if ("A" === t2[e2] && A(t2, "NY", e2)) e2 += 2; + if ("E" === t2[e2 = A(t2, e2)] && S(t2, "MPTY", e2)) e2 += 4; + else if ("A" === t2[e2] && S(t2, "NY", e2)) e2 += 2; else if ("(" === t2[e2]) { for (e2++; e2 < t2.length && ")" !== t2[e2]; ) i2 += t2[e2], e2++; if (")" !== t2[e2]) throw new Error("Unterminated content model"); @@ -62137,24 +62239,24 @@ var require_fxp = __commonJS({ return { elementName: n2, contentModel: i2.trim(), index: e2 }; } readAttlistExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - S(n2), e2 = P(t2, e2); + C(n2), e2 = A(t2, e2); let i2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) i2 += t2[e2], e2++; - if (!S(i2)) throw new Error(`Invalid attribute name: "${i2}"`); - e2 = P(t2, e2); + if (!C(i2)) throw new Error(`Invalid attribute name: "${i2}"`); + e2 = A(t2, e2); let s2 = ""; if ("NOTATION" === t2.substring(e2, e2 + 8).toUpperCase()) { - if (s2 = "NOTATION", "(" !== t2[e2 = P(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); + if (s2 = "NOTATION", "(" !== t2[e2 = A(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); e2++; let n3 = []; for (; e2 < t2.length && ")" !== t2[e2]; ) { let i3 = ""; for (; e2 < t2.length && "|" !== t2[e2] && ")" !== t2[e2]; ) i3 += t2[e2], e2++; - if (i3 = i3.trim(), !S(i3)) throw new Error(`Invalid notation name: "${i3}"`); - n3.push(i3), "|" === t2[e2] && (e2++, e2 = P(t2, e2)); + if (i3 = i3.trim(), !C(i3)) throw new Error(`Invalid notation name: "${i3}"`); + n3.push(i3), "|" === t2[e2] && (e2++, e2 = A(t2, e2)); } if (")" !== t2[e2]) throw new Error("Unterminated list of notations"); e2++, s2 += " (" + n3.join("|") + ")"; @@ -62163,45 +62265,43 @@ var require_fxp = __commonJS({ const n3 = ["CDATA", "ID", "IDREF", "IDREFS", "ENTITY", "ENTITIES", "NMTOKEN", "NMTOKENS"]; if (!this.suppressValidationErr && !n3.includes(s2.toUpperCase())) throw new Error(`Invalid attribute type: "${s2}"`); } - e2 = P(t2, e2); + e2 = A(t2, e2); let r2 = ""; return "#REQUIRED" === t2.substring(e2, e2 + 8).toUpperCase() ? (r2 = "#REQUIRED", e2 += 8) : "#IMPLIED" === t2.substring(e2, e2 + 7).toUpperCase() ? (r2 = "#IMPLIED", e2 += 7) : [e2, r2] = this.readIdentifierVal(t2, e2, "ATTLIST"), { elementName: n2, attributeName: i2, attributeType: s2, defaultValue: r2, index: e2 }; } } - const P = (t2, e2) => { + const A = (t2, e2) => { for (; e2 < t2.length && /\s/.test(t2[e2]); ) e2++; return e2; }; - function A(t2, e2, n2) { + function S(t2, e2, n2) { for (let i2 = 0; i2 < e2.length; i2++) if (e2[i2] !== t2[n2 + i2 + 1]) return false; return true; } - function S(t2) { + function C(t2) { if (r(t2)) return t2; throw new Error(`Invalid entity name ${t2}`); } - const C = /^[-+]?0x[a-fA-F0-9]+$/, $ = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; - const D = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; - function L(t2) { - return "function" == typeof t2 ? t2 : Array.isArray(t2) ? (e2) => { - for (const n2 of t2) { - if ("string" == typeof n2 && e2 === n2) return true; - if (n2 instanceof RegExp && n2.test(e2)) return true; - } - } : () => false; - } - class F { + const $ = /^[-+]?0x[a-fA-F0-9]+$/, V = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, D = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; + const j = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; + class L { constructor(t2) { - if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e2) => K(e2, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e2) => K(e2, 16, "&#x") } }, this.addExternalEntities = j, this.parseXml = B, this.parseTextData = M, this.resolveNameSpace = _2, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = R, this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { + var e2; + if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e3) => K(e3, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e3) => K(e3, 16, "&#x") } }, this.addExternalEntities = F, this.parseXml = R, this.parseTextData = M, this.resolveNameSpace = k, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = B, this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { this.stopNodesExact = /* @__PURE__ */ new Set(), this.stopNodesWildcard = /* @__PURE__ */ new Set(); for (let t3 = 0; t3 < this.options.stopNodes.length; t3++) { - const e2 = this.options.stopNodes[t3]; - "string" == typeof e2 && (e2.startsWith("*.") ? this.stopNodesWildcard.add(e2.substring(2)) : this.stopNodesExact.add(e2)); + const e3 = this.options.stopNodes[t3]; + "string" == typeof e3 && (e3.startsWith("*.") ? this.stopNodesWildcard.add(e3.substring(2)) : this.stopNodesExact.add(e3)); } } } } - function j(t2) { + function F(t2) { const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2], s2 = i2.replace(/[.\-+*:]/g, "\\."); @@ -62215,7 +62315,7 @@ var require_fxp = __commonJS({ return null == i3 ? t2 : typeof i3 != typeof t2 || i3 !== t2 ? i3 : this.options.trimValues || t2.trim() === t2 ? Z(t2, this.options.parseTagValue, this.options.numberParseOptions) : t2; } } - function _2(t2) { + function k(t2) { if (this.options.removeNSPrefix) { const e2 = t2.split(":"), n2 = "/" === t2.charAt(0) ? "/" : ""; if ("xmlns" === e2[0]) return ""; @@ -62223,10 +62323,10 @@ var require_fxp = __commonJS({ } return t2; } - const k = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); + const _2 = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); function U(t2, e2, n2) { if (true !== this.options.ignoreAttributes && "string" == typeof t2) { - const i2 = s(t2, k), r2 = i2.length, o2 = {}; + const i2 = s(t2, _2), r2 = i2.length, o2 = {}; for (let t3 = 0; t3 < r2; t3++) { const s2 = this.resolveNameSpace(i2[t3][1]); if (this.ignoreAttributesFn(s2, e2)) continue; @@ -62245,12 +62345,12 @@ var require_fxp = __commonJS({ return o2; } } - const B = function(t2) { + const R = function(t2) { t2 = t2.replace(/\r\n?/g, "\n"); const e2 = new I("!xml"); let n2 = e2, i2 = "", s2 = ""; this.entityExpansionCount = 0, this.currentExpandedLength = 0; - const r2 = new O(this.options.processEntities); + const r2 = new P(this.options.processEntities); for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2]) if ("/" === t2[o2 + 1]) { const e3 = z(t2, ">", o2, "Closing Tag is not closed."); let r3 = t2.substring(o2 + 2, e3).trim(); @@ -62290,26 +62390,27 @@ var require_fxp = __commonJS({ } else { let r3 = W(t2, o2, this.options.removeNSPrefix), a2 = r3.tagName; const l2 = r3.rawTagName; - let u2 = r3.tagExp, h2 = r3.attrExpPresent, d2 = r3.closeIndex; + let u2 = r3.tagExp, d2 = r3.attrExpPresent, h2 = r3.closeIndex; if (this.options.transformTagName) { const t3 = this.options.transformTagName(a2); u2 === a2 && (u2 = t3), a2 = t3; } + if (this.options.strictReservedNames && (a2 === this.options.commentPropName || a2 === this.options.cdataPropName)) throw new Error(`Invalid tag name: ${a2}`); n2 && i2 && "!xml" !== n2.tagname && (i2 = this.saveTextToParentTag(i2, n2, s2, false)); const p2 = n2; p2 && -1 !== this.options.unpairedTags.indexOf(p2.tagname) && (n2 = this.tagsNodeStack.pop(), s2 = s2.substring(0, s2.lastIndexOf("."))), a2 !== e2.tagname && (s2 += s2 ? "." + a2 : a2); - const f2 = o2; + const c2 = o2; if (this.isItStopNode(this.stopNodesExact, this.stopNodesWildcard, s2, a2)) { let e3 = ""; if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) "/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), o2 = r3.closeIndex; else if (-1 !== this.options.unpairedTags.indexOf(a2)) o2 = r3.closeIndex; else { - const n3 = this.readStopNodeData(t2, l2, d2 + 1); + const n3 = this.readStopNodeData(t2, l2, h2 + 1); if (!n3) throw new Error(`Unexpected end of ${l2}`); o2 = n3.i, e3 = n3.tagContent; } const i3 = new I(a2); - a2 !== u2 && h2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, h2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, f2); + a2 !== u2 && d2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, d2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, c2); } else { if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) { if ("/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), this.options.transformTagName) { @@ -62317,18 +62418,26 @@ var require_fxp = __commonJS({ u2 === a2 && (u2 = t4), a2 = t4; } const t3 = new I(a2); - a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), s2 = s2.substr(0, s2.lastIndexOf(".")); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")); } else { - const t3 = new I(a2); - this.tagsNodeStack.push(n2), a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), n2 = t3; + if (-1 !== this.options.unpairedTags.indexOf(a2)) { + const t3 = new I(a2); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")), o2 = r3.closeIndex; + continue; + } + { + const t3 = new I(a2); + if (this.tagsNodeStack.length > this.options.maxNestedTags) throw new Error("Maximum nested tags exceeded"); + this.tagsNodeStack.push(n2), a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), n2 = t3; + } } - i2 = "", o2 = d2; + i2 = "", o2 = h2; } } else i2 += t2[o2]; return e2.child; }; - function R(t2, e2, n2, i2) { + function B(t2, e2, n2, i2) { this.options.captureMetaData || (i2 = void 0); const s2 = this.options.updateTag(e2.tagname, n2, e2[":@"]); false === s2 || ("string" == typeof s2 ? (e2.tagname = s2, t2.addChild(e2, i2)) : t2.addChild(e2, i2)); @@ -62389,12 +62498,12 @@ var require_fxp = __commonJS({ const o2 = s2.index, a2 = r2.search(/\s/); let l2 = r2, u2 = true; -1 !== a2 && (l2 = r2.substring(0, a2), r2 = r2.substring(a2 + 1).trimStart()); - const h2 = l2; + const d2 = l2; if (n2) { const t3 = l2.indexOf(":"); -1 !== t3 && (l2 = l2.substr(t3 + 1), u2 = l2 !== s2.data.substr(t3 + 1)); } - return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: h2 }; + return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: d2 }; } function q(t2, e2, n2) { const i2 = n2; @@ -62415,19 +62524,19 @@ var require_fxp = __commonJS({ if (e2 && "string" == typeof t2) { const e3 = t2.trim(); return "true" === e3 || "false" !== e3 && (function(t3, e4 = {}) { - if (e4 = Object.assign({}, V, e4), !t3 || "string" != typeof t3) return t3; + if (e4 = Object.assign({}, D, e4), !t3 || "string" != typeof t3) return t3; let n3 = t3.trim(); if (void 0 !== e4.skipLike && e4.skipLike.test(n3)) return t3; if ("0" === t3) return 0; - if (e4.hex && C.test(n3)) return (function(t4) { + if (e4.hex && $.test(n3)) return (function(t4) { if (parseInt) return parseInt(t4, 16); if (Number.parseInt) return Number.parseInt(t4, 16); if (window && window.parseInt) return window.parseInt(t4, 16); throw new Error("parseInt, Number.parseInt, window.parseInt are not supported"); })(n3); - if (-1 !== n3.search(/.+[eE].+/)) return (function(t4, e5, n4) { + if (n3.includes("e") || n3.includes("E")) return (function(t4, e5, n4) { if (!n4.eNotation) return t4; - const i3 = e5.match(D); + const i3 = e5.match(j); if (i3) { let s2 = i3[1] || ""; const r2 = -1 === i3[3].indexOf("e") ? "E" : "e", o2 = i3[2], a2 = s2 ? t4[o2.length + 1] === r2 : t4[o2.length] === r2; @@ -62436,7 +62545,7 @@ var require_fxp = __commonJS({ return t4; })(t3, n3, e4); { - const s2 = $.exec(n3); + const s2 = V.exec(n3); if (s2) { const r2 = s2[1] || "", o2 = s2[2]; let a2 = (i2 = s2[3]) && -1 !== i2.indexOf(".") ? ("." === (i2 = i2.replace(/0+$/, "")) ? i2 = "0" : "." === i2[0] ? i2 = "0" + i2 : "." === i2[i2.length - 1] && (i2 = i2.substring(0, i2.length - 1)), i2) : i2; @@ -62444,7 +62553,7 @@ var require_fxp = __commonJS({ if (!e4.leadingZeros && (o2.length > 1 || 1 === o2.length && !l2)) return t3; { const i3 = Number(n3), s3 = String(i3); - if (0 === i3 || -0 === i3) return i3; + if (0 === i3) return i3; if (-1 !== s3.search(/[eE]/)) return e4.eNotation ? i3 : t3; if (-1 !== n3.indexOf(".")) return "0" === s3 || s3 === a2 || s3 === `${r2}${a2}` ? i3 : t3; let l3 = o2 ? a2 : n3; @@ -62478,7 +62587,7 @@ var require_fxp = __commonJS({ if (o2[a2]) { let t3 = H(o2[a2], e2, l2); const n3 = nt(t3, e2); - void 0 !== o2[Q] && (t3[Q] = o2[Q]), o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== s2[a2] && s2.hasOwnProperty(a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; + o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== o2[Q] && "object" == typeof t3 && null !== t3 && (t3[Q] = o2[Q]), void 0 !== s2[a2] && Object.prototype.hasOwnProperty.call(s2, a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; } } } @@ -62506,7 +62615,7 @@ var require_fxp = __commonJS({ } class it { constructor(t2) { - this.externalEntities = {}, this.options = w(t2); + this.externalEntities = {}, this.options = v(t2); } parse(t2, e2) { if ("string" != typeof t2 && t2.toString) t2 = t2.toString(); @@ -62516,7 +62625,7 @@ var require_fxp = __commonJS({ const n3 = a(t2, e2); if (true !== n3) throw Error(`${n3.err.msg}:${n3.err.line}:${n3.err.col}`); } - const n2 = new F(this.options); + const n2 = new L(this.options); n2.addExternalEntities(this.externalEntities); const i2 = n2.parseXml(t2); return this.options.preserveOrder || void 0 === i2 ? i2 : J(i2, this.options); @@ -62537,6 +62646,13 @@ var require_fxp = __commonJS({ } function rt(t2, e2, n2, i2) { let s2 = "", r2 = false; + if (!Array.isArray(t2)) { + if (null != t2) { + let n3 = t2.toString(); + return n3 = ut(n3, e2), n3; + } + return ""; + } for (let o2 = 0; o2 < t2.length; o2++) { const a2 = t2[o2], l2 = ot(a2); if (void 0 === l2) continue; @@ -62560,10 +62676,10 @@ var require_fxp = __commonJS({ o3 = 0 !== o3.length ? " " + o3 : "", s2 += n3 + `<${l2}${o3}${t3}?>`, r2 = true; continue; } - let h2 = i2; - "" !== h2 && (h2 += e2.indentBy); - const d2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, h2); - -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += d2 + ">" : s2 += d2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += d2 + `>${p2}${i2}` : (s2 += d2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += d2 + "/>", r2 = true; + let d2 = i2; + "" !== d2 && (d2 += e2.indentBy); + const h2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, d2); + -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += h2 + ">" : s2 += h2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += h2 + `>${p2}${i2}` : (s2 += h2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += h2 + "/>", r2 = true; } return s2; } @@ -62571,13 +62687,13 @@ var require_fxp = __commonJS({ const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2]; - if (t2.hasOwnProperty(i2) && ":@" !== i2) return i2; + if (Object.prototype.hasOwnProperty.call(t2, i2) && ":@" !== i2) return i2; } } function at(t2, e2) { let n2 = ""; if (t2 && !e2.ignoreAttributes) for (let i2 in t2) { - if (!t2.hasOwnProperty(i2)) continue; + if (!Object.prototype.hasOwnProperty.call(t2, i2)) continue; let s2 = e2.attributeValueProcessor(i2, t2[i2]); s2 = ut(s2, e2), true === s2 && e2.suppressBooleanAttributes ? n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}` : n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}="${s2}"`; } @@ -62595,15 +62711,21 @@ var require_fxp = __commonJS({ } return t2; } - const ht = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { + const dt = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [{ regex: new RegExp("&", "g"), val: "&" }, { regex: new RegExp(">", "g"), val: ">" }, { regex: new RegExp("<", "g"), val: "<" }, { regex: new RegExp("'", "g"), val: "'" }, { regex: new RegExp('"', "g"), val: """ }], processEntities: true, stopNodes: [], oneListGroup: false }; - function dt(t2) { - this.options = Object.assign({}, ht, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { + function ht(t2) { + var e2; + this.options = Object.assign({}, dt, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { return false; - } : (this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ct), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ft, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { + } : (this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ft), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ct, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { return ""; }, this.tagEndChar = ">", this.newLine = ""); } @@ -62611,15 +62733,15 @@ var require_fxp = __commonJS({ const s2 = this.j2x(t2, n2 + 1, i2.concat(e2)); return void 0 !== t2[this.options.textNodeName] && 1 === Object.keys(t2).length ? this.buildTextValNode(t2[this.options.textNodeName], e2, s2.attrStr, n2) : this.buildObjectNode(s2.val, e2, s2.attrStr, n2); } - function ft(t2) { + function ct(t2) { return this.options.indentBy.repeat(t2); } - function ct(t2) { + function ft(t2) { return !(!t2.startsWith(this.options.attributeNamePrefix) || t2 === this.options.textNodeName) && t2.substr(this.attrPrefixLen); } - dt.prototype.build = function(t2) { + ht.prototype.build = function(t2) { return this.options.preserveOrder ? st(t2, this.options) : (Array.isArray(t2) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t2 = { [this.options.arrayNodeName]: t2 }), this.j2x(t2, 0, []).val); - }, dt.prototype.j2x = function(t2, e2, n2) { + }, ht.prototype.j2x = function(t2, e2, n2) { let i2 = "", s2 = ""; const r2 = n2.join("."); for (let o2 in t2) if (Object.prototype.hasOwnProperty.call(t2, o2)) if (void 0 === t2[o2]) this.isAttribute(o2) && (s2 += ""); @@ -62654,18 +62776,18 @@ var require_fxp = __commonJS({ for (let s3 = 0; s3 < n3; s3++) i2 += this.buildAttrPairStr(e3[s3], "" + t2[o2][e3[s3]]); } else s2 += this.processTextOrObjNode(t2[o2], o2, e2, n2); return { attrStr: i2, val: s2 }; - }, dt.prototype.buildAttrPairStr = function(t2, e2) { + }, ht.prototype.buildAttrPairStr = function(t2, e2) { return e2 = this.options.attributeValueProcessor(t2, "" + e2), e2 = this.replaceEntitiesValue(e2), this.options.suppressBooleanAttributes && "true" === e2 ? " " + t2 : " " + t2 + '="' + e2 + '"'; - }, dt.prototype.buildObjectNode = function(t2, e2, n2, i2) { + }, ht.prototype.buildObjectNode = function(t2, e2, n2, i2) { if ("" === t2) return "?" === e2[0] ? this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar; { let s2 = "` + this.newLine : this.indentate(i2) + "<" + e2 + n2 + r2 + this.tagEndChar + t2 + this.indentate(i2) + s2 : this.indentate(i2) + "<" + e2 + n2 + r2 + ">" + t2 + s2; } - }, dt.prototype.closeTag = function(t2) { + }, ht.prototype.closeTag = function(t2) { let e2 = ""; return -1 !== this.options.unpairedTags.indexOf(t2) ? this.options.suppressUnpairedNode || (e2 = "/") : e2 = this.options.suppressEmptyNode ? "/" : `>` + this.newLine; if (false !== this.options.commentPropName && e2 === this.options.commentPropName) return this.indentate(i2) + `` + this.newLine; if ("?" === e2[0]) return this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar; @@ -62673,14 +62795,14 @@ var require_fxp = __commonJS({ let s2 = this.options.tagValueProcessor(e2, t2); return s2 = this.replaceEntitiesValue(s2), "" === s2 ? this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + ">" + s2 + " 0 && this.options.processEntities) for (let e2 = 0; e2 < this.options.entities.length; e2++) { const n2 = this.options.entities[e2]; t2 = t2.replace(n2.regex, n2.val); } return t2; }; - const gt = { validate: a }; + const gt = ht, xt = { validate: a }; module2.exports = e; })(); } @@ -92199,7 +92321,7 @@ var require_downloadUtils = __commonJS({ var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); - var fs19 = __importStar2(require("fs")); + var fs20 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); var utils = __importStar2(require_cacheUtils()); @@ -92310,7 +92432,7 @@ var require_downloadUtils = __commonJS({ exports2.DownloadProgress = DownloadProgress; function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter2(this, void 0, void 0, function* () { - const writeStream = fs19.createWriteStream(archivePath); + const writeStream = fs20.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); @@ -92335,7 +92457,7 @@ var require_downloadUtils = __commonJS({ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { return __awaiter2(this, void 0, void 0, function* () { var _a; - const archiveDescriptor = yield fs19.promises.open(archivePath, "w"); + const archiveDescriptor = yield fs20.promises.open(archivePath, "w"); const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { socketTimeout: options.timeoutInMs, keepAlive: true @@ -92451,7 +92573,7 @@ var require_downloadUtils = __commonJS({ } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); - const fd = fs19.openSync(archivePath, "w"); + const fd = fs20.openSync(archivePath, "w"); try { downloadProgress.startDisplayTimer(); const controller = new abort_controller_1.AbortController(); @@ -92469,12 +92591,12 @@ var require_downloadUtils = __commonJS({ controller.abort(); throw new Error("Aborting cache download as the download time exceeded the timeout."); } else if (Buffer.isBuffer(result)) { - fs19.writeFileSync(fd, result); + fs20.writeFileSync(fd, result); } } } finally { downloadProgress.stopDisplayTimer(); - fs19.closeSync(fd); + fs20.closeSync(fd); } } }); @@ -92796,7 +92918,7 @@ var require_cacheHttpClient = __commonJS({ var core17 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); - var fs19 = __importStar2(require("fs")); + var fs20 = __importStar2(require("fs")); var url_1 = require("url"); var utils = __importStar2(require_cacheUtils()); var uploadUtils_1 = require_uploadUtils(); @@ -92931,7 +93053,7 @@ Other caches with similar key:`); return __awaiter2(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs19.openSync(archivePath, "r"); + const fd = fs20.openSync(archivePath, "r"); const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); @@ -92945,7 +93067,7 @@ Other caches with similar key:`); const start = offset; const end = offset + chunkSize - 1; offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs19.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs20.createReadStream(archivePath, { fd, start, end, @@ -92956,7 +93078,7 @@ Other caches with similar key:`); } }))); } finally { - fs19.closeSync(fd); + fs20.closeSync(fd); } return; }); @@ -98912,7 +99034,7 @@ var require_manifest = __commonJS({ var core_1 = require_core(); var os4 = require("os"); var cp = require("child_process"); - var fs19 = require("fs"); + var fs20 = require("fs"); function _findMatch(versionSpec, stable, candidates, archFilter) { return __awaiter2(this, void 0, void 0, function* () { const platFilter = os4.platform(); @@ -98974,10 +99096,10 @@ var require_manifest = __commonJS({ const lsbReleaseFile = "/etc/lsb-release"; const osReleaseFile = "/etc/os-release"; let contents = ""; - if (fs19.existsSync(lsbReleaseFile)) { - contents = fs19.readFileSync(lsbReleaseFile).toString(); - } else if (fs19.existsSync(osReleaseFile)) { - contents = fs19.readFileSync(osReleaseFile).toString(); + if (fs20.existsSync(lsbReleaseFile)) { + contents = fs20.readFileSync(lsbReleaseFile).toString(); + } else if (fs20.existsSync(osReleaseFile)) { + contents = fs20.readFileSync(osReleaseFile).toString(); } return contents; } @@ -99186,7 +99308,7 @@ var require_tool_cache = __commonJS({ var core17 = __importStar2(require_core()); var io7 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); - var fs19 = __importStar2(require("fs")); + var fs20 = __importStar2(require("fs")); var mm = __importStar2(require_manifest()); var os4 = __importStar2(require("os")); var path19 = __importStar2(require("path")); @@ -99232,7 +99354,7 @@ var require_tool_cache = __commonJS({ } function downloadToolAttempt(url2, dest, auth2, headers) { return __awaiter2(this, void 0, void 0, function* () { - if (fs19.existsSync(dest)) { + if (fs20.existsSync(dest)) { throw new Error(`Destination file path ${dest} already exists`); } const http = new httpm.HttpClient(userAgent2, [], { @@ -99256,7 +99378,7 @@ var require_tool_cache = __commonJS({ const readStream = responseMessageFactory(); let succeeded = false; try { - yield pipeline(readStream, fs19.createWriteStream(dest)); + yield pipeline(readStream, fs20.createWriteStream(dest)); core17.debug("download complete"); succeeded = true; return dest; @@ -99468,11 +99590,11 @@ var require_tool_cache = __commonJS({ arch2 = arch2 || os4.arch(); core17.debug(`Caching tool ${tool} ${version} ${arch2}`); core17.debug(`source dir: ${sourceDir}`); - if (!fs19.statSync(sourceDir).isDirectory()) { + if (!fs20.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } const destPath = yield _createToolPath(tool, version, arch2); - for (const itemName of fs19.readdirSync(sourceDir)) { + for (const itemName of fs20.readdirSync(sourceDir)) { const s = path19.join(sourceDir, itemName); yield io7.cp(s, destPath, { recursive: true }); } @@ -99486,7 +99608,7 @@ var require_tool_cache = __commonJS({ arch2 = arch2 || os4.arch(); core17.debug(`Caching tool ${tool} ${version} ${arch2}`); core17.debug(`source file: ${sourceFile}`); - if (!fs19.statSync(sourceFile).isFile()) { + if (!fs20.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); @@ -99515,7 +99637,7 @@ var require_tool_cache = __commonJS({ versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path19.join(_getCacheDirectory(), toolName, versionSpec, arch2); core17.debug(`checking cache: ${cachePath}`); - if (fs19.existsSync(cachePath) && fs19.existsSync(`${cachePath}.complete`)) { + if (fs20.existsSync(cachePath) && fs20.existsSync(`${cachePath}.complete`)) { core17.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { @@ -99528,12 +99650,12 @@ var require_tool_cache = __commonJS({ const versions = []; arch2 = arch2 || os4.arch(); const toolPath = path19.join(_getCacheDirectory(), toolName); - if (fs19.existsSync(toolPath)) { - const children = fs19.readdirSync(toolPath); + if (fs20.existsSync(toolPath)) { + const children = fs20.readdirSync(toolPath); for (const child of children) { if (isExplicitVersion(child)) { const fullPath = path19.join(toolPath, child, arch2 || ""); - if (fs19.existsSync(fullPath) && fs19.existsSync(`${fullPath}.complete`)) { + if (fs20.existsSync(fullPath) && fs20.existsSync(`${fullPath}.complete`)) { versions.push(child); } } @@ -99604,7 +99726,7 @@ var require_tool_cache = __commonJS({ function _completeToolPath(tool, version, arch2) { const folderPath = path19.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; - fs19.writeFileSync(markerPath, ""); + fs20.writeFileSync(markerPath, ""); core17.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { @@ -102935,15 +103057,15 @@ var require_upload_zip_specification = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadZipSpecification = exports2.validateRootDirectory = void 0; - var fs19 = __importStar2(require("fs")); + var fs20 = __importStar2(require("fs")); var core_1 = require_core(); var path_1 = require("path"); var path_and_artifact_name_validation_1 = require_path_and_artifact_name_validation(); function validateRootDirectory(rootDirectory) { - if (!fs19.existsSync(rootDirectory)) { + if (!fs20.existsSync(rootDirectory)) { throw new Error(`The provided rootDirectory ${rootDirectory} does not exist`); } - if (!fs19.statSync(rootDirectory).isDirectory()) { + if (!fs20.statSync(rootDirectory).isDirectory()) { throw new Error(`The provided rootDirectory ${rootDirectory} is not a valid directory`); } (0, core_1.info)(`Root directory input is valid!`); @@ -102954,7 +103076,7 @@ var require_upload_zip_specification = __commonJS({ rootDirectory = (0, path_1.normalize)(rootDirectory); rootDirectory = (0, path_1.resolve)(rootDirectory); for (let file of filesToZip) { - const stats = fs19.lstatSync(file, { throwIfNoEntry: false }); + const stats = fs20.lstatSync(file, { throwIfNoEntry: false }); if (!stats) { throw new Error(`File ${file} does not exist`); } @@ -103376,6 +103498,7 @@ var require_minimatch2 = __commonJS({ assertValidPattern(pattern); if (!options) options = {}; this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200; this.set = []; this.pattern = pattern; this.windowsPathsNoEscape = !!options.windowsPathsNoEscape || options.allowWindowsEscape === false; @@ -103432,51 +103555,146 @@ var require_minimatch2 = __commonJS({ // out of pattern, then that's fine, as long as all // the parts match. matchOne(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } - ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { - this.debug("matchOne loop"); - var p = pattern[pi]; - var f = file[fi]; - this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } + if (pattern.indexOf(GLOBSTAR) !== -1) { + return this._matchGlobstar(file, pattern, partial, 0, 0); + } + return this._matchOne(file, pattern, partial, 0, 0); + } + _matchGlobstar(file, pattern, partial, fileIndex, patternIndex) { + let firstgs = -1; + for (let i = patternIndex; i < pattern.length; i++) { + if (pattern[i] === GLOBSTAR) { + firstgs = i; + break; + } + } + let lastgs = -1; + for (let i = pattern.length - 1; i >= 0; i--) { + if (pattern[i] === GLOBSTAR) { + lastgs = i; + break; + } + } + const head = pattern.slice(patternIndex, firstgs); + const body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs); + const tail = partial ? [] : pattern.slice(lastgs + 1); + if (head.length) { + const fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this._matchOne(fileHead, head, partial, 0, 0)) { return false; } - var hit; + fileIndex += head.length; + } + let fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) return false; + const tailStart = file.length - tail.length; + if (this._matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; + } + if (!this._matchOne(file, tail, partial, tailStart - 1, 0)) { + return false; + } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + let sawSome = !!fileTailMatch; + for (let i = fileIndex; i < file.length - fileTailMatch; i++) { + const f = String(file[i]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return partial || sawSome; + } + const bodySegments = [[[], 0]]; + let currentBody = bodySegments[0]; + let nonGsParts = 0; + const nonGsPartsSums = [0]; + for (const b of body) { + if (b === GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + let idx = bodySegments.length - 1; + const fileLength = file.length - fileTailMatch; + for (const b of bodySegments) { + b[1] = fileLength - (nonGsPartsSums[idx--] + b[0].length); + } + return !!this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex, + 0, + partial, + 0, + !!fileTailMatch + ); + } + // return false for "nope, not matching" + // return null for "not matching, cannot keep trying" + _matchGlobStarBodySections(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + const bs = bodySegments[bodyIndex]; + if (!bs) { + for (let i = fileIndex; i < file.length; i++) { + sawTail = true; + const f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return sawTail; + } + const [body, after] = bs; + while (fileIndex <= after) { + const m = this._matchOne( + file.slice(0, fileIndex + body.length), + body, + partial, + fileIndex, + 0 + ); + if (m && globStarDepth < this.maxGlobstarRecursion) { + const sub = this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex + body.length, + bodyIndex + 1, + partial, + globStarDepth + 1, + sawTail + ); + if (sub !== false) { + return sub; + } + } + const f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + fileIndex++; + } + return partial || null; + } + _matchOne(file, pattern, partial, fileIndex, patternIndex) { + let fi, pi, fl, pl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + this.debug("matchOne loop"); + const p = pattern[pi]; + const f = file[fi]; + this.debug(pattern, p, f); + if (p === false || p === GLOBSTAR) return false; + let hit; if (typeof p === "string") { hit = f === p; this.debug("string match", p, f, hit); @@ -103841,13 +104059,13 @@ var require_minimatch2 = __commonJS({ var require_readdir_glob = __commonJS({ "node_modules/readdir-glob/index.js"(exports2, module2) { module2.exports = readdirGlob; - var fs19 = require("fs"); + var fs20 = require("fs"); var { EventEmitter } = require("events"); var { Minimatch } = require_minimatch2(); var { resolve: resolve8 } = require("path"); function readdir(dir, strict) { return new Promise((resolve9, reject) => { - fs19.readdir(dir, { withFileTypes: true }, (err, files) => { + fs20.readdir(dir, { withFileTypes: true }, (err, files) => { if (err) { switch (err.code) { case "ENOTDIR": @@ -103880,7 +104098,7 @@ var require_readdir_glob = __commonJS({ } function stat(file, followSymlinks) { return new Promise((resolve9, reject) => { - const statFunc = followSymlinks ? fs19.stat : fs19.lstat; + const statFunc = followSymlinks ? fs20.stat : fs20.lstat; statFunc(file, (err, stats) => { if (err) { switch (err.code) { @@ -105945,54 +106163,54 @@ var require_polyfills = __commonJS({ } var chdir; module2.exports = patch; - function patch(fs19) { + function patch(fs20) { if (constants.hasOwnProperty("O_SYMLINK") && process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { - patchLchmod(fs19); + patchLchmod(fs20); } - if (!fs19.lutimes) { - patchLutimes(fs19); + if (!fs20.lutimes) { + patchLutimes(fs20); } - fs19.chown = chownFix(fs19.chown); - fs19.fchown = chownFix(fs19.fchown); - fs19.lchown = chownFix(fs19.lchown); - fs19.chmod = chmodFix(fs19.chmod); - fs19.fchmod = chmodFix(fs19.fchmod); - fs19.lchmod = chmodFix(fs19.lchmod); - fs19.chownSync = chownFixSync(fs19.chownSync); - fs19.fchownSync = chownFixSync(fs19.fchownSync); - fs19.lchownSync = chownFixSync(fs19.lchownSync); - fs19.chmodSync = chmodFixSync(fs19.chmodSync); - fs19.fchmodSync = chmodFixSync(fs19.fchmodSync); - fs19.lchmodSync = chmodFixSync(fs19.lchmodSync); - fs19.stat = statFix(fs19.stat); - fs19.fstat = statFix(fs19.fstat); - fs19.lstat = statFix(fs19.lstat); - fs19.statSync = statFixSync(fs19.statSync); - fs19.fstatSync = statFixSync(fs19.fstatSync); - fs19.lstatSync = statFixSync(fs19.lstatSync); - if (fs19.chmod && !fs19.lchmod) { - fs19.lchmod = function(path19, mode, cb) { + fs20.chown = chownFix(fs20.chown); + fs20.fchown = chownFix(fs20.fchown); + fs20.lchown = chownFix(fs20.lchown); + fs20.chmod = chmodFix(fs20.chmod); + fs20.fchmod = chmodFix(fs20.fchmod); + fs20.lchmod = chmodFix(fs20.lchmod); + fs20.chownSync = chownFixSync(fs20.chownSync); + fs20.fchownSync = chownFixSync(fs20.fchownSync); + fs20.lchownSync = chownFixSync(fs20.lchownSync); + fs20.chmodSync = chmodFixSync(fs20.chmodSync); + fs20.fchmodSync = chmodFixSync(fs20.fchmodSync); + fs20.lchmodSync = chmodFixSync(fs20.lchmodSync); + fs20.stat = statFix(fs20.stat); + fs20.fstat = statFix(fs20.fstat); + fs20.lstat = statFix(fs20.lstat); + fs20.statSync = statFixSync(fs20.statSync); + fs20.fstatSync = statFixSync(fs20.fstatSync); + fs20.lstatSync = statFixSync(fs20.lstatSync); + if (fs20.chmod && !fs20.lchmod) { + fs20.lchmod = function(path19, mode, cb) { if (cb) process.nextTick(cb); }; - fs19.lchmodSync = function() { + fs20.lchmodSync = function() { }; } - if (fs19.chown && !fs19.lchown) { - fs19.lchown = function(path19, uid, gid, cb) { + if (fs20.chown && !fs20.lchown) { + fs20.lchown = function(path19, uid, gid, cb) { if (cb) process.nextTick(cb); }; - fs19.lchownSync = function() { + fs20.lchownSync = function() { }; } if (platform === "win32") { - fs19.rename = typeof fs19.rename !== "function" ? fs19.rename : (function(fs$rename) { + fs20.rename = typeof fs20.rename !== "function" ? fs20.rename : (function(fs$rename) { function rename(from, to, cb) { var start = Date.now(); var backoff = 0; fs$rename(from, to, function CB(er) { if (er && (er.code === "EACCES" || er.code === "EPERM") && Date.now() - start < 6e4) { setTimeout(function() { - fs19.stat(to, function(stater, st) { + fs20.stat(to, function(stater, st) { if (stater && stater.code === "ENOENT") fs$rename(from, to, CB); else @@ -106008,9 +106226,9 @@ var require_polyfills = __commonJS({ } if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename); return rename; - })(fs19.rename); + })(fs20.rename); } - fs19.read = typeof fs19.read !== "function" ? fs19.read : (function(fs$read) { + fs20.read = typeof fs20.read !== "function" ? fs20.read : (function(fs$read) { function read(fd, buffer, offset, length, position, callback_) { var callback; if (callback_ && typeof callback_ === "function") { @@ -106018,22 +106236,22 @@ var require_polyfills = __commonJS({ callback = function(er, _2, __) { if (er && er.code === "EAGAIN" && eagCounter < 10) { eagCounter++; - return fs$read.call(fs19, fd, buffer, offset, length, position, callback); + return fs$read.call(fs20, fd, buffer, offset, length, position, callback); } callback_.apply(this, arguments); }; } - return fs$read.call(fs19, fd, buffer, offset, length, position, callback); + return fs$read.call(fs20, fd, buffer, offset, length, position, callback); } if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read); return read; - })(fs19.read); - fs19.readSync = typeof fs19.readSync !== "function" ? fs19.readSync : /* @__PURE__ */ (function(fs$readSync) { + })(fs20.read); + fs20.readSync = typeof fs20.readSync !== "function" ? fs20.readSync : /* @__PURE__ */ (function(fs$readSync) { return function(fd, buffer, offset, length, position) { var eagCounter = 0; while (true) { try { - return fs$readSync.call(fs19, fd, buffer, offset, length, position); + return fs$readSync.call(fs20, fd, buffer, offset, length, position); } catch (er) { if (er.code === "EAGAIN" && eagCounter < 10) { eagCounter++; @@ -106043,10 +106261,10 @@ var require_polyfills = __commonJS({ } } }; - })(fs19.readSync); - function patchLchmod(fs20) { - fs20.lchmod = function(path19, mode, callback) { - fs20.open( + })(fs20.readSync); + function patchLchmod(fs21) { + fs21.lchmod = function(path19, mode, callback) { + fs21.open( path19, constants.O_WRONLY | constants.O_SYMLINK, mode, @@ -106055,80 +106273,80 @@ var require_polyfills = __commonJS({ if (callback) callback(err); return; } - fs20.fchmod(fd, mode, function(err2) { - fs20.close(fd, function(err22) { + fs21.fchmod(fd, mode, function(err2) { + fs21.close(fd, function(err22) { if (callback) callback(err2 || err22); }); }); } ); }; - fs20.lchmodSync = function(path19, mode) { - var fd = fs20.openSync(path19, constants.O_WRONLY | constants.O_SYMLINK, mode); + fs21.lchmodSync = function(path19, mode) { + var fd = fs21.openSync(path19, constants.O_WRONLY | constants.O_SYMLINK, mode); var threw = true; var ret; try { - ret = fs20.fchmodSync(fd, mode); + ret = fs21.fchmodSync(fd, mode); threw = false; } finally { if (threw) { try { - fs20.closeSync(fd); + fs21.closeSync(fd); } catch (er) { } } else { - fs20.closeSync(fd); + fs21.closeSync(fd); } } return ret; }; } - function patchLutimes(fs20) { - if (constants.hasOwnProperty("O_SYMLINK") && fs20.futimes) { - fs20.lutimes = function(path19, at, mt, cb) { - fs20.open(path19, constants.O_SYMLINK, function(er, fd) { + function patchLutimes(fs21) { + if (constants.hasOwnProperty("O_SYMLINK") && fs21.futimes) { + fs21.lutimes = function(path19, at, mt, cb) { + fs21.open(path19, constants.O_SYMLINK, function(er, fd) { if (er) { if (cb) cb(er); return; } - fs20.futimes(fd, at, mt, function(er2) { - fs20.close(fd, function(er22) { + fs21.futimes(fd, at, mt, function(er2) { + fs21.close(fd, function(er22) { if (cb) cb(er2 || er22); }); }); }); }; - fs20.lutimesSync = function(path19, at, mt) { - var fd = fs20.openSync(path19, constants.O_SYMLINK); + fs21.lutimesSync = function(path19, at, mt) { + var fd = fs21.openSync(path19, constants.O_SYMLINK); var ret; var threw = true; try { - ret = fs20.futimesSync(fd, at, mt); + ret = fs21.futimesSync(fd, at, mt); threw = false; } finally { if (threw) { try { - fs20.closeSync(fd); + fs21.closeSync(fd); } catch (er) { } } else { - fs20.closeSync(fd); + fs21.closeSync(fd); } } return ret; }; - } else if (fs20.futimes) { - fs20.lutimes = function(_a, _b, _c, cb) { + } else if (fs21.futimes) { + fs21.lutimes = function(_a, _b, _c, cb) { if (cb) process.nextTick(cb); }; - fs20.lutimesSync = function() { + fs21.lutimesSync = function() { }; } } function chmodFix(orig) { if (!orig) return orig; return function(target, mode, cb) { - return orig.call(fs19, target, mode, function(er) { + return orig.call(fs20, target, mode, function(er) { if (chownErOk(er)) er = null; if (cb) cb.apply(this, arguments); }); @@ -106138,7 +106356,7 @@ var require_polyfills = __commonJS({ if (!orig) return orig; return function(target, mode) { try { - return orig.call(fs19, target, mode); + return orig.call(fs20, target, mode); } catch (er) { if (!chownErOk(er)) throw er; } @@ -106147,7 +106365,7 @@ var require_polyfills = __commonJS({ function chownFix(orig) { if (!orig) return orig; return function(target, uid, gid, cb) { - return orig.call(fs19, target, uid, gid, function(er) { + return orig.call(fs20, target, uid, gid, function(er) { if (chownErOk(er)) er = null; if (cb) cb.apply(this, arguments); }); @@ -106157,7 +106375,7 @@ var require_polyfills = __commonJS({ if (!orig) return orig; return function(target, uid, gid) { try { - return orig.call(fs19, target, uid, gid); + return orig.call(fs20, target, uid, gid); } catch (er) { if (!chownErOk(er)) throw er; } @@ -106177,13 +106395,13 @@ var require_polyfills = __commonJS({ } if (cb) cb.apply(this, arguments); } - return options ? orig.call(fs19, target, options, callback) : orig.call(fs19, target, callback); + return options ? orig.call(fs20, target, options, callback) : orig.call(fs20, target, callback); }; } function statFixSync(orig) { if (!orig) return orig; return function(target, options) { - var stats = options ? orig.call(fs19, target, options) : orig.call(fs19, target); + var stats = options ? orig.call(fs20, target, options) : orig.call(fs20, target); if (stats) { if (stats.uid < 0) stats.uid += 4294967296; if (stats.gid < 0) stats.gid += 4294967296; @@ -106212,7 +106430,7 @@ var require_legacy_streams = __commonJS({ "node_modules/graceful-fs/legacy-streams.js"(exports2, module2) { var Stream = require("stream").Stream; module2.exports = legacy; - function legacy(fs19) { + function legacy(fs20) { return { ReadStream, WriteStream @@ -106255,7 +106473,7 @@ var require_legacy_streams = __commonJS({ }); return; } - fs19.open(this.path, this.flags, this.mode, function(err, fd) { + fs20.open(this.path, this.flags, this.mode, function(err, fd) { if (err) { self2.emit("error", err); self2.readable = false; @@ -106294,7 +106512,7 @@ var require_legacy_streams = __commonJS({ this.busy = false; this._queue = []; if (this.fd === null) { - this._open = fs19.open; + this._open = fs20.open; this._queue.push([this._open, this.path, this.flags, this.mode, void 0]); this.flush(); } @@ -106329,7 +106547,7 @@ var require_clone = __commonJS({ // node_modules/graceful-fs/graceful-fs.js var require_graceful_fs = __commonJS({ "node_modules/graceful-fs/graceful-fs.js"(exports2, module2) { - var fs19 = require("fs"); + var fs20 = require("fs"); var polyfills = require_polyfills(); var legacy = require_legacy_streams(); var clone = require_clone(); @@ -106361,12 +106579,12 @@ var require_graceful_fs = __commonJS({ m = "GFS4: " + m.split(/\n/).join("\nGFS4: "); console.error(m); }; - if (!fs19[gracefulQueue]) { + if (!fs20[gracefulQueue]) { queue = global[gracefulQueue] || []; - publishQueue(fs19, queue); - fs19.close = (function(fs$close) { + publishQueue(fs20, queue); + fs20.close = (function(fs$close) { function close(fd, cb) { - return fs$close.call(fs19, fd, function(err) { + return fs$close.call(fs20, fd, function(err) { if (!err) { resetQueue(); } @@ -106378,40 +106596,40 @@ var require_graceful_fs = __commonJS({ value: fs$close }); return close; - })(fs19.close); - fs19.closeSync = (function(fs$closeSync) { + })(fs20.close); + fs20.closeSync = (function(fs$closeSync) { function closeSync(fd) { - fs$closeSync.apply(fs19, arguments); + fs$closeSync.apply(fs20, arguments); resetQueue(); } Object.defineProperty(closeSync, previousSymbol, { value: fs$closeSync }); return closeSync; - })(fs19.closeSync); + })(fs20.closeSync); if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || "")) { process.on("exit", function() { - debug5(fs19[gracefulQueue]); - require("assert").equal(fs19[gracefulQueue].length, 0); + debug5(fs20[gracefulQueue]); + require("assert").equal(fs20[gracefulQueue].length, 0); }); } } var queue; if (!global[gracefulQueue]) { - publishQueue(global, fs19[gracefulQueue]); + publishQueue(global, fs20[gracefulQueue]); } - module2.exports = patch(clone(fs19)); - if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs19.__patched) { - module2.exports = patch(fs19); - fs19.__patched = true; + module2.exports = patch(clone(fs20)); + if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs20.__patched) { + module2.exports = patch(fs20); + fs20.__patched = true; } - function patch(fs20) { - polyfills(fs20); - fs20.gracefulify = patch; - fs20.createReadStream = createReadStream2; - fs20.createWriteStream = createWriteStream3; - var fs$readFile = fs20.readFile; - fs20.readFile = readFile; + function patch(fs21) { + polyfills(fs21); + fs21.gracefulify = patch; + fs21.createReadStream = createReadStream2; + fs21.createWriteStream = createWriteStream3; + var fs$readFile = fs21.readFile; + fs21.readFile = readFile; function readFile(path19, options, cb) { if (typeof options === "function") cb = options, options = null; @@ -106427,8 +106645,8 @@ var require_graceful_fs = __commonJS({ }); } } - var fs$writeFile = fs20.writeFile; - fs20.writeFile = writeFile; + var fs$writeFile = fs21.writeFile; + fs21.writeFile = writeFile; function writeFile(path19, data, options, cb) { if (typeof options === "function") cb = options, options = null; @@ -106444,9 +106662,9 @@ var require_graceful_fs = __commonJS({ }); } } - var fs$appendFile = fs20.appendFile; + var fs$appendFile = fs21.appendFile; if (fs$appendFile) - fs20.appendFile = appendFile; + fs21.appendFile = appendFile; function appendFile(path19, data, options, cb) { if (typeof options === "function") cb = options, options = null; @@ -106462,9 +106680,9 @@ var require_graceful_fs = __commonJS({ }); } } - var fs$copyFile = fs20.copyFile; + var fs$copyFile = fs21.copyFile; if (fs$copyFile) - fs20.copyFile = copyFile2; + fs21.copyFile = copyFile2; function copyFile2(src, dest, flags, cb) { if (typeof flags === "function") { cb = flags; @@ -106482,8 +106700,8 @@ var require_graceful_fs = __commonJS({ }); } } - var fs$readdir = fs20.readdir; - fs20.readdir = readdir; + var fs$readdir = fs21.readdir; + fs21.readdir = readdir; var noReaddirOptionVersions = /^v[0-5]\./; function readdir(path19, options, cb) { if (typeof options === "function") @@ -106524,21 +106742,21 @@ var require_graceful_fs = __commonJS({ } } if (process.version.substr(0, 4) === "v0.8") { - var legStreams = legacy(fs20); + var legStreams = legacy(fs21); ReadStream = legStreams.ReadStream; WriteStream = legStreams.WriteStream; } - var fs$ReadStream = fs20.ReadStream; + var fs$ReadStream = fs21.ReadStream; if (fs$ReadStream) { ReadStream.prototype = Object.create(fs$ReadStream.prototype); ReadStream.prototype.open = ReadStream$open; } - var fs$WriteStream = fs20.WriteStream; + var fs$WriteStream = fs21.WriteStream; if (fs$WriteStream) { WriteStream.prototype = Object.create(fs$WriteStream.prototype); WriteStream.prototype.open = WriteStream$open; } - Object.defineProperty(fs20, "ReadStream", { + Object.defineProperty(fs21, "ReadStream", { get: function() { return ReadStream; }, @@ -106548,7 +106766,7 @@ var require_graceful_fs = __commonJS({ enumerable: true, configurable: true }); - Object.defineProperty(fs20, "WriteStream", { + Object.defineProperty(fs21, "WriteStream", { get: function() { return WriteStream; }, @@ -106559,7 +106777,7 @@ var require_graceful_fs = __commonJS({ configurable: true }); var FileReadStream = ReadStream; - Object.defineProperty(fs20, "FileReadStream", { + Object.defineProperty(fs21, "FileReadStream", { get: function() { return FileReadStream; }, @@ -106570,7 +106788,7 @@ var require_graceful_fs = __commonJS({ configurable: true }); var FileWriteStream = WriteStream; - Object.defineProperty(fs20, "FileWriteStream", { + Object.defineProperty(fs21, "FileWriteStream", { get: function() { return FileWriteStream; }, @@ -106619,13 +106837,13 @@ var require_graceful_fs = __commonJS({ }); } function createReadStream2(path19, options) { - return new fs20.ReadStream(path19, options); + return new fs21.ReadStream(path19, options); } function createWriteStream3(path19, options) { - return new fs20.WriteStream(path19, options); + return new fs21.WriteStream(path19, options); } - var fs$open = fs20.open; - fs20.open = open; + var fs$open = fs21.open; + fs21.open = open; function open(path19, flags, mode, cb) { if (typeof mode === "function") cb = mode, mode = null; @@ -106641,20 +106859,20 @@ var require_graceful_fs = __commonJS({ }); } } - return fs20; + return fs21; } function enqueue(elem) { debug5("ENQUEUE", elem[0].name, elem[1]); - fs19[gracefulQueue].push(elem); + fs20[gracefulQueue].push(elem); retry2(); } var retryTimer; function resetQueue() { var now = Date.now(); - for (var i = 0; i < fs19[gracefulQueue].length; ++i) { - if (fs19[gracefulQueue][i].length > 2) { - fs19[gracefulQueue][i][3] = now; - fs19[gracefulQueue][i][4] = now; + for (var i = 0; i < fs20[gracefulQueue].length; ++i) { + if (fs20[gracefulQueue][i].length > 2) { + fs20[gracefulQueue][i][3] = now; + fs20[gracefulQueue][i][4] = now; } } retry2(); @@ -106662,9 +106880,9 @@ var require_graceful_fs = __commonJS({ function retry2() { clearTimeout(retryTimer); retryTimer = void 0; - if (fs19[gracefulQueue].length === 0) + if (fs20[gracefulQueue].length === 0) return; - var elem = fs19[gracefulQueue].shift(); + var elem = fs20[gracefulQueue].shift(); var fn = elem[0]; var args = elem[1]; var err = elem[2]; @@ -106686,7 +106904,7 @@ var require_graceful_fs = __commonJS({ debug5("RETRY", fn.name, args); fn.apply(null, args.concat([startTime])); } else { - fs19[gracefulQueue].push(elem); + fs20[gracefulQueue].push(elem); } } if (retryTimer === void 0) { @@ -116797,12 +117015,60 @@ var require_unescape = __commonJS({ var require_ast = __commonJS({ "node_modules/glob/node_modules/minimatch/dist/commonjs/ast.js"(exports2) { "use strict"; + var _a; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.AST = void 0; var brace_expressions_js_1 = require_brace_expressions(); var unescape_js_1 = require_unescape(); var types = /* @__PURE__ */ new Set(["!", "?", "+", "*", "@"]); var isExtglobType = (c) => types.has(c); + var isExtglobAST = (c) => isExtglobType(c.type); + var adoptionMap = /* @__PURE__ */ new Map([ + ["!", ["@"]], + ["?", ["?", "@"]], + ["@", ["@"]], + ["*", ["*", "+", "?", "@"]], + ["+", ["+", "@"]] + ]); + var adoptionWithSpaceMap = /* @__PURE__ */ new Map([ + ["!", ["?"]], + ["@", ["?"]], + ["+", ["?", "*"]] + ]); + var adoptionAnyMap = /* @__PURE__ */ new Map([ + ["!", ["?", "@"]], + ["?", ["?", "@"]], + ["@", ["?", "@"]], + ["*", ["*", "+", "?", "@"]], + ["+", ["+", "@", "?", "*"]] + ]); + var usurpMap = /* @__PURE__ */ new Map([ + ["!", /* @__PURE__ */ new Map([["!", "@"]])], + [ + "?", + /* @__PURE__ */ new Map([ + ["*", "*"], + ["+", "*"] + ]) + ], + [ + "@", + /* @__PURE__ */ new Map([ + ["!", "!"], + ["?", "?"], + ["@", "@"], + ["*", "*"], + ["+", "+"] + ]) + ], + [ + "+", + /* @__PURE__ */ new Map([ + ["?", "*"], + ["*", "*"] + ]) + ] + ]); var startNoTraversal = "(?!(?:^|/)\\.\\.?(?:$|/))"; var startNoDot = "(?!\\.)"; var addPatternStart = /* @__PURE__ */ new Set(["[", "."]); @@ -116812,7 +117078,8 @@ var require_ast = __commonJS({ var qmark = "[^/]"; var star = qmark + "*?"; var starNoEmpty = qmark + "+?"; - var AST = class _AST { + var ID = 0; + var AST = class { type; #root; #hasMagic; @@ -116827,6 +117094,22 @@ var require_ast = __commonJS({ // set to true if it's an extglob with no children // (which really means one child of '') #emptyExt = false; + id = ++ID; + get depth() { + return (this.#parent?.depth ?? -1) + 1; + } + [/* @__PURE__ */ Symbol.for("nodejs.util.inspect.custom")]() { + return { + "@@type": "AST", + id: this.id, + type: this.type, + root: this.#root.id, + parent: this.#parent?.id, + depth: this.depth, + partsLength: this.#parts.length, + parts: this.#parts + }; + } constructor(type2, parent, options = {}) { this.type = type2; if (type2) @@ -116892,7 +117175,7 @@ var require_ast = __commonJS({ for (const p of parts) { if (p === "") continue; - if (typeof p !== "string" && !(p instanceof _AST && p.#parent === this)) { + if (typeof p !== "string" && !(p instanceof _a && p.#parent === this)) { throw new Error("invalid part: " + p); } this.#parts.push(p); @@ -116917,7 +117200,7 @@ var require_ast = __commonJS({ const p = this.#parent; for (let i = 0; i < this.#parentIndex; i++) { const pp = p.#parts[i]; - if (!(pp instanceof _AST && pp.type === "!")) { + if (!(pp instanceof _a && pp.type === "!")) { return false; } } @@ -116942,13 +117225,14 @@ var require_ast = __commonJS({ this.push(part.clone(this)); } clone(parent) { - const c = new _AST(this.type, parent); + const c = new _a(this.type, parent); for (const p of this.#parts) { c.copyIn(p); } return c; } - static #parseAST(str2, ast, pos, opt) { + static #parseAST(str2, ast, pos, opt, extDepth) { + const maxDepth = opt.maxExtglobRecursion ?? 2; let escaping = false; let inBrace = false; let braceStart = -1; @@ -116980,11 +117264,12 @@ var require_ast = __commonJS({ acc2 += c; continue; } - if (!opt.noext && isExtglobType(c) && str2.charAt(i2) === "(") { + const doRecurse = !opt.noext && isExtglobType(c) && str2.charAt(i2) === "(" && extDepth <= maxDepth; + if (doRecurse) { ast.push(acc2); acc2 = ""; - const ext = new _AST(c, ast); - i2 = _AST.#parseAST(str2, ext, i2, opt); + const ext = new _a(c, ast); + i2 = _a.#parseAST(str2, ext, i2, opt, extDepth + 1); ast.push(ext); continue; } @@ -116994,7 +117279,7 @@ var require_ast = __commonJS({ return i2; } let i = pos + 1; - let part = new _AST(null, ast); + let part = new _a(null, ast); const parts = []; let acc = ""; while (i < str2.length) { @@ -117021,19 +117306,22 @@ var require_ast = __commonJS({ acc += c; continue; } - if (isExtglobType(c) && str2.charAt(i) === "(") { + const doRecurse = !opt.noext && isExtglobType(c) && str2.charAt(i) === "(" && /* c8 ignore start - the maxDepth is sufficient here */ + (extDepth <= maxDepth || ast && ast.#canAdoptType(c)); + if (doRecurse) { + const depthAdd = ast && ast.#canAdoptType(c) ? 0 : 1; part.push(acc); acc = ""; - const ext = new _AST(c, part); + const ext = new _a(c, part); part.push(ext); - i = _AST.#parseAST(str2, ext, i, opt); + i = _a.#parseAST(str2, ext, i, opt, extDepth + depthAdd); continue; } if (c === "|") { part.push(acc); acc = ""; parts.push(part); - part = new _AST(null, ast); + part = new _a(null, ast); continue; } if (c === ")") { @@ -117052,9 +117340,71 @@ var require_ast = __commonJS({ ast.#parts = [str2.substring(pos - 1)]; return i; } + #canAdoptWithSpace(child) { + return this.#canAdopt(child, adoptionWithSpaceMap); + } + #canAdopt(child, map2 = adoptionMap) { + if (!child || typeof child !== "object" || child.type !== null || child.#parts.length !== 1 || this.type === null) { + return false; + } + const gc = child.#parts[0]; + if (!gc || typeof gc !== "object" || gc.type === null) { + return false; + } + return this.#canAdoptType(gc.type, map2); + } + #canAdoptType(c, map2 = adoptionAnyMap) { + return !!map2.get(this.type)?.includes(c); + } + #adoptWithSpace(child, index) { + const gc = child.#parts[0]; + const blank = new _a(null, gc, this.options); + blank.#parts.push(""); + gc.push(blank); + this.#adopt(child, index); + } + #adopt(child, index) { + const gc = child.#parts[0]; + this.#parts.splice(index, 1, ...gc.#parts); + for (const p of gc.#parts) { + if (typeof p === "object") + p.#parent = this; + } + this.#toString = void 0; + } + #canUsurpType(c) { + const m = usurpMap.get(this.type); + return !!m?.has(c); + } + #canUsurp(child) { + if (!child || typeof child !== "object" || child.type !== null || child.#parts.length !== 1 || this.type === null || this.#parts.length !== 1) { + return false; + } + const gc = child.#parts[0]; + if (!gc || typeof gc !== "object" || gc.type === null) { + return false; + } + return this.#canUsurpType(gc.type); + } + #usurp(child) { + const m = usurpMap.get(this.type); + const gc = child.#parts[0]; + const nt = m?.get(gc.type); + if (!nt) + return false; + this.#parts = gc.#parts; + for (const p of this.#parts) { + if (typeof p === "object") { + p.#parent = this; + } + } + this.type = nt; + this.#toString = void 0; + this.#emptyExt = false; + } static fromGlob(pattern, options = {}) { - const ast = new _AST(null, void 0, options); - _AST.#parseAST(pattern, ast, 0, options); + const ast = new _a(null, void 0, options); + _a.#parseAST(pattern, ast, 0, options, 0); return ast; } // returns the regular expression if there's magic, or the unescaped @@ -117148,12 +117498,14 @@ var require_ast = __commonJS({ // or start or whatever) and prepend ^ or / at the Regexp construction. toRegExpSource(allowDot) { const dot = allowDot ?? !!this.#options.dot; - if (this.#root === this) + if (this.#root === this) { + this.#flatten(); this.#fillNegs(); - if (!this.type) { + } + if (!isExtglobAST(this)) { const noEmpty = this.isStart() && this.isEnd() && !this.#parts.some((s) => typeof s !== "string"); const src = this.#parts.map((p) => { - const [re, _2, hasMagic, uflag] = typeof p === "string" ? _AST.#parseGlob(p, this.#hasMagic, noEmpty) : p.toRegExpSource(allowDot); + const [re, _2, hasMagic, uflag] = typeof p === "string" ? _a.#parseGlob(p, this.#hasMagic, noEmpty) : p.toRegExpSource(allowDot); this.#hasMagic = this.#hasMagic || hasMagic; this.#uflag = this.#uflag || uflag; return re; @@ -117192,9 +117544,10 @@ var require_ast = __commonJS({ let body = this.#partsToRegExp(dot); if (this.isStart() && this.isEnd() && !body && this.type !== "!") { const s = this.toString(); - this.#parts = [s]; - this.type = null; - this.#hasMagic = void 0; + const me = this; + me.#parts = [s]; + me.type = null; + me.#hasMagic = void 0; return [s, (0, unescape_js_1.unescape)(this.toString()), false, false]; } let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot ? "" : this.#partsToRegExp(true); @@ -117221,6 +117574,38 @@ var require_ast = __commonJS({ this.#uflag ]; } + #flatten() { + if (!isExtglobAST(this)) { + for (const p of this.#parts) { + if (typeof p === "object") { + p.#flatten(); + } + } + } else { + let iterations = 0; + let done = false; + do { + done = true; + for (let i = 0; i < this.#parts.length; i++) { + const c = this.#parts[i]; + if (typeof c === "object") { + c.#flatten(); + if (this.#canAdopt(c)) { + done = false; + this.#adopt(c, i); + } else if (this.#canAdoptWithSpace(c)) { + done = false; + this.#adoptWithSpace(c, i); + } else if (this.#canUsurp(c)) { + done = false; + this.#usurp(c); + } + } + } + } while (!done && ++iterations < 10); + } + this.#toString = void 0; + } #partsToRegExp(dot) { return this.#parts.map((p) => { if (typeof p === "string") { @@ -117282,6 +117667,7 @@ var require_ast = __commonJS({ } }; exports2.AST = AST; + _a = AST; } }); @@ -117466,11 +117852,13 @@ var require_commonjs20 = __commonJS({ isWindows; platform; windowsNoMagicRoot; + maxGlobstarRecursion; regexp; constructor(pattern, options = {}) { (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); options = options || {}; this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion ?? 200; this.pattern = pattern; this.platform = options.platform || defaultPlatform; this.isWindows = this.platform === "win32"; @@ -117807,7 +118195,8 @@ var require_commonjs20 = __commonJS({ // out of pattern, then that's fine, as long as all // the parts match. matchOne(file, pattern, partial = false) { - const options = this.options; + let fileStartIndex = 0; + let patternStartIndex = 0; if (this.isWindows) { const fileDrive = typeof file[0] === "string" && /^[a-z]:$/i.test(file[0]); const fileUNC = !fileDrive && file[0] === "" && file[1] === "" && file[2] === "?" && /^[a-z]:$/i.test(file[3]); @@ -117822,11 +118211,8 @@ var require_commonjs20 = __commonJS({ ]; if (fd.toLowerCase() === pd.toLowerCase()) { pattern[pdi] = fd; - if (pdi > fdi) { - pattern = pattern.slice(pdi); - } else if (fdi > pdi) { - file = file.slice(fdi); - } + patternStartIndex = pdi; + fileStartIndex = fdi; } } } @@ -117834,49 +118220,123 @@ var require_commonjs20 = __commonJS({ if (optimizationLevel >= 2) { file = this.levelTwoFileOptimize(file); } - this.debug("matchOne", this, { file, pattern }); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { - this.debug("matchOne loop"); - var p = pattern[pi]; - var f = file[fi]; - this.debug(pattern, p, f); - if (p === false) { + if (pattern.includes(exports2.GLOBSTAR)) { + return this.#matchGlobstar(file, pattern, partial, fileStartIndex, patternStartIndex); + } + return this.#matchOne(file, pattern, partial, fileStartIndex, patternStartIndex); + } + #matchGlobstar(file, pattern, partial, fileIndex, patternIndex) { + const firstgs = pattern.indexOf(exports2.GLOBSTAR, patternIndex); + const lastgs = pattern.lastIndexOf(exports2.GLOBSTAR); + const [head, body, tail] = partial ? [ + pattern.slice(patternIndex, firstgs), + pattern.slice(firstgs + 1), + [] + ] : [ + pattern.slice(patternIndex, firstgs), + pattern.slice(firstgs + 1, lastgs), + pattern.slice(lastgs + 1) + ]; + if (head.length) { + const fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this.#matchOne(fileHead, head, partial, 0, 0)) { return false; } - if (p === exports2.GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") - return false; - } - return true; + fileIndex += head.length; + patternIndex += head.length; + } + let fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) + return false; + let tailStart = file.length - tail.length; + if (this.#matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } + tailStart--; + if (!this.#matchOne(file, tail, partial, tailStart, 0)) { + return false; } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) { - return true; - } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + let sawSome = !!fileTailMatch; + for (let i2 = fileIndex; i2 < file.length - fileTailMatch; i2++) { + const f = String(file[i2]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) { + return false; } + } + return partial || sawSome; + } + const bodySegments = [[[], 0]]; + let currentBody = bodySegments[0]; + let nonGsParts = 0; + const nonGsPartsSums = [0]; + for (const b of body) { + if (b === exports2.GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + let i = bodySegments.length - 1; + const fileLength = file.length - fileTailMatch; + for (const b of bodySegments) { + b[1] = fileLength - (nonGsPartsSums[i--] + b[0].length); + } + return !!this.#matchGlobStarBodySections(file, bodySegments, fileIndex, 0, partial, 0, !!fileTailMatch); + } + // return false for "nope, not matching" + // return null for "not matching, cannot keep trying" + #matchGlobStarBodySections(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + const bs = bodySegments[bodyIndex]; + if (!bs) { + for (let i = fileIndex; i < file.length; i++) { + sawTail = true; + const f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) { + return false; + } + } + return sawTail; + } + const [body, after] = bs; + while (fileIndex <= after) { + const m = this.#matchOne(file.slice(0, fileIndex + body.length), body, partial, fileIndex, 0); + if (m && globStarDepth < this.maxGlobstarRecursion) { + const sub = this.#matchGlobStarBodySections(file, bodySegments, fileIndex + body.length, bodyIndex + 1, partial, globStarDepth + 1, sawTail); + if (sub !== false) { + return sub; + } + } + const f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) { + return false; + } + fileIndex++; + } + return partial || null; + } + #matchOne(file, pattern, partial, fileIndex, patternIndex) { + let fi; + let pi; + let pl; + let fl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + this.debug("matchOne loop"); + let p = pattern[pi]; + let f = file[fi]; + this.debug(pattern, p, f); + if (p === false || p === exports2.GLOBSTAR) { return false; } let hit; @@ -121517,8 +121977,8 @@ var require_commonjs23 = __commonJS({ * * @internal */ - constructor(cwd = process.cwd(), pathImpl, sep5, { nocase, childrenCacheSize = 16 * 1024, fs: fs19 = defaultFS } = {}) { - this.#fs = fsFromOption(fs19); + constructor(cwd = process.cwd(), pathImpl, sep5, { nocase, childrenCacheSize = 16 * 1024, fs: fs20 = defaultFS } = {}) { + this.#fs = fsFromOption(fs20); if (cwd instanceof URL || cwd.startsWith("file://")) { cwd = (0, node_url_1.fileURLToPath)(cwd); } @@ -122077,8 +122537,8 @@ var require_commonjs23 = __commonJS({ /** * @internal */ - newRoot(fs19) { - return new PathWin32(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs: fs19 }); + newRoot(fs20) { + return new PathWin32(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs: fs20 }); } /** * Return true if the provided path string is an absolute path @@ -122107,8 +122567,8 @@ var require_commonjs23 = __commonJS({ /** * @internal */ - newRoot(fs19) { - return new PathPosix(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs: fs19 }); + newRoot(fs20) { + return new PathPosix(this.rootPath, IFDIR, void 0, this.roots, this.nocase, this.childrenCache(), { fs: fs20 }); } /** * Return true if the provided path string is an absolute path @@ -123293,7 +123753,7 @@ var require_commonjs24 = __commonJS({ // node_modules/archiver-utils/file.js var require_file4 = __commonJS({ "node_modules/archiver-utils/file.js"(exports2, module2) { - var fs19 = require_graceful_fs(); + var fs20 = require_graceful_fs(); var path19 = require("path"); var flatten = require_flatten(); var difference = require_difference(); @@ -123320,7 +123780,7 @@ var require_file4 = __commonJS({ }; file.exists = function() { var filepath = path19.join.apply(path19, arguments); - return fs19.existsSync(filepath); + return fs20.existsSync(filepath); }; file.expand = function(...args) { var options = isPlainObject3(args[0]) ? args.shift() : {}; @@ -123338,7 +123798,7 @@ var require_file4 = __commonJS({ if (typeof options.filter === "function") { return options.filter(filepath); } else { - return fs19.statSync(filepath)[options.filter](); + return fs20.statSync(filepath)[options.filter](); } } catch (e) { return false; @@ -123446,7 +123906,7 @@ var require_file4 = __commonJS({ // node_modules/archiver-utils/index.js var require_archiver_utils = __commonJS({ "node_modules/archiver-utils/index.js"(exports2, module2) { - var fs19 = require_graceful_fs(); + var fs20 = require_graceful_fs(); var path19 = require("path"); var isStream = require_is_stream(); var lazystream = require_lazystream(); @@ -123495,7 +123955,7 @@ var require_archiver_utils = __commonJS({ }; utils.lazyReadStream = function(filepath) { return new lazystream.Readable(function() { - return fs19.createReadStream(filepath); + return fs20.createReadStream(filepath); }); }; utils.normalizeInputSource = function(source) { @@ -123523,7 +123983,7 @@ var require_archiver_utils = __commonJS({ callback = base; base = dirpath; } - fs19.readdir(dirpath, function(err, list) { + fs20.readdir(dirpath, function(err, list) { var i = 0; var file; var filepath; @@ -123536,7 +123996,7 @@ var require_archiver_utils = __commonJS({ return callback(null, results); } filepath = path19.join(dirpath, file); - fs19.stat(filepath, function(err2, stats) { + fs20.stat(filepath, function(err2, stats) { results.push({ path: filepath, relative: path19.relative(base, filepath).replace(/\\/g, "/"), @@ -123598,7 +124058,7 @@ var require_error3 = __commonJS({ // node_modules/archiver/lib/core.js var require_core2 = __commonJS({ "node_modules/archiver/lib/core.js"(exports2, module2) { - var fs19 = require("fs"); + var fs20 = require("fs"); var glob2 = require_readdir_glob(); var async = require_async(); var path19 = require("path"); @@ -123662,7 +124122,7 @@ var require_core2 = __commonJS({ data.sourcePath = filepath; task.data = data; this._entriesCount++; - if (data.stats && data.stats instanceof fs19.Stats) { + if (data.stats && data.stats instanceof fs20.Stats) { task = this._updateQueueTaskWithStats(task, data.stats); if (task) { if (data.stats.size) { @@ -123833,7 +124293,7 @@ var require_core2 = __commonJS({ callback(); return; } - fs19.lstat(task.filepath, function(err, stats) { + fs20.lstat(task.filepath, function(err, stats) { if (this._state.aborted) { setImmediate(callback); return; @@ -123876,7 +124336,7 @@ var require_core2 = __commonJS({ task.data.sourceType = "buffer"; task.source = Buffer.concat([]); } else if (stats.isSymbolicLink() && this._moduleSupports("symlink")) { - var linkPath = fs19.readlinkSync(task.filepath); + var linkPath = fs20.readlinkSync(task.filepath); var dirName = path19.dirname(task.filepath); task.data.type = "symlink"; task.data.linkname = path19.relative(dirName, path19.resolve(dirName, linkPath)); @@ -151535,7 +151995,7 @@ var require_parser_stream = __commonJS({ var require_mkdirp = __commonJS({ "node_modules/mkdirp/index.js"(exports2, module2) { var path19 = require("path"); - var fs19 = require("fs"); + var fs20 = require("fs"); var _0777 = parseInt("0777", 8); module2.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP; function mkdirP(p, opts, f, made) { @@ -151546,7 +152006,7 @@ var require_mkdirp = __commonJS({ opts = { mode: opts }; } var mode = opts.mode; - var xfs = opts.fs || fs19; + var xfs = opts.fs || fs20; if (mode === void 0) { mode = _0777; } @@ -151585,7 +152045,7 @@ var require_mkdirp = __commonJS({ opts = { mode: opts }; } var mode = opts.mode; - var xfs = opts.fs || fs19; + var xfs = opts.fs || fs20; if (mode === void 0) { mode = _0777; } @@ -151622,7 +152082,7 @@ var require_mkdirp = __commonJS({ // node_modules/unzip-stream/lib/extract.js var require_extract2 = __commonJS({ "node_modules/unzip-stream/lib/extract.js"(exports2, module2) { - var fs19 = require("fs"); + var fs20 = require("fs"); var path19 = require("path"); var util = require("util"); var mkdirp = require_mkdirp(); @@ -151669,7 +152129,7 @@ var require_extract2 = __commonJS({ var directory = entry.isDirectory ? destPath : path19.dirname(destPath); this.unfinishedEntries++; var writeFileFn = function() { - var pipedStream = fs19.createWriteStream(destPath); + var pipedStream = fs20.createWriteStream(destPath); pipedStream.on("close", function() { self2.unfinishedEntries--; self2._notifyAwaiter(); @@ -152867,7 +153327,7 @@ var require_file_command2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.prepareKeyValueMessage = exports2.issueFileCommand = void 0; var crypto2 = __importStar2(require("crypto")); - var fs19 = __importStar2(require("fs")); + var fs20 = __importStar2(require("fs")); var os4 = __importStar2(require("os")); var utils_1 = require_utils12(); function issueFileCommand(command, message) { @@ -152875,10 +153335,10 @@ var require_file_command2 = __commonJS({ if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs19.existsSync(filePath)) { + if (!fs20.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs19.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os4.EOL}`, { + fs20.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os4.EOL}`, { encoding: "utf8" }); } @@ -154201,12 +154661,12 @@ var require_io_util2 = __commonJS({ var _a; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getCmdPath = exports2.tryGetExecutablePath = exports2.isRooted = exports2.isDirectory = exports2.exists = exports2.READONLY = exports2.UV_FS_O_EXLOCK = exports2.IS_WINDOWS = exports2.unlink = exports2.symlink = exports2.stat = exports2.rmdir = exports2.rm = exports2.rename = exports2.readlink = exports2.readdir = exports2.open = exports2.mkdir = exports2.lstat = exports2.copyFile = exports2.chmod = void 0; - var fs19 = __importStar2(require("fs")); + var fs20 = __importStar2(require("fs")); var path19 = __importStar2(require("path")); - _a = fs19.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + _a = fs20.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs19.constants.O_RDONLY; + exports2.READONLY = fs20.constants.O_RDONLY; function exists(fsPath) { return __awaiter2(this, void 0, void 0, function* () { try { @@ -155595,25 +156055,25 @@ var require_upload_specification = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadSpecification = void 0; - var fs19 = __importStar2(require("fs")); + var fs20 = __importStar2(require("fs")); var core_1 = require_core3(); var path_1 = require("path"); var path_and_artifact_name_validation_1 = require_path_and_artifact_name_validation2(); function getUploadSpecification(artifactName, rootDirectory, artifactFiles) { const specifications = []; - if (!fs19.existsSync(rootDirectory)) { + if (!fs20.existsSync(rootDirectory)) { throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`); } - if (!fs19.statSync(rootDirectory).isDirectory()) { + if (!fs20.statSync(rootDirectory).isDirectory()) { throw new Error(`Provided rootDirectory ${rootDirectory} is not a valid directory`); } rootDirectory = (0, path_1.normalize)(rootDirectory); rootDirectory = (0, path_1.resolve)(rootDirectory); for (let file of artifactFiles) { - if (!fs19.existsSync(file)) { + if (!fs20.existsSync(file)) { throw new Error(`File ${file} does not exist`); } - if (!fs19.statSync(file).isDirectory()) { + if (!fs20.statSync(file).isDirectory()) { file = (0, path_1.normalize)(file); file = (0, path_1.resolve)(file); if (!file.startsWith(rootDirectory)) { @@ -155638,11 +156098,11 @@ var require_upload_specification = __commonJS({ // node_modules/tmp/lib/tmp.js var require_tmp = __commonJS({ "node_modules/tmp/lib/tmp.js"(exports2, module2) { - var fs19 = require("fs"); + var fs20 = require("fs"); var os4 = require("os"); var path19 = require("path"); var crypto2 = require("crypto"); - var _c = { fs: fs19.constants, os: os4.constants }; + var _c = { fs: fs20.constants, os: os4.constants }; var RANDOM_CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; var TEMPLATE_PATTERN = /XXXXXX/; var DEFAULT_TRIES = 3; @@ -155654,13 +156114,13 @@ var require_tmp = __commonJS({ var FILE_MODE = 384; var EXIT = "exit"; var _removeObjects = []; - var FN_RMDIR_SYNC = fs19.rmdirSync.bind(fs19); + var FN_RMDIR_SYNC = fs20.rmdirSync.bind(fs20); var _gracefulCleanup = false; function rimraf(dirPath, callback) { - return fs19.rm(dirPath, { recursive: true }, callback); + return fs20.rm(dirPath, { recursive: true }, callback); } function FN_RIMRAF_SYNC(dirPath) { - return fs19.rmSync(dirPath, { recursive: true }); + return fs20.rmSync(dirPath, { recursive: true }); } function tmpName(options, callback) { const args = _parseArguments(options, callback), opts = args[0], cb = args[1]; @@ -155670,7 +156130,7 @@ var require_tmp = __commonJS({ (function _getUniqueName() { try { const name = _generateTmpName(sanitizedOptions); - fs19.stat(name, function(err2) { + fs20.stat(name, function(err2) { if (!err2) { if (tries-- > 0) return _getUniqueName(); return cb(new Error("Could not get a unique tmp filename, max tries reached " + name)); @@ -155690,7 +156150,7 @@ var require_tmp = __commonJS({ do { const name = _generateTmpName(sanitizedOptions); try { - fs19.statSync(name); + fs20.statSync(name); } catch (e) { return name; } @@ -155701,10 +156161,10 @@ var require_tmp = __commonJS({ const args = _parseArguments(options, callback), opts = args[0], cb = args[1]; tmpName(opts, function _tmpNameCreated(err, name) { if (err) return cb(err); - fs19.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err2, fd) { + fs20.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err2, fd) { if (err2) return cb(err2); if (opts.discardDescriptor) { - return fs19.close(fd, function _discardCallback(possibleErr) { + return fs20.close(fd, function _discardCallback(possibleErr) { return cb(possibleErr, name, void 0, _prepareTmpFileRemoveCallback(name, -1, opts, false)); }); } else { @@ -155718,9 +156178,9 @@ var require_tmp = __commonJS({ const args = _parseArguments(options), opts = args[0]; const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; const name = tmpNameSync(opts); - let fd = fs19.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE); + let fd = fs20.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE); if (opts.discardDescriptor) { - fs19.closeSync(fd); + fs20.closeSync(fd); fd = void 0; } return { @@ -155733,7 +156193,7 @@ var require_tmp = __commonJS({ const args = _parseArguments(options, callback), opts = args[0], cb = args[1]; tmpName(opts, function _tmpNameCreated(err, name) { if (err) return cb(err); - fs19.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err2) { + fs20.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err2) { if (err2) return cb(err2); cb(null, name, _prepareTmpDirRemoveCallback(name, opts, false)); }); @@ -155742,7 +156202,7 @@ var require_tmp = __commonJS({ function dirSync(options) { const args = _parseArguments(options), opts = args[0]; const name = tmpNameSync(opts); - fs19.mkdirSync(name, opts.mode || DIR_MODE); + fs20.mkdirSync(name, opts.mode || DIR_MODE); return { name, removeCallback: _prepareTmpDirRemoveCallback(name, opts, true) @@ -155756,20 +156216,20 @@ var require_tmp = __commonJS({ next(); }; if (0 <= fdPath[0]) - fs19.close(fdPath[0], function() { - fs19.unlink(fdPath[1], _handler); + fs20.close(fdPath[0], function() { + fs20.unlink(fdPath[1], _handler); }); - else fs19.unlink(fdPath[1], _handler); + else fs20.unlink(fdPath[1], _handler); } function _removeFileSync(fdPath) { let rethrownException = null; try { - if (0 <= fdPath[0]) fs19.closeSync(fdPath[0]); + if (0 <= fdPath[0]) fs20.closeSync(fdPath[0]); } catch (e) { if (!_isEBADF(e) && !_isENOENT(e)) throw e; } finally { try { - fs19.unlinkSync(fdPath[1]); + fs20.unlinkSync(fdPath[1]); } catch (e) { if (!_isENOENT(e)) rethrownException = e; } @@ -155785,7 +156245,7 @@ var require_tmp = __commonJS({ return sync ? removeCallbackSync : removeCallback; } function _prepareTmpDirRemoveCallback(name, opts, sync) { - const removeFunction = opts.unsafeCleanup ? rimraf : fs19.rmdir.bind(fs19); + const removeFunction = opts.unsafeCleanup ? rimraf : fs20.rmdir.bind(fs20); const removeFunctionSync = opts.unsafeCleanup ? FN_RIMRAF_SYNC : FN_RMDIR_SYNC; const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name, sync); const removeCallback = _prepareRemoveCallback(removeFunction, name, sync, removeCallbackSync); @@ -155848,24 +156308,24 @@ var require_tmp = __commonJS({ } function _resolvePath(name, tmpDir, cb) { const pathToResolve = path19.isAbsolute(name) ? name : path19.join(tmpDir, name); - fs19.stat(pathToResolve, function(err) { + fs20.stat(pathToResolve, function(err) { if (err) { - fs19.realpath(path19.dirname(pathToResolve), function(err2, parentDir) { + fs20.realpath(path19.dirname(pathToResolve), function(err2, parentDir) { if (err2) return cb(err2); cb(null, path19.join(parentDir, path19.basename(pathToResolve))); }); } else { - fs19.realpath(path19, cb); + fs20.realpath(path19, cb); } }); } function _resolvePathSync(name, tmpDir) { const pathToResolve = path19.isAbsolute(name) ? name : path19.join(tmpDir, name); try { - fs19.statSync(pathToResolve); - return fs19.realpathSync(pathToResolve); + fs20.statSync(pathToResolve); + return fs20.realpathSync(pathToResolve); } catch (_err) { - const parentDir = fs19.realpathSync(path19.dirname(pathToResolve)); + const parentDir = fs20.realpathSync(path19.dirname(pathToResolve)); return path19.join(parentDir, path19.basename(pathToResolve)); } } @@ -155970,10 +156430,10 @@ var require_tmp = __commonJS({ _gracefulCleanup = true; } function _getTmpDir(options, cb) { - return fs19.realpath(options && options.tmpdir || os4.tmpdir(), cb); + return fs20.realpath(options && options.tmpdir || os4.tmpdir(), cb); } function _getTmpDirSync(options) { - return fs19.realpathSync(options && options.tmpdir || os4.tmpdir()); + return fs20.realpathSync(options && options.tmpdir || os4.tmpdir()); } process.addListener(EXIT, _garbageCollector); Object.defineProperty(module2.exports, "tmpdir", { @@ -156827,10 +157287,10 @@ var require_upload_gzip = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.createGZipFileInBuffer = exports2.createGZipFileOnDisk = void 0; - var fs19 = __importStar2(require("fs")); + var fs20 = __importStar2(require("fs")); var zlib3 = __importStar2(require("zlib")); var util_1 = require("util"); - var stat = (0, util_1.promisify)(fs19.stat); + var stat = (0, util_1.promisify)(fs20.stat); var gzipExemptFileExtensions = [ ".gz", ".gzip", @@ -156863,9 +157323,9 @@ var require_upload_gzip = __commonJS({ } } return new Promise((resolve8, reject) => { - const inputStream = fs19.createReadStream(originalFilePath); + const inputStream = fs20.createReadStream(originalFilePath); const gzip = zlib3.createGzip(); - const outputStream = fs19.createWriteStream(tempFilePath); + const outputStream = fs20.createWriteStream(tempFilePath); inputStream.pipe(gzip).pipe(outputStream); outputStream.on("finish", () => __awaiter2(this, void 0, void 0, function* () { const size = (yield stat(tempFilePath)).size; @@ -156883,7 +157343,7 @@ var require_upload_gzip = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { return new Promise((resolve8) => __awaiter2(this, void 0, void 0, function* () { var _a, e_1, _b, _c; - const inputStream = fs19.createReadStream(originalFilePath); + const inputStream = fs20.createReadStream(originalFilePath); const gzip = zlib3.createGzip(); inputStream.pipe(gzip); const chunks = []; @@ -157092,7 +157552,7 @@ var require_upload_http_client = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadHttpClient = void 0; - var fs19 = __importStar2(require("fs")); + var fs20 = __importStar2(require("fs")); var core17 = __importStar2(require_core3()); var tmp = __importStar2(require_tmp_promise()); var stream2 = __importStar2(require("stream")); @@ -157106,7 +157566,7 @@ var require_upload_http_client = __commonJS({ var http_manager_1 = require_http_manager(); var upload_gzip_1 = require_upload_gzip(); var requestUtils_1 = require_requestUtils2(); - var stat = (0, util_1.promisify)(fs19.stat); + var stat = (0, util_1.promisify)(fs20.stat); var UploadHttpClient = class { constructor() { this.uploadHttpManager = new http_manager_1.HttpManager((0, config_variables_1.getUploadFileConcurrency)(), "@actions/artifact-upload"); @@ -157243,7 +157703,7 @@ var require_upload_http_client = __commonJS({ let openUploadStream; if (totalFileSize < buffer.byteLength) { core17.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); - openUploadStream = () => fs19.createReadStream(parameters.file); + openUploadStream = () => fs20.createReadStream(parameters.file); isGzip = false; uploadFileSize = totalFileSize; } else { @@ -157289,7 +157749,7 @@ var require_upload_http_client = __commonJS({ failedChunkSizes += chunkSize; continue; } - const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs19.createReadStream(uploadFilePath, { + const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs20.createReadStream(uploadFilePath, { start: startChunkIndex, end: endChunkIndex, autoClose: false @@ -157484,7 +157944,7 @@ var require_download_http_client = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DownloadHttpClient = void 0; - var fs19 = __importStar2(require("fs")); + var fs20 = __importStar2(require("fs")); var core17 = __importStar2(require_core3()); var zlib3 = __importStar2(require("zlib")); var utils_1 = require_utils13(); @@ -157575,7 +158035,7 @@ var require_download_http_client = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { let retryCount = 0; const retryLimit = (0, config_variables_1.getRetryLimit)(); - let destinationStream = fs19.createWriteStream(downloadPath); + let destinationStream = fs20.createWriteStream(downloadPath); const headers = (0, utils_1.getDownloadHeaders)("application/json", true, true); const makeDownloadRequest = () => __awaiter2(this, void 0, void 0, function* () { const client = this.downloadHttpManager.getClient(httpClientIndex); @@ -157617,7 +158077,7 @@ var require_download_http_client = __commonJS({ } }); yield (0, utils_1.rmFile)(fileDownloadPath); - destinationStream = fs19.createWriteStream(fileDownloadPath); + destinationStream = fs20.createWriteStream(fileDownloadPath); }); while (retryCount <= retryLimit) { let response; @@ -160901,21 +161361,21 @@ async function getFolderSize(itemPath, options) { getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); async function core(rootItemPath, options = {}, returnType = {}) { - const fs19 = options.fs || await import("node:fs/promises"); + const fs20 = options.fs || await import("node:fs/promises"); let folderSize = 0n; const foundInos = /* @__PURE__ */ new Set(); const errors = []; await processItem(rootItemPath); async function processItem(itemPath) { if (options.ignore?.test(itemPath)) return; - const stats = returnType.strict ? await fs19.lstat(itemPath, { bigint: true }) : await fs19.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3)); + const stats = returnType.strict ? await fs20.lstat(itemPath, { bigint: true }) : await fs20.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3)); if (typeof stats !== "object") return; if (!foundInos.has(stats.ino)) { foundInos.add(stats.ino); folderSize += stats.size; } if (stats.isDirectory()) { - const directoryItems = returnType.strict ? await fs19.readdir(itemPath) : await fs19.readdir(itemPath).catch((error3) => errors.push(error3)); + const directoryItems = returnType.strict ? await fs20.readdir(itemPath) : await fs20.readdir(itemPath).catch((error3) => errors.push(error3)); if (typeof directoryItems !== "object") return; await Promise.all( directoryItems.map( @@ -163538,7 +163998,7 @@ var safeDump = renamed("safeDump", "dump"); var semver = __toESM(require_semver2()); // src/api-compatibility.json -var maximumVersion = "3.20"; +var maximumVersion = "3.21"; var minimumVersion = "3.14"; // src/util.ts @@ -163561,17 +164021,6 @@ function getExtraOptionsEnvParam() { ); } } -function getToolNames(sarif) { - const toolNames = {}; - for (const run2 of sarif.runs || []) { - const tool = run2.tool || {}; - const driver = tool.driver || {}; - if (typeof driver.name === "string" && driver.name.length > 0) { - toolNames[driver.name] = true; - } - } - return Object.keys(toolNames); -} function getCodeQLDatabasePath(config, language) { return path.resolve(config.dbLocation, language); } @@ -164797,8 +165246,8 @@ var path5 = __toESM(require("path")); var semver5 = __toESM(require_semver2()); // src/defaults.json -var bundleVersion = "codeql-bundle-v2.24.2"; -var cliVersion = "2.24.2"; +var bundleVersion = "codeql-bundle-v2.24.3"; +var cliVersion = "2.24.3"; // src/overlay/index.ts var fs3 = __toESM(require("fs")); @@ -165292,6 +165741,11 @@ var featureConfig = { // cannot be found when interpreting results. minimumVersion: void 0 }, + ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES", + minimumVersion: void 0 + }, ["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: { defaultValue: false, envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE", @@ -165303,11 +165757,6 @@ var featureConfig = { minimumVersion: void 0, toolsFeature: "bundleSupportsOverlay" /* BundleSupportsOverlay */ }, - ["use_repository_properties_v2" /* UseRepositoryProperties */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES", - minimumVersion: void 0 - }, ["validate_db_config" /* ValidateDbConfig */]: { defaultValue: false, envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG", @@ -165670,6 +166119,18 @@ function getStatusFilePath(languages) { STATUS_FILE_NAME ); } +function createOverlayStatus(attributes, checkRunId) { + const job = { + workflowRunId: getWorkflowRunID(), + workflowRunAttempt: getWorkflowRunAttempt(), + name: getRequiredEnvParam("GITHUB_JOB"), + checkRunId + }; + return { + ...attributes, + job + }; +} async function saveOverlayStatus(codeql, languages, diskUsage, status, logger) { const cacheKey = await getCacheKey(codeql, languages, diskUsage); const statusFile = getStatusFilePath(languages); @@ -167881,12 +168342,12 @@ async function createDatabaseBundleCli(codeql, config, language) { } // src/init-action-post-helper.ts -var fs18 = __toESM(require("fs")); +var fs19 = __toESM(require("fs")); var import_path3 = __toESM(require("path")); var github2 = __toESM(require_github()); // src/upload-lib.ts -var fs16 = __toESM(require("fs")); +var fs17 = __toESM(require("fs")); var path16 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); @@ -168967,12 +169428,12 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) { } return uri; } -async function addFingerprints(sarif, sourceRoot, logger) { +async function addFingerprints(sarifLog, sourceRoot, logger) { logger.info( `Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.` ); const callbacksByFile = {}; - for (const run2 of sarif.runs || []) { + for (const run2 of sarifLog.runs || []) { const artifacts = run2.artifacts || []; for (const result of run2.results || []) { const primaryLocation = (result.locations || [])[0]; @@ -169012,7 +169473,7 @@ async function addFingerprints(sarif, sourceRoot, logger) { }; await hash(teeCallback, filepath); } - return sarif; + return sarifLog; } // src/init.ts @@ -169047,36 +169508,48 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe }; } -// src/upload-lib.ts -var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning."; -var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository."; +// src/sarif/index.ts +var fs16 = __toESM(require("fs")); +var InvalidSarifUploadError = class extends Error { +}; +function getToolNames(sarifFile) { + const toolNames = {}; + for (const run2 of sarifFile.runs || []) { + const tool = run2.tool || {}; + const driver = tool.driver || {}; + if (typeof driver.name === "string" && driver.name.length > 0) { + toolNames[driver.name] = true; + } + } + return Object.keys(toolNames); +} +function readSarifFile(sarifFilePath) { + return JSON.parse(fs16.readFileSync(sarifFilePath, "utf8")); +} function combineSarifFiles(sarifFiles, logger) { logger.info(`Loading SARIF file(s)`); - const combinedSarif = { - version: null, - runs: [] - }; + const runs = []; + let version = void 0; for (const sarifFile of sarifFiles) { logger.debug(`Loading SARIF file: ${sarifFile}`); - const sarifObject = JSON.parse( - fs16.readFileSync(sarifFile, "utf8") - ); - if (combinedSarif.version === null) { - combinedSarif.version = sarifObject.version; - } else if (combinedSarif.version !== sarifObject.version) { + const sarifLog = readSarifFile(sarifFile); + if (version === void 0) { + version = sarifLog.version; + } else if (version !== sarifLog.version) { throw new InvalidSarifUploadError( - `Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}` + `Different SARIF versions encountered: ${version} and ${sarifLog.version}` ); } - combinedSarif.runs.push(...sarifObject.runs); + runs.push(...sarifLog?.runs || []); } - return combinedSarif; + if (version === void 0) { + version = "2.1.0"; + } + return { version, runs }; } -function areAllRunsProducedByCodeQL(sarifObjects) { - return sarifObjects.every((sarifObject) => { - return sarifObject.runs?.every( - (run2) => run2.tool?.driver?.name === "CodeQL" - ); +function areAllRunsProducedByCodeQL(sarifLogs) { + return sarifLogs.every((sarifLog) => { + return sarifLog.runs?.every((run2) => run2.tool?.driver?.name === "CodeQL"); }); } function createRunKey(run2) { @@ -169089,10 +169562,13 @@ function createRunKey(run2) { automationId: run2.automationDetails?.id }; } -function areAllRunsUnique(sarifObjects) { +function areAllRunsUnique(sarifLogs) { const keys = /* @__PURE__ */ new Set(); - for (const sarifObject of sarifObjects) { - for (const run2 of sarifObject.runs) { + for (const sarifLog of sarifLogs) { + if (sarifLog.runs === void 0) { + continue; + } + for (const run2 of sarifLog.runs) { const key = JSON.stringify(createRunKey(run2)); if (keys.has(key)) { return false; @@ -169102,6 +169578,10 @@ function areAllRunsUnique(sarifObjects) { } return true; } + +// src/upload-lib.ts +var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning."; +var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository."; async function shouldShowCombineSarifFilesDeprecationWarning(sarifObjects, githubVersion) { if (githubVersion.type === "GitHub Enterprise Server" /* GHES */ && satisfiesGHESVersion(githubVersion.version, "<3.14", true)) { return false; @@ -169130,9 +169610,7 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) { } async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) { logger.info("Combining SARIF files using the CodeQL CLI"); - const sarifObjects = sarifFiles.map((sarifFile) => { - return JSON.parse(fs16.readFileSync(sarifFile, "utf8")); - }); + const sarifObjects = sarifFiles.map(readSarifFile); const deprecationWarningMessage = gitHubVersion.type === "GitHub Enterprise Server" /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025"; const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload"; if (!areAllRunsProducedByCodeQL(sarifObjects)) { @@ -169185,27 +169663,27 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo codeQL = initCodeQLResult.codeql; } const baseTempDir = path16.resolve(tempDir, "combined-sarif"); - fs16.mkdirSync(baseTempDir, { recursive: true }); - const outputDirectory = fs16.mkdtempSync(path16.resolve(baseTempDir, "output-")); + fs17.mkdirSync(baseTempDir, { recursive: true }); + const outputDirectory = fs17.mkdtempSync(path16.resolve(baseTempDir, "output-")); const outputFile = path16.resolve(outputDirectory, "combined-sarif.sarif"); await codeQL.mergeResults(sarifFiles, outputFile, { mergeRunsFromEqualCategory: true }); - return JSON.parse(fs16.readFileSync(outputFile, "utf8")); + return readSarifFile(outputFile); } -function populateRunAutomationDetails(sarif, category, analysis_key, environment) { +function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) { const automationID = getAutomationID2(category, analysis_key, environment); if (automationID !== void 0) { - for (const run2 of sarif.runs || []) { + for (const run2 of sarifFile.runs || []) { if (run2.automationDetails === void 0) { run2.automationDetails = { id: automationID }; } } - return sarif; + return sarifFile; } - return sarif; + return sarifFile; } function getAutomationID2(category, analysis_key, environment) { if (category !== void 0) { @@ -169228,7 +169706,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { `SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}` ); logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`); - fs16.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); + fs17.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); return "dummy-sarif-id"; } const client = getApiClient(); @@ -169262,7 +169740,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { function findSarifFilesInDir(sarifPath, isSarif) { const sarifFiles = []; const walkSarifFiles = (dir) => { - const entries = fs16.readdirSync(dir, { withFileTypes: true }); + const entries = fs17.readdirSync(dir, { withFileTypes: true }); for (const entry of entries) { if (entry.isFile() && isSarif(entry.name)) { sarifFiles.push(path16.resolve(dir, entry.name)); @@ -169275,11 +169753,11 @@ function findSarifFilesInDir(sarifPath, isSarif) { return sarifFiles; } function getSarifFilePaths(sarifPath, isSarif) { - if (!fs16.existsSync(sarifPath)) { + if (!fs17.existsSync(sarifPath)) { throw new ConfigurationError(`Path does not exist: ${sarifPath}`); } let sarifFiles; - if (fs16.lstatSync(sarifPath).isDirectory()) { + if (fs17.lstatSync(sarifPath).isDirectory()) { sarifFiles = findSarifFilesInDir(sarifPath, isSarif); if (sarifFiles.length === 0) { throw new ConfigurationError( @@ -169291,9 +169769,9 @@ function getSarifFilePaths(sarifPath, isSarif) { } return sarifFiles; } -function countResultsInSarif(sarif) { +function countResultsInSarif(sarifLog) { let numResults = 0; - const parsedSarif = JSON.parse(sarif); + const parsedSarif = JSON.parse(sarifLog); if (!Array.isArray(parsedSarif.runs)) { throw new InvalidSarifUploadError("Invalid SARIF. Missing 'runs' array."); } @@ -169307,26 +169785,26 @@ function countResultsInSarif(sarif) { } return numResults; } -function readSarifFile(sarifFilePath) { +function readSarifFileOrThrow(sarifFilePath) { try { - return JSON.parse(fs16.readFileSync(sarifFilePath, "utf8")); + return readSarifFile(sarifFilePath); } catch (e) { throw new InvalidSarifUploadError( `Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}` ); } } -function validateSarifFileSchema(sarif, sarifFilePath, logger) { - if (areAllRunsProducedByCodeQL([sarif]) && // We want to validate CodeQL SARIF in testing environments. +function validateSarifFileSchema(sarifLog, sarifFilePath, logger) { + if (areAllRunsProducedByCodeQL([sarifLog]) && // We want to validate CodeQL SARIF in testing environments. !getTestingEnvironment()) { logger.debug( `Skipping SARIF schema validation for ${sarifFilePath} as all runs are produced by CodeQL.` ); - return; + return true; } logger.info(`Validating ${sarifFilePath}`); const schema2 = require_sarif_schema_2_1_0(); - const result = new jsonschema2.Validator().validate(sarif, schema2); + const result = new jsonschema2.Validator().validate(sarifLog, schema2); const warningAttributes = ["uri-reference", "uri"]; const errors = (result.errors ?? []).filter( (err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument)) @@ -169353,6 +169831,7 @@ ${sarifErrors.join( )}` ); } + return true; } function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, mergeBaseCommitOid) { const payloadObj = { @@ -169378,7 +169857,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo payloadObj.base_sha = mergeBaseCommitOid; } else if (process.env.GITHUB_EVENT_PATH) { const githubEvent = JSON.parse( - fs16.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") + fs17.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") ); payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`; payloadObj.base_sha = githubEvent.pull_request.base.sha; @@ -169389,14 +169868,14 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) { logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`); const gitHubVersion = await getGitHubVersion(); - let sarif; + let sarifLog; category = analysis.fixCategory(logger, category); if (sarifPaths.length > 1) { for (const sarifPath of sarifPaths) { - const parsedSarif = readSarifFile(sarifPath); + const parsedSarif = readSarifFileOrThrow(sarifPath); validateSarifFileSchema(parsedSarif, sarifPath, logger); } - sarif = await combineSarifFilesUsingCLI( + sarifLog = await combineSarifFilesUsingCLI( sarifPaths, gitHubVersion, features, @@ -169404,21 +169883,21 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, ); } else { const sarifPath = sarifPaths[0]; - sarif = readSarifFile(sarifPath); - validateSarifFileSchema(sarif, sarifPath, logger); - await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion); + sarifLog = readSarifFileOrThrow(sarifPath); + validateSarifFileSchema(sarifLog, sarifPath, logger); + await throwIfCombineSarifFilesDisabled([sarifLog], gitHubVersion); } - sarif = filterAlertsByDiffRange(logger, sarif); - sarif = await addFingerprints(sarif, checkoutPath, logger); + sarifLog = filterAlertsByDiffRange(logger, sarifLog); + sarifLog = await addFingerprints(sarifLog, checkoutPath, logger); const analysisKey = await getAnalysisKey(); const environment = getRequiredInput("matrix"); - sarif = populateRunAutomationDetails( - sarif, + sarifLog = populateRunAutomationDetails( + sarifLog, category, analysisKey, environment ); - return { sarif, analysisKey, environment }; + return { sarif: sarifLog, analysisKey, environment }; } async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) { const sarifPaths = getSarifFilePaths( @@ -169452,12 +169931,12 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features } async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) { logger.startGroup(`Uploading ${uploadTarget.name} results`); - const sarif = postProcessingResults.sarif; - const toolNames = getToolNames(sarif); + const sarifLog = postProcessingResults.sarif; + const toolNames = getToolNames(sarifLog); logger.debug(`Validating that each SARIF run has a unique category`); - validateUniqueCategory(sarif, uploadTarget.sentinelPrefix); + validateUniqueCategory(sarifLog, uploadTarget.sentinelPrefix); logger.debug(`Serializing SARIF for upload`); - const sarifPayload = JSON.stringify(sarif); + const sarifPayload = JSON.stringify(sarifLog); logger.debug(`Compressing serialized SARIF`); const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64"); const checkoutURI = url.pathToFileURL(checkoutPath).href; @@ -169594,9 +170073,9 @@ function handleProcessingResultForUnsuccessfulExecution(response, status, logger assertNever(status); } } -function validateUniqueCategory(sarif, sentinelPrefix) { +function validateUniqueCategory(sarifLog, sentinelPrefix) { const categories = {}; - for (const run2 of sarif.runs) { + for (const run2 of sarifLog.runs || []) { const id = run2?.automationDetails?.id; const tool = run2.tool?.driver?.name; const category = `${sanitize(id)}_${sanitize(tool)}`; @@ -169615,15 +170094,16 @@ function validateUniqueCategory(sarif, sentinelPrefix) { function sanitize(str2) { return (str2 ?? "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase(); } -var InvalidSarifUploadError = class extends Error { -}; -function filterAlertsByDiffRange(logger, sarif) { +function filterAlertsByDiffRange(logger, sarifLog) { const diffRanges = readDiffRangesJsonFile(logger); if (!diffRanges?.length) { - return sarif; + return sarifLog; + } + if (sarifLog.runs === void 0) { + return sarifLog; } const checkoutPath = getRequiredInput("checkout_path"); - for (const run2 of sarif.runs) { + for (const run2 of sarifLog.runs) { if (run2.results) { run2.results = run2.results.filter((result) => { const locations = [ @@ -169644,11 +170124,11 @@ function filterAlertsByDiffRange(logger, sarif) { }); } } - return sarif; + return sarifLog; } // src/workflow.ts -var fs17 = __toESM(require("fs")); +var fs18 = __toESM(require("fs")); var path17 = __toESM(require("path")); var import_zlib2 = __toESM(require("zlib")); var core14 = __toESM(require_core()); @@ -169677,7 +170157,7 @@ async function getWorkflow(logger) { ); } const workflowPath = await getWorkflowAbsolutePath(logger); - return load(fs17.readFileSync(workflowPath, "utf-8")); + return load(fs18.readFileSync(workflowPath, "utf-8")); } async function getWorkflowAbsolutePath(logger) { const relativePath = await getWorkflowRelativePath(); @@ -169685,7 +170165,7 @@ async function getWorkflowAbsolutePath(logger) { getRequiredEnvParam("GITHUB_WORKSPACE"), relativePath ); - if (fs17.existsSync(absolutePath)) { + if (fs18.existsSync(absolutePath)) { logger.debug( `Derived the following absolute path for the currently executing workflow: ${absolutePath}.` ); @@ -169965,7 +170445,7 @@ async function uploadFailureInfo(uploadAllAvailableDebugArtifacts, printDebugLog } if (isSelfHostedRunner()) { try { - fs18.rmSync(config.dbLocation, { + fs19.rmSync(config.dbLocation, { recursive: true, force: true, maxRetries: 3 @@ -169989,10 +170469,15 @@ async function recordOverlayStatus(codeql, config, features, logger) { if (config.overlayDatabaseMode !== "overlay-base" /* OverlayBase */ || process.env["CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */] === "true" || !await features.getValue("overlay_analysis_status_save" /* OverlayAnalysisStatusSave */)) { return; } - const overlayStatus = { - attemptedToBuildOverlayBaseDatabase: true, - builtOverlayBaseDatabase: false - }; + const checkRunIdInput = getOptionalInput("check-run-id"); + const checkRunId = checkRunIdInput !== void 0 ? parseInt(checkRunIdInput, 10) : void 0; + const overlayStatus = createOverlayStatus( + { + attemptedToBuildOverlayBaseDatabase: true, + builtOverlayBaseDatabase: false + }, + checkRunId !== void 0 && checkRunId >= 0 ? checkRunId : void 0 + ); const diskUsage = await checkDiskUsage(logger); if (diskUsage === void 0) { logger.warning( @@ -170007,10 +170492,10 @@ async function recordOverlayStatus(codeql, config, features, logger) { overlayStatus, logger ); - const blurb = "This job attempted to run with improved incremental analysis but it did not complete successfully. This may have been due to disk space constraints: using improved incremental analysis can require a significant amount of disk space for some repositories."; + const blurb = "This job attempted to run with improved incremental analysis but it did not complete successfully. One possible reason for this is disk space constraints, since improved incremental analysis can require a significant amount of disk space for some repositories."; if (saved) { logger.error( - `${blurb} This failure has been recorded in the Actions cache, so the next CodeQL analysis will run without improved incremental analysis. If you want to enable improved incremental analysis, increase the disk space available to the runner. If that doesn't help, contact GitHub Support for further assistance.` + `${blurb} This failure has been recorded in the Actions cache, so the next CodeQL analysis will run without improved incremental analysis. If you want to enable improved incremental analysis, try increasing the disk space available to the runner. If that doesn't help, contact GitHub Support for further assistance.` ); } else { logger.error( diff --git a/lib/init-action.js b/lib/init-action.js index c6f1263a4..5d5a6fa59 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -45986,7 +45986,7 @@ var require_package = __commonJS({ "package.json"(exports2, module2) { module2.exports = { name: "codeql", - version: "4.32.5", + version: "4.32.7", private: true, description: "CodeQL action", scripts: { @@ -45995,7 +45995,7 @@ var require_package = __commonJS({ lint: "eslint --report-unused-disable-directives --max-warnings=0 .", "lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif", "lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix", - ava: "npm run transpile && ava --serial --verbose", + ava: "npm run transpile && ava --verbose", test: "npm run ava -- src/", "test-debug": "npm run test -- --timeout=20m", transpile: "tsc --build --verbose" @@ -46044,6 +46044,7 @@ var require_package = __commonJS({ "@types/js-yaml": "^4.0.9", "@types/node": "^20.19.9", "@types/node-forge": "^1.3.14", + "@types/sarif": "^2.1.7", "@types/semver": "^7.7.1", "@types/sinon": "^21.0.0", ava: "^6.4.1", @@ -46052,14 +46053,14 @@ var require_package = __commonJS({ "eslint-import-resolver-typescript": "^3.8.7", "eslint-plugin-github": "^6.0.0", "eslint-plugin-import-x": "^4.16.1", - "eslint-plugin-jsdoc": "^62.5.0", + "eslint-plugin-jsdoc": "^62.7.1", "eslint-plugin-no-async-foreach": "^0.1.1", glob: "^11.1.0", - globals: "^16.5.0", + globals: "^17.3.0", nock: "^14.0.11", sinon: "^21.0.1", typescript: "^5.9.3", - "typescript-eslint": "^8.56.0" + "typescript-eslint": "^8.56.1" }, overrides: { "@actions/tool-cache": { @@ -49512,6 +49513,7 @@ var require_minimatch = __commonJS({ pattern = pattern.split(path17.sep).join("/"); } this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200; this.set = []; this.pattern = pattern; this.regexp = null; @@ -49908,50 +49910,147 @@ var require_minimatch = __commonJS({ return this.negate; }; Minimatch.prototype.matchOne = function(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } + if (pattern.indexOf(GLOBSTAR) !== -1) { + return this._matchGlobstar(file, pattern, partial, 0, 0); + } + return this._matchOne(file, pattern, partial, 0, 0); + }; + Minimatch.prototype._matchGlobstar = function(file, pattern, partial, fileIndex, patternIndex) { + var i; + var firstgs = -1; + for (i = patternIndex; i < pattern.length; i++) { + if (pattern[i] === GLOBSTAR) { + firstgs = i; + break; + } + } + var lastgs = -1; + for (i = pattern.length - 1; i >= 0; i--) { + if (pattern[i] === GLOBSTAR) { + lastgs = i; + break; + } + } + var head = pattern.slice(patternIndex, firstgs); + var body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs); + var tail = partial ? [] : pattern.slice(lastgs + 1); + if (head.length) { + var fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this._matchOne(fileHead, head, partial, 0, 0)) { + return false; + } + fileIndex += head.length; + } + var fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) return false; + var tailStart = file.length - tail.length; + if (this._matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; + } + tailStart--; + if (!this._matchOne(file, tail, partial, tailStart, 0)) { + return false; + } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + var sawSome = !!fileTailMatch; + for (i = fileIndex; i < file.length - fileTailMatch; i++) { + var f = String(file[i]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return partial || sawSome; + } + var bodySegments = [[[], 0]]; + var currentBody = bodySegments[0]; + var nonGsParts = 0; + var nonGsPartsSums = [0]; + for (var bi = 0; bi < body.length; bi++) { + var b = body[bi]; + if (b === GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + var idx = bodySegments.length - 1; + var fileLength = file.length - fileTailMatch; + for (var si = 0; si < bodySegments.length; si++) { + bodySegments[si][1] = fileLength - (nonGsPartsSums[idx--] + bodySegments[si][0].length); + } + return !!this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex, + 0, + partial, + 0, + !!fileTailMatch ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + }; + Minimatch.prototype._matchGlobStarBodySections = function(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + var bs = bodySegments[bodyIndex]; + if (!bs) { + for (var i = fileIndex; i < file.length; i++) { + sawTail = true; + var f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return sawTail; + } + var body = bs[0]; + var after = bs[1]; + while (fileIndex <= after) { + var m = this._matchOne( + file.slice(0, fileIndex + body.length), + body, + partial, + fileIndex, + 0 + ); + if (m && globStarDepth < this.maxGlobstarRecursion) { + var sub = this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex + body.length, + bodyIndex + 1, + partial, + globStarDepth + 1, + sawTail + ); + if (sub !== false) { + return sub; + } + } + var f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + fileIndex++; + } + return partial || null; + }; + Minimatch.prototype._matchOne = function(file, pattern, partial, fileIndex, patternIndex) { + var fi, pi, fl, pl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { this.debug("matchOne loop"); var p = pattern[pi]; var f = file[fi]; this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } - return false; - } + if (p === false || p === GLOBSTAR) return false; var hit; if (typeof p === "string") { hit = f === p; @@ -61991,7 +62090,7 @@ var require_fxp = __commonJS({ }, o: (t2, e2) => Object.prototype.hasOwnProperty.call(t2, e2), r: (t2) => { "undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t2, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t2, "__esModule", { value: true }); } }, e = {}; - t.r(e), t.d(e, { XMLBuilder: () => dt, XMLParser: () => it, XMLValidator: () => gt }); + t.r(e), t.d(e, { XMLBuilder: () => gt, XMLParser: () => it, XMLValidator: () => xt }); const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$"); function s(t2, e2) { const n2 = []; @@ -62013,90 +62112,90 @@ var require_fxp = __commonJS({ const n2 = []; let i2 = false, s2 = false; "\uFEFF" === t2[0] && (t2 = t2.substr(1)); - for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2] && "?" === t2[o2 + 1]) { - if (o2 += 2, o2 = u(t2, o2), o2.err) return o2; + for (let r2 = 0; r2 < t2.length; r2++) if ("<" === t2[r2] && "?" === t2[r2 + 1]) { + if (r2 += 2, r2 = u(t2, r2), r2.err) return r2; } else { - if ("<" !== t2[o2]) { - if (l(t2[o2])) continue; - return m("InvalidChar", "char '" + t2[o2] + "' is not expected.", b(t2, o2)); + if ("<" !== t2[r2]) { + if (l(t2[r2])) continue; + return m("InvalidChar", "char '" + t2[r2] + "' is not expected.", N(t2, r2)); } { - let a2 = o2; - if (o2++, "!" === t2[o2]) { - o2 = h(t2, o2); + let o2 = r2; + if (r2++, "!" === t2[r2]) { + r2 = d(t2, r2); continue; } { - let d2 = false; - "/" === t2[o2] && (d2 = true, o2++); - let p2 = ""; - for (; o2 < t2.length && ">" !== t2[o2] && " " !== t2[o2] && " " !== t2[o2] && "\n" !== t2[o2] && "\r" !== t2[o2]; o2++) p2 += t2[o2]; - if (p2 = p2.trim(), "/" === p2[p2.length - 1] && (p2 = p2.substring(0, p2.length - 1), o2--), !r(p2)) { + let a2 = false; + "/" === t2[r2] && (a2 = true, r2++); + let h2 = ""; + for (; r2 < t2.length && ">" !== t2[r2] && " " !== t2[r2] && " " !== t2[r2] && "\n" !== t2[r2] && "\r" !== t2[r2]; r2++) h2 += t2[r2]; + if (h2 = h2.trim(), "/" === h2[h2.length - 1] && (h2 = h2.substring(0, h2.length - 1), r2--), !b(h2)) { let e3; - return e3 = 0 === p2.trim().length ? "Invalid space after '<'." : "Tag '" + p2 + "' is an invalid name.", m("InvalidTag", e3, b(t2, o2)); + return e3 = 0 === h2.trim().length ? "Invalid space after '<'." : "Tag '" + h2 + "' is an invalid name.", m("InvalidTag", e3, N(t2, r2)); } - const c2 = f(t2, o2); - if (false === c2) return m("InvalidAttr", "Attributes for '" + p2 + "' have open quote.", b(t2, o2)); - let E2 = c2.value; - if (o2 = c2.index, "/" === E2[E2.length - 1]) { - const n3 = o2 - E2.length; - E2 = E2.substring(0, E2.length - 1); - const s3 = g(E2, e2); - if (true !== s3) return m(s3.err.code, s3.err.msg, b(t2, n3 + s3.err.line)); + const p2 = c(t2, r2); + if (false === p2) return m("InvalidAttr", "Attributes for '" + h2 + "' have open quote.", N(t2, r2)); + let f2 = p2.value; + if (r2 = p2.index, "/" === f2[f2.length - 1]) { + const n3 = r2 - f2.length; + f2 = f2.substring(0, f2.length - 1); + const s3 = g(f2, e2); + if (true !== s3) return m(s3.err.code, s3.err.msg, N(t2, n3 + s3.err.line)); i2 = true; - } else if (d2) { - if (!c2.tagClosed) return m("InvalidTag", "Closing tag '" + p2 + "' doesn't have proper closing.", b(t2, o2)); - if (E2.trim().length > 0) return m("InvalidTag", "Closing tag '" + p2 + "' can't have attributes or invalid starting.", b(t2, a2)); - if (0 === n2.length) return m("InvalidTag", "Closing tag '" + p2 + "' has not been opened.", b(t2, a2)); + } else if (a2) { + if (!p2.tagClosed) return m("InvalidTag", "Closing tag '" + h2 + "' doesn't have proper closing.", N(t2, r2)); + if (f2.trim().length > 0) return m("InvalidTag", "Closing tag '" + h2 + "' can't have attributes or invalid starting.", N(t2, o2)); + if (0 === n2.length) return m("InvalidTag", "Closing tag '" + h2 + "' has not been opened.", N(t2, o2)); { const e3 = n2.pop(); - if (p2 !== e3.tagName) { - let n3 = b(t2, e3.tagStartPos); - return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + p2 + "'.", b(t2, a2)); + if (h2 !== e3.tagName) { + let n3 = N(t2, e3.tagStartPos); + return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + h2 + "'.", N(t2, o2)); } 0 == n2.length && (s2 = true); } } else { - const r2 = g(E2, e2); - if (true !== r2) return m(r2.err.code, r2.err.msg, b(t2, o2 - E2.length + r2.err.line)); - if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", b(t2, o2)); - -1 !== e2.unpairedTags.indexOf(p2) || n2.push({ tagName: p2, tagStartPos: a2 }), i2 = true; + const a3 = g(f2, e2); + if (true !== a3) return m(a3.err.code, a3.err.msg, N(t2, r2 - f2.length + a3.err.line)); + if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", N(t2, r2)); + -1 !== e2.unpairedTags.indexOf(h2) || n2.push({ tagName: h2, tagStartPos: o2 }), i2 = true; } - for (o2++; o2 < t2.length; o2++) if ("<" === t2[o2]) { - if ("!" === t2[o2 + 1]) { - o2++, o2 = h(t2, o2); + for (r2++; r2 < t2.length; r2++) if ("<" === t2[r2]) { + if ("!" === t2[r2 + 1]) { + r2++, r2 = d(t2, r2); continue; } - if ("?" !== t2[o2 + 1]) break; - if (o2 = u(t2, ++o2), o2.err) return o2; - } else if ("&" === t2[o2]) { - const e3 = x(t2, o2); - if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", b(t2, o2)); - o2 = e3; - } else if (true === s2 && !l(t2[o2])) return m("InvalidXml", "Extra text at the end", b(t2, o2)); - "<" === t2[o2] && o2--; + if ("?" !== t2[r2 + 1]) break; + if (r2 = u(t2, ++r2), r2.err) return r2; + } else if ("&" === t2[r2]) { + const e3 = x(t2, r2); + if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", N(t2, r2)); + r2 = e3; + } else if (true === s2 && !l(t2[r2])) return m("InvalidXml", "Extra text at the end", N(t2, r2)); + "<" === t2[r2] && r2--; } } } - return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", b(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map(((t3) => t3.tagName)), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); + return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", N(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map((t3) => t3.tagName), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); } function l(t2) { return " " === t2 || " " === t2 || "\n" === t2 || "\r" === t2; } function u(t2, e2) { const n2 = e2; - for (; e2 < t2.length; e2++) if ("?" != t2[e2] && " " != t2[e2]) ; - else { + for (; e2 < t2.length; e2++) if ("?" == t2[e2] || " " == t2[e2]) { const i2 = t2.substr(n2, e2 - n2); - if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", b(t2, e2)); + if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", N(t2, e2)); if ("?" == t2[e2] && ">" == t2[e2 + 1]) { e2++; break; } + continue; } return e2; } - function h(t2, e2) { + function d(t2, e2) { if (t2.length > e2 + 5 && "-" === t2[e2 + 1] && "-" === t2[e2 + 2]) { for (e2 += 3; e2 < t2.length; e2++) if ("-" === t2[e2] && "-" === t2[e2 + 1] && ">" === t2[e2 + 2]) { e2 += 2; @@ -62114,11 +62213,11 @@ var require_fxp = __commonJS({ } return e2; } - const d = '"', p = "'"; - function f(t2, e2) { + const h = '"', p = "'"; + function c(t2, e2) { let n2 = "", i2 = "", s2 = false; for (; e2 < t2.length; e2++) { - if (t2[e2] === d || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); + if (t2[e2] === h || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); else if (">" === t2[e2] && "" === i2) { s2 = true; break; @@ -62127,16 +62226,16 @@ var require_fxp = __commonJS({ } return "" === i2 && { value: n2, index: e2, tagClosed: s2 }; } - const c = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); + const f = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); function g(t2, e2) { - const n2 = s(t2, c), i2 = {}; + const n2 = s(t2, f), i2 = {}; for (let t3 = 0; t3 < n2.length; t3++) { - if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", N(n2[t3])); - if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", N(n2[t3])); - if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", N(n2[t3])); + if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", y(n2[t3])); + if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", y(n2[t3])); + if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", y(n2[t3])); const s2 = n2[t3][2]; - if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", N(n2[t3])); - if (i2.hasOwnProperty(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", N(n2[t3])); + if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", y(n2[t3])); + if (Object.prototype.hasOwnProperty.call(i2, s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", y(n2[t3])); i2[s2] = 1; } return true; @@ -62164,49 +62263,52 @@ var require_fxp = __commonJS({ function E(t2) { return r(t2); } - function b(t2, e2) { + function b(t2) { + return r(t2); + } + function N(t2, e2) { const n2 = t2.substring(0, e2).split(/\r?\n/); return { line: n2.length, col: n2[n2.length - 1].length + 1 }; } - function N(t2) { + function y(t2) { return t2.startIndex + t2[1].length; } - const y = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { + const T = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, stopNodes: [], alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(t2, e2, n2) { return t2; - }, captureMetaData: false }; - function T(t2) { - return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : T(true); + }, captureMetaData: false, maxNestedTags: 100, strictReservedNames: true }; + function w(t2) { + return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : w(true); } - const w = function(t2) { - const e2 = Object.assign({}, y, t2); - return e2.processEntities = T(e2.processEntities), e2; + const v = function(t2) { + const e2 = Object.assign({}, T, t2); + return e2.processEntities = w(e2.processEntities), e2; }; - let v; - v = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); + let O; + O = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); class I { constructor(t2) { - this.tagname = t2, this.child = [], this[":@"] = {}; + this.tagname = t2, this.child = [], this[":@"] = /* @__PURE__ */ Object.create(null); } add(t2, e2) { "__proto__" === t2 && (t2 = "#__proto__"), this.child.push({ [t2]: e2 }); } addChild(t2, e2) { - "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][v] = { startIndex: e2 }); + "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][O] = { startIndex: e2 }); } static getMetaDataSymbol() { - return v; + return O; } } - class O { + class P { constructor(t2) { this.suppressValidationErr = !t2, this.options = t2; } readDocType(t2, e2) { - const n2 = {}; + const n2 = /* @__PURE__ */ Object.create(null); if ("O" !== t2[e2 + 3] || "C" !== t2[e2 + 4] || "T" !== t2[e2 + 5] || "Y" !== t2[e2 + 6] || "P" !== t2[e2 + 7] || "E" !== t2[e2 + 8]) throw new Error("Invalid Tag instead of DOCTYPE"); { e2 += 9; @@ -62215,23 +62317,23 @@ var require_fxp = __commonJS({ if (r2 ? "-" === t2[e2 - 1] && "-" === t2[e2 - 2] && (r2 = false, i2--) : i2--, 0 === i2) break; } else "[" === t2[e2] ? s2 = true : o2 += t2[e2]; else { - if (s2 && A(t2, "!ENTITY", e2)) { + if (s2 && S(t2, "!ENTITY", e2)) { let i3, s3; if (e2 += 7, [i3, s3, e2] = this.readEntityExp(t2, e2 + 1, this.suppressValidationErr), -1 === s3.indexOf("&")) { const t3 = i3.replace(/[.\-+*:]/g, "\\."); n2[i3] = { regx: RegExp(`&${t3};`, "g"), val: s3 }; } - } else if (s2 && A(t2, "!ELEMENT", e2)) { + } else if (s2 && S(t2, "!ELEMENT", e2)) { e2 += 8; const { index: n3 } = this.readElementExp(t2, e2 + 1); e2 = n3; - } else if (s2 && A(t2, "!ATTLIST", e2)) e2 += 8; - else if (s2 && A(t2, "!NOTATION", e2)) { + } else if (s2 && S(t2, "!ATTLIST", e2)) e2 += 8; + else if (s2 && S(t2, "!NOTATION", e2)) { e2 += 9; const { index: n3 } = this.readNotationExp(t2, e2 + 1, this.suppressValidationErr); e2 = n3; } else { - if (!A(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); + if (!S(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); r2 = true; } i2++, o2 = ""; @@ -62241,10 +62343,10 @@ var require_fxp = __commonJS({ return { entities: n2, i: e2 }; } readEntityExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]) && '"' !== t2[e2] && "'" !== t2[e2]; ) n2 += t2[e2], e2++; - if (S(n2), e2 = P(t2, e2), !this.suppressValidationErr) { + if (C(n2), e2 = A(t2, e2), !this.suppressValidationErr) { if ("SYSTEM" === t2.substring(e2, e2 + 6).toUpperCase()) throw new Error("External entities are not supported"); if ("%" === t2[e2]) throw new Error("Parameter entities are not supported"); } @@ -62253,15 +62355,15 @@ var require_fxp = __commonJS({ return [n2, i2, --e2]; } readNotationExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - !this.suppressValidationErr && S(n2), e2 = P(t2, e2); + !this.suppressValidationErr && C(n2), e2 = A(t2, e2); const i2 = t2.substring(e2, e2 + 6).toUpperCase(); if (!this.suppressValidationErr && "SYSTEM" !== i2 && "PUBLIC" !== i2) throw new Error(`Expected SYSTEM or PUBLIC, found "${i2}"`); - e2 += i2.length, e2 = P(t2, e2); + e2 += i2.length, e2 = A(t2, e2); let s2 = null, r2 = null; - if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = P(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); + if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = A(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); else if ("SYSTEM" === i2 && ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"), !this.suppressValidationErr && !r2)) throw new Error("Missing mandatory system identifier for SYSTEM notation"); return { notationName: n2, publicIdentifier: s2, systemIdentifier: r2, index: --e2 }; } @@ -62274,13 +62376,13 @@ var require_fxp = __commonJS({ return [++e2, i2]; } readElementExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; if (!this.suppressValidationErr && !r(n2)) throw new Error(`Invalid element name: "${n2}"`); let i2 = ""; - if ("E" === t2[e2 = P(t2, e2)] && A(t2, "MPTY", e2)) e2 += 4; - else if ("A" === t2[e2] && A(t2, "NY", e2)) e2 += 2; + if ("E" === t2[e2 = A(t2, e2)] && S(t2, "MPTY", e2)) e2 += 4; + else if ("A" === t2[e2] && S(t2, "NY", e2)) e2 += 2; else if ("(" === t2[e2]) { for (e2++; e2 < t2.length && ")" !== t2[e2]; ) i2 += t2[e2], e2++; if (")" !== t2[e2]) throw new Error("Unterminated content model"); @@ -62288,24 +62390,24 @@ var require_fxp = __commonJS({ return { elementName: n2, contentModel: i2.trim(), index: e2 }; } readAttlistExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - S(n2), e2 = P(t2, e2); + C(n2), e2 = A(t2, e2); let i2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) i2 += t2[e2], e2++; - if (!S(i2)) throw new Error(`Invalid attribute name: "${i2}"`); - e2 = P(t2, e2); + if (!C(i2)) throw new Error(`Invalid attribute name: "${i2}"`); + e2 = A(t2, e2); let s2 = ""; if ("NOTATION" === t2.substring(e2, e2 + 8).toUpperCase()) { - if (s2 = "NOTATION", "(" !== t2[e2 = P(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); + if (s2 = "NOTATION", "(" !== t2[e2 = A(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); e2++; let n3 = []; for (; e2 < t2.length && ")" !== t2[e2]; ) { let i3 = ""; for (; e2 < t2.length && "|" !== t2[e2] && ")" !== t2[e2]; ) i3 += t2[e2], e2++; - if (i3 = i3.trim(), !S(i3)) throw new Error(`Invalid notation name: "${i3}"`); - n3.push(i3), "|" === t2[e2] && (e2++, e2 = P(t2, e2)); + if (i3 = i3.trim(), !C(i3)) throw new Error(`Invalid notation name: "${i3}"`); + n3.push(i3), "|" === t2[e2] && (e2++, e2 = A(t2, e2)); } if (")" !== t2[e2]) throw new Error("Unterminated list of notations"); e2++, s2 += " (" + n3.join("|") + ")"; @@ -62314,45 +62416,43 @@ var require_fxp = __commonJS({ const n3 = ["CDATA", "ID", "IDREF", "IDREFS", "ENTITY", "ENTITIES", "NMTOKEN", "NMTOKENS"]; if (!this.suppressValidationErr && !n3.includes(s2.toUpperCase())) throw new Error(`Invalid attribute type: "${s2}"`); } - e2 = P(t2, e2); + e2 = A(t2, e2); let r2 = ""; return "#REQUIRED" === t2.substring(e2, e2 + 8).toUpperCase() ? (r2 = "#REQUIRED", e2 += 8) : "#IMPLIED" === t2.substring(e2, e2 + 7).toUpperCase() ? (r2 = "#IMPLIED", e2 += 7) : [e2, r2] = this.readIdentifierVal(t2, e2, "ATTLIST"), { elementName: n2, attributeName: i2, attributeType: s2, defaultValue: r2, index: e2 }; } } - const P = (t2, e2) => { + const A = (t2, e2) => { for (; e2 < t2.length && /\s/.test(t2[e2]); ) e2++; return e2; }; - function A(t2, e2, n2) { + function S(t2, e2, n2) { for (let i2 = 0; i2 < e2.length; i2++) if (e2[i2] !== t2[n2 + i2 + 1]) return false; return true; } - function S(t2) { + function C(t2) { if (r(t2)) return t2; throw new Error(`Invalid entity name ${t2}`); } - const C = /^[-+]?0x[a-fA-F0-9]+$/, $ = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; - const D = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; - function L(t2) { - return "function" == typeof t2 ? t2 : Array.isArray(t2) ? (e2) => { - for (const n2 of t2) { - if ("string" == typeof n2 && e2 === n2) return true; - if (n2 instanceof RegExp && n2.test(e2)) return true; - } - } : () => false; - } - class F { + const $ = /^[-+]?0x[a-fA-F0-9]+$/, V = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, D = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; + const j = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; + class L { constructor(t2) { - if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e2) => K(e2, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e2) => K(e2, 16, "&#x") } }, this.addExternalEntities = j, this.parseXml = B, this.parseTextData = M, this.resolveNameSpace = _, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = R, this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { + var e2; + if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e3) => K(e3, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e3) => K(e3, 16, "&#x") } }, this.addExternalEntities = F, this.parseXml = R, this.parseTextData = M, this.resolveNameSpace = k, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = B, this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { this.stopNodesExact = /* @__PURE__ */ new Set(), this.stopNodesWildcard = /* @__PURE__ */ new Set(); for (let t3 = 0; t3 < this.options.stopNodes.length; t3++) { - const e2 = this.options.stopNodes[t3]; - "string" == typeof e2 && (e2.startsWith("*.") ? this.stopNodesWildcard.add(e2.substring(2)) : this.stopNodesExact.add(e2)); + const e3 = this.options.stopNodes[t3]; + "string" == typeof e3 && (e3.startsWith("*.") ? this.stopNodesWildcard.add(e3.substring(2)) : this.stopNodesExact.add(e3)); } } } } - function j(t2) { + function F(t2) { const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2], s2 = i2.replace(/[.\-+*:]/g, "\\."); @@ -62366,7 +62466,7 @@ var require_fxp = __commonJS({ return null == i3 ? t2 : typeof i3 != typeof t2 || i3 !== t2 ? i3 : this.options.trimValues || t2.trim() === t2 ? Z(t2, this.options.parseTagValue, this.options.numberParseOptions) : t2; } } - function _(t2) { + function k(t2) { if (this.options.removeNSPrefix) { const e2 = t2.split(":"), n2 = "/" === t2.charAt(0) ? "/" : ""; if ("xmlns" === e2[0]) return ""; @@ -62374,10 +62474,10 @@ var require_fxp = __commonJS({ } return t2; } - const k = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); + const _ = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); function U(t2, e2, n2) { if (true !== this.options.ignoreAttributes && "string" == typeof t2) { - const i2 = s(t2, k), r2 = i2.length, o2 = {}; + const i2 = s(t2, _), r2 = i2.length, o2 = {}; for (let t3 = 0; t3 < r2; t3++) { const s2 = this.resolveNameSpace(i2[t3][1]); if (this.ignoreAttributesFn(s2, e2)) continue; @@ -62396,12 +62496,12 @@ var require_fxp = __commonJS({ return o2; } } - const B = function(t2) { + const R = function(t2) { t2 = t2.replace(/\r\n?/g, "\n"); const e2 = new I("!xml"); let n2 = e2, i2 = "", s2 = ""; this.entityExpansionCount = 0, this.currentExpandedLength = 0; - const r2 = new O(this.options.processEntities); + const r2 = new P(this.options.processEntities); for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2]) if ("/" === t2[o2 + 1]) { const e3 = z(t2, ">", o2, "Closing Tag is not closed."); let r3 = t2.substring(o2 + 2, e3).trim(); @@ -62441,26 +62541,27 @@ var require_fxp = __commonJS({ } else { let r3 = W(t2, o2, this.options.removeNSPrefix), a2 = r3.tagName; const l2 = r3.rawTagName; - let u2 = r3.tagExp, h2 = r3.attrExpPresent, d2 = r3.closeIndex; + let u2 = r3.tagExp, d2 = r3.attrExpPresent, h2 = r3.closeIndex; if (this.options.transformTagName) { const t3 = this.options.transformTagName(a2); u2 === a2 && (u2 = t3), a2 = t3; } + if (this.options.strictReservedNames && (a2 === this.options.commentPropName || a2 === this.options.cdataPropName)) throw new Error(`Invalid tag name: ${a2}`); n2 && i2 && "!xml" !== n2.tagname && (i2 = this.saveTextToParentTag(i2, n2, s2, false)); const p2 = n2; p2 && -1 !== this.options.unpairedTags.indexOf(p2.tagname) && (n2 = this.tagsNodeStack.pop(), s2 = s2.substring(0, s2.lastIndexOf("."))), a2 !== e2.tagname && (s2 += s2 ? "." + a2 : a2); - const f2 = o2; + const c2 = o2; if (this.isItStopNode(this.stopNodesExact, this.stopNodesWildcard, s2, a2)) { let e3 = ""; if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) "/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), o2 = r3.closeIndex; else if (-1 !== this.options.unpairedTags.indexOf(a2)) o2 = r3.closeIndex; else { - const n3 = this.readStopNodeData(t2, l2, d2 + 1); + const n3 = this.readStopNodeData(t2, l2, h2 + 1); if (!n3) throw new Error(`Unexpected end of ${l2}`); o2 = n3.i, e3 = n3.tagContent; } const i3 = new I(a2); - a2 !== u2 && h2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, h2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, f2); + a2 !== u2 && d2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, d2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, c2); } else { if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) { if ("/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), this.options.transformTagName) { @@ -62468,18 +62569,26 @@ var require_fxp = __commonJS({ u2 === a2 && (u2 = t4), a2 = t4; } const t3 = new I(a2); - a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), s2 = s2.substr(0, s2.lastIndexOf(".")); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")); } else { - const t3 = new I(a2); - this.tagsNodeStack.push(n2), a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), n2 = t3; + if (-1 !== this.options.unpairedTags.indexOf(a2)) { + const t3 = new I(a2); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")), o2 = r3.closeIndex; + continue; + } + { + const t3 = new I(a2); + if (this.tagsNodeStack.length > this.options.maxNestedTags) throw new Error("Maximum nested tags exceeded"); + this.tagsNodeStack.push(n2), a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), n2 = t3; + } } - i2 = "", o2 = d2; + i2 = "", o2 = h2; } } else i2 += t2[o2]; return e2.child; }; - function R(t2, e2, n2, i2) { + function B(t2, e2, n2, i2) { this.options.captureMetaData || (i2 = void 0); const s2 = this.options.updateTag(e2.tagname, n2, e2[":@"]); false === s2 || ("string" == typeof s2 ? (e2.tagname = s2, t2.addChild(e2, i2)) : t2.addChild(e2, i2)); @@ -62540,12 +62649,12 @@ var require_fxp = __commonJS({ const o2 = s2.index, a2 = r2.search(/\s/); let l2 = r2, u2 = true; -1 !== a2 && (l2 = r2.substring(0, a2), r2 = r2.substring(a2 + 1).trimStart()); - const h2 = l2; + const d2 = l2; if (n2) { const t3 = l2.indexOf(":"); -1 !== t3 && (l2 = l2.substr(t3 + 1), u2 = l2 !== s2.data.substr(t3 + 1)); } - return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: h2 }; + return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: d2 }; } function q(t2, e2, n2) { const i2 = n2; @@ -62566,19 +62675,19 @@ var require_fxp = __commonJS({ if (e2 && "string" == typeof t2) { const e3 = t2.trim(); return "true" === e3 || "false" !== e3 && (function(t3, e4 = {}) { - if (e4 = Object.assign({}, V, e4), !t3 || "string" != typeof t3) return t3; + if (e4 = Object.assign({}, D, e4), !t3 || "string" != typeof t3) return t3; let n3 = t3.trim(); if (void 0 !== e4.skipLike && e4.skipLike.test(n3)) return t3; if ("0" === t3) return 0; - if (e4.hex && C.test(n3)) return (function(t4) { + if (e4.hex && $.test(n3)) return (function(t4) { if (parseInt) return parseInt(t4, 16); if (Number.parseInt) return Number.parseInt(t4, 16); if (window && window.parseInt) return window.parseInt(t4, 16); throw new Error("parseInt, Number.parseInt, window.parseInt are not supported"); })(n3); - if (-1 !== n3.search(/.+[eE].+/)) return (function(t4, e5, n4) { + if (n3.includes("e") || n3.includes("E")) return (function(t4, e5, n4) { if (!n4.eNotation) return t4; - const i3 = e5.match(D); + const i3 = e5.match(j); if (i3) { let s2 = i3[1] || ""; const r2 = -1 === i3[3].indexOf("e") ? "E" : "e", o2 = i3[2], a2 = s2 ? t4[o2.length + 1] === r2 : t4[o2.length] === r2; @@ -62587,7 +62696,7 @@ var require_fxp = __commonJS({ return t4; })(t3, n3, e4); { - const s2 = $.exec(n3); + const s2 = V.exec(n3); if (s2) { const r2 = s2[1] || "", o2 = s2[2]; let a2 = (i2 = s2[3]) && -1 !== i2.indexOf(".") ? ("." === (i2 = i2.replace(/0+$/, "")) ? i2 = "0" : "." === i2[0] ? i2 = "0" + i2 : "." === i2[i2.length - 1] && (i2 = i2.substring(0, i2.length - 1)), i2) : i2; @@ -62595,7 +62704,7 @@ var require_fxp = __commonJS({ if (!e4.leadingZeros && (o2.length > 1 || 1 === o2.length && !l2)) return t3; { const i3 = Number(n3), s3 = String(i3); - if (0 === i3 || -0 === i3) return i3; + if (0 === i3) return i3; if (-1 !== s3.search(/[eE]/)) return e4.eNotation ? i3 : t3; if (-1 !== n3.indexOf(".")) return "0" === s3 || s3 === a2 || s3 === `${r2}${a2}` ? i3 : t3; let l3 = o2 ? a2 : n3; @@ -62629,7 +62738,7 @@ var require_fxp = __commonJS({ if (o2[a2]) { let t3 = H(o2[a2], e2, l2); const n3 = nt(t3, e2); - void 0 !== o2[Q] && (t3[Q] = o2[Q]), o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== s2[a2] && s2.hasOwnProperty(a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; + o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== o2[Q] && "object" == typeof t3 && null !== t3 && (t3[Q] = o2[Q]), void 0 !== s2[a2] && Object.prototype.hasOwnProperty.call(s2, a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; } } } @@ -62657,7 +62766,7 @@ var require_fxp = __commonJS({ } class it { constructor(t2) { - this.externalEntities = {}, this.options = w(t2); + this.externalEntities = {}, this.options = v(t2); } parse(t2, e2) { if ("string" != typeof t2 && t2.toString) t2 = t2.toString(); @@ -62667,7 +62776,7 @@ var require_fxp = __commonJS({ const n3 = a(t2, e2); if (true !== n3) throw Error(`${n3.err.msg}:${n3.err.line}:${n3.err.col}`); } - const n2 = new F(this.options); + const n2 = new L(this.options); n2.addExternalEntities(this.externalEntities); const i2 = n2.parseXml(t2); return this.options.preserveOrder || void 0 === i2 ? i2 : J(i2, this.options); @@ -62688,6 +62797,13 @@ var require_fxp = __commonJS({ } function rt(t2, e2, n2, i2) { let s2 = "", r2 = false; + if (!Array.isArray(t2)) { + if (null != t2) { + let n3 = t2.toString(); + return n3 = ut(n3, e2), n3; + } + return ""; + } for (let o2 = 0; o2 < t2.length; o2++) { const a2 = t2[o2], l2 = ot(a2); if (void 0 === l2) continue; @@ -62711,10 +62827,10 @@ var require_fxp = __commonJS({ o3 = 0 !== o3.length ? " " + o3 : "", s2 += n3 + `<${l2}${o3}${t3}?>`, r2 = true; continue; } - let h2 = i2; - "" !== h2 && (h2 += e2.indentBy); - const d2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, h2); - -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += d2 + ">" : s2 += d2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += d2 + `>${p2}${i2}` : (s2 += d2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += d2 + "/>", r2 = true; + let d2 = i2; + "" !== d2 && (d2 += e2.indentBy); + const h2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, d2); + -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += h2 + ">" : s2 += h2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += h2 + `>${p2}${i2}` : (s2 += h2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += h2 + "/>", r2 = true; } return s2; } @@ -62722,13 +62838,13 @@ var require_fxp = __commonJS({ const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2]; - if (t2.hasOwnProperty(i2) && ":@" !== i2) return i2; + if (Object.prototype.hasOwnProperty.call(t2, i2) && ":@" !== i2) return i2; } } function at(t2, e2) { let n2 = ""; if (t2 && !e2.ignoreAttributes) for (let i2 in t2) { - if (!t2.hasOwnProperty(i2)) continue; + if (!Object.prototype.hasOwnProperty.call(t2, i2)) continue; let s2 = e2.attributeValueProcessor(i2, t2[i2]); s2 = ut(s2, e2), true === s2 && e2.suppressBooleanAttributes ? n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}` : n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}="${s2}"`; } @@ -62746,15 +62862,21 @@ var require_fxp = __commonJS({ } return t2; } - const ht = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { + const dt = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [{ regex: new RegExp("&", "g"), val: "&" }, { regex: new RegExp(">", "g"), val: ">" }, { regex: new RegExp("<", "g"), val: "<" }, { regex: new RegExp("'", "g"), val: "'" }, { regex: new RegExp('"', "g"), val: """ }], processEntities: true, stopNodes: [], oneListGroup: false }; - function dt(t2) { - this.options = Object.assign({}, ht, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { + function ht(t2) { + var e2; + this.options = Object.assign({}, dt, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { return false; - } : (this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ct), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ft, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { + } : (this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ft), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ct, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { return ""; }, this.tagEndChar = ">", this.newLine = ""); } @@ -62762,15 +62884,15 @@ var require_fxp = __commonJS({ const s2 = this.j2x(t2, n2 + 1, i2.concat(e2)); return void 0 !== t2[this.options.textNodeName] && 1 === Object.keys(t2).length ? this.buildTextValNode(t2[this.options.textNodeName], e2, s2.attrStr, n2) : this.buildObjectNode(s2.val, e2, s2.attrStr, n2); } - function ft(t2) { + function ct(t2) { return this.options.indentBy.repeat(t2); } - function ct(t2) { + function ft(t2) { return !(!t2.startsWith(this.options.attributeNamePrefix) || t2 === this.options.textNodeName) && t2.substr(this.attrPrefixLen); } - dt.prototype.build = function(t2) { + ht.prototype.build = function(t2) { return this.options.preserveOrder ? st(t2, this.options) : (Array.isArray(t2) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t2 = { [this.options.arrayNodeName]: t2 }), this.j2x(t2, 0, []).val); - }, dt.prototype.j2x = function(t2, e2, n2) { + }, ht.prototype.j2x = function(t2, e2, n2) { let i2 = "", s2 = ""; const r2 = n2.join("."); for (let o2 in t2) if (Object.prototype.hasOwnProperty.call(t2, o2)) if (void 0 === t2[o2]) this.isAttribute(o2) && (s2 += ""); @@ -62805,18 +62927,18 @@ var require_fxp = __commonJS({ for (let s3 = 0; s3 < n3; s3++) i2 += this.buildAttrPairStr(e3[s3], "" + t2[o2][e3[s3]]); } else s2 += this.processTextOrObjNode(t2[o2], o2, e2, n2); return { attrStr: i2, val: s2 }; - }, dt.prototype.buildAttrPairStr = function(t2, e2) { + }, ht.prototype.buildAttrPairStr = function(t2, e2) { return e2 = this.options.attributeValueProcessor(t2, "" + e2), e2 = this.replaceEntitiesValue(e2), this.options.suppressBooleanAttributes && "true" === e2 ? " " + t2 : " " + t2 + '="' + e2 + '"'; - }, dt.prototype.buildObjectNode = function(t2, e2, n2, i2) { + }, ht.prototype.buildObjectNode = function(t2, e2, n2, i2) { if ("" === t2) return "?" === e2[0] ? this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar; { let s2 = "` + this.newLine : this.indentate(i2) + "<" + e2 + n2 + r2 + this.tagEndChar + t2 + this.indentate(i2) + s2 : this.indentate(i2) + "<" + e2 + n2 + r2 + ">" + t2 + s2; } - }, dt.prototype.closeTag = function(t2) { + }, ht.prototype.closeTag = function(t2) { let e2 = ""; return -1 !== this.options.unpairedTags.indexOf(t2) ? this.options.suppressUnpairedNode || (e2 = "/") : e2 = this.options.suppressEmptyNode ? "/" : `>` + this.newLine; if (false !== this.options.commentPropName && e2 === this.options.commentPropName) return this.indentate(i2) + `` + this.newLine; if ("?" === e2[0]) return this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar; @@ -62824,14 +62946,14 @@ var require_fxp = __commonJS({ let s2 = this.options.tagValueProcessor(e2, t2); return s2 = this.replaceEntitiesValue(s2), "" === s2 ? this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + ">" + s2 + " 0 && this.options.processEntities) for (let e2 = 0; e2 < this.options.entities.length; e2++) { const n2 = this.options.entities[e2]; t2 = t2.replace(n2.regex, n2.val); } return t2; }; - const gt = { validate: a }; + const gt = ht, xt = { validate: a }; module2.exports = e; })(); } @@ -99969,7 +100091,7 @@ var require_follow_redirects = __commonJS({ if (this._ending) { throw new WriteAfterEndError(); } - if (!isString(data) && !isBuffer(data)) { + if (!isString2(data) && !isBuffer(data)) { throw new TypeError("data should be a string, Buffer or Uint8Array"); } if (isFunction(encoding)) { @@ -100224,7 +100346,7 @@ var require_follow_redirects = __commonJS({ function request2(input, options, callback) { if (isURL(input)) { input = spreadUrlObject(input); - } else if (isString(input)) { + } else if (isString2(input)) { input = spreadUrlObject(parseUrl2(input)); } else { callback = options; @@ -100240,7 +100362,7 @@ var require_follow_redirects = __commonJS({ maxBodyLength: exports3.maxBodyLength }, input, options); options.nativeProtocols = nativeProtocols; - if (!isString(options.host) && !isString(options.hostname)) { + if (!isString2(options.host) && !isString2(options.hostname)) { options.hostname = "::1"; } assert.equal(options.protocol, protocol, "protocol mismatch"); @@ -100267,7 +100389,7 @@ var require_follow_redirects = __commonJS({ parsed = new URL2(input); } else { parsed = validateUrl(url.parse(input)); - if (!isString(parsed.protocol)) { + if (!isString2(parsed.protocol)) { throw new InvalidUrlError({ input }); } } @@ -100339,11 +100461,11 @@ var require_follow_redirects = __commonJS({ request2.destroy(error3); } function isSubdomain(subdomain, domain) { - assert(isString(subdomain) && isString(domain)); + assert(isString2(subdomain) && isString2(domain)); var dot = subdomain.length - domain.length - 1; return dot > 0 && subdomain[dot] === "." && subdomain.endsWith(domain); } - function isString(value) { + function isString2(value) { return typeof value === "string" || value instanceof String; } function isFunction(value) { @@ -103089,7 +103211,7 @@ var safeDump = renamed("safeDump", "dump"); var semver = __toESM(require_semver2()); // src/api-compatibility.json -var maximumVersion = "3.20"; +var maximumVersion = "3.21"; var minimumVersion = "3.14"; // src/util.ts @@ -104286,14 +104408,23 @@ var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); -var repositoryPropertyParsers = { - ["github-codeql-disable-overlay" /* DISABLE_OVERLAY */]: parseBooleanRepositoryProperty, - ["github-codeql-extra-queries" /* EXTRA_QUERIES */]: parseStringRepositoryProperty +function isString(value) { + return typeof value === "string"; +} +var stringProperty = { + validate: isString, + parse: parseStringRepositoryProperty }; -async function loadPropertiesFromApi(gitHubVersion, logger, repositoryNwo) { - if (gitHubVersion.type === "GitHub Enterprise Server" /* GHES */) { - return {}; - } +var booleanProperty = { + // The value from the API should come as a string, which we then parse into a boolean. + validate: isString, + parse: parseBooleanRepositoryProperty +}; +var repositoryPropertyParsers = { + ["github-codeql-disable-overlay" /* DISABLE_OVERLAY */]: booleanProperty, + ["github-codeql-extra-queries" /* EXTRA_QUERIES */]: stringProperty +}; +async function loadPropertiesFromApi(logger, repositoryNwo) { try { const response = await getRepositoryProperties(repositoryNwo); const remoteProperties = response.data; @@ -104312,11 +104443,6 @@ async function loadPropertiesFromApi(gitHubVersion, logger, repositoryNwo) { `Expected repository property object to have a 'property_name', but got: ${JSON.stringify(property)}` ); } - if (typeof property.value !== "string") { - throw new Error( - `Expected repository property '${property.property_name}' to have a string value, but got: ${JSON.stringify(property)}` - ); - } if (isKnownPropertyName(property.property_name)) { setProperty2(properties, property.property_name, property.value, logger); } @@ -104341,7 +104467,14 @@ async function loadPropertiesFromApi(gitHubVersion, logger, repositoryNwo) { } } function setProperty2(properties, name, value, logger) { - properties[name] = repositoryPropertyParsers[name](name, value, logger); + const propertyOptions = repositoryPropertyParsers[name]; + if (propertyOptions.validate(value)) { + properties[name] = propertyOptions.parse(name, value, logger); + } else { + throw new Error( + `Unexpected value for repository property '${name}' (${typeof value}), got: ${JSON.stringify(value)}` + ); + } } function parseBooleanRepositoryProperty(name, value, logger) { if (value !== "true" && value !== "false") { @@ -104735,8 +104868,8 @@ var path6 = __toESM(require("path")); var semver5 = __toESM(require_semver2()); // src/defaults.json -var bundleVersion = "codeql-bundle-v2.24.2"; -var cliVersion = "2.24.2"; +var bundleVersion = "codeql-bundle-v2.24.3"; +var cliVersion = "2.24.3"; // src/overlay/index.ts var fs3 = __toESM(require("fs")); @@ -105400,6 +105533,11 @@ var featureConfig = { // cannot be found when interpreting results. minimumVersion: void 0 }, + ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES", + minimumVersion: void 0 + }, ["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: { defaultValue: false, envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE", @@ -105411,11 +105549,6 @@ var featureConfig = { minimumVersion: void 0, toolsFeature: "bundleSupportsOverlay" /* BundleSupportsOverlay */ }, - ["use_repository_properties_v2" /* UseRepositoryProperties */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES", - minimumVersion: void 0 - }, ["validate_db_config" /* ValidateDbConfig */]: { defaultValue: false, envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG", @@ -105804,7 +105937,7 @@ async function addOverlayDisablementDiagnostics(config, codeql, overlayDisabledR attributes: { languages: config.languages }, - markdownMessage: `Improved incremental analysis was skipped because it previously failed for this repository with CodeQL version ${(await codeql.getVersion()).version} on a runner with similar hardware resources. Improved incremental analysis may require a significant amount of disk space for some repositories. If you want to enable improved incremental analysis, increase the disk space available to the runner. If that doesn't help, contact GitHub Support for further assistance. + markdownMessage: `Improved incremental analysis was skipped because it previously failed for this repository with CodeQL version ${(await codeql.getVersion()).version} on a runner with similar hardware resources. One possible reason for this is that improved incremental analysis can require a significant amount of disk space for some repositories. If you want to try re-enabling improved incremental analysis, increase the disk space available to the runner. If that doesn't help, contact GitHub Support for further assistance. Improved incremental analysis will be automatically retried when the next version of CodeQL is released. You can also manually trigger a retry by [removing](${"https://docs.github.com/en/actions/how-tos/manage-workflow-runs/manage-caches#deleting-cache-entries" /* DELETE_ACTIONS_CACHE_ENTRIES */}) \`codeql-overlay-status-*\` entries from the Actions cache.`, severity: "note", @@ -106028,6 +106161,7 @@ var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_ var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB = 14e3; var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB * 1e6; var OVERLAY_MINIMUM_MEMORY_MB = 5 * 1024; +var CODEQL_VERSION_REDUCED_OVERLAY_MEMORY_USAGE = "2.24.3"; async function getSupportedLanguageMap(codeql, logger) { const resolveSupportedLanguagesUsingCli = await codeql.supportsFeature( "builtinExtractorsSpecifyDefaultQueries" /* BuiltinExtractorsSpecifyDefaultQueries */ @@ -106265,9 +106399,9 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */, swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ }; -async function isOverlayAnalysisFeatureEnabled(features, codeql, languages, codeScanningConfig) { +async function checkOverlayAnalysisFeatureEnabled(features, codeql, languages, codeScanningConfig) { if (!await features.getValue("overlay_analysis" /* OverlayAnalysis */, codeql)) { - return false; + return new Failure("overall-feature-not-enabled" /* OverallFeatureNotEnabled */); } let enableForCodeScanningOnly = false; for (const language of languages) { @@ -106280,23 +106414,38 @@ async function isOverlayAnalysisFeatureEnabled(features, codeql, languages, code enableForCodeScanningOnly = true; continue; } - return false; + return new Failure("language-not-enabled" /* LanguageNotEnabled */); } if (enableForCodeScanningOnly) { - return codeScanningConfig["disable-default-queries"] !== true && codeScanningConfig.packs === void 0 && codeScanningConfig.queries === void 0 && codeScanningConfig["query-filters"] === void 0; + const usesDefaultQueriesOnly = codeScanningConfig["disable-default-queries"] !== true && codeScanningConfig.packs === void 0 && codeScanningConfig.queries === void 0 && codeScanningConfig["query-filters"] === void 0; + if (!usesDefaultQueriesOnly) { + return new Failure("non-default-queries" /* NonDefaultQueries */); + } } - return true; + return new Success(void 0); } -async function runnerSupportsOverlayAnalysis(diskUsage, ramInput, logger, useV2ResourceChecks) { +function runnerHasSufficientDiskSpace(diskUsage, logger, useV2ResourceChecks) { const minimumDiskSpaceBytes = useV2ResourceChecks ? OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES : OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_BYTES; - if (diskUsage === void 0 || diskUsage.numAvailableBytes < minimumDiskSpaceBytes) { - const diskSpaceMb = diskUsage === void 0 ? 0 : Math.round(diskUsage.numAvailableBytes / 1e6); + if (diskUsage.numAvailableBytes < minimumDiskSpaceBytes) { + const diskSpaceMb = Math.round(diskUsage.numAvailableBytes / 1e6); const minimumDiskSpaceMb = Math.round(minimumDiskSpaceBytes / 1e6); logger.info( `Setting overlay database mode to ${"none" /* None */} due to insufficient disk space (${diskSpaceMb} MB, needed ${minimumDiskSpaceMb} MB).` ); return false; } + return true; +} +async function runnerHasSufficientMemory(codeql, ramInput, logger) { + if (await codeQlVersionAtLeast( + codeql, + CODEQL_VERSION_REDUCED_OVERLAY_MEMORY_USAGE + )) { + logger.debug( + `Skipping memory check for overlay analysis because CodeQL version is at least ${CODEQL_VERSION_REDUCED_OVERLAY_MEMORY_USAGE}.` + ); + return true; + } const memoryFlagValue = getCodeQLMemoryLimit(ramInput, logger); if (memoryFlagValue < OVERLAY_MINIMUM_MEMORY_MB) { logger.info( @@ -106304,85 +106453,115 @@ async function runnerSupportsOverlayAnalysis(diskUsage, ramInput, logger, useV2R ); return false; } + logger.debug( + `Memory available for CodeQL analysis is ${memoryFlagValue} MB, which is above the minimum of ${OVERLAY_MINIMUM_MEMORY_MB} MB.` + ); return true; } -async function getOverlayDatabaseMode(codeql, features, languages, sourceRoot, buildMode, ramInput, codeScanningConfig, repositoryProperties, gitVersion, logger) { - let overlayDatabaseMode = "none" /* None */; - let useOverlayDatabaseCaching = false; - let disabledReason; +async function checkRunnerResources(codeql, diskUsage, ramInput, logger, useV2ResourceChecks) { + if (!runnerHasSufficientDiskSpace(diskUsage, logger, useV2ResourceChecks)) { + return new Failure("insufficient-disk-space" /* InsufficientDiskSpace */); + } + if (!await runnerHasSufficientMemory(codeql, ramInput, logger)) { + return new Failure("insufficient-memory" /* InsufficientMemory */); + } + return new Success(void 0); +} +async function checkOverlayEnablement(codeql, features, languages, sourceRoot, buildMode, ramInput, codeScanningConfig, repositoryProperties, gitVersion, logger) { const modeEnv = process.env.CODEQL_OVERLAY_DATABASE_MODE; if (modeEnv === "overlay" /* Overlay */ || modeEnv === "overlay-base" /* OverlayBase */ || modeEnv === "none" /* None */) { - overlayDatabaseMode = modeEnv; logger.info( - `Setting overlay database mode to ${overlayDatabaseMode} from the CODEQL_OVERLAY_DATABASE_MODE environment variable.` + `Setting overlay database mode to ${modeEnv} from the CODEQL_OVERLAY_DATABASE_MODE environment variable.` ); - } else if (repositoryProperties["github-codeql-disable-overlay" /* DISABLE_OVERLAY */] === true) { + if (modeEnv === "none" /* None */) { + return new Failure("disabled-by-environment-variable" /* DisabledByEnvironmentVariable */); + } + return validateOverlayDatabaseMode( + modeEnv, + false, + codeql, + languages, + sourceRoot, + buildMode, + gitVersion, + logger + ); + } + if (repositoryProperties["github-codeql-disable-overlay" /* DISABLE_OVERLAY */] === true) { logger.info( `Setting overlay database mode to ${"none" /* None */} because the ${"github-codeql-disable-overlay" /* DISABLE_OVERLAY */} repository property is set to true.` ); - overlayDatabaseMode = "none" /* None */; - disabledReason = "disabled-by-repository-property" /* DisabledByRepositoryProperty */; - } else if (await isOverlayAnalysisFeatureEnabled( + return new Failure("disabled-by-repository-property" /* DisabledByRepositoryProperty */); + } + const featureResult = await checkOverlayAnalysisFeatureEnabled( features, codeql, languages, codeScanningConfig - )) { - const performResourceChecks = !await features.getValue( - "overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */, - codeql + ); + if (featureResult.isFailure()) { + return featureResult; + } + const performResourceChecks = !await features.getValue( + "overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */, + codeql + ); + const useV2ResourceChecks = await features.getValue( + "overlay_analysis_resource_checks_v2" /* OverlayAnalysisResourceChecksV2 */ + ); + const checkOverlayStatus = await features.getValue( + "overlay_analysis_status_check" /* OverlayAnalysisStatusCheck */ + ); + const needDiskUsage = performResourceChecks || checkOverlayStatus; + const diskUsage = needDiskUsage ? await checkDiskUsage(logger) : void 0; + if (needDiskUsage && diskUsage === void 0) { + logger.warning( + `Unable to determine disk usage, therefore setting overlay database mode to ${"none" /* None */}.` ); - const useV2ResourceChecks = await features.getValue( - "overlay_analysis_resource_checks_v2" /* OverlayAnalysisResourceChecksV2 */ + return new Failure("unable-to-determine-disk-usage" /* UnableToDetermineDiskUsage */); + } + const resourceResult = performResourceChecks && diskUsage !== void 0 ? await checkRunnerResources( + codeql, + diskUsage, + ramInput, + logger, + useV2ResourceChecks + ) : new Success(void 0); + if (resourceResult.isFailure()) { + return resourceResult; + } + if (checkOverlayStatus && diskUsage !== void 0 && await shouldSkipOverlayAnalysis(codeql, languages, diskUsage, logger)) { + logger.info( + `Setting overlay database mode to ${"none" /* None */} because overlay analysis previously failed with this combination of languages, disk space, and CodeQL version.` ); - const checkOverlayStatus = await features.getValue( - "overlay_analysis_status_check" /* OverlayAnalysisStatusCheck */ + return new Failure("skipped-due-to-cached-status" /* SkippedDueToCachedStatus */); + } + let overlayDatabaseMode; + if (isAnalyzingPullRequest()) { + overlayDatabaseMode = "overlay" /* Overlay */; + logger.info( + `Setting overlay database mode to ${overlayDatabaseMode} with caching because we are analyzing a pull request.` + ); + } else if (await isAnalyzingDefaultBranch()) { + overlayDatabaseMode = "overlay-base" /* OverlayBase */; + logger.info( + `Setting overlay database mode to ${overlayDatabaseMode} with caching because we are analyzing the default branch.` ); - const diskUsage = performResourceChecks || checkOverlayStatus ? await checkDiskUsage(logger) : void 0; - if (performResourceChecks && !await runnerSupportsOverlayAnalysis( - diskUsage, - ramInput, - logger, - useV2ResourceChecks - )) { - overlayDatabaseMode = "none" /* None */; - disabledReason = "insufficient-resources" /* InsufficientResources */; - } else if (checkOverlayStatus && diskUsage === void 0) { - logger.warning( - `Unable to determine disk usage, therefore setting overlay database mode to ${"none" /* None */}.` - ); - overlayDatabaseMode = "none" /* None */; - disabledReason = "unable-to-determine-disk-usage" /* UnableToDetermineDiskUsage */; - } else if (checkOverlayStatus && diskUsage && await shouldSkipOverlayAnalysis(codeql, languages, diskUsage, logger)) { - logger.info( - `Setting overlay database mode to ${"none" /* None */} because overlay analysis previously failed with this combination of languages, disk space, and CodeQL version.` - ); - overlayDatabaseMode = "none" /* None */; - disabledReason = "skipped-due-to-cached-status" /* SkippedDueToCachedStatus */; - } else if (isAnalyzingPullRequest()) { - overlayDatabaseMode = "overlay" /* Overlay */; - useOverlayDatabaseCaching = true; - logger.info( - `Setting overlay database mode to ${overlayDatabaseMode} with caching because we are analyzing a pull request.` - ); - } else if (await isAnalyzingDefaultBranch()) { - overlayDatabaseMode = "overlay-base" /* OverlayBase */; - useOverlayDatabaseCaching = true; - logger.info( - `Setting overlay database mode to ${overlayDatabaseMode} with caching because we are analyzing the default branch.` - ); - } } else { - disabledReason = "feature-not-enabled" /* FeatureNotEnabled */; - } - const disabledResult = (reason) => ({ - overlayDatabaseMode: "none" /* None */, - useOverlayDatabaseCaching: false, - disabledReason: reason - }); - if (overlayDatabaseMode === "none" /* None */) { - return disabledResult(disabledReason); + return new Failure("not-pull-request-or-default-branch" /* NotPullRequestOrDefaultBranch */); } + return validateOverlayDatabaseMode( + overlayDatabaseMode, + true, + codeql, + languages, + sourceRoot, + buildMode, + gitVersion, + logger + ); +} +async function validateOverlayDatabaseMode(overlayDatabaseMode, useOverlayDatabaseCaching, codeql, languages, sourceRoot, buildMode, gitVersion, logger) { if (buildMode !== "none" /* None */ && (await Promise.all( languages.map( async (l) => l !== "go" /* go */ && // Workaround to allow overlay analysis for Go with any build @@ -106395,37 +106574,36 @@ async function getOverlayDatabaseMode(codeql, features, languages, sourceRoot, b logger.warning( `Cannot build an ${overlayDatabaseMode} database because build-mode is set to "${buildMode}" instead of "none". Falling back to creating a normal full database instead.` ); - return disabledResult("incompatible-build-mode" /* IncompatibleBuildMode */); + return new Failure("incompatible-build-mode" /* IncompatibleBuildMode */); } if (!await codeQlVersionAtLeast(codeql, CODEQL_OVERLAY_MINIMUM_VERSION)) { logger.warning( `Cannot build an ${overlayDatabaseMode} database because the CodeQL CLI is older than ${CODEQL_OVERLAY_MINIMUM_VERSION}. Falling back to creating a normal full database instead.` ); - return disabledResult("incompatible-codeql" /* IncompatibleCodeQl */); + return new Failure("incompatible-codeql" /* IncompatibleCodeQl */); } if (await getGitRoot(sourceRoot) === void 0) { logger.warning( `Cannot build an ${overlayDatabaseMode} database because the source root "${sourceRoot}" is not inside a git repository. Falling back to creating a normal full database instead.` ); - return disabledResult("no-git-root" /* NoGitRoot */); + return new Failure("no-git-root" /* NoGitRoot */); } if (gitVersion === void 0) { logger.warning( `Cannot build an ${overlayDatabaseMode} database because the Git version could not be determined. Falling back to creating a normal full database instead.` ); - return disabledResult("incompatible-git" /* IncompatibleGit */); + return new Failure("incompatible-git" /* IncompatibleGit */); } if (!gitVersion.isAtLeast(GIT_MINIMUM_VERSION_FOR_OVERLAY)) { logger.warning( `Cannot build an ${overlayDatabaseMode} database because the installed Git version is older than ${GIT_MINIMUM_VERSION_FOR_OVERLAY}. Falling back to creating a normal full database instead.` ); - return disabledResult("incompatible-git" /* IncompatibleGit */); + return new Failure("incompatible-git" /* IncompatibleGit */); } - return { + return new Success({ overlayDatabaseMode, - useOverlayDatabaseCaching, - disabledReason - }; + useOverlayDatabaseCaching + }); } function dbLocationOrDefault(dbLocation, tempDir) { return dbLocation || path9.resolve(tempDir, "codeql_databases"); @@ -106513,11 +106691,7 @@ async function initConfig(features, inputs) { } else { logger.debug(`Skipping check for generated files.`); } - const { - overlayDatabaseMode, - useOverlayDatabaseCaching, - disabledReason: overlayDisabledReason - } = await getOverlayDatabaseMode( + const overlayDatabaseModeResult = await checkOverlayEnablement( inputs.codeql, inputs.features, config.languages, @@ -106529,19 +106703,27 @@ async function initConfig(features, inputs) { gitVersion, logger ); - logger.info( - `Using overlay database mode: ${overlayDatabaseMode} ${useOverlayDatabaseCaching ? "with" : "without"} caching.` - ); - config.overlayDatabaseMode = overlayDatabaseMode; - config.useOverlayDatabaseCaching = useOverlayDatabaseCaching; - if (overlayDisabledReason !== void 0) { + if (overlayDatabaseModeResult.isSuccess()) { + const { overlayDatabaseMode, useOverlayDatabaseCaching } = overlayDatabaseModeResult.value; + logger.info( + `Using overlay database mode: ${overlayDatabaseMode} ${useOverlayDatabaseCaching ? "with" : "without"} caching.` + ); + config.overlayDatabaseMode = overlayDatabaseMode; + config.useOverlayDatabaseCaching = useOverlayDatabaseCaching; + } else { + const overlayDisabledReason = overlayDatabaseModeResult.value; + logger.info( + `Using overlay database mode: ${"none" /* None */} without caching.` + ); + config.overlayDatabaseMode = "none" /* None */; + config.useOverlayDatabaseCaching = false; await addOverlayDisablementDiagnostics( config, inputs.codeql, overlayDisabledReason ); } - if (overlayDatabaseMode === "overlay" /* Overlay */ || await shouldPerformDiffInformedAnalysis( + if (config.overlayDatabaseMode === "overlay" /* Overlay */ || await shouldPerformDiffInformedAnalysis( inputs.codeql, inputs.features, logger @@ -109457,8 +109639,6 @@ async function run(startedAt) { ); const repositoryPropertiesResult = await loadRepositoryProperties( repositoryNwo, - gitHubVersion, - features, logger ); const jobRunUuid = v4_default(); @@ -109858,7 +110038,7 @@ exec ${goBinaryPath} "$@"` logger ); } -async function loadRepositoryProperties(repositoryNwo, gitHubVersion, features, logger) { +async function loadRepositoryProperties(repositoryNwo, logger) { const repositoryOwnerType = github2.context.payload.repository?.owner.type; logger.debug( `Repository owner type is '${repositoryOwnerType ?? "unknown"}'.` @@ -109869,16 +110049,8 @@ async function loadRepositoryProperties(repositoryNwo, gitHubVersion, features, ); return new Success({}); } - if (!await features.getValue("use_repository_properties_v2" /* UseRepositoryProperties */)) { - logger.debug( - "Skipping loading repository properties because the UseRepositoryProperties feature flag is disabled." - ); - return new Success({}); - } try { - return new Success( - await loadPropertiesFromApi(gitHubVersion, logger, repositoryNwo) - ); + return new Success(await loadPropertiesFromApi(logger, repositoryNwo)); } catch (error3) { logger.warning( `Failed to load repository properties: ${getErrorMessage(error3)}` diff --git a/lib/resolve-environment-action.js b/lib/resolve-environment-action.js index 7c8bbe7e3..aa3673bd3 100644 --- a/lib/resolve-environment-action.js +++ b/lib/resolve-environment-action.js @@ -45986,7 +45986,7 @@ var require_package = __commonJS({ "package.json"(exports2, module2) { module2.exports = { name: "codeql", - version: "4.32.5", + version: "4.32.7", private: true, description: "CodeQL action", scripts: { @@ -45995,7 +45995,7 @@ var require_package = __commonJS({ lint: "eslint --report-unused-disable-directives --max-warnings=0 .", "lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif", "lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix", - ava: "npm run transpile && ava --serial --verbose", + ava: "npm run transpile && ava --verbose", test: "npm run ava -- src/", "test-debug": "npm run test -- --timeout=20m", transpile: "tsc --build --verbose" @@ -46044,6 +46044,7 @@ var require_package = __commonJS({ "@types/js-yaml": "^4.0.9", "@types/node": "^20.19.9", "@types/node-forge": "^1.3.14", + "@types/sarif": "^2.1.7", "@types/semver": "^7.7.1", "@types/sinon": "^21.0.0", ava: "^6.4.1", @@ -46052,14 +46053,14 @@ var require_package = __commonJS({ "eslint-import-resolver-typescript": "^3.8.7", "eslint-plugin-github": "^6.0.0", "eslint-plugin-import-x": "^4.16.1", - "eslint-plugin-jsdoc": "^62.5.0", + "eslint-plugin-jsdoc": "^62.7.1", "eslint-plugin-no-async-foreach": "^0.1.1", glob: "^11.1.0", - globals: "^16.5.0", + globals: "^17.3.0", nock: "^14.0.11", sinon: "^21.0.1", typescript: "^5.9.3", - "typescript-eslint": "^8.56.0" + "typescript-eslint": "^8.56.1" }, overrides: { "@actions/tool-cache": { @@ -49361,6 +49362,7 @@ var require_minimatch = __commonJS({ pattern = pattern.split(path5.sep).join("/"); } this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200; this.set = []; this.pattern = pattern; this.regexp = null; @@ -49757,50 +49759,147 @@ var require_minimatch = __commonJS({ return this.negate; }; Minimatch.prototype.matchOne = function(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } + if (pattern.indexOf(GLOBSTAR) !== -1) { + return this._matchGlobstar(file, pattern, partial, 0, 0); + } + return this._matchOne(file, pattern, partial, 0, 0); + }; + Minimatch.prototype._matchGlobstar = function(file, pattern, partial, fileIndex, patternIndex) { + var i; + var firstgs = -1; + for (i = patternIndex; i < pattern.length; i++) { + if (pattern[i] === GLOBSTAR) { + firstgs = i; + break; + } + } + var lastgs = -1; + for (i = pattern.length - 1; i >= 0; i--) { + if (pattern[i] === GLOBSTAR) { + lastgs = i; + break; + } + } + var head = pattern.slice(patternIndex, firstgs); + var body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs); + var tail = partial ? [] : pattern.slice(lastgs + 1); + if (head.length) { + var fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this._matchOne(fileHead, head, partial, 0, 0)) { + return false; + } + fileIndex += head.length; + } + var fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) return false; + var tailStart = file.length - tail.length; + if (this._matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; + } + tailStart--; + if (!this._matchOne(file, tail, partial, tailStart, 0)) { + return false; + } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + var sawSome = !!fileTailMatch; + for (i = fileIndex; i < file.length - fileTailMatch; i++) { + var f = String(file[i]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return partial || sawSome; + } + var bodySegments = [[[], 0]]; + var currentBody = bodySegments[0]; + var nonGsParts = 0; + var nonGsPartsSums = [0]; + for (var bi = 0; bi < body.length; bi++) { + var b = body[bi]; + if (b === GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + var idx = bodySegments.length - 1; + var fileLength = file.length - fileTailMatch; + for (var si = 0; si < bodySegments.length; si++) { + bodySegments[si][1] = fileLength - (nonGsPartsSums[idx--] + bodySegments[si][0].length); + } + return !!this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex, + 0, + partial, + 0, + !!fileTailMatch ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + }; + Minimatch.prototype._matchGlobStarBodySections = function(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + var bs = bodySegments[bodyIndex]; + if (!bs) { + for (var i = fileIndex; i < file.length; i++) { + sawTail = true; + var f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return sawTail; + } + var body = bs[0]; + var after = bs[1]; + while (fileIndex <= after) { + var m = this._matchOne( + file.slice(0, fileIndex + body.length), + body, + partial, + fileIndex, + 0 + ); + if (m && globStarDepth < this.maxGlobstarRecursion) { + var sub = this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex + body.length, + bodyIndex + 1, + partial, + globStarDepth + 1, + sawTail + ); + if (sub !== false) { + return sub; + } + } + var f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + fileIndex++; + } + return partial || null; + }; + Minimatch.prototype._matchOne = function(file, pattern, partial, fileIndex, patternIndex) { + var fi, pi, fl, pl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { this.debug("matchOne loop"); var p = pattern[pi]; var f = file[fi]; this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } - return false; - } + if (p === false || p === GLOBSTAR) return false; var hit; if (typeof p === "string") { hit = f === p; @@ -61840,7 +61939,7 @@ var require_fxp = __commonJS({ }, o: (t2, e2) => Object.prototype.hasOwnProperty.call(t2, e2), r: (t2) => { "undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t2, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t2, "__esModule", { value: true }); } }, e = {}; - t.r(e), t.d(e, { XMLBuilder: () => dt, XMLParser: () => it, XMLValidator: () => gt }); + t.r(e), t.d(e, { XMLBuilder: () => gt, XMLParser: () => it, XMLValidator: () => xt }); const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$"); function s(t2, e2) { const n2 = []; @@ -61862,90 +61961,90 @@ var require_fxp = __commonJS({ const n2 = []; let i2 = false, s2 = false; "\uFEFF" === t2[0] && (t2 = t2.substr(1)); - for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2] && "?" === t2[o2 + 1]) { - if (o2 += 2, o2 = u(t2, o2), o2.err) return o2; + for (let r2 = 0; r2 < t2.length; r2++) if ("<" === t2[r2] && "?" === t2[r2 + 1]) { + if (r2 += 2, r2 = u(t2, r2), r2.err) return r2; } else { - if ("<" !== t2[o2]) { - if (l(t2[o2])) continue; - return m("InvalidChar", "char '" + t2[o2] + "' is not expected.", b(t2, o2)); + if ("<" !== t2[r2]) { + if (l(t2[r2])) continue; + return m("InvalidChar", "char '" + t2[r2] + "' is not expected.", N(t2, r2)); } { - let a2 = o2; - if (o2++, "!" === t2[o2]) { - o2 = h(t2, o2); + let o2 = r2; + if (r2++, "!" === t2[r2]) { + r2 = d(t2, r2); continue; } { - let d2 = false; - "/" === t2[o2] && (d2 = true, o2++); - let p2 = ""; - for (; o2 < t2.length && ">" !== t2[o2] && " " !== t2[o2] && " " !== t2[o2] && "\n" !== t2[o2] && "\r" !== t2[o2]; o2++) p2 += t2[o2]; - if (p2 = p2.trim(), "/" === p2[p2.length - 1] && (p2 = p2.substring(0, p2.length - 1), o2--), !r(p2)) { + let a2 = false; + "/" === t2[r2] && (a2 = true, r2++); + let h2 = ""; + for (; r2 < t2.length && ">" !== t2[r2] && " " !== t2[r2] && " " !== t2[r2] && "\n" !== t2[r2] && "\r" !== t2[r2]; r2++) h2 += t2[r2]; + if (h2 = h2.trim(), "/" === h2[h2.length - 1] && (h2 = h2.substring(0, h2.length - 1), r2--), !b(h2)) { let e3; - return e3 = 0 === p2.trim().length ? "Invalid space after '<'." : "Tag '" + p2 + "' is an invalid name.", m("InvalidTag", e3, b(t2, o2)); + return e3 = 0 === h2.trim().length ? "Invalid space after '<'." : "Tag '" + h2 + "' is an invalid name.", m("InvalidTag", e3, N(t2, r2)); } - const c2 = f(t2, o2); - if (false === c2) return m("InvalidAttr", "Attributes for '" + p2 + "' have open quote.", b(t2, o2)); - let E2 = c2.value; - if (o2 = c2.index, "/" === E2[E2.length - 1]) { - const n3 = o2 - E2.length; - E2 = E2.substring(0, E2.length - 1); - const s3 = g(E2, e2); - if (true !== s3) return m(s3.err.code, s3.err.msg, b(t2, n3 + s3.err.line)); + const p2 = c(t2, r2); + if (false === p2) return m("InvalidAttr", "Attributes for '" + h2 + "' have open quote.", N(t2, r2)); + let f2 = p2.value; + if (r2 = p2.index, "/" === f2[f2.length - 1]) { + const n3 = r2 - f2.length; + f2 = f2.substring(0, f2.length - 1); + const s3 = g(f2, e2); + if (true !== s3) return m(s3.err.code, s3.err.msg, N(t2, n3 + s3.err.line)); i2 = true; - } else if (d2) { - if (!c2.tagClosed) return m("InvalidTag", "Closing tag '" + p2 + "' doesn't have proper closing.", b(t2, o2)); - if (E2.trim().length > 0) return m("InvalidTag", "Closing tag '" + p2 + "' can't have attributes or invalid starting.", b(t2, a2)); - if (0 === n2.length) return m("InvalidTag", "Closing tag '" + p2 + "' has not been opened.", b(t2, a2)); + } else if (a2) { + if (!p2.tagClosed) return m("InvalidTag", "Closing tag '" + h2 + "' doesn't have proper closing.", N(t2, r2)); + if (f2.trim().length > 0) return m("InvalidTag", "Closing tag '" + h2 + "' can't have attributes or invalid starting.", N(t2, o2)); + if (0 === n2.length) return m("InvalidTag", "Closing tag '" + h2 + "' has not been opened.", N(t2, o2)); { const e3 = n2.pop(); - if (p2 !== e3.tagName) { - let n3 = b(t2, e3.tagStartPos); - return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + p2 + "'.", b(t2, a2)); + if (h2 !== e3.tagName) { + let n3 = N(t2, e3.tagStartPos); + return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + h2 + "'.", N(t2, o2)); } 0 == n2.length && (s2 = true); } } else { - const r2 = g(E2, e2); - if (true !== r2) return m(r2.err.code, r2.err.msg, b(t2, o2 - E2.length + r2.err.line)); - if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", b(t2, o2)); - -1 !== e2.unpairedTags.indexOf(p2) || n2.push({ tagName: p2, tagStartPos: a2 }), i2 = true; + const a3 = g(f2, e2); + if (true !== a3) return m(a3.err.code, a3.err.msg, N(t2, r2 - f2.length + a3.err.line)); + if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", N(t2, r2)); + -1 !== e2.unpairedTags.indexOf(h2) || n2.push({ tagName: h2, tagStartPos: o2 }), i2 = true; } - for (o2++; o2 < t2.length; o2++) if ("<" === t2[o2]) { - if ("!" === t2[o2 + 1]) { - o2++, o2 = h(t2, o2); + for (r2++; r2 < t2.length; r2++) if ("<" === t2[r2]) { + if ("!" === t2[r2 + 1]) { + r2++, r2 = d(t2, r2); continue; } - if ("?" !== t2[o2 + 1]) break; - if (o2 = u(t2, ++o2), o2.err) return o2; - } else if ("&" === t2[o2]) { - const e3 = x(t2, o2); - if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", b(t2, o2)); - o2 = e3; - } else if (true === s2 && !l(t2[o2])) return m("InvalidXml", "Extra text at the end", b(t2, o2)); - "<" === t2[o2] && o2--; + if ("?" !== t2[r2 + 1]) break; + if (r2 = u(t2, ++r2), r2.err) return r2; + } else if ("&" === t2[r2]) { + const e3 = x(t2, r2); + if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", N(t2, r2)); + r2 = e3; + } else if (true === s2 && !l(t2[r2])) return m("InvalidXml", "Extra text at the end", N(t2, r2)); + "<" === t2[r2] && r2--; } } } - return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", b(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map(((t3) => t3.tagName)), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); + return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", N(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map((t3) => t3.tagName), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); } function l(t2) { return " " === t2 || " " === t2 || "\n" === t2 || "\r" === t2; } function u(t2, e2) { const n2 = e2; - for (; e2 < t2.length; e2++) if ("?" != t2[e2] && " " != t2[e2]) ; - else { + for (; e2 < t2.length; e2++) if ("?" == t2[e2] || " " == t2[e2]) { const i2 = t2.substr(n2, e2 - n2); - if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", b(t2, e2)); + if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", N(t2, e2)); if ("?" == t2[e2] && ">" == t2[e2 + 1]) { e2++; break; } + continue; } return e2; } - function h(t2, e2) { + function d(t2, e2) { if (t2.length > e2 + 5 && "-" === t2[e2 + 1] && "-" === t2[e2 + 2]) { for (e2 += 3; e2 < t2.length; e2++) if ("-" === t2[e2] && "-" === t2[e2 + 1] && ">" === t2[e2 + 2]) { e2 += 2; @@ -61963,11 +62062,11 @@ var require_fxp = __commonJS({ } return e2; } - const d = '"', p = "'"; - function f(t2, e2) { + const h = '"', p = "'"; + function c(t2, e2) { let n2 = "", i2 = "", s2 = false; for (; e2 < t2.length; e2++) { - if (t2[e2] === d || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); + if (t2[e2] === h || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); else if (">" === t2[e2] && "" === i2) { s2 = true; break; @@ -61976,16 +62075,16 @@ var require_fxp = __commonJS({ } return "" === i2 && { value: n2, index: e2, tagClosed: s2 }; } - const c = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); + const f = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); function g(t2, e2) { - const n2 = s(t2, c), i2 = {}; + const n2 = s(t2, f), i2 = {}; for (let t3 = 0; t3 < n2.length; t3++) { - if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", N(n2[t3])); - if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", N(n2[t3])); - if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", N(n2[t3])); + if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", y(n2[t3])); + if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", y(n2[t3])); + if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", y(n2[t3])); const s2 = n2[t3][2]; - if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", N(n2[t3])); - if (i2.hasOwnProperty(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", N(n2[t3])); + if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", y(n2[t3])); + if (Object.prototype.hasOwnProperty.call(i2, s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", y(n2[t3])); i2[s2] = 1; } return true; @@ -62013,49 +62112,52 @@ var require_fxp = __commonJS({ function E(t2) { return r(t2); } - function b(t2, e2) { + function b(t2) { + return r(t2); + } + function N(t2, e2) { const n2 = t2.substring(0, e2).split(/\r?\n/); return { line: n2.length, col: n2[n2.length - 1].length + 1 }; } - function N(t2) { + function y(t2) { return t2.startIndex + t2[1].length; } - const y = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { + const T = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, stopNodes: [], alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(t2, e2, n2) { return t2; - }, captureMetaData: false }; - function T(t2) { - return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : T(true); + }, captureMetaData: false, maxNestedTags: 100, strictReservedNames: true }; + function w(t2) { + return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : w(true); } - const w = function(t2) { - const e2 = Object.assign({}, y, t2); - return e2.processEntities = T(e2.processEntities), e2; + const v = function(t2) { + const e2 = Object.assign({}, T, t2); + return e2.processEntities = w(e2.processEntities), e2; }; - let v; - v = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); + let O; + O = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); class I { constructor(t2) { - this.tagname = t2, this.child = [], this[":@"] = {}; + this.tagname = t2, this.child = [], this[":@"] = /* @__PURE__ */ Object.create(null); } add(t2, e2) { "__proto__" === t2 && (t2 = "#__proto__"), this.child.push({ [t2]: e2 }); } addChild(t2, e2) { - "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][v] = { startIndex: e2 }); + "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][O] = { startIndex: e2 }); } static getMetaDataSymbol() { - return v; + return O; } } - class O { + class P { constructor(t2) { this.suppressValidationErr = !t2, this.options = t2; } readDocType(t2, e2) { - const n2 = {}; + const n2 = /* @__PURE__ */ Object.create(null); if ("O" !== t2[e2 + 3] || "C" !== t2[e2 + 4] || "T" !== t2[e2 + 5] || "Y" !== t2[e2 + 6] || "P" !== t2[e2 + 7] || "E" !== t2[e2 + 8]) throw new Error("Invalid Tag instead of DOCTYPE"); { e2 += 9; @@ -62064,23 +62166,23 @@ var require_fxp = __commonJS({ if (r2 ? "-" === t2[e2 - 1] && "-" === t2[e2 - 2] && (r2 = false, i2--) : i2--, 0 === i2) break; } else "[" === t2[e2] ? s2 = true : o2 += t2[e2]; else { - if (s2 && A(t2, "!ENTITY", e2)) { + if (s2 && S(t2, "!ENTITY", e2)) { let i3, s3; if (e2 += 7, [i3, s3, e2] = this.readEntityExp(t2, e2 + 1, this.suppressValidationErr), -1 === s3.indexOf("&")) { const t3 = i3.replace(/[.\-+*:]/g, "\\."); n2[i3] = { regx: RegExp(`&${t3};`, "g"), val: s3 }; } - } else if (s2 && A(t2, "!ELEMENT", e2)) { + } else if (s2 && S(t2, "!ELEMENT", e2)) { e2 += 8; const { index: n3 } = this.readElementExp(t2, e2 + 1); e2 = n3; - } else if (s2 && A(t2, "!ATTLIST", e2)) e2 += 8; - else if (s2 && A(t2, "!NOTATION", e2)) { + } else if (s2 && S(t2, "!ATTLIST", e2)) e2 += 8; + else if (s2 && S(t2, "!NOTATION", e2)) { e2 += 9; const { index: n3 } = this.readNotationExp(t2, e2 + 1, this.suppressValidationErr); e2 = n3; } else { - if (!A(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); + if (!S(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); r2 = true; } i2++, o2 = ""; @@ -62090,10 +62192,10 @@ var require_fxp = __commonJS({ return { entities: n2, i: e2 }; } readEntityExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]) && '"' !== t2[e2] && "'" !== t2[e2]; ) n2 += t2[e2], e2++; - if (S(n2), e2 = P(t2, e2), !this.suppressValidationErr) { + if (C(n2), e2 = A(t2, e2), !this.suppressValidationErr) { if ("SYSTEM" === t2.substring(e2, e2 + 6).toUpperCase()) throw new Error("External entities are not supported"); if ("%" === t2[e2]) throw new Error("Parameter entities are not supported"); } @@ -62102,15 +62204,15 @@ var require_fxp = __commonJS({ return [n2, i2, --e2]; } readNotationExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - !this.suppressValidationErr && S(n2), e2 = P(t2, e2); + !this.suppressValidationErr && C(n2), e2 = A(t2, e2); const i2 = t2.substring(e2, e2 + 6).toUpperCase(); if (!this.suppressValidationErr && "SYSTEM" !== i2 && "PUBLIC" !== i2) throw new Error(`Expected SYSTEM or PUBLIC, found "${i2}"`); - e2 += i2.length, e2 = P(t2, e2); + e2 += i2.length, e2 = A(t2, e2); let s2 = null, r2 = null; - if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = P(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); + if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = A(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); else if ("SYSTEM" === i2 && ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"), !this.suppressValidationErr && !r2)) throw new Error("Missing mandatory system identifier for SYSTEM notation"); return { notationName: n2, publicIdentifier: s2, systemIdentifier: r2, index: --e2 }; } @@ -62123,13 +62225,13 @@ var require_fxp = __commonJS({ return [++e2, i2]; } readElementExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; if (!this.suppressValidationErr && !r(n2)) throw new Error(`Invalid element name: "${n2}"`); let i2 = ""; - if ("E" === t2[e2 = P(t2, e2)] && A(t2, "MPTY", e2)) e2 += 4; - else if ("A" === t2[e2] && A(t2, "NY", e2)) e2 += 2; + if ("E" === t2[e2 = A(t2, e2)] && S(t2, "MPTY", e2)) e2 += 4; + else if ("A" === t2[e2] && S(t2, "NY", e2)) e2 += 2; else if ("(" === t2[e2]) { for (e2++; e2 < t2.length && ")" !== t2[e2]; ) i2 += t2[e2], e2++; if (")" !== t2[e2]) throw new Error("Unterminated content model"); @@ -62137,24 +62239,24 @@ var require_fxp = __commonJS({ return { elementName: n2, contentModel: i2.trim(), index: e2 }; } readAttlistExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - S(n2), e2 = P(t2, e2); + C(n2), e2 = A(t2, e2); let i2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) i2 += t2[e2], e2++; - if (!S(i2)) throw new Error(`Invalid attribute name: "${i2}"`); - e2 = P(t2, e2); + if (!C(i2)) throw new Error(`Invalid attribute name: "${i2}"`); + e2 = A(t2, e2); let s2 = ""; if ("NOTATION" === t2.substring(e2, e2 + 8).toUpperCase()) { - if (s2 = "NOTATION", "(" !== t2[e2 = P(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); + if (s2 = "NOTATION", "(" !== t2[e2 = A(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); e2++; let n3 = []; for (; e2 < t2.length && ")" !== t2[e2]; ) { let i3 = ""; for (; e2 < t2.length && "|" !== t2[e2] && ")" !== t2[e2]; ) i3 += t2[e2], e2++; - if (i3 = i3.trim(), !S(i3)) throw new Error(`Invalid notation name: "${i3}"`); - n3.push(i3), "|" === t2[e2] && (e2++, e2 = P(t2, e2)); + if (i3 = i3.trim(), !C(i3)) throw new Error(`Invalid notation name: "${i3}"`); + n3.push(i3), "|" === t2[e2] && (e2++, e2 = A(t2, e2)); } if (")" !== t2[e2]) throw new Error("Unterminated list of notations"); e2++, s2 += " (" + n3.join("|") + ")"; @@ -62163,45 +62265,43 @@ var require_fxp = __commonJS({ const n3 = ["CDATA", "ID", "IDREF", "IDREFS", "ENTITY", "ENTITIES", "NMTOKEN", "NMTOKENS"]; if (!this.suppressValidationErr && !n3.includes(s2.toUpperCase())) throw new Error(`Invalid attribute type: "${s2}"`); } - e2 = P(t2, e2); + e2 = A(t2, e2); let r2 = ""; return "#REQUIRED" === t2.substring(e2, e2 + 8).toUpperCase() ? (r2 = "#REQUIRED", e2 += 8) : "#IMPLIED" === t2.substring(e2, e2 + 7).toUpperCase() ? (r2 = "#IMPLIED", e2 += 7) : [e2, r2] = this.readIdentifierVal(t2, e2, "ATTLIST"), { elementName: n2, attributeName: i2, attributeType: s2, defaultValue: r2, index: e2 }; } } - const P = (t2, e2) => { + const A = (t2, e2) => { for (; e2 < t2.length && /\s/.test(t2[e2]); ) e2++; return e2; }; - function A(t2, e2, n2) { + function S(t2, e2, n2) { for (let i2 = 0; i2 < e2.length; i2++) if (e2[i2] !== t2[n2 + i2 + 1]) return false; return true; } - function S(t2) { + function C(t2) { if (r(t2)) return t2; throw new Error(`Invalid entity name ${t2}`); } - const C = /^[-+]?0x[a-fA-F0-9]+$/, $ = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; - const D = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; - function L(t2) { - return "function" == typeof t2 ? t2 : Array.isArray(t2) ? (e2) => { - for (const n2 of t2) { - if ("string" == typeof n2 && e2 === n2) return true; - if (n2 instanceof RegExp && n2.test(e2)) return true; - } - } : () => false; - } - class F { + const $ = /^[-+]?0x[a-fA-F0-9]+$/, V = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, D = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; + const j = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; + class L { constructor(t2) { - if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e2) => K(e2, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e2) => K(e2, 16, "&#x") } }, this.addExternalEntities = j, this.parseXml = B, this.parseTextData = M, this.resolveNameSpace = _, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = R, this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { + var e2; + if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e3) => K(e3, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e3) => K(e3, 16, "&#x") } }, this.addExternalEntities = F, this.parseXml = R, this.parseTextData = M, this.resolveNameSpace = k, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = B, this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { this.stopNodesExact = /* @__PURE__ */ new Set(), this.stopNodesWildcard = /* @__PURE__ */ new Set(); for (let t3 = 0; t3 < this.options.stopNodes.length; t3++) { - const e2 = this.options.stopNodes[t3]; - "string" == typeof e2 && (e2.startsWith("*.") ? this.stopNodesWildcard.add(e2.substring(2)) : this.stopNodesExact.add(e2)); + const e3 = this.options.stopNodes[t3]; + "string" == typeof e3 && (e3.startsWith("*.") ? this.stopNodesWildcard.add(e3.substring(2)) : this.stopNodesExact.add(e3)); } } } } - function j(t2) { + function F(t2) { const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2], s2 = i2.replace(/[.\-+*:]/g, "\\."); @@ -62215,7 +62315,7 @@ var require_fxp = __commonJS({ return null == i3 ? t2 : typeof i3 != typeof t2 || i3 !== t2 ? i3 : this.options.trimValues || t2.trim() === t2 ? Z(t2, this.options.parseTagValue, this.options.numberParseOptions) : t2; } } - function _(t2) { + function k(t2) { if (this.options.removeNSPrefix) { const e2 = t2.split(":"), n2 = "/" === t2.charAt(0) ? "/" : ""; if ("xmlns" === e2[0]) return ""; @@ -62223,10 +62323,10 @@ var require_fxp = __commonJS({ } return t2; } - const k = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); + const _ = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); function U(t2, e2, n2) { if (true !== this.options.ignoreAttributes && "string" == typeof t2) { - const i2 = s(t2, k), r2 = i2.length, o2 = {}; + const i2 = s(t2, _), r2 = i2.length, o2 = {}; for (let t3 = 0; t3 < r2; t3++) { const s2 = this.resolveNameSpace(i2[t3][1]); if (this.ignoreAttributesFn(s2, e2)) continue; @@ -62245,12 +62345,12 @@ var require_fxp = __commonJS({ return o2; } } - const B = function(t2) { + const R = function(t2) { t2 = t2.replace(/\r\n?/g, "\n"); const e2 = new I("!xml"); let n2 = e2, i2 = "", s2 = ""; this.entityExpansionCount = 0, this.currentExpandedLength = 0; - const r2 = new O(this.options.processEntities); + const r2 = new P(this.options.processEntities); for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2]) if ("/" === t2[o2 + 1]) { const e3 = z(t2, ">", o2, "Closing Tag is not closed."); let r3 = t2.substring(o2 + 2, e3).trim(); @@ -62290,26 +62390,27 @@ var require_fxp = __commonJS({ } else { let r3 = W(t2, o2, this.options.removeNSPrefix), a2 = r3.tagName; const l2 = r3.rawTagName; - let u2 = r3.tagExp, h2 = r3.attrExpPresent, d2 = r3.closeIndex; + let u2 = r3.tagExp, d2 = r3.attrExpPresent, h2 = r3.closeIndex; if (this.options.transformTagName) { const t3 = this.options.transformTagName(a2); u2 === a2 && (u2 = t3), a2 = t3; } + if (this.options.strictReservedNames && (a2 === this.options.commentPropName || a2 === this.options.cdataPropName)) throw new Error(`Invalid tag name: ${a2}`); n2 && i2 && "!xml" !== n2.tagname && (i2 = this.saveTextToParentTag(i2, n2, s2, false)); const p2 = n2; p2 && -1 !== this.options.unpairedTags.indexOf(p2.tagname) && (n2 = this.tagsNodeStack.pop(), s2 = s2.substring(0, s2.lastIndexOf("."))), a2 !== e2.tagname && (s2 += s2 ? "." + a2 : a2); - const f2 = o2; + const c2 = o2; if (this.isItStopNode(this.stopNodesExact, this.stopNodesWildcard, s2, a2)) { let e3 = ""; if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) "/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), o2 = r3.closeIndex; else if (-1 !== this.options.unpairedTags.indexOf(a2)) o2 = r3.closeIndex; else { - const n3 = this.readStopNodeData(t2, l2, d2 + 1); + const n3 = this.readStopNodeData(t2, l2, h2 + 1); if (!n3) throw new Error(`Unexpected end of ${l2}`); o2 = n3.i, e3 = n3.tagContent; } const i3 = new I(a2); - a2 !== u2 && h2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, h2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, f2); + a2 !== u2 && d2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, d2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, c2); } else { if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) { if ("/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), this.options.transformTagName) { @@ -62317,18 +62418,26 @@ var require_fxp = __commonJS({ u2 === a2 && (u2 = t4), a2 = t4; } const t3 = new I(a2); - a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), s2 = s2.substr(0, s2.lastIndexOf(".")); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")); } else { - const t3 = new I(a2); - this.tagsNodeStack.push(n2), a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), n2 = t3; + if (-1 !== this.options.unpairedTags.indexOf(a2)) { + const t3 = new I(a2); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")), o2 = r3.closeIndex; + continue; + } + { + const t3 = new I(a2); + if (this.tagsNodeStack.length > this.options.maxNestedTags) throw new Error("Maximum nested tags exceeded"); + this.tagsNodeStack.push(n2), a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), n2 = t3; + } } - i2 = "", o2 = d2; + i2 = "", o2 = h2; } } else i2 += t2[o2]; return e2.child; }; - function R(t2, e2, n2, i2) { + function B(t2, e2, n2, i2) { this.options.captureMetaData || (i2 = void 0); const s2 = this.options.updateTag(e2.tagname, n2, e2[":@"]); false === s2 || ("string" == typeof s2 ? (e2.tagname = s2, t2.addChild(e2, i2)) : t2.addChild(e2, i2)); @@ -62389,12 +62498,12 @@ var require_fxp = __commonJS({ const o2 = s2.index, a2 = r2.search(/\s/); let l2 = r2, u2 = true; -1 !== a2 && (l2 = r2.substring(0, a2), r2 = r2.substring(a2 + 1).trimStart()); - const h2 = l2; + const d2 = l2; if (n2) { const t3 = l2.indexOf(":"); -1 !== t3 && (l2 = l2.substr(t3 + 1), u2 = l2 !== s2.data.substr(t3 + 1)); } - return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: h2 }; + return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: d2 }; } function q(t2, e2, n2) { const i2 = n2; @@ -62415,19 +62524,19 @@ var require_fxp = __commonJS({ if (e2 && "string" == typeof t2) { const e3 = t2.trim(); return "true" === e3 || "false" !== e3 && (function(t3, e4 = {}) { - if (e4 = Object.assign({}, V, e4), !t3 || "string" != typeof t3) return t3; + if (e4 = Object.assign({}, D, e4), !t3 || "string" != typeof t3) return t3; let n3 = t3.trim(); if (void 0 !== e4.skipLike && e4.skipLike.test(n3)) return t3; if ("0" === t3) return 0; - if (e4.hex && C.test(n3)) return (function(t4) { + if (e4.hex && $.test(n3)) return (function(t4) { if (parseInt) return parseInt(t4, 16); if (Number.parseInt) return Number.parseInt(t4, 16); if (window && window.parseInt) return window.parseInt(t4, 16); throw new Error("parseInt, Number.parseInt, window.parseInt are not supported"); })(n3); - if (-1 !== n3.search(/.+[eE].+/)) return (function(t4, e5, n4) { + if (n3.includes("e") || n3.includes("E")) return (function(t4, e5, n4) { if (!n4.eNotation) return t4; - const i3 = e5.match(D); + const i3 = e5.match(j); if (i3) { let s2 = i3[1] || ""; const r2 = -1 === i3[3].indexOf("e") ? "E" : "e", o2 = i3[2], a2 = s2 ? t4[o2.length + 1] === r2 : t4[o2.length] === r2; @@ -62436,7 +62545,7 @@ var require_fxp = __commonJS({ return t4; })(t3, n3, e4); { - const s2 = $.exec(n3); + const s2 = V.exec(n3); if (s2) { const r2 = s2[1] || "", o2 = s2[2]; let a2 = (i2 = s2[3]) && -1 !== i2.indexOf(".") ? ("." === (i2 = i2.replace(/0+$/, "")) ? i2 = "0" : "." === i2[0] ? i2 = "0" + i2 : "." === i2[i2.length - 1] && (i2 = i2.substring(0, i2.length - 1)), i2) : i2; @@ -62444,7 +62553,7 @@ var require_fxp = __commonJS({ if (!e4.leadingZeros && (o2.length > 1 || 1 === o2.length && !l2)) return t3; { const i3 = Number(n3), s3 = String(i3); - if (0 === i3 || -0 === i3) return i3; + if (0 === i3) return i3; if (-1 !== s3.search(/[eE]/)) return e4.eNotation ? i3 : t3; if (-1 !== n3.indexOf(".")) return "0" === s3 || s3 === a2 || s3 === `${r2}${a2}` ? i3 : t3; let l3 = o2 ? a2 : n3; @@ -62478,7 +62587,7 @@ var require_fxp = __commonJS({ if (o2[a2]) { let t3 = H(o2[a2], e2, l2); const n3 = nt(t3, e2); - void 0 !== o2[Q] && (t3[Q] = o2[Q]), o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== s2[a2] && s2.hasOwnProperty(a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; + o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== o2[Q] && "object" == typeof t3 && null !== t3 && (t3[Q] = o2[Q]), void 0 !== s2[a2] && Object.prototype.hasOwnProperty.call(s2, a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; } } } @@ -62506,7 +62615,7 @@ var require_fxp = __commonJS({ } class it { constructor(t2) { - this.externalEntities = {}, this.options = w(t2); + this.externalEntities = {}, this.options = v(t2); } parse(t2, e2) { if ("string" != typeof t2 && t2.toString) t2 = t2.toString(); @@ -62516,7 +62625,7 @@ var require_fxp = __commonJS({ const n3 = a(t2, e2); if (true !== n3) throw Error(`${n3.err.msg}:${n3.err.line}:${n3.err.col}`); } - const n2 = new F(this.options); + const n2 = new L(this.options); n2.addExternalEntities(this.externalEntities); const i2 = n2.parseXml(t2); return this.options.preserveOrder || void 0 === i2 ? i2 : J(i2, this.options); @@ -62537,6 +62646,13 @@ var require_fxp = __commonJS({ } function rt(t2, e2, n2, i2) { let s2 = "", r2 = false; + if (!Array.isArray(t2)) { + if (null != t2) { + let n3 = t2.toString(); + return n3 = ut(n3, e2), n3; + } + return ""; + } for (let o2 = 0; o2 < t2.length; o2++) { const a2 = t2[o2], l2 = ot(a2); if (void 0 === l2) continue; @@ -62560,10 +62676,10 @@ var require_fxp = __commonJS({ o3 = 0 !== o3.length ? " " + o3 : "", s2 += n3 + `<${l2}${o3}${t3}?>`, r2 = true; continue; } - let h2 = i2; - "" !== h2 && (h2 += e2.indentBy); - const d2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, h2); - -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += d2 + ">" : s2 += d2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += d2 + `>${p2}${i2}` : (s2 += d2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += d2 + "/>", r2 = true; + let d2 = i2; + "" !== d2 && (d2 += e2.indentBy); + const h2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, d2); + -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += h2 + ">" : s2 += h2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += h2 + `>${p2}${i2}` : (s2 += h2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += h2 + "/>", r2 = true; } return s2; } @@ -62571,13 +62687,13 @@ var require_fxp = __commonJS({ const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2]; - if (t2.hasOwnProperty(i2) && ":@" !== i2) return i2; + if (Object.prototype.hasOwnProperty.call(t2, i2) && ":@" !== i2) return i2; } } function at(t2, e2) { let n2 = ""; if (t2 && !e2.ignoreAttributes) for (let i2 in t2) { - if (!t2.hasOwnProperty(i2)) continue; + if (!Object.prototype.hasOwnProperty.call(t2, i2)) continue; let s2 = e2.attributeValueProcessor(i2, t2[i2]); s2 = ut(s2, e2), true === s2 && e2.suppressBooleanAttributes ? n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}` : n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}="${s2}"`; } @@ -62595,15 +62711,21 @@ var require_fxp = __commonJS({ } return t2; } - const ht = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { + const dt = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [{ regex: new RegExp("&", "g"), val: "&" }, { regex: new RegExp(">", "g"), val: ">" }, { regex: new RegExp("<", "g"), val: "<" }, { regex: new RegExp("'", "g"), val: "'" }, { regex: new RegExp('"', "g"), val: """ }], processEntities: true, stopNodes: [], oneListGroup: false }; - function dt(t2) { - this.options = Object.assign({}, ht, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { + function ht(t2) { + var e2; + this.options = Object.assign({}, dt, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { return false; - } : (this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ct), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ft, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { + } : (this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ft), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ct, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { return ""; }, this.tagEndChar = ">", this.newLine = ""); } @@ -62611,15 +62733,15 @@ var require_fxp = __commonJS({ const s2 = this.j2x(t2, n2 + 1, i2.concat(e2)); return void 0 !== t2[this.options.textNodeName] && 1 === Object.keys(t2).length ? this.buildTextValNode(t2[this.options.textNodeName], e2, s2.attrStr, n2) : this.buildObjectNode(s2.val, e2, s2.attrStr, n2); } - function ft(t2) { + function ct(t2) { return this.options.indentBy.repeat(t2); } - function ct(t2) { + function ft(t2) { return !(!t2.startsWith(this.options.attributeNamePrefix) || t2 === this.options.textNodeName) && t2.substr(this.attrPrefixLen); } - dt.prototype.build = function(t2) { + ht.prototype.build = function(t2) { return this.options.preserveOrder ? st(t2, this.options) : (Array.isArray(t2) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t2 = { [this.options.arrayNodeName]: t2 }), this.j2x(t2, 0, []).val); - }, dt.prototype.j2x = function(t2, e2, n2) { + }, ht.prototype.j2x = function(t2, e2, n2) { let i2 = "", s2 = ""; const r2 = n2.join("."); for (let o2 in t2) if (Object.prototype.hasOwnProperty.call(t2, o2)) if (void 0 === t2[o2]) this.isAttribute(o2) && (s2 += ""); @@ -62654,18 +62776,18 @@ var require_fxp = __commonJS({ for (let s3 = 0; s3 < n3; s3++) i2 += this.buildAttrPairStr(e3[s3], "" + t2[o2][e3[s3]]); } else s2 += this.processTextOrObjNode(t2[o2], o2, e2, n2); return { attrStr: i2, val: s2 }; - }, dt.prototype.buildAttrPairStr = function(t2, e2) { + }, ht.prototype.buildAttrPairStr = function(t2, e2) { return e2 = this.options.attributeValueProcessor(t2, "" + e2), e2 = this.replaceEntitiesValue(e2), this.options.suppressBooleanAttributes && "true" === e2 ? " " + t2 : " " + t2 + '="' + e2 + '"'; - }, dt.prototype.buildObjectNode = function(t2, e2, n2, i2) { + }, ht.prototype.buildObjectNode = function(t2, e2, n2, i2) { if ("" === t2) return "?" === e2[0] ? this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar; { let s2 = "` + this.newLine : this.indentate(i2) + "<" + e2 + n2 + r2 + this.tagEndChar + t2 + this.indentate(i2) + s2 : this.indentate(i2) + "<" + e2 + n2 + r2 + ">" + t2 + s2; } - }, dt.prototype.closeTag = function(t2) { + }, ht.prototype.closeTag = function(t2) { let e2 = ""; return -1 !== this.options.unpairedTags.indexOf(t2) ? this.options.suppressUnpairedNode || (e2 = "/") : e2 = this.options.suppressEmptyNode ? "/" : `>` + this.newLine; if (false !== this.options.commentPropName && e2 === this.options.commentPropName) return this.indentate(i2) + `` + this.newLine; if ("?" === e2[0]) return this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar; @@ -62673,14 +62795,14 @@ var require_fxp = __commonJS({ let s2 = this.options.tagValueProcessor(e2, t2); return s2 = this.replaceEntitiesValue(s2), "" === s2 ? this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + ">" + s2 + " 0 && this.options.processEntities) for (let e2 = 0; e2 < this.options.entities.length; e2++) { const n2 = this.options.entities[e2]; t2 = t2.replace(n2.regex, n2.val); } return t2; }; - const gt = { validate: a }; + const gt = ht, xt = { validate: a }; module2.exports = e; })(); } @@ -102870,7 +102992,7 @@ var safeDump = renamed("safeDump", "dump"); var semver = __toESM(require_semver2()); // src/api-compatibility.json -var maximumVersion = "3.20"; +var maximumVersion = "3.21"; var minimumVersion = "3.14"; // src/util.ts @@ -104145,6 +104267,11 @@ var featureConfig = { // cannot be found when interpreting results. minimumVersion: void 0 }, + ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES", + minimumVersion: void 0 + }, ["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: { defaultValue: false, envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE", @@ -104156,11 +104283,6 @@ var featureConfig = { minimumVersion: void 0, toolsFeature: "bundleSupportsOverlay" /* BundleSupportsOverlay */ }, - ["use_repository_properties_v2" /* UseRepositoryProperties */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES", - minimumVersion: void 0 - }, ["validate_db_config" /* ValidateDbConfig */]: { defaultValue: false, envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG", diff --git a/lib/setup-codeql-action.js b/lib/setup-codeql-action.js index fbba903f7..a9eb08eb5 100644 --- a/lib/setup-codeql-action.js +++ b/lib/setup-codeql-action.js @@ -45986,7 +45986,7 @@ var require_package = __commonJS({ "package.json"(exports2, module2) { module2.exports = { name: "codeql", - version: "4.32.5", + version: "4.32.7", private: true, description: "CodeQL action", scripts: { @@ -45995,7 +45995,7 @@ var require_package = __commonJS({ lint: "eslint --report-unused-disable-directives --max-warnings=0 .", "lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif", "lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix", - ava: "npm run transpile && ava --serial --verbose", + ava: "npm run transpile && ava --verbose", test: "npm run ava -- src/", "test-debug": "npm run test -- --timeout=20m", transpile: "tsc --build --verbose" @@ -46044,6 +46044,7 @@ var require_package = __commonJS({ "@types/js-yaml": "^4.0.9", "@types/node": "^20.19.9", "@types/node-forge": "^1.3.14", + "@types/sarif": "^2.1.7", "@types/semver": "^7.7.1", "@types/sinon": "^21.0.0", ava: "^6.4.1", @@ -46052,14 +46053,14 @@ var require_package = __commonJS({ "eslint-import-resolver-typescript": "^3.8.7", "eslint-plugin-github": "^6.0.0", "eslint-plugin-import-x": "^4.16.1", - "eslint-plugin-jsdoc": "^62.5.0", + "eslint-plugin-jsdoc": "^62.7.1", "eslint-plugin-no-async-foreach": "^0.1.1", glob: "^11.1.0", - globals: "^16.5.0", + globals: "^17.3.0", nock: "^14.0.11", sinon: "^21.0.1", typescript: "^5.9.3", - "typescript-eslint": "^8.56.0" + "typescript-eslint": "^8.56.1" }, overrides: { "@actions/tool-cache": { @@ -48064,6 +48065,7 @@ var require_minimatch = __commonJS({ pattern = pattern.split(path9.sep).join("/"); } this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200; this.set = []; this.pattern = pattern; this.regexp = null; @@ -48460,50 +48462,147 @@ var require_minimatch = __commonJS({ return this.negate; }; Minimatch.prototype.matchOne = function(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } + if (pattern.indexOf(GLOBSTAR) !== -1) { + return this._matchGlobstar(file, pattern, partial, 0, 0); + } + return this._matchOne(file, pattern, partial, 0, 0); + }; + Minimatch.prototype._matchGlobstar = function(file, pattern, partial, fileIndex, patternIndex) { + var i; + var firstgs = -1; + for (i = patternIndex; i < pattern.length; i++) { + if (pattern[i] === GLOBSTAR) { + firstgs = i; + break; + } + } + var lastgs = -1; + for (i = pattern.length - 1; i >= 0; i--) { + if (pattern[i] === GLOBSTAR) { + lastgs = i; + break; + } + } + var head = pattern.slice(patternIndex, firstgs); + var body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs); + var tail = partial ? [] : pattern.slice(lastgs + 1); + if (head.length) { + var fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this._matchOne(fileHead, head, partial, 0, 0)) { + return false; + } + fileIndex += head.length; + } + var fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) return false; + var tailStart = file.length - tail.length; + if (this._matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; + } + tailStart--; + if (!this._matchOne(file, tail, partial, tailStart, 0)) { + return false; + } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + var sawSome = !!fileTailMatch; + for (i = fileIndex; i < file.length - fileTailMatch; i++) { + var f = String(file[i]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return partial || sawSome; + } + var bodySegments = [[[], 0]]; + var currentBody = bodySegments[0]; + var nonGsParts = 0; + var nonGsPartsSums = [0]; + for (var bi = 0; bi < body.length; bi++) { + var b = body[bi]; + if (b === GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + var idx = bodySegments.length - 1; + var fileLength = file.length - fileTailMatch; + for (var si = 0; si < bodySegments.length; si++) { + bodySegments[si][1] = fileLength - (nonGsPartsSums[idx--] + bodySegments[si][0].length); + } + return !!this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex, + 0, + partial, + 0, + !!fileTailMatch ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + }; + Minimatch.prototype._matchGlobStarBodySections = function(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + var bs = bodySegments[bodyIndex]; + if (!bs) { + for (var i = fileIndex; i < file.length; i++) { + sawTail = true; + var f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return sawTail; + } + var body = bs[0]; + var after = bs[1]; + while (fileIndex <= after) { + var m = this._matchOne( + file.slice(0, fileIndex + body.length), + body, + partial, + fileIndex, + 0 + ); + if (m && globStarDepth < this.maxGlobstarRecursion) { + var sub = this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex + body.length, + bodyIndex + 1, + partial, + globStarDepth + 1, + sawTail + ); + if (sub !== false) { + return sub; + } + } + var f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + fileIndex++; + } + return partial || null; + }; + Minimatch.prototype._matchOne = function(file, pattern, partial, fileIndex, patternIndex) { + var fi, pi, fl, pl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { this.debug("matchOne loop"); var p = pattern[pi]; var f = file[fi]; this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } - return false; - } + if (p === false || p === GLOBSTAR) return false; var hit; if (typeof p === "string") { hit = f === p; @@ -60543,7 +60642,7 @@ var require_fxp = __commonJS({ }, o: (t2, e2) => Object.prototype.hasOwnProperty.call(t2, e2), r: (t2) => { "undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t2, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t2, "__esModule", { value: true }); } }, e = {}; - t.r(e), t.d(e, { XMLBuilder: () => dt, XMLParser: () => it, XMLValidator: () => gt }); + t.r(e), t.d(e, { XMLBuilder: () => gt, XMLParser: () => it, XMLValidator: () => xt }); const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$"); function s(t2, e2) { const n2 = []; @@ -60565,90 +60664,90 @@ var require_fxp = __commonJS({ const n2 = []; let i2 = false, s2 = false; "\uFEFF" === t2[0] && (t2 = t2.substr(1)); - for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2] && "?" === t2[o2 + 1]) { - if (o2 += 2, o2 = u(t2, o2), o2.err) return o2; + for (let r2 = 0; r2 < t2.length; r2++) if ("<" === t2[r2] && "?" === t2[r2 + 1]) { + if (r2 += 2, r2 = u(t2, r2), r2.err) return r2; } else { - if ("<" !== t2[o2]) { - if (l(t2[o2])) continue; - return m("InvalidChar", "char '" + t2[o2] + "' is not expected.", b(t2, o2)); + if ("<" !== t2[r2]) { + if (l(t2[r2])) continue; + return m("InvalidChar", "char '" + t2[r2] + "' is not expected.", N(t2, r2)); } { - let a2 = o2; - if (o2++, "!" === t2[o2]) { - o2 = h(t2, o2); + let o2 = r2; + if (r2++, "!" === t2[r2]) { + r2 = d(t2, r2); continue; } { - let d2 = false; - "/" === t2[o2] && (d2 = true, o2++); - let p2 = ""; - for (; o2 < t2.length && ">" !== t2[o2] && " " !== t2[o2] && " " !== t2[o2] && "\n" !== t2[o2] && "\r" !== t2[o2]; o2++) p2 += t2[o2]; - if (p2 = p2.trim(), "/" === p2[p2.length - 1] && (p2 = p2.substring(0, p2.length - 1), o2--), !r(p2)) { + let a2 = false; + "/" === t2[r2] && (a2 = true, r2++); + let h2 = ""; + for (; r2 < t2.length && ">" !== t2[r2] && " " !== t2[r2] && " " !== t2[r2] && "\n" !== t2[r2] && "\r" !== t2[r2]; r2++) h2 += t2[r2]; + if (h2 = h2.trim(), "/" === h2[h2.length - 1] && (h2 = h2.substring(0, h2.length - 1), r2--), !b(h2)) { let e3; - return e3 = 0 === p2.trim().length ? "Invalid space after '<'." : "Tag '" + p2 + "' is an invalid name.", m("InvalidTag", e3, b(t2, o2)); + return e3 = 0 === h2.trim().length ? "Invalid space after '<'." : "Tag '" + h2 + "' is an invalid name.", m("InvalidTag", e3, N(t2, r2)); } - const c2 = f(t2, o2); - if (false === c2) return m("InvalidAttr", "Attributes for '" + p2 + "' have open quote.", b(t2, o2)); - let E2 = c2.value; - if (o2 = c2.index, "/" === E2[E2.length - 1]) { - const n3 = o2 - E2.length; - E2 = E2.substring(0, E2.length - 1); - const s3 = g(E2, e2); - if (true !== s3) return m(s3.err.code, s3.err.msg, b(t2, n3 + s3.err.line)); + const p2 = c(t2, r2); + if (false === p2) return m("InvalidAttr", "Attributes for '" + h2 + "' have open quote.", N(t2, r2)); + let f2 = p2.value; + if (r2 = p2.index, "/" === f2[f2.length - 1]) { + const n3 = r2 - f2.length; + f2 = f2.substring(0, f2.length - 1); + const s3 = g(f2, e2); + if (true !== s3) return m(s3.err.code, s3.err.msg, N(t2, n3 + s3.err.line)); i2 = true; - } else if (d2) { - if (!c2.tagClosed) return m("InvalidTag", "Closing tag '" + p2 + "' doesn't have proper closing.", b(t2, o2)); - if (E2.trim().length > 0) return m("InvalidTag", "Closing tag '" + p2 + "' can't have attributes or invalid starting.", b(t2, a2)); - if (0 === n2.length) return m("InvalidTag", "Closing tag '" + p2 + "' has not been opened.", b(t2, a2)); + } else if (a2) { + if (!p2.tagClosed) return m("InvalidTag", "Closing tag '" + h2 + "' doesn't have proper closing.", N(t2, r2)); + if (f2.trim().length > 0) return m("InvalidTag", "Closing tag '" + h2 + "' can't have attributes or invalid starting.", N(t2, o2)); + if (0 === n2.length) return m("InvalidTag", "Closing tag '" + h2 + "' has not been opened.", N(t2, o2)); { const e3 = n2.pop(); - if (p2 !== e3.tagName) { - let n3 = b(t2, e3.tagStartPos); - return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + p2 + "'.", b(t2, a2)); + if (h2 !== e3.tagName) { + let n3 = N(t2, e3.tagStartPos); + return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + h2 + "'.", N(t2, o2)); } 0 == n2.length && (s2 = true); } } else { - const r2 = g(E2, e2); - if (true !== r2) return m(r2.err.code, r2.err.msg, b(t2, o2 - E2.length + r2.err.line)); - if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", b(t2, o2)); - -1 !== e2.unpairedTags.indexOf(p2) || n2.push({ tagName: p2, tagStartPos: a2 }), i2 = true; + const a3 = g(f2, e2); + if (true !== a3) return m(a3.err.code, a3.err.msg, N(t2, r2 - f2.length + a3.err.line)); + if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", N(t2, r2)); + -1 !== e2.unpairedTags.indexOf(h2) || n2.push({ tagName: h2, tagStartPos: o2 }), i2 = true; } - for (o2++; o2 < t2.length; o2++) if ("<" === t2[o2]) { - if ("!" === t2[o2 + 1]) { - o2++, o2 = h(t2, o2); + for (r2++; r2 < t2.length; r2++) if ("<" === t2[r2]) { + if ("!" === t2[r2 + 1]) { + r2++, r2 = d(t2, r2); continue; } - if ("?" !== t2[o2 + 1]) break; - if (o2 = u(t2, ++o2), o2.err) return o2; - } else if ("&" === t2[o2]) { - const e3 = x(t2, o2); - if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", b(t2, o2)); - o2 = e3; - } else if (true === s2 && !l(t2[o2])) return m("InvalidXml", "Extra text at the end", b(t2, o2)); - "<" === t2[o2] && o2--; + if ("?" !== t2[r2 + 1]) break; + if (r2 = u(t2, ++r2), r2.err) return r2; + } else if ("&" === t2[r2]) { + const e3 = x(t2, r2); + if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", N(t2, r2)); + r2 = e3; + } else if (true === s2 && !l(t2[r2])) return m("InvalidXml", "Extra text at the end", N(t2, r2)); + "<" === t2[r2] && r2--; } } } - return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", b(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map(((t3) => t3.tagName)), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); + return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", N(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map((t3) => t3.tagName), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); } function l(t2) { return " " === t2 || " " === t2 || "\n" === t2 || "\r" === t2; } function u(t2, e2) { const n2 = e2; - for (; e2 < t2.length; e2++) if ("?" != t2[e2] && " " != t2[e2]) ; - else { + for (; e2 < t2.length; e2++) if ("?" == t2[e2] || " " == t2[e2]) { const i2 = t2.substr(n2, e2 - n2); - if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", b(t2, e2)); + if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", N(t2, e2)); if ("?" == t2[e2] && ">" == t2[e2 + 1]) { e2++; break; } + continue; } return e2; } - function h(t2, e2) { + function d(t2, e2) { if (t2.length > e2 + 5 && "-" === t2[e2 + 1] && "-" === t2[e2 + 2]) { for (e2 += 3; e2 < t2.length; e2++) if ("-" === t2[e2] && "-" === t2[e2 + 1] && ">" === t2[e2 + 2]) { e2 += 2; @@ -60666,11 +60765,11 @@ var require_fxp = __commonJS({ } return e2; } - const d = '"', p = "'"; - function f(t2, e2) { + const h = '"', p = "'"; + function c(t2, e2) { let n2 = "", i2 = "", s2 = false; for (; e2 < t2.length; e2++) { - if (t2[e2] === d || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); + if (t2[e2] === h || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); else if (">" === t2[e2] && "" === i2) { s2 = true; break; @@ -60679,16 +60778,16 @@ var require_fxp = __commonJS({ } return "" === i2 && { value: n2, index: e2, tagClosed: s2 }; } - const c = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); + const f = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); function g(t2, e2) { - const n2 = s(t2, c), i2 = {}; + const n2 = s(t2, f), i2 = {}; for (let t3 = 0; t3 < n2.length; t3++) { - if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", N(n2[t3])); - if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", N(n2[t3])); - if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", N(n2[t3])); + if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", y(n2[t3])); + if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", y(n2[t3])); + if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", y(n2[t3])); const s2 = n2[t3][2]; - if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", N(n2[t3])); - if (i2.hasOwnProperty(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", N(n2[t3])); + if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", y(n2[t3])); + if (Object.prototype.hasOwnProperty.call(i2, s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", y(n2[t3])); i2[s2] = 1; } return true; @@ -60716,49 +60815,52 @@ var require_fxp = __commonJS({ function E(t2) { return r(t2); } - function b(t2, e2) { + function b(t2) { + return r(t2); + } + function N(t2, e2) { const n2 = t2.substring(0, e2).split(/\r?\n/); return { line: n2.length, col: n2[n2.length - 1].length + 1 }; } - function N(t2) { + function y(t2) { return t2.startIndex + t2[1].length; } - const y = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { + const T = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, stopNodes: [], alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(t2, e2, n2) { return t2; - }, captureMetaData: false }; - function T(t2) { - return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : T(true); + }, captureMetaData: false, maxNestedTags: 100, strictReservedNames: true }; + function w(t2) { + return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : w(true); } - const w = function(t2) { - const e2 = Object.assign({}, y, t2); - return e2.processEntities = T(e2.processEntities), e2; + const v = function(t2) { + const e2 = Object.assign({}, T, t2); + return e2.processEntities = w(e2.processEntities), e2; }; - let v; - v = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); + let O; + O = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); class I { constructor(t2) { - this.tagname = t2, this.child = [], this[":@"] = {}; + this.tagname = t2, this.child = [], this[":@"] = /* @__PURE__ */ Object.create(null); } add(t2, e2) { "__proto__" === t2 && (t2 = "#__proto__"), this.child.push({ [t2]: e2 }); } addChild(t2, e2) { - "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][v] = { startIndex: e2 }); + "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][O] = { startIndex: e2 }); } static getMetaDataSymbol() { - return v; + return O; } } - class O { + class P { constructor(t2) { this.suppressValidationErr = !t2, this.options = t2; } readDocType(t2, e2) { - const n2 = {}; + const n2 = /* @__PURE__ */ Object.create(null); if ("O" !== t2[e2 + 3] || "C" !== t2[e2 + 4] || "T" !== t2[e2 + 5] || "Y" !== t2[e2 + 6] || "P" !== t2[e2 + 7] || "E" !== t2[e2 + 8]) throw new Error("Invalid Tag instead of DOCTYPE"); { e2 += 9; @@ -60767,23 +60869,23 @@ var require_fxp = __commonJS({ if (r2 ? "-" === t2[e2 - 1] && "-" === t2[e2 - 2] && (r2 = false, i2--) : i2--, 0 === i2) break; } else "[" === t2[e2] ? s2 = true : o2 += t2[e2]; else { - if (s2 && A(t2, "!ENTITY", e2)) { + if (s2 && S(t2, "!ENTITY", e2)) { let i3, s3; if (e2 += 7, [i3, s3, e2] = this.readEntityExp(t2, e2 + 1, this.suppressValidationErr), -1 === s3.indexOf("&")) { const t3 = i3.replace(/[.\-+*:]/g, "\\."); n2[i3] = { regx: RegExp(`&${t3};`, "g"), val: s3 }; } - } else if (s2 && A(t2, "!ELEMENT", e2)) { + } else if (s2 && S(t2, "!ELEMENT", e2)) { e2 += 8; const { index: n3 } = this.readElementExp(t2, e2 + 1); e2 = n3; - } else if (s2 && A(t2, "!ATTLIST", e2)) e2 += 8; - else if (s2 && A(t2, "!NOTATION", e2)) { + } else if (s2 && S(t2, "!ATTLIST", e2)) e2 += 8; + else if (s2 && S(t2, "!NOTATION", e2)) { e2 += 9; const { index: n3 } = this.readNotationExp(t2, e2 + 1, this.suppressValidationErr); e2 = n3; } else { - if (!A(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); + if (!S(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); r2 = true; } i2++, o2 = ""; @@ -60793,10 +60895,10 @@ var require_fxp = __commonJS({ return { entities: n2, i: e2 }; } readEntityExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]) && '"' !== t2[e2] && "'" !== t2[e2]; ) n2 += t2[e2], e2++; - if (S(n2), e2 = P(t2, e2), !this.suppressValidationErr) { + if (C(n2), e2 = A(t2, e2), !this.suppressValidationErr) { if ("SYSTEM" === t2.substring(e2, e2 + 6).toUpperCase()) throw new Error("External entities are not supported"); if ("%" === t2[e2]) throw new Error("Parameter entities are not supported"); } @@ -60805,15 +60907,15 @@ var require_fxp = __commonJS({ return [n2, i2, --e2]; } readNotationExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - !this.suppressValidationErr && S(n2), e2 = P(t2, e2); + !this.suppressValidationErr && C(n2), e2 = A(t2, e2); const i2 = t2.substring(e2, e2 + 6).toUpperCase(); if (!this.suppressValidationErr && "SYSTEM" !== i2 && "PUBLIC" !== i2) throw new Error(`Expected SYSTEM or PUBLIC, found "${i2}"`); - e2 += i2.length, e2 = P(t2, e2); + e2 += i2.length, e2 = A(t2, e2); let s2 = null, r2 = null; - if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = P(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); + if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = A(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); else if ("SYSTEM" === i2 && ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"), !this.suppressValidationErr && !r2)) throw new Error("Missing mandatory system identifier for SYSTEM notation"); return { notationName: n2, publicIdentifier: s2, systemIdentifier: r2, index: --e2 }; } @@ -60826,13 +60928,13 @@ var require_fxp = __commonJS({ return [++e2, i2]; } readElementExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; if (!this.suppressValidationErr && !r(n2)) throw new Error(`Invalid element name: "${n2}"`); let i2 = ""; - if ("E" === t2[e2 = P(t2, e2)] && A(t2, "MPTY", e2)) e2 += 4; - else if ("A" === t2[e2] && A(t2, "NY", e2)) e2 += 2; + if ("E" === t2[e2 = A(t2, e2)] && S(t2, "MPTY", e2)) e2 += 4; + else if ("A" === t2[e2] && S(t2, "NY", e2)) e2 += 2; else if ("(" === t2[e2]) { for (e2++; e2 < t2.length && ")" !== t2[e2]; ) i2 += t2[e2], e2++; if (")" !== t2[e2]) throw new Error("Unterminated content model"); @@ -60840,24 +60942,24 @@ var require_fxp = __commonJS({ return { elementName: n2, contentModel: i2.trim(), index: e2 }; } readAttlistExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - S(n2), e2 = P(t2, e2); + C(n2), e2 = A(t2, e2); let i2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) i2 += t2[e2], e2++; - if (!S(i2)) throw new Error(`Invalid attribute name: "${i2}"`); - e2 = P(t2, e2); + if (!C(i2)) throw new Error(`Invalid attribute name: "${i2}"`); + e2 = A(t2, e2); let s2 = ""; if ("NOTATION" === t2.substring(e2, e2 + 8).toUpperCase()) { - if (s2 = "NOTATION", "(" !== t2[e2 = P(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); + if (s2 = "NOTATION", "(" !== t2[e2 = A(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); e2++; let n3 = []; for (; e2 < t2.length && ")" !== t2[e2]; ) { let i3 = ""; for (; e2 < t2.length && "|" !== t2[e2] && ")" !== t2[e2]; ) i3 += t2[e2], e2++; - if (i3 = i3.trim(), !S(i3)) throw new Error(`Invalid notation name: "${i3}"`); - n3.push(i3), "|" === t2[e2] && (e2++, e2 = P(t2, e2)); + if (i3 = i3.trim(), !C(i3)) throw new Error(`Invalid notation name: "${i3}"`); + n3.push(i3), "|" === t2[e2] && (e2++, e2 = A(t2, e2)); } if (")" !== t2[e2]) throw new Error("Unterminated list of notations"); e2++, s2 += " (" + n3.join("|") + ")"; @@ -60866,45 +60968,43 @@ var require_fxp = __commonJS({ const n3 = ["CDATA", "ID", "IDREF", "IDREFS", "ENTITY", "ENTITIES", "NMTOKEN", "NMTOKENS"]; if (!this.suppressValidationErr && !n3.includes(s2.toUpperCase())) throw new Error(`Invalid attribute type: "${s2}"`); } - e2 = P(t2, e2); + e2 = A(t2, e2); let r2 = ""; return "#REQUIRED" === t2.substring(e2, e2 + 8).toUpperCase() ? (r2 = "#REQUIRED", e2 += 8) : "#IMPLIED" === t2.substring(e2, e2 + 7).toUpperCase() ? (r2 = "#IMPLIED", e2 += 7) : [e2, r2] = this.readIdentifierVal(t2, e2, "ATTLIST"), { elementName: n2, attributeName: i2, attributeType: s2, defaultValue: r2, index: e2 }; } } - const P = (t2, e2) => { + const A = (t2, e2) => { for (; e2 < t2.length && /\s/.test(t2[e2]); ) e2++; return e2; }; - function A(t2, e2, n2) { + function S(t2, e2, n2) { for (let i2 = 0; i2 < e2.length; i2++) if (e2[i2] !== t2[n2 + i2 + 1]) return false; return true; } - function S(t2) { + function C(t2) { if (r(t2)) return t2; throw new Error(`Invalid entity name ${t2}`); } - const C = /^[-+]?0x[a-fA-F0-9]+$/, $ = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; - const D = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; - function L(t2) { - return "function" == typeof t2 ? t2 : Array.isArray(t2) ? (e2) => { - for (const n2 of t2) { - if ("string" == typeof n2 && e2 === n2) return true; - if (n2 instanceof RegExp && n2.test(e2)) return true; - } - } : () => false; - } - class F { + const $ = /^[-+]?0x[a-fA-F0-9]+$/, V = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, D = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; + const j = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; + class L { constructor(t2) { - if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e2) => K(e2, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e2) => K(e2, 16, "&#x") } }, this.addExternalEntities = j, this.parseXml = B, this.parseTextData = M, this.resolveNameSpace = _, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = R, this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { + var e2; + if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e3) => K(e3, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e3) => K(e3, 16, "&#x") } }, this.addExternalEntities = F, this.parseXml = R, this.parseTextData = M, this.resolveNameSpace = k, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = B, this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { this.stopNodesExact = /* @__PURE__ */ new Set(), this.stopNodesWildcard = /* @__PURE__ */ new Set(); for (let t3 = 0; t3 < this.options.stopNodes.length; t3++) { - const e2 = this.options.stopNodes[t3]; - "string" == typeof e2 && (e2.startsWith("*.") ? this.stopNodesWildcard.add(e2.substring(2)) : this.stopNodesExact.add(e2)); + const e3 = this.options.stopNodes[t3]; + "string" == typeof e3 && (e3.startsWith("*.") ? this.stopNodesWildcard.add(e3.substring(2)) : this.stopNodesExact.add(e3)); } } } } - function j(t2) { + function F(t2) { const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2], s2 = i2.replace(/[.\-+*:]/g, "\\."); @@ -60918,7 +61018,7 @@ var require_fxp = __commonJS({ return null == i3 ? t2 : typeof i3 != typeof t2 || i3 !== t2 ? i3 : this.options.trimValues || t2.trim() === t2 ? Z(t2, this.options.parseTagValue, this.options.numberParseOptions) : t2; } } - function _(t2) { + function k(t2) { if (this.options.removeNSPrefix) { const e2 = t2.split(":"), n2 = "/" === t2.charAt(0) ? "/" : ""; if ("xmlns" === e2[0]) return ""; @@ -60926,10 +61026,10 @@ var require_fxp = __commonJS({ } return t2; } - const k = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); + const _ = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); function U(t2, e2, n2) { if (true !== this.options.ignoreAttributes && "string" == typeof t2) { - const i2 = s(t2, k), r2 = i2.length, o2 = {}; + const i2 = s(t2, _), r2 = i2.length, o2 = {}; for (let t3 = 0; t3 < r2; t3++) { const s2 = this.resolveNameSpace(i2[t3][1]); if (this.ignoreAttributesFn(s2, e2)) continue; @@ -60948,12 +61048,12 @@ var require_fxp = __commonJS({ return o2; } } - const B = function(t2) { + const R = function(t2) { t2 = t2.replace(/\r\n?/g, "\n"); const e2 = new I("!xml"); let n2 = e2, i2 = "", s2 = ""; this.entityExpansionCount = 0, this.currentExpandedLength = 0; - const r2 = new O(this.options.processEntities); + const r2 = new P(this.options.processEntities); for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2]) if ("/" === t2[o2 + 1]) { const e3 = z(t2, ">", o2, "Closing Tag is not closed."); let r3 = t2.substring(o2 + 2, e3).trim(); @@ -60993,26 +61093,27 @@ var require_fxp = __commonJS({ } else { let r3 = W(t2, o2, this.options.removeNSPrefix), a2 = r3.tagName; const l2 = r3.rawTagName; - let u2 = r3.tagExp, h2 = r3.attrExpPresent, d2 = r3.closeIndex; + let u2 = r3.tagExp, d2 = r3.attrExpPresent, h2 = r3.closeIndex; if (this.options.transformTagName) { const t3 = this.options.transformTagName(a2); u2 === a2 && (u2 = t3), a2 = t3; } + if (this.options.strictReservedNames && (a2 === this.options.commentPropName || a2 === this.options.cdataPropName)) throw new Error(`Invalid tag name: ${a2}`); n2 && i2 && "!xml" !== n2.tagname && (i2 = this.saveTextToParentTag(i2, n2, s2, false)); const p2 = n2; p2 && -1 !== this.options.unpairedTags.indexOf(p2.tagname) && (n2 = this.tagsNodeStack.pop(), s2 = s2.substring(0, s2.lastIndexOf("."))), a2 !== e2.tagname && (s2 += s2 ? "." + a2 : a2); - const f2 = o2; + const c2 = o2; if (this.isItStopNode(this.stopNodesExact, this.stopNodesWildcard, s2, a2)) { let e3 = ""; if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) "/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), o2 = r3.closeIndex; else if (-1 !== this.options.unpairedTags.indexOf(a2)) o2 = r3.closeIndex; else { - const n3 = this.readStopNodeData(t2, l2, d2 + 1); + const n3 = this.readStopNodeData(t2, l2, h2 + 1); if (!n3) throw new Error(`Unexpected end of ${l2}`); o2 = n3.i, e3 = n3.tagContent; } const i3 = new I(a2); - a2 !== u2 && h2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, h2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, f2); + a2 !== u2 && d2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, d2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, c2); } else { if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) { if ("/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), this.options.transformTagName) { @@ -61020,18 +61121,26 @@ var require_fxp = __commonJS({ u2 === a2 && (u2 = t4), a2 = t4; } const t3 = new I(a2); - a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), s2 = s2.substr(0, s2.lastIndexOf(".")); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")); } else { - const t3 = new I(a2); - this.tagsNodeStack.push(n2), a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), n2 = t3; + if (-1 !== this.options.unpairedTags.indexOf(a2)) { + const t3 = new I(a2); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")), o2 = r3.closeIndex; + continue; + } + { + const t3 = new I(a2); + if (this.tagsNodeStack.length > this.options.maxNestedTags) throw new Error("Maximum nested tags exceeded"); + this.tagsNodeStack.push(n2), a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), n2 = t3; + } } - i2 = "", o2 = d2; + i2 = "", o2 = h2; } } else i2 += t2[o2]; return e2.child; }; - function R(t2, e2, n2, i2) { + function B(t2, e2, n2, i2) { this.options.captureMetaData || (i2 = void 0); const s2 = this.options.updateTag(e2.tagname, n2, e2[":@"]); false === s2 || ("string" == typeof s2 ? (e2.tagname = s2, t2.addChild(e2, i2)) : t2.addChild(e2, i2)); @@ -61092,12 +61201,12 @@ var require_fxp = __commonJS({ const o2 = s2.index, a2 = r2.search(/\s/); let l2 = r2, u2 = true; -1 !== a2 && (l2 = r2.substring(0, a2), r2 = r2.substring(a2 + 1).trimStart()); - const h2 = l2; + const d2 = l2; if (n2) { const t3 = l2.indexOf(":"); -1 !== t3 && (l2 = l2.substr(t3 + 1), u2 = l2 !== s2.data.substr(t3 + 1)); } - return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: h2 }; + return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: d2 }; } function q(t2, e2, n2) { const i2 = n2; @@ -61118,19 +61227,19 @@ var require_fxp = __commonJS({ if (e2 && "string" == typeof t2) { const e3 = t2.trim(); return "true" === e3 || "false" !== e3 && (function(t3, e4 = {}) { - if (e4 = Object.assign({}, V, e4), !t3 || "string" != typeof t3) return t3; + if (e4 = Object.assign({}, D, e4), !t3 || "string" != typeof t3) return t3; let n3 = t3.trim(); if (void 0 !== e4.skipLike && e4.skipLike.test(n3)) return t3; if ("0" === t3) return 0; - if (e4.hex && C.test(n3)) return (function(t4) { + if (e4.hex && $.test(n3)) return (function(t4) { if (parseInt) return parseInt(t4, 16); if (Number.parseInt) return Number.parseInt(t4, 16); if (window && window.parseInt) return window.parseInt(t4, 16); throw new Error("parseInt, Number.parseInt, window.parseInt are not supported"); })(n3); - if (-1 !== n3.search(/.+[eE].+/)) return (function(t4, e5, n4) { + if (n3.includes("e") || n3.includes("E")) return (function(t4, e5, n4) { if (!n4.eNotation) return t4; - const i3 = e5.match(D); + const i3 = e5.match(j); if (i3) { let s2 = i3[1] || ""; const r2 = -1 === i3[3].indexOf("e") ? "E" : "e", o2 = i3[2], a2 = s2 ? t4[o2.length + 1] === r2 : t4[o2.length] === r2; @@ -61139,7 +61248,7 @@ var require_fxp = __commonJS({ return t4; })(t3, n3, e4); { - const s2 = $.exec(n3); + const s2 = V.exec(n3); if (s2) { const r2 = s2[1] || "", o2 = s2[2]; let a2 = (i2 = s2[3]) && -1 !== i2.indexOf(".") ? ("." === (i2 = i2.replace(/0+$/, "")) ? i2 = "0" : "." === i2[0] ? i2 = "0" + i2 : "." === i2[i2.length - 1] && (i2 = i2.substring(0, i2.length - 1)), i2) : i2; @@ -61147,7 +61256,7 @@ var require_fxp = __commonJS({ if (!e4.leadingZeros && (o2.length > 1 || 1 === o2.length && !l2)) return t3; { const i3 = Number(n3), s3 = String(i3); - if (0 === i3 || -0 === i3) return i3; + if (0 === i3) return i3; if (-1 !== s3.search(/[eE]/)) return e4.eNotation ? i3 : t3; if (-1 !== n3.indexOf(".")) return "0" === s3 || s3 === a2 || s3 === `${r2}${a2}` ? i3 : t3; let l3 = o2 ? a2 : n3; @@ -61181,7 +61290,7 @@ var require_fxp = __commonJS({ if (o2[a2]) { let t3 = H(o2[a2], e2, l2); const n3 = nt(t3, e2); - void 0 !== o2[Q] && (t3[Q] = o2[Q]), o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== s2[a2] && s2.hasOwnProperty(a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; + o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== o2[Q] && "object" == typeof t3 && null !== t3 && (t3[Q] = o2[Q]), void 0 !== s2[a2] && Object.prototype.hasOwnProperty.call(s2, a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; } } } @@ -61209,7 +61318,7 @@ var require_fxp = __commonJS({ } class it { constructor(t2) { - this.externalEntities = {}, this.options = w(t2); + this.externalEntities = {}, this.options = v(t2); } parse(t2, e2) { if ("string" != typeof t2 && t2.toString) t2 = t2.toString(); @@ -61219,7 +61328,7 @@ var require_fxp = __commonJS({ const n3 = a(t2, e2); if (true !== n3) throw Error(`${n3.err.msg}:${n3.err.line}:${n3.err.col}`); } - const n2 = new F(this.options); + const n2 = new L(this.options); n2.addExternalEntities(this.externalEntities); const i2 = n2.parseXml(t2); return this.options.preserveOrder || void 0 === i2 ? i2 : J(i2, this.options); @@ -61240,6 +61349,13 @@ var require_fxp = __commonJS({ } function rt(t2, e2, n2, i2) { let s2 = "", r2 = false; + if (!Array.isArray(t2)) { + if (null != t2) { + let n3 = t2.toString(); + return n3 = ut(n3, e2), n3; + } + return ""; + } for (let o2 = 0; o2 < t2.length; o2++) { const a2 = t2[o2], l2 = ot(a2); if (void 0 === l2) continue; @@ -61263,10 +61379,10 @@ var require_fxp = __commonJS({ o3 = 0 !== o3.length ? " " + o3 : "", s2 += n3 + `<${l2}${o3}${t3}?>`, r2 = true; continue; } - let h2 = i2; - "" !== h2 && (h2 += e2.indentBy); - const d2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, h2); - -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += d2 + ">" : s2 += d2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += d2 + `>${p2}${i2}` : (s2 += d2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += d2 + "/>", r2 = true; + let d2 = i2; + "" !== d2 && (d2 += e2.indentBy); + const h2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, d2); + -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += h2 + ">" : s2 += h2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += h2 + `>${p2}${i2}` : (s2 += h2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += h2 + "/>", r2 = true; } return s2; } @@ -61274,13 +61390,13 @@ var require_fxp = __commonJS({ const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2]; - if (t2.hasOwnProperty(i2) && ":@" !== i2) return i2; + if (Object.prototype.hasOwnProperty.call(t2, i2) && ":@" !== i2) return i2; } } function at(t2, e2) { let n2 = ""; if (t2 && !e2.ignoreAttributes) for (let i2 in t2) { - if (!t2.hasOwnProperty(i2)) continue; + if (!Object.prototype.hasOwnProperty.call(t2, i2)) continue; let s2 = e2.attributeValueProcessor(i2, t2[i2]); s2 = ut(s2, e2), true === s2 && e2.suppressBooleanAttributes ? n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}` : n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}="${s2}"`; } @@ -61298,15 +61414,21 @@ var require_fxp = __commonJS({ } return t2; } - const ht = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { + const dt = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [{ regex: new RegExp("&", "g"), val: "&" }, { regex: new RegExp(">", "g"), val: ">" }, { regex: new RegExp("<", "g"), val: "<" }, { regex: new RegExp("'", "g"), val: "'" }, { regex: new RegExp('"', "g"), val: """ }], processEntities: true, stopNodes: [], oneListGroup: false }; - function dt(t2) { - this.options = Object.assign({}, ht, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { + function ht(t2) { + var e2; + this.options = Object.assign({}, dt, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { return false; - } : (this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ct), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ft, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { + } : (this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ft), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ct, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { return ""; }, this.tagEndChar = ">", this.newLine = ""); } @@ -61314,15 +61436,15 @@ var require_fxp = __commonJS({ const s2 = this.j2x(t2, n2 + 1, i2.concat(e2)); return void 0 !== t2[this.options.textNodeName] && 1 === Object.keys(t2).length ? this.buildTextValNode(t2[this.options.textNodeName], e2, s2.attrStr, n2) : this.buildObjectNode(s2.val, e2, s2.attrStr, n2); } - function ft(t2) { + function ct(t2) { return this.options.indentBy.repeat(t2); } - function ct(t2) { + function ft(t2) { return !(!t2.startsWith(this.options.attributeNamePrefix) || t2 === this.options.textNodeName) && t2.substr(this.attrPrefixLen); } - dt.prototype.build = function(t2) { + ht.prototype.build = function(t2) { return this.options.preserveOrder ? st(t2, this.options) : (Array.isArray(t2) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t2 = { [this.options.arrayNodeName]: t2 }), this.j2x(t2, 0, []).val); - }, dt.prototype.j2x = function(t2, e2, n2) { + }, ht.prototype.j2x = function(t2, e2, n2) { let i2 = "", s2 = ""; const r2 = n2.join("."); for (let o2 in t2) if (Object.prototype.hasOwnProperty.call(t2, o2)) if (void 0 === t2[o2]) this.isAttribute(o2) && (s2 += ""); @@ -61357,18 +61479,18 @@ var require_fxp = __commonJS({ for (let s3 = 0; s3 < n3; s3++) i2 += this.buildAttrPairStr(e3[s3], "" + t2[o2][e3[s3]]); } else s2 += this.processTextOrObjNode(t2[o2], o2, e2, n2); return { attrStr: i2, val: s2 }; - }, dt.prototype.buildAttrPairStr = function(t2, e2) { + }, ht.prototype.buildAttrPairStr = function(t2, e2) { return e2 = this.options.attributeValueProcessor(t2, "" + e2), e2 = this.replaceEntitiesValue(e2), this.options.suppressBooleanAttributes && "true" === e2 ? " " + t2 : " " + t2 + '="' + e2 + '"'; - }, dt.prototype.buildObjectNode = function(t2, e2, n2, i2) { + }, ht.prototype.buildObjectNode = function(t2, e2, n2, i2) { if ("" === t2) return "?" === e2[0] ? this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar; { let s2 = "` + this.newLine : this.indentate(i2) + "<" + e2 + n2 + r2 + this.tagEndChar + t2 + this.indentate(i2) + s2 : this.indentate(i2) + "<" + e2 + n2 + r2 + ">" + t2 + s2; } - }, dt.prototype.closeTag = function(t2) { + }, ht.prototype.closeTag = function(t2) { let e2 = ""; return -1 !== this.options.unpairedTags.indexOf(t2) ? this.options.suppressUnpairedNode || (e2 = "/") : e2 = this.options.suppressEmptyNode ? "/" : `>` + this.newLine; if (false !== this.options.commentPropName && e2 === this.options.commentPropName) return this.indentate(i2) + `` + this.newLine; if ("?" === e2[0]) return this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar; @@ -61376,14 +61498,14 @@ var require_fxp = __commonJS({ let s2 = this.options.tagValueProcessor(e2, t2); return s2 = this.replaceEntitiesValue(s2), "" === s2 ? this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + ">" + s2 + " 0 && this.options.processEntities) for (let e2 = 0; e2 < this.options.entities.length; e2++) { const n2 = this.options.entities[e2]; t2 = t2.replace(n2.regex, n2.val); } return t2; }; - const gt = { validate: a }; + const gt = ht, xt = { validate: a }; module2.exports = e; })(); } @@ -102926,7 +103048,7 @@ var safeDump = renamed("safeDump", "dump"); var semver = __toESM(require_semver2()); // src/api-compatibility.json -var maximumVersion = "3.20"; +var maximumVersion = "3.21"; var minimumVersion = "3.14"; // src/util.ts @@ -103557,8 +103679,8 @@ var path4 = __toESM(require("path")); var semver4 = __toESM(require_semver2()); // src/defaults.json -var bundleVersion = "codeql-bundle-v2.24.2"; -var cliVersion = "2.24.2"; +var bundleVersion = "codeql-bundle-v2.24.3"; +var cliVersion = "2.24.3"; // src/overlay/index.ts var fs3 = __toESM(require("fs")); @@ -104042,6 +104164,11 @@ var featureConfig = { // cannot be found when interpreting results. minimumVersion: void 0 }, + ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES", + minimumVersion: void 0 + }, ["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: { defaultValue: false, envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE", @@ -104053,11 +104180,6 @@ var featureConfig = { minimumVersion: void 0, toolsFeature: "bundleSupportsOverlay" /* BundleSupportsOverlay */ }, - ["use_repository_properties_v2" /* UseRepositoryProperties */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES", - minimumVersion: void 0 - }, ["validate_db_config" /* ValidateDbConfig */]: { defaultValue: false, envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG", diff --git a/lib/start-proxy-action-post.js b/lib/start-proxy-action-post.js index bfe40922a..6fdfe2d8b 100644 --- a/lib/start-proxy-action-post.js +++ b/lib/start-proxy-action-post.js @@ -45986,7 +45986,7 @@ var require_package = __commonJS({ "package.json"(exports2, module2) { module2.exports = { name: "codeql", - version: "4.32.5", + version: "4.32.7", private: true, description: "CodeQL action", scripts: { @@ -45995,7 +45995,7 @@ var require_package = __commonJS({ lint: "eslint --report-unused-disable-directives --max-warnings=0 .", "lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif", "lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix", - ava: "npm run transpile && ava --serial --verbose", + ava: "npm run transpile && ava --verbose", test: "npm run ava -- src/", "test-debug": "npm run test -- --timeout=20m", transpile: "tsc --build --verbose" @@ -46044,6 +46044,7 @@ var require_package = __commonJS({ "@types/js-yaml": "^4.0.9", "@types/node": "^20.19.9", "@types/node-forge": "^1.3.14", + "@types/sarif": "^2.1.7", "@types/semver": "^7.7.1", "@types/sinon": "^21.0.0", ava: "^6.4.1", @@ -46052,14 +46053,14 @@ var require_package = __commonJS({ "eslint-import-resolver-typescript": "^3.8.7", "eslint-plugin-github": "^6.0.0", "eslint-plugin-import-x": "^4.16.1", - "eslint-plugin-jsdoc": "^62.5.0", + "eslint-plugin-jsdoc": "^62.7.1", "eslint-plugin-no-async-foreach": "^0.1.1", glob: "^11.1.0", - globals: "^16.5.0", + globals: "^17.3.0", nock: "^14.0.11", sinon: "^21.0.1", typescript: "^5.9.3", - "typescript-eslint": "^8.56.0" + "typescript-eslint": "^8.56.1" }, overrides: { "@actions/tool-cache": { @@ -49361,6 +49362,7 @@ var require_minimatch = __commonJS({ pattern = pattern.split(path4.sep).join("/"); } this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200; this.set = []; this.pattern = pattern; this.regexp = null; @@ -49757,50 +49759,147 @@ var require_minimatch = __commonJS({ return this.negate; }; Minimatch.prototype.matchOne = function(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } + if (pattern.indexOf(GLOBSTAR) !== -1) { + return this._matchGlobstar(file, pattern, partial, 0, 0); + } + return this._matchOne(file, pattern, partial, 0, 0); + }; + Minimatch.prototype._matchGlobstar = function(file, pattern, partial, fileIndex, patternIndex) { + var i; + var firstgs = -1; + for (i = patternIndex; i < pattern.length; i++) { + if (pattern[i] === GLOBSTAR) { + firstgs = i; + break; + } + } + var lastgs = -1; + for (i = pattern.length - 1; i >= 0; i--) { + if (pattern[i] === GLOBSTAR) { + lastgs = i; + break; + } + } + var head = pattern.slice(patternIndex, firstgs); + var body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs); + var tail = partial ? [] : pattern.slice(lastgs + 1); + if (head.length) { + var fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this._matchOne(fileHead, head, partial, 0, 0)) { + return false; + } + fileIndex += head.length; + } + var fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) return false; + var tailStart = file.length - tail.length; + if (this._matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; + } + tailStart--; + if (!this._matchOne(file, tail, partial, tailStart, 0)) { + return false; + } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + var sawSome = !!fileTailMatch; + for (i = fileIndex; i < file.length - fileTailMatch; i++) { + var f = String(file[i]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return partial || sawSome; + } + var bodySegments = [[[], 0]]; + var currentBody = bodySegments[0]; + var nonGsParts = 0; + var nonGsPartsSums = [0]; + for (var bi = 0; bi < body.length; bi++) { + var b = body[bi]; + if (b === GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + var idx = bodySegments.length - 1; + var fileLength = file.length - fileTailMatch; + for (var si = 0; si < bodySegments.length; si++) { + bodySegments[si][1] = fileLength - (nonGsPartsSums[idx--] + bodySegments[si][0].length); + } + return !!this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex, + 0, + partial, + 0, + !!fileTailMatch ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + }; + Minimatch.prototype._matchGlobStarBodySections = function(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + var bs = bodySegments[bodyIndex]; + if (!bs) { + for (var i = fileIndex; i < file.length; i++) { + sawTail = true; + var f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return sawTail; + } + var body = bs[0]; + var after = bs[1]; + while (fileIndex <= after) { + var m = this._matchOne( + file.slice(0, fileIndex + body.length), + body, + partial, + fileIndex, + 0 + ); + if (m && globStarDepth < this.maxGlobstarRecursion) { + var sub = this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex + body.length, + bodyIndex + 1, + partial, + globStarDepth + 1, + sawTail + ); + if (sub !== false) { + return sub; + } + } + var f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + fileIndex++; + } + return partial || null; + }; + Minimatch.prototype._matchOne = function(file, pattern, partial, fileIndex, patternIndex) { + var fi, pi, fl, pl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { this.debug("matchOne loop"); var p = pattern[pi]; var f = file[fi]; this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } - return false; - } + if (p === false || p === GLOBSTAR) return false; var hit; if (typeof p === "string") { hit = f === p; @@ -61840,7 +61939,7 @@ var require_fxp = __commonJS({ }, o: (t2, e2) => Object.prototype.hasOwnProperty.call(t2, e2), r: (t2) => { "undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t2, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t2, "__esModule", { value: true }); } }, e = {}; - t.r(e), t.d(e, { XMLBuilder: () => dt, XMLParser: () => it, XMLValidator: () => gt }); + t.r(e), t.d(e, { XMLBuilder: () => gt, XMLParser: () => it, XMLValidator: () => xt }); const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$"); function s(t2, e2) { const n2 = []; @@ -61862,90 +61961,90 @@ var require_fxp = __commonJS({ const n2 = []; let i2 = false, s2 = false; "\uFEFF" === t2[0] && (t2 = t2.substr(1)); - for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2] && "?" === t2[o2 + 1]) { - if (o2 += 2, o2 = u(t2, o2), o2.err) return o2; + for (let r2 = 0; r2 < t2.length; r2++) if ("<" === t2[r2] && "?" === t2[r2 + 1]) { + if (r2 += 2, r2 = u(t2, r2), r2.err) return r2; } else { - if ("<" !== t2[o2]) { - if (l(t2[o2])) continue; - return m("InvalidChar", "char '" + t2[o2] + "' is not expected.", b(t2, o2)); + if ("<" !== t2[r2]) { + if (l(t2[r2])) continue; + return m("InvalidChar", "char '" + t2[r2] + "' is not expected.", N(t2, r2)); } { - let a2 = o2; - if (o2++, "!" === t2[o2]) { - o2 = h(t2, o2); + let o2 = r2; + if (r2++, "!" === t2[r2]) { + r2 = d(t2, r2); continue; } { - let d2 = false; - "/" === t2[o2] && (d2 = true, o2++); - let p2 = ""; - for (; o2 < t2.length && ">" !== t2[o2] && " " !== t2[o2] && " " !== t2[o2] && "\n" !== t2[o2] && "\r" !== t2[o2]; o2++) p2 += t2[o2]; - if (p2 = p2.trim(), "/" === p2[p2.length - 1] && (p2 = p2.substring(0, p2.length - 1), o2--), !r(p2)) { + let a2 = false; + "/" === t2[r2] && (a2 = true, r2++); + let h2 = ""; + for (; r2 < t2.length && ">" !== t2[r2] && " " !== t2[r2] && " " !== t2[r2] && "\n" !== t2[r2] && "\r" !== t2[r2]; r2++) h2 += t2[r2]; + if (h2 = h2.trim(), "/" === h2[h2.length - 1] && (h2 = h2.substring(0, h2.length - 1), r2--), !b(h2)) { let e3; - return e3 = 0 === p2.trim().length ? "Invalid space after '<'." : "Tag '" + p2 + "' is an invalid name.", m("InvalidTag", e3, b(t2, o2)); + return e3 = 0 === h2.trim().length ? "Invalid space after '<'." : "Tag '" + h2 + "' is an invalid name.", m("InvalidTag", e3, N(t2, r2)); } - const c2 = f(t2, o2); - if (false === c2) return m("InvalidAttr", "Attributes for '" + p2 + "' have open quote.", b(t2, o2)); - let E2 = c2.value; - if (o2 = c2.index, "/" === E2[E2.length - 1]) { - const n3 = o2 - E2.length; - E2 = E2.substring(0, E2.length - 1); - const s3 = g(E2, e2); - if (true !== s3) return m(s3.err.code, s3.err.msg, b(t2, n3 + s3.err.line)); + const p2 = c(t2, r2); + if (false === p2) return m("InvalidAttr", "Attributes for '" + h2 + "' have open quote.", N(t2, r2)); + let f2 = p2.value; + if (r2 = p2.index, "/" === f2[f2.length - 1]) { + const n3 = r2 - f2.length; + f2 = f2.substring(0, f2.length - 1); + const s3 = g(f2, e2); + if (true !== s3) return m(s3.err.code, s3.err.msg, N(t2, n3 + s3.err.line)); i2 = true; - } else if (d2) { - if (!c2.tagClosed) return m("InvalidTag", "Closing tag '" + p2 + "' doesn't have proper closing.", b(t2, o2)); - if (E2.trim().length > 0) return m("InvalidTag", "Closing tag '" + p2 + "' can't have attributes or invalid starting.", b(t2, a2)); - if (0 === n2.length) return m("InvalidTag", "Closing tag '" + p2 + "' has not been opened.", b(t2, a2)); + } else if (a2) { + if (!p2.tagClosed) return m("InvalidTag", "Closing tag '" + h2 + "' doesn't have proper closing.", N(t2, r2)); + if (f2.trim().length > 0) return m("InvalidTag", "Closing tag '" + h2 + "' can't have attributes or invalid starting.", N(t2, o2)); + if (0 === n2.length) return m("InvalidTag", "Closing tag '" + h2 + "' has not been opened.", N(t2, o2)); { const e3 = n2.pop(); - if (p2 !== e3.tagName) { - let n3 = b(t2, e3.tagStartPos); - return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + p2 + "'.", b(t2, a2)); + if (h2 !== e3.tagName) { + let n3 = N(t2, e3.tagStartPos); + return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + h2 + "'.", N(t2, o2)); } 0 == n2.length && (s2 = true); } } else { - const r2 = g(E2, e2); - if (true !== r2) return m(r2.err.code, r2.err.msg, b(t2, o2 - E2.length + r2.err.line)); - if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", b(t2, o2)); - -1 !== e2.unpairedTags.indexOf(p2) || n2.push({ tagName: p2, tagStartPos: a2 }), i2 = true; + const a3 = g(f2, e2); + if (true !== a3) return m(a3.err.code, a3.err.msg, N(t2, r2 - f2.length + a3.err.line)); + if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", N(t2, r2)); + -1 !== e2.unpairedTags.indexOf(h2) || n2.push({ tagName: h2, tagStartPos: o2 }), i2 = true; } - for (o2++; o2 < t2.length; o2++) if ("<" === t2[o2]) { - if ("!" === t2[o2 + 1]) { - o2++, o2 = h(t2, o2); + for (r2++; r2 < t2.length; r2++) if ("<" === t2[r2]) { + if ("!" === t2[r2 + 1]) { + r2++, r2 = d(t2, r2); continue; } - if ("?" !== t2[o2 + 1]) break; - if (o2 = u(t2, ++o2), o2.err) return o2; - } else if ("&" === t2[o2]) { - const e3 = x(t2, o2); - if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", b(t2, o2)); - o2 = e3; - } else if (true === s2 && !l(t2[o2])) return m("InvalidXml", "Extra text at the end", b(t2, o2)); - "<" === t2[o2] && o2--; + if ("?" !== t2[r2 + 1]) break; + if (r2 = u(t2, ++r2), r2.err) return r2; + } else if ("&" === t2[r2]) { + const e3 = x(t2, r2); + if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", N(t2, r2)); + r2 = e3; + } else if (true === s2 && !l(t2[r2])) return m("InvalidXml", "Extra text at the end", N(t2, r2)); + "<" === t2[r2] && r2--; } } } - return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", b(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map(((t3) => t3.tagName)), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); + return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", N(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map((t3) => t3.tagName), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); } function l(t2) { return " " === t2 || " " === t2 || "\n" === t2 || "\r" === t2; } function u(t2, e2) { const n2 = e2; - for (; e2 < t2.length; e2++) if ("?" != t2[e2] && " " != t2[e2]) ; - else { + for (; e2 < t2.length; e2++) if ("?" == t2[e2] || " " == t2[e2]) { const i2 = t2.substr(n2, e2 - n2); - if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", b(t2, e2)); + if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", N(t2, e2)); if ("?" == t2[e2] && ">" == t2[e2 + 1]) { e2++; break; } + continue; } return e2; } - function h(t2, e2) { + function d(t2, e2) { if (t2.length > e2 + 5 && "-" === t2[e2 + 1] && "-" === t2[e2 + 2]) { for (e2 += 3; e2 < t2.length; e2++) if ("-" === t2[e2] && "-" === t2[e2 + 1] && ">" === t2[e2 + 2]) { e2 += 2; @@ -61963,11 +62062,11 @@ var require_fxp = __commonJS({ } return e2; } - const d = '"', p = "'"; - function f(t2, e2) { + const h = '"', p = "'"; + function c(t2, e2) { let n2 = "", i2 = "", s2 = false; for (; e2 < t2.length; e2++) { - if (t2[e2] === d || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); + if (t2[e2] === h || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); else if (">" === t2[e2] && "" === i2) { s2 = true; break; @@ -61976,16 +62075,16 @@ var require_fxp = __commonJS({ } return "" === i2 && { value: n2, index: e2, tagClosed: s2 }; } - const c = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); + const f = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); function g(t2, e2) { - const n2 = s(t2, c), i2 = {}; + const n2 = s(t2, f), i2 = {}; for (let t3 = 0; t3 < n2.length; t3++) { - if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", N(n2[t3])); - if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", N(n2[t3])); - if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", N(n2[t3])); + if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", y(n2[t3])); + if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", y(n2[t3])); + if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", y(n2[t3])); const s2 = n2[t3][2]; - if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", N(n2[t3])); - if (i2.hasOwnProperty(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", N(n2[t3])); + if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", y(n2[t3])); + if (Object.prototype.hasOwnProperty.call(i2, s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", y(n2[t3])); i2[s2] = 1; } return true; @@ -62013,49 +62112,52 @@ var require_fxp = __commonJS({ function E(t2) { return r(t2); } - function b(t2, e2) { + function b(t2) { + return r(t2); + } + function N(t2, e2) { const n2 = t2.substring(0, e2).split(/\r?\n/); return { line: n2.length, col: n2[n2.length - 1].length + 1 }; } - function N(t2) { + function y(t2) { return t2.startIndex + t2[1].length; } - const y = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { + const T = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, stopNodes: [], alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(t2, e2, n2) { return t2; - }, captureMetaData: false }; - function T(t2) { - return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : T(true); + }, captureMetaData: false, maxNestedTags: 100, strictReservedNames: true }; + function w(t2) { + return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : w(true); } - const w = function(t2) { - const e2 = Object.assign({}, y, t2); - return e2.processEntities = T(e2.processEntities), e2; + const v = function(t2) { + const e2 = Object.assign({}, T, t2); + return e2.processEntities = w(e2.processEntities), e2; }; - let v; - v = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); + let O; + O = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); class I { constructor(t2) { - this.tagname = t2, this.child = [], this[":@"] = {}; + this.tagname = t2, this.child = [], this[":@"] = /* @__PURE__ */ Object.create(null); } add(t2, e2) { "__proto__" === t2 && (t2 = "#__proto__"), this.child.push({ [t2]: e2 }); } addChild(t2, e2) { - "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][v] = { startIndex: e2 }); + "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][O] = { startIndex: e2 }); } static getMetaDataSymbol() { - return v; + return O; } } - class O { + class P { constructor(t2) { this.suppressValidationErr = !t2, this.options = t2; } readDocType(t2, e2) { - const n2 = {}; + const n2 = /* @__PURE__ */ Object.create(null); if ("O" !== t2[e2 + 3] || "C" !== t2[e2 + 4] || "T" !== t2[e2 + 5] || "Y" !== t2[e2 + 6] || "P" !== t2[e2 + 7] || "E" !== t2[e2 + 8]) throw new Error("Invalid Tag instead of DOCTYPE"); { e2 += 9; @@ -62064,23 +62166,23 @@ var require_fxp = __commonJS({ if (r2 ? "-" === t2[e2 - 1] && "-" === t2[e2 - 2] && (r2 = false, i2--) : i2--, 0 === i2) break; } else "[" === t2[e2] ? s2 = true : o2 += t2[e2]; else { - if (s2 && A(t2, "!ENTITY", e2)) { + if (s2 && S(t2, "!ENTITY", e2)) { let i3, s3; if (e2 += 7, [i3, s3, e2] = this.readEntityExp(t2, e2 + 1, this.suppressValidationErr), -1 === s3.indexOf("&")) { const t3 = i3.replace(/[.\-+*:]/g, "\\."); n2[i3] = { regx: RegExp(`&${t3};`, "g"), val: s3 }; } - } else if (s2 && A(t2, "!ELEMENT", e2)) { + } else if (s2 && S(t2, "!ELEMENT", e2)) { e2 += 8; const { index: n3 } = this.readElementExp(t2, e2 + 1); e2 = n3; - } else if (s2 && A(t2, "!ATTLIST", e2)) e2 += 8; - else if (s2 && A(t2, "!NOTATION", e2)) { + } else if (s2 && S(t2, "!ATTLIST", e2)) e2 += 8; + else if (s2 && S(t2, "!NOTATION", e2)) { e2 += 9; const { index: n3 } = this.readNotationExp(t2, e2 + 1, this.suppressValidationErr); e2 = n3; } else { - if (!A(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); + if (!S(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); r2 = true; } i2++, o2 = ""; @@ -62090,10 +62192,10 @@ var require_fxp = __commonJS({ return { entities: n2, i: e2 }; } readEntityExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]) && '"' !== t2[e2] && "'" !== t2[e2]; ) n2 += t2[e2], e2++; - if (S(n2), e2 = P(t2, e2), !this.suppressValidationErr) { + if (C(n2), e2 = A(t2, e2), !this.suppressValidationErr) { if ("SYSTEM" === t2.substring(e2, e2 + 6).toUpperCase()) throw new Error("External entities are not supported"); if ("%" === t2[e2]) throw new Error("Parameter entities are not supported"); } @@ -62102,15 +62204,15 @@ var require_fxp = __commonJS({ return [n2, i2, --e2]; } readNotationExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - !this.suppressValidationErr && S(n2), e2 = P(t2, e2); + !this.suppressValidationErr && C(n2), e2 = A(t2, e2); const i2 = t2.substring(e2, e2 + 6).toUpperCase(); if (!this.suppressValidationErr && "SYSTEM" !== i2 && "PUBLIC" !== i2) throw new Error(`Expected SYSTEM or PUBLIC, found "${i2}"`); - e2 += i2.length, e2 = P(t2, e2); + e2 += i2.length, e2 = A(t2, e2); let s2 = null, r2 = null; - if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = P(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); + if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = A(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); else if ("SYSTEM" === i2 && ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"), !this.suppressValidationErr && !r2)) throw new Error("Missing mandatory system identifier for SYSTEM notation"); return { notationName: n2, publicIdentifier: s2, systemIdentifier: r2, index: --e2 }; } @@ -62123,13 +62225,13 @@ var require_fxp = __commonJS({ return [++e2, i2]; } readElementExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; if (!this.suppressValidationErr && !r(n2)) throw new Error(`Invalid element name: "${n2}"`); let i2 = ""; - if ("E" === t2[e2 = P(t2, e2)] && A(t2, "MPTY", e2)) e2 += 4; - else if ("A" === t2[e2] && A(t2, "NY", e2)) e2 += 2; + if ("E" === t2[e2 = A(t2, e2)] && S(t2, "MPTY", e2)) e2 += 4; + else if ("A" === t2[e2] && S(t2, "NY", e2)) e2 += 2; else if ("(" === t2[e2]) { for (e2++; e2 < t2.length && ")" !== t2[e2]; ) i2 += t2[e2], e2++; if (")" !== t2[e2]) throw new Error("Unterminated content model"); @@ -62137,24 +62239,24 @@ var require_fxp = __commonJS({ return { elementName: n2, contentModel: i2.trim(), index: e2 }; } readAttlistExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - S(n2), e2 = P(t2, e2); + C(n2), e2 = A(t2, e2); let i2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) i2 += t2[e2], e2++; - if (!S(i2)) throw new Error(`Invalid attribute name: "${i2}"`); - e2 = P(t2, e2); + if (!C(i2)) throw new Error(`Invalid attribute name: "${i2}"`); + e2 = A(t2, e2); let s2 = ""; if ("NOTATION" === t2.substring(e2, e2 + 8).toUpperCase()) { - if (s2 = "NOTATION", "(" !== t2[e2 = P(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); + if (s2 = "NOTATION", "(" !== t2[e2 = A(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); e2++; let n3 = []; for (; e2 < t2.length && ")" !== t2[e2]; ) { let i3 = ""; for (; e2 < t2.length && "|" !== t2[e2] && ")" !== t2[e2]; ) i3 += t2[e2], e2++; - if (i3 = i3.trim(), !S(i3)) throw new Error(`Invalid notation name: "${i3}"`); - n3.push(i3), "|" === t2[e2] && (e2++, e2 = P(t2, e2)); + if (i3 = i3.trim(), !C(i3)) throw new Error(`Invalid notation name: "${i3}"`); + n3.push(i3), "|" === t2[e2] && (e2++, e2 = A(t2, e2)); } if (")" !== t2[e2]) throw new Error("Unterminated list of notations"); e2++, s2 += " (" + n3.join("|") + ")"; @@ -62163,45 +62265,43 @@ var require_fxp = __commonJS({ const n3 = ["CDATA", "ID", "IDREF", "IDREFS", "ENTITY", "ENTITIES", "NMTOKEN", "NMTOKENS"]; if (!this.suppressValidationErr && !n3.includes(s2.toUpperCase())) throw new Error(`Invalid attribute type: "${s2}"`); } - e2 = P(t2, e2); + e2 = A(t2, e2); let r2 = ""; return "#REQUIRED" === t2.substring(e2, e2 + 8).toUpperCase() ? (r2 = "#REQUIRED", e2 += 8) : "#IMPLIED" === t2.substring(e2, e2 + 7).toUpperCase() ? (r2 = "#IMPLIED", e2 += 7) : [e2, r2] = this.readIdentifierVal(t2, e2, "ATTLIST"), { elementName: n2, attributeName: i2, attributeType: s2, defaultValue: r2, index: e2 }; } } - const P = (t2, e2) => { + const A = (t2, e2) => { for (; e2 < t2.length && /\s/.test(t2[e2]); ) e2++; return e2; }; - function A(t2, e2, n2) { + function S(t2, e2, n2) { for (let i2 = 0; i2 < e2.length; i2++) if (e2[i2] !== t2[n2 + i2 + 1]) return false; return true; } - function S(t2) { + function C(t2) { if (r(t2)) return t2; throw new Error(`Invalid entity name ${t2}`); } - const C = /^[-+]?0x[a-fA-F0-9]+$/, $ = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; - const D = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; - function L(t2) { - return "function" == typeof t2 ? t2 : Array.isArray(t2) ? (e2) => { - for (const n2 of t2) { - if ("string" == typeof n2 && e2 === n2) return true; - if (n2 instanceof RegExp && n2.test(e2)) return true; - } - } : () => false; - } - class F { + const $ = /^[-+]?0x[a-fA-F0-9]+$/, V = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, D = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; + const j = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; + class L { constructor(t2) { - if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e2) => K(e2, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e2) => K(e2, 16, "&#x") } }, this.addExternalEntities = j, this.parseXml = B, this.parseTextData = M, this.resolveNameSpace = _2, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = R, this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { + var e2; + if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e3) => K(e3, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e3) => K(e3, 16, "&#x") } }, this.addExternalEntities = F, this.parseXml = R, this.parseTextData = M, this.resolveNameSpace = k, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = B, this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { this.stopNodesExact = /* @__PURE__ */ new Set(), this.stopNodesWildcard = /* @__PURE__ */ new Set(); for (let t3 = 0; t3 < this.options.stopNodes.length; t3++) { - const e2 = this.options.stopNodes[t3]; - "string" == typeof e2 && (e2.startsWith("*.") ? this.stopNodesWildcard.add(e2.substring(2)) : this.stopNodesExact.add(e2)); + const e3 = this.options.stopNodes[t3]; + "string" == typeof e3 && (e3.startsWith("*.") ? this.stopNodesWildcard.add(e3.substring(2)) : this.stopNodesExact.add(e3)); } } } } - function j(t2) { + function F(t2) { const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2], s2 = i2.replace(/[.\-+*:]/g, "\\."); @@ -62215,7 +62315,7 @@ var require_fxp = __commonJS({ return null == i3 ? t2 : typeof i3 != typeof t2 || i3 !== t2 ? i3 : this.options.trimValues || t2.trim() === t2 ? Z(t2, this.options.parseTagValue, this.options.numberParseOptions) : t2; } } - function _2(t2) { + function k(t2) { if (this.options.removeNSPrefix) { const e2 = t2.split(":"), n2 = "/" === t2.charAt(0) ? "/" : ""; if ("xmlns" === e2[0]) return ""; @@ -62223,10 +62323,10 @@ var require_fxp = __commonJS({ } return t2; } - const k = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); + const _2 = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); function U(t2, e2, n2) { if (true !== this.options.ignoreAttributes && "string" == typeof t2) { - const i2 = s(t2, k), r2 = i2.length, o2 = {}; + const i2 = s(t2, _2), r2 = i2.length, o2 = {}; for (let t3 = 0; t3 < r2; t3++) { const s2 = this.resolveNameSpace(i2[t3][1]); if (this.ignoreAttributesFn(s2, e2)) continue; @@ -62245,12 +62345,12 @@ var require_fxp = __commonJS({ return o2; } } - const B = function(t2) { + const R = function(t2) { t2 = t2.replace(/\r\n?/g, "\n"); const e2 = new I("!xml"); let n2 = e2, i2 = "", s2 = ""; this.entityExpansionCount = 0, this.currentExpandedLength = 0; - const r2 = new O(this.options.processEntities); + const r2 = new P(this.options.processEntities); for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2]) if ("/" === t2[o2 + 1]) { const e3 = z(t2, ">", o2, "Closing Tag is not closed."); let r3 = t2.substring(o2 + 2, e3).trim(); @@ -62290,26 +62390,27 @@ var require_fxp = __commonJS({ } else { let r3 = W(t2, o2, this.options.removeNSPrefix), a2 = r3.tagName; const l2 = r3.rawTagName; - let u2 = r3.tagExp, h2 = r3.attrExpPresent, d2 = r3.closeIndex; + let u2 = r3.tagExp, d2 = r3.attrExpPresent, h2 = r3.closeIndex; if (this.options.transformTagName) { const t3 = this.options.transformTagName(a2); u2 === a2 && (u2 = t3), a2 = t3; } + if (this.options.strictReservedNames && (a2 === this.options.commentPropName || a2 === this.options.cdataPropName)) throw new Error(`Invalid tag name: ${a2}`); n2 && i2 && "!xml" !== n2.tagname && (i2 = this.saveTextToParentTag(i2, n2, s2, false)); const p2 = n2; p2 && -1 !== this.options.unpairedTags.indexOf(p2.tagname) && (n2 = this.tagsNodeStack.pop(), s2 = s2.substring(0, s2.lastIndexOf("."))), a2 !== e2.tagname && (s2 += s2 ? "." + a2 : a2); - const f2 = o2; + const c2 = o2; if (this.isItStopNode(this.stopNodesExact, this.stopNodesWildcard, s2, a2)) { let e3 = ""; if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) "/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), o2 = r3.closeIndex; else if (-1 !== this.options.unpairedTags.indexOf(a2)) o2 = r3.closeIndex; else { - const n3 = this.readStopNodeData(t2, l2, d2 + 1); + const n3 = this.readStopNodeData(t2, l2, h2 + 1); if (!n3) throw new Error(`Unexpected end of ${l2}`); o2 = n3.i, e3 = n3.tagContent; } const i3 = new I(a2); - a2 !== u2 && h2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, h2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, f2); + a2 !== u2 && d2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, d2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, c2); } else { if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) { if ("/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), this.options.transformTagName) { @@ -62317,18 +62418,26 @@ var require_fxp = __commonJS({ u2 === a2 && (u2 = t4), a2 = t4; } const t3 = new I(a2); - a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), s2 = s2.substr(0, s2.lastIndexOf(".")); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")); } else { - const t3 = new I(a2); - this.tagsNodeStack.push(n2), a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), n2 = t3; + if (-1 !== this.options.unpairedTags.indexOf(a2)) { + const t3 = new I(a2); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")), o2 = r3.closeIndex; + continue; + } + { + const t3 = new I(a2); + if (this.tagsNodeStack.length > this.options.maxNestedTags) throw new Error("Maximum nested tags exceeded"); + this.tagsNodeStack.push(n2), a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), n2 = t3; + } } - i2 = "", o2 = d2; + i2 = "", o2 = h2; } } else i2 += t2[o2]; return e2.child; }; - function R(t2, e2, n2, i2) { + function B(t2, e2, n2, i2) { this.options.captureMetaData || (i2 = void 0); const s2 = this.options.updateTag(e2.tagname, n2, e2[":@"]); false === s2 || ("string" == typeof s2 ? (e2.tagname = s2, t2.addChild(e2, i2)) : t2.addChild(e2, i2)); @@ -62389,12 +62498,12 @@ var require_fxp = __commonJS({ const o2 = s2.index, a2 = r2.search(/\s/); let l2 = r2, u2 = true; -1 !== a2 && (l2 = r2.substring(0, a2), r2 = r2.substring(a2 + 1).trimStart()); - const h2 = l2; + const d2 = l2; if (n2) { const t3 = l2.indexOf(":"); -1 !== t3 && (l2 = l2.substr(t3 + 1), u2 = l2 !== s2.data.substr(t3 + 1)); } - return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: h2 }; + return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: d2 }; } function q(t2, e2, n2) { const i2 = n2; @@ -62415,19 +62524,19 @@ var require_fxp = __commonJS({ if (e2 && "string" == typeof t2) { const e3 = t2.trim(); return "true" === e3 || "false" !== e3 && (function(t3, e4 = {}) { - if (e4 = Object.assign({}, V, e4), !t3 || "string" != typeof t3) return t3; + if (e4 = Object.assign({}, D, e4), !t3 || "string" != typeof t3) return t3; let n3 = t3.trim(); if (void 0 !== e4.skipLike && e4.skipLike.test(n3)) return t3; if ("0" === t3) return 0; - if (e4.hex && C.test(n3)) return (function(t4) { + if (e4.hex && $.test(n3)) return (function(t4) { if (parseInt) return parseInt(t4, 16); if (Number.parseInt) return Number.parseInt(t4, 16); if (window && window.parseInt) return window.parseInt(t4, 16); throw new Error("parseInt, Number.parseInt, window.parseInt are not supported"); })(n3); - if (-1 !== n3.search(/.+[eE].+/)) return (function(t4, e5, n4) { + if (n3.includes("e") || n3.includes("E")) return (function(t4, e5, n4) { if (!n4.eNotation) return t4; - const i3 = e5.match(D); + const i3 = e5.match(j); if (i3) { let s2 = i3[1] || ""; const r2 = -1 === i3[3].indexOf("e") ? "E" : "e", o2 = i3[2], a2 = s2 ? t4[o2.length + 1] === r2 : t4[o2.length] === r2; @@ -62436,7 +62545,7 @@ var require_fxp = __commonJS({ return t4; })(t3, n3, e4); { - const s2 = $.exec(n3); + const s2 = V.exec(n3); if (s2) { const r2 = s2[1] || "", o2 = s2[2]; let a2 = (i2 = s2[3]) && -1 !== i2.indexOf(".") ? ("." === (i2 = i2.replace(/0+$/, "")) ? i2 = "0" : "." === i2[0] ? i2 = "0" + i2 : "." === i2[i2.length - 1] && (i2 = i2.substring(0, i2.length - 1)), i2) : i2; @@ -62444,7 +62553,7 @@ var require_fxp = __commonJS({ if (!e4.leadingZeros && (o2.length > 1 || 1 === o2.length && !l2)) return t3; { const i3 = Number(n3), s3 = String(i3); - if (0 === i3 || -0 === i3) return i3; + if (0 === i3) return i3; if (-1 !== s3.search(/[eE]/)) return e4.eNotation ? i3 : t3; if (-1 !== n3.indexOf(".")) return "0" === s3 || s3 === a2 || s3 === `${r2}${a2}` ? i3 : t3; let l3 = o2 ? a2 : n3; @@ -62478,7 +62587,7 @@ var require_fxp = __commonJS({ if (o2[a2]) { let t3 = H(o2[a2], e2, l2); const n3 = nt(t3, e2); - void 0 !== o2[Q] && (t3[Q] = o2[Q]), o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== s2[a2] && s2.hasOwnProperty(a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; + o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== o2[Q] && "object" == typeof t3 && null !== t3 && (t3[Q] = o2[Q]), void 0 !== s2[a2] && Object.prototype.hasOwnProperty.call(s2, a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; } } } @@ -62506,7 +62615,7 @@ var require_fxp = __commonJS({ } class it { constructor(t2) { - this.externalEntities = {}, this.options = w(t2); + this.externalEntities = {}, this.options = v(t2); } parse(t2, e2) { if ("string" != typeof t2 && t2.toString) t2 = t2.toString(); @@ -62516,7 +62625,7 @@ var require_fxp = __commonJS({ const n3 = a(t2, e2); if (true !== n3) throw Error(`${n3.err.msg}:${n3.err.line}:${n3.err.col}`); } - const n2 = new F(this.options); + const n2 = new L(this.options); n2.addExternalEntities(this.externalEntities); const i2 = n2.parseXml(t2); return this.options.preserveOrder || void 0 === i2 ? i2 : J(i2, this.options); @@ -62537,6 +62646,13 @@ var require_fxp = __commonJS({ } function rt(t2, e2, n2, i2) { let s2 = "", r2 = false; + if (!Array.isArray(t2)) { + if (null != t2) { + let n3 = t2.toString(); + return n3 = ut(n3, e2), n3; + } + return ""; + } for (let o2 = 0; o2 < t2.length; o2++) { const a2 = t2[o2], l2 = ot(a2); if (void 0 === l2) continue; @@ -62560,10 +62676,10 @@ var require_fxp = __commonJS({ o3 = 0 !== o3.length ? " " + o3 : "", s2 += n3 + `<${l2}${o3}${t3}?>`, r2 = true; continue; } - let h2 = i2; - "" !== h2 && (h2 += e2.indentBy); - const d2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, h2); - -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += d2 + ">" : s2 += d2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += d2 + `>${p2}${i2}` : (s2 += d2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += d2 + "/>", r2 = true; + let d2 = i2; + "" !== d2 && (d2 += e2.indentBy); + const h2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, d2); + -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += h2 + ">" : s2 += h2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += h2 + `>${p2}${i2}` : (s2 += h2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += h2 + "/>", r2 = true; } return s2; } @@ -62571,13 +62687,13 @@ var require_fxp = __commonJS({ const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2]; - if (t2.hasOwnProperty(i2) && ":@" !== i2) return i2; + if (Object.prototype.hasOwnProperty.call(t2, i2) && ":@" !== i2) return i2; } } function at(t2, e2) { let n2 = ""; if (t2 && !e2.ignoreAttributes) for (let i2 in t2) { - if (!t2.hasOwnProperty(i2)) continue; + if (!Object.prototype.hasOwnProperty.call(t2, i2)) continue; let s2 = e2.attributeValueProcessor(i2, t2[i2]); s2 = ut(s2, e2), true === s2 && e2.suppressBooleanAttributes ? n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}` : n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}="${s2}"`; } @@ -62595,15 +62711,21 @@ var require_fxp = __commonJS({ } return t2; } - const ht = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { + const dt = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [{ regex: new RegExp("&", "g"), val: "&" }, { regex: new RegExp(">", "g"), val: ">" }, { regex: new RegExp("<", "g"), val: "<" }, { regex: new RegExp("'", "g"), val: "'" }, { regex: new RegExp('"', "g"), val: """ }], processEntities: true, stopNodes: [], oneListGroup: false }; - function dt(t2) { - this.options = Object.assign({}, ht, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { + function ht(t2) { + var e2; + this.options = Object.assign({}, dt, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { return false; - } : (this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ct), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ft, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { + } : (this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ft), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ct, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { return ""; }, this.tagEndChar = ">", this.newLine = ""); } @@ -62611,15 +62733,15 @@ var require_fxp = __commonJS({ const s2 = this.j2x(t2, n2 + 1, i2.concat(e2)); return void 0 !== t2[this.options.textNodeName] && 1 === Object.keys(t2).length ? this.buildTextValNode(t2[this.options.textNodeName], e2, s2.attrStr, n2) : this.buildObjectNode(s2.val, e2, s2.attrStr, n2); } - function ft(t2) { + function ct(t2) { return this.options.indentBy.repeat(t2); } - function ct(t2) { + function ft(t2) { return !(!t2.startsWith(this.options.attributeNamePrefix) || t2 === this.options.textNodeName) && t2.substr(this.attrPrefixLen); } - dt.prototype.build = function(t2) { + ht.prototype.build = function(t2) { return this.options.preserveOrder ? st(t2, this.options) : (Array.isArray(t2) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t2 = { [this.options.arrayNodeName]: t2 }), this.j2x(t2, 0, []).val); - }, dt.prototype.j2x = function(t2, e2, n2) { + }, ht.prototype.j2x = function(t2, e2, n2) { let i2 = "", s2 = ""; const r2 = n2.join("."); for (let o2 in t2) if (Object.prototype.hasOwnProperty.call(t2, o2)) if (void 0 === t2[o2]) this.isAttribute(o2) && (s2 += ""); @@ -62654,18 +62776,18 @@ var require_fxp = __commonJS({ for (let s3 = 0; s3 < n3; s3++) i2 += this.buildAttrPairStr(e3[s3], "" + t2[o2][e3[s3]]); } else s2 += this.processTextOrObjNode(t2[o2], o2, e2, n2); return { attrStr: i2, val: s2 }; - }, dt.prototype.buildAttrPairStr = function(t2, e2) { + }, ht.prototype.buildAttrPairStr = function(t2, e2) { return e2 = this.options.attributeValueProcessor(t2, "" + e2), e2 = this.replaceEntitiesValue(e2), this.options.suppressBooleanAttributes && "true" === e2 ? " " + t2 : " " + t2 + '="' + e2 + '"'; - }, dt.prototype.buildObjectNode = function(t2, e2, n2, i2) { + }, ht.prototype.buildObjectNode = function(t2, e2, n2, i2) { if ("" === t2) return "?" === e2[0] ? this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar; { let s2 = "` + this.newLine : this.indentate(i2) + "<" + e2 + n2 + r2 + this.tagEndChar + t2 + this.indentate(i2) + s2 : this.indentate(i2) + "<" + e2 + n2 + r2 + ">" + t2 + s2; } - }, dt.prototype.closeTag = function(t2) { + }, ht.prototype.closeTag = function(t2) { let e2 = ""; return -1 !== this.options.unpairedTags.indexOf(t2) ? this.options.suppressUnpairedNode || (e2 = "/") : e2 = this.options.suppressEmptyNode ? "/" : `>` + this.newLine; if (false !== this.options.commentPropName && e2 === this.options.commentPropName) return this.indentate(i2) + `` + this.newLine; if ("?" === e2[0]) return this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar; @@ -62673,14 +62795,14 @@ var require_fxp = __commonJS({ let s2 = this.options.tagValueProcessor(e2, t2); return s2 = this.replaceEntitiesValue(s2), "" === s2 ? this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + ">" + s2 + " 0 && this.options.processEntities) for (let e2 = 0; e2 < this.options.entities.length; e2++) { const n2 = this.options.entities[e2]; t2 = t2.replace(n2.regex, n2.val); } return t2; }; - const gt = { validate: a }; + const gt = ht, xt = { validate: a }; module2.exports = e; })(); } @@ -102003,6 +102125,7 @@ var require_minimatch2 = __commonJS({ assertValidPattern(pattern); if (!options) options = {}; this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200; this.set = []; this.pattern = pattern; this.windowsPathsNoEscape = !!options.windowsPathsNoEscape || options.allowWindowsEscape === false; @@ -102059,51 +102182,146 @@ var require_minimatch2 = __commonJS({ // out of pattern, then that's fine, as long as all // the parts match. matchOne(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } - ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { - this.debug("matchOne loop"); - var p = pattern[pi]; - var f = file[fi]; - this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } + if (pattern.indexOf(GLOBSTAR) !== -1) { + return this._matchGlobstar(file, pattern, partial, 0, 0); + } + return this._matchOne(file, pattern, partial, 0, 0); + } + _matchGlobstar(file, pattern, partial, fileIndex, patternIndex) { + let firstgs = -1; + for (let i = patternIndex; i < pattern.length; i++) { + if (pattern[i] === GLOBSTAR) { + firstgs = i; + break; + } + } + let lastgs = -1; + for (let i = pattern.length - 1; i >= 0; i--) { + if (pattern[i] === GLOBSTAR) { + lastgs = i; + break; + } + } + const head = pattern.slice(patternIndex, firstgs); + const body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs); + const tail = partial ? [] : pattern.slice(lastgs + 1); + if (head.length) { + const fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this._matchOne(fileHead, head, partial, 0, 0)) { return false; } - var hit; + fileIndex += head.length; + } + let fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) return false; + const tailStart = file.length - tail.length; + if (this._matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; + } + if (!this._matchOne(file, tail, partial, tailStart - 1, 0)) { + return false; + } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + let sawSome = !!fileTailMatch; + for (let i = fileIndex; i < file.length - fileTailMatch; i++) { + const f = String(file[i]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return partial || sawSome; + } + const bodySegments = [[[], 0]]; + let currentBody = bodySegments[0]; + let nonGsParts = 0; + const nonGsPartsSums = [0]; + for (const b of body) { + if (b === GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + let idx = bodySegments.length - 1; + const fileLength = file.length - fileTailMatch; + for (const b of bodySegments) { + b[1] = fileLength - (nonGsPartsSums[idx--] + b[0].length); + } + return !!this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex, + 0, + partial, + 0, + !!fileTailMatch + ); + } + // return false for "nope, not matching" + // return null for "not matching, cannot keep trying" + _matchGlobStarBodySections(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + const bs = bodySegments[bodyIndex]; + if (!bs) { + for (let i = fileIndex; i < file.length; i++) { + sawTail = true; + const f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return sawTail; + } + const [body, after] = bs; + while (fileIndex <= after) { + const m = this._matchOne( + file.slice(0, fileIndex + body.length), + body, + partial, + fileIndex, + 0 + ); + if (m && globStarDepth < this.maxGlobstarRecursion) { + const sub = this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex + body.length, + bodyIndex + 1, + partial, + globStarDepth + 1, + sawTail + ); + if (sub !== false) { + return sub; + } + } + const f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + fileIndex++; + } + return partial || null; + } + _matchOne(file, pattern, partial, fileIndex, patternIndex) { + let fi, pi, fl, pl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + this.debug("matchOne loop"); + const p = pattern[pi]; + const f = file[fi]; + this.debug(pattern, p, f); + if (p === false || p === GLOBSTAR) return false; + let hit; if (typeof p === "string") { hit = f === p; this.debug("string match", p, f, hit); @@ -115424,12 +115642,60 @@ var require_unescape = __commonJS({ var require_ast = __commonJS({ "node_modules/glob/node_modules/minimatch/dist/commonjs/ast.js"(exports2) { "use strict"; + var _a; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.AST = void 0; var brace_expressions_js_1 = require_brace_expressions(); var unescape_js_1 = require_unescape(); var types = /* @__PURE__ */ new Set(["!", "?", "+", "*", "@"]); var isExtglobType = (c) => types.has(c); + var isExtglobAST = (c) => isExtglobType(c.type); + var adoptionMap = /* @__PURE__ */ new Map([ + ["!", ["@"]], + ["?", ["?", "@"]], + ["@", ["@"]], + ["*", ["*", "+", "?", "@"]], + ["+", ["+", "@"]] + ]); + var adoptionWithSpaceMap = /* @__PURE__ */ new Map([ + ["!", ["?"]], + ["@", ["?"]], + ["+", ["?", "*"]] + ]); + var adoptionAnyMap = /* @__PURE__ */ new Map([ + ["!", ["?", "@"]], + ["?", ["?", "@"]], + ["@", ["?", "@"]], + ["*", ["*", "+", "?", "@"]], + ["+", ["+", "@", "?", "*"]] + ]); + var usurpMap = /* @__PURE__ */ new Map([ + ["!", /* @__PURE__ */ new Map([["!", "@"]])], + [ + "?", + /* @__PURE__ */ new Map([ + ["*", "*"], + ["+", "*"] + ]) + ], + [ + "@", + /* @__PURE__ */ new Map([ + ["!", "!"], + ["?", "?"], + ["@", "@"], + ["*", "*"], + ["+", "+"] + ]) + ], + [ + "+", + /* @__PURE__ */ new Map([ + ["?", "*"], + ["*", "*"] + ]) + ] + ]); var startNoTraversal = "(?!(?:^|/)\\.\\.?(?:$|/))"; var startNoDot = "(?!\\.)"; var addPatternStart = /* @__PURE__ */ new Set(["[", "."]); @@ -115439,7 +115705,8 @@ var require_ast = __commonJS({ var qmark = "[^/]"; var star = qmark + "*?"; var starNoEmpty = qmark + "+?"; - var AST = class _AST { + var ID = 0; + var AST = class { type; #root; #hasMagic; @@ -115454,6 +115721,22 @@ var require_ast = __commonJS({ // set to true if it's an extglob with no children // (which really means one child of '') #emptyExt = false; + id = ++ID; + get depth() { + return (this.#parent?.depth ?? -1) + 1; + } + [/* @__PURE__ */ Symbol.for("nodejs.util.inspect.custom")]() { + return { + "@@type": "AST", + id: this.id, + type: this.type, + root: this.#root.id, + parent: this.#parent?.id, + depth: this.depth, + partsLength: this.#parts.length, + parts: this.#parts + }; + } constructor(type2, parent, options = {}) { this.type = type2; if (type2) @@ -115519,7 +115802,7 @@ var require_ast = __commonJS({ for (const p of parts) { if (p === "") continue; - if (typeof p !== "string" && !(p instanceof _AST && p.#parent === this)) { + if (typeof p !== "string" && !(p instanceof _a && p.#parent === this)) { throw new Error("invalid part: " + p); } this.#parts.push(p); @@ -115544,7 +115827,7 @@ var require_ast = __commonJS({ const p = this.#parent; for (let i = 0; i < this.#parentIndex; i++) { const pp = p.#parts[i]; - if (!(pp instanceof _AST && pp.type === "!")) { + if (!(pp instanceof _a && pp.type === "!")) { return false; } } @@ -115569,13 +115852,14 @@ var require_ast = __commonJS({ this.push(part.clone(this)); } clone(parent) { - const c = new _AST(this.type, parent); + const c = new _a(this.type, parent); for (const p of this.#parts) { c.copyIn(p); } return c; } - static #parseAST(str2, ast, pos, opt) { + static #parseAST(str2, ast, pos, opt, extDepth) { + const maxDepth = opt.maxExtglobRecursion ?? 2; let escaping = false; let inBrace = false; let braceStart = -1; @@ -115607,11 +115891,12 @@ var require_ast = __commonJS({ acc2 += c; continue; } - if (!opt.noext && isExtglobType(c) && str2.charAt(i2) === "(") { + const doRecurse = !opt.noext && isExtglobType(c) && str2.charAt(i2) === "(" && extDepth <= maxDepth; + if (doRecurse) { ast.push(acc2); acc2 = ""; - const ext = new _AST(c, ast); - i2 = _AST.#parseAST(str2, ext, i2, opt); + const ext = new _a(c, ast); + i2 = _a.#parseAST(str2, ext, i2, opt, extDepth + 1); ast.push(ext); continue; } @@ -115621,7 +115906,7 @@ var require_ast = __commonJS({ return i2; } let i = pos + 1; - let part = new _AST(null, ast); + let part = new _a(null, ast); const parts = []; let acc = ""; while (i < str2.length) { @@ -115648,19 +115933,22 @@ var require_ast = __commonJS({ acc += c; continue; } - if (isExtglobType(c) && str2.charAt(i) === "(") { + const doRecurse = !opt.noext && isExtglobType(c) && str2.charAt(i) === "(" && /* c8 ignore start - the maxDepth is sufficient here */ + (extDepth <= maxDepth || ast && ast.#canAdoptType(c)); + if (doRecurse) { + const depthAdd = ast && ast.#canAdoptType(c) ? 0 : 1; part.push(acc); acc = ""; - const ext = new _AST(c, part); + const ext = new _a(c, part); part.push(ext); - i = _AST.#parseAST(str2, ext, i, opt); + i = _a.#parseAST(str2, ext, i, opt, extDepth + depthAdd); continue; } if (c === "|") { part.push(acc); acc = ""; parts.push(part); - part = new _AST(null, ast); + part = new _a(null, ast); continue; } if (c === ")") { @@ -115679,9 +115967,71 @@ var require_ast = __commonJS({ ast.#parts = [str2.substring(pos - 1)]; return i; } + #canAdoptWithSpace(child) { + return this.#canAdopt(child, adoptionWithSpaceMap); + } + #canAdopt(child, map2 = adoptionMap) { + if (!child || typeof child !== "object" || child.type !== null || child.#parts.length !== 1 || this.type === null) { + return false; + } + const gc = child.#parts[0]; + if (!gc || typeof gc !== "object" || gc.type === null) { + return false; + } + return this.#canAdoptType(gc.type, map2); + } + #canAdoptType(c, map2 = adoptionAnyMap) { + return !!map2.get(this.type)?.includes(c); + } + #adoptWithSpace(child, index) { + const gc = child.#parts[0]; + const blank = new _a(null, gc, this.options); + blank.#parts.push(""); + gc.push(blank); + this.#adopt(child, index); + } + #adopt(child, index) { + const gc = child.#parts[0]; + this.#parts.splice(index, 1, ...gc.#parts); + for (const p of gc.#parts) { + if (typeof p === "object") + p.#parent = this; + } + this.#toString = void 0; + } + #canUsurpType(c) { + const m = usurpMap.get(this.type); + return !!m?.has(c); + } + #canUsurp(child) { + if (!child || typeof child !== "object" || child.type !== null || child.#parts.length !== 1 || this.type === null || this.#parts.length !== 1) { + return false; + } + const gc = child.#parts[0]; + if (!gc || typeof gc !== "object" || gc.type === null) { + return false; + } + return this.#canUsurpType(gc.type); + } + #usurp(child) { + const m = usurpMap.get(this.type); + const gc = child.#parts[0]; + const nt = m?.get(gc.type); + if (!nt) + return false; + this.#parts = gc.#parts; + for (const p of this.#parts) { + if (typeof p === "object") { + p.#parent = this; + } + } + this.type = nt; + this.#toString = void 0; + this.#emptyExt = false; + } static fromGlob(pattern, options = {}) { - const ast = new _AST(null, void 0, options); - _AST.#parseAST(pattern, ast, 0, options); + const ast = new _a(null, void 0, options); + _a.#parseAST(pattern, ast, 0, options, 0); return ast; } // returns the regular expression if there's magic, or the unescaped @@ -115775,12 +116125,14 @@ var require_ast = __commonJS({ // or start or whatever) and prepend ^ or / at the Regexp construction. toRegExpSource(allowDot) { const dot = allowDot ?? !!this.#options.dot; - if (this.#root === this) + if (this.#root === this) { + this.#flatten(); this.#fillNegs(); - if (!this.type) { + } + if (!isExtglobAST(this)) { const noEmpty = this.isStart() && this.isEnd() && !this.#parts.some((s) => typeof s !== "string"); const src = this.#parts.map((p) => { - const [re, _2, hasMagic, uflag] = typeof p === "string" ? _AST.#parseGlob(p, this.#hasMagic, noEmpty) : p.toRegExpSource(allowDot); + const [re, _2, hasMagic, uflag] = typeof p === "string" ? _a.#parseGlob(p, this.#hasMagic, noEmpty) : p.toRegExpSource(allowDot); this.#hasMagic = this.#hasMagic || hasMagic; this.#uflag = this.#uflag || uflag; return re; @@ -115819,9 +116171,10 @@ var require_ast = __commonJS({ let body = this.#partsToRegExp(dot); if (this.isStart() && this.isEnd() && !body && this.type !== "!") { const s = this.toString(); - this.#parts = [s]; - this.type = null; - this.#hasMagic = void 0; + const me = this; + me.#parts = [s]; + me.type = null; + me.#hasMagic = void 0; return [s, (0, unescape_js_1.unescape)(this.toString()), false, false]; } let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot ? "" : this.#partsToRegExp(true); @@ -115848,6 +116201,38 @@ var require_ast = __commonJS({ this.#uflag ]; } + #flatten() { + if (!isExtglobAST(this)) { + for (const p of this.#parts) { + if (typeof p === "object") { + p.#flatten(); + } + } + } else { + let iterations = 0; + let done = false; + do { + done = true; + for (let i = 0; i < this.#parts.length; i++) { + const c = this.#parts[i]; + if (typeof c === "object") { + c.#flatten(); + if (this.#canAdopt(c)) { + done = false; + this.#adopt(c, i); + } else if (this.#canAdoptWithSpace(c)) { + done = false; + this.#adoptWithSpace(c, i); + } else if (this.#canUsurp(c)) { + done = false; + this.#usurp(c); + } + } + } + } while (!done && ++iterations < 10); + } + this.#toString = void 0; + } #partsToRegExp(dot) { return this.#parts.map((p) => { if (typeof p === "string") { @@ -115909,6 +116294,7 @@ var require_ast = __commonJS({ } }; exports2.AST = AST; + _a = AST; } }); @@ -116093,11 +116479,13 @@ var require_commonjs20 = __commonJS({ isWindows; platform; windowsNoMagicRoot; + maxGlobstarRecursion; regexp; constructor(pattern, options = {}) { (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); options = options || {}; this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion ?? 200; this.pattern = pattern; this.platform = options.platform || defaultPlatform; this.isWindows = this.platform === "win32"; @@ -116434,7 +116822,8 @@ var require_commonjs20 = __commonJS({ // out of pattern, then that's fine, as long as all // the parts match. matchOne(file, pattern, partial = false) { - const options = this.options; + let fileStartIndex = 0; + let patternStartIndex = 0; if (this.isWindows) { const fileDrive = typeof file[0] === "string" && /^[a-z]:$/i.test(file[0]); const fileUNC = !fileDrive && file[0] === "" && file[1] === "" && file[2] === "?" && /^[a-z]:$/i.test(file[3]); @@ -116449,11 +116838,8 @@ var require_commonjs20 = __commonJS({ ]; if (fd.toLowerCase() === pd.toLowerCase()) { pattern[pdi] = fd; - if (pdi > fdi) { - pattern = pattern.slice(pdi); - } else if (fdi > pdi) { - file = file.slice(fdi); - } + patternStartIndex = pdi; + fileStartIndex = fdi; } } } @@ -116461,49 +116847,123 @@ var require_commonjs20 = __commonJS({ if (optimizationLevel >= 2) { file = this.levelTwoFileOptimize(file); } - this.debug("matchOne", this, { file, pattern }); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { - this.debug("matchOne loop"); - var p = pattern[pi]; - var f = file[fi]; - this.debug(pattern, p, f); - if (p === false) { + if (pattern.includes(exports2.GLOBSTAR)) { + return this.#matchGlobstar(file, pattern, partial, fileStartIndex, patternStartIndex); + } + return this.#matchOne(file, pattern, partial, fileStartIndex, patternStartIndex); + } + #matchGlobstar(file, pattern, partial, fileIndex, patternIndex) { + const firstgs = pattern.indexOf(exports2.GLOBSTAR, patternIndex); + const lastgs = pattern.lastIndexOf(exports2.GLOBSTAR); + const [head, body, tail] = partial ? [ + pattern.slice(patternIndex, firstgs), + pattern.slice(firstgs + 1), + [] + ] : [ + pattern.slice(patternIndex, firstgs), + pattern.slice(firstgs + 1, lastgs), + pattern.slice(lastgs + 1) + ]; + if (head.length) { + const fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this.#matchOne(fileHead, head, partial, 0, 0)) { return false; } - if (p === exports2.GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") - return false; - } - return true; + fileIndex += head.length; + patternIndex += head.length; + } + let fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) + return false; + let tailStart = file.length - tail.length; + if (this.#matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } + tailStart--; + if (!this.#matchOne(file, tail, partial, tailStart, 0)) { + return false; } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) { - return true; - } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + let sawSome = !!fileTailMatch; + for (let i2 = fileIndex; i2 < file.length - fileTailMatch; i2++) { + const f = String(file[i2]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) { + return false; } + } + return partial || sawSome; + } + const bodySegments = [[[], 0]]; + let currentBody = bodySegments[0]; + let nonGsParts = 0; + const nonGsPartsSums = [0]; + for (const b of body) { + if (b === exports2.GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + let i = bodySegments.length - 1; + const fileLength = file.length - fileTailMatch; + for (const b of bodySegments) { + b[1] = fileLength - (nonGsPartsSums[i--] + b[0].length); + } + return !!this.#matchGlobStarBodySections(file, bodySegments, fileIndex, 0, partial, 0, !!fileTailMatch); + } + // return false for "nope, not matching" + // return null for "not matching, cannot keep trying" + #matchGlobStarBodySections(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + const bs = bodySegments[bodyIndex]; + if (!bs) { + for (let i = fileIndex; i < file.length; i++) { + sawTail = true; + const f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) { + return false; + } + } + return sawTail; + } + const [body, after] = bs; + while (fileIndex <= after) { + const m = this.#matchOne(file.slice(0, fileIndex + body.length), body, partial, fileIndex, 0); + if (m && globStarDepth < this.maxGlobstarRecursion) { + const sub = this.#matchGlobStarBodySections(file, bodySegments, fileIndex + body.length, bodyIndex + 1, partial, globStarDepth + 1, sawTail); + if (sub !== false) { + return sub; + } + } + const f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) { + return false; + } + fileIndex++; + } + return partial || null; + } + #matchOne(file, pattern, partial, fileIndex, patternIndex) { + let fi; + let pi; + let pl; + let fl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + this.debug("matchOne loop"); + let p = pattern[pi]; + let f = file[fi]; + this.debug(pattern, p, f); + if (p === false || p === exports2.GLOBSTAR) { return false; } let hit; @@ -160637,7 +161097,7 @@ var safeDump = renamed("safeDump", "dump"); var semver = __toESM(require_semver2()); // src/api-compatibility.json -var maximumVersion = "3.20"; +var maximumVersion = "3.21"; var minimumVersion = "3.14"; // src/util.ts @@ -161173,6 +161633,11 @@ var featureConfig = { // cannot be found when interpreting results. minimumVersion: void 0 }, + ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES", + minimumVersion: void 0 + }, ["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: { defaultValue: false, envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE", @@ -161184,11 +161649,6 @@ var featureConfig = { minimumVersion: void 0, toolsFeature: "bundleSupportsOverlay" /* BundleSupportsOverlay */ }, - ["use_repository_properties_v2" /* UseRepositoryProperties */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES", - minimumVersion: void 0 - }, ["validate_db_config" /* ValidateDbConfig */]: { defaultValue: false, envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG", diff --git a/lib/start-proxy-action.js b/lib/start-proxy-action.js index 3586bfa5e..84519a068 100644 --- a/lib/start-proxy-action.js +++ b/lib/start-proxy-action.js @@ -45986,7 +45986,7 @@ var require_package = __commonJS({ "package.json"(exports2, module2) { module2.exports = { name: "codeql", - version: "4.32.5", + version: "4.32.7", private: true, description: "CodeQL action", scripts: { @@ -45995,7 +45995,7 @@ var require_package = __commonJS({ lint: "eslint --report-unused-disable-directives --max-warnings=0 .", "lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif", "lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix", - ava: "npm run transpile && ava --serial --verbose", + ava: "npm run transpile && ava --verbose", test: "npm run ava -- src/", "test-debug": "npm run test -- --timeout=20m", transpile: "tsc --build --verbose" @@ -46044,6 +46044,7 @@ var require_package = __commonJS({ "@types/js-yaml": "^4.0.9", "@types/node": "^20.19.9", "@types/node-forge": "^1.3.14", + "@types/sarif": "^2.1.7", "@types/semver": "^7.7.1", "@types/sinon": "^21.0.0", ava: "^6.4.1", @@ -46052,14 +46053,14 @@ var require_package = __commonJS({ "eslint-import-resolver-typescript": "^3.8.7", "eslint-plugin-github": "^6.0.0", "eslint-plugin-import-x": "^4.16.1", - "eslint-plugin-jsdoc": "^62.5.0", + "eslint-plugin-jsdoc": "^62.7.1", "eslint-plugin-no-async-foreach": "^0.1.1", glob: "^11.1.0", - globals: "^16.5.0", + globals: "^17.3.0", nock: "^14.0.11", sinon: "^21.0.1", typescript: "^5.9.3", - "typescript-eslint": "^8.56.0" + "typescript-eslint": "^8.56.1" }, overrides: { "@actions/tool-cache": { @@ -48064,6 +48065,7 @@ var require_minimatch = __commonJS({ pattern = pattern.split(path5.sep).join("/"); } this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200; this.set = []; this.pattern = pattern; this.regexp = null; @@ -48460,50 +48462,147 @@ var require_minimatch = __commonJS({ return this.negate; }; Minimatch.prototype.matchOne = function(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } + if (pattern.indexOf(GLOBSTAR) !== -1) { + return this._matchGlobstar(file, pattern, partial, 0, 0); + } + return this._matchOne(file, pattern, partial, 0, 0); + }; + Minimatch.prototype._matchGlobstar = function(file, pattern, partial, fileIndex, patternIndex) { + var i; + var firstgs = -1; + for (i = patternIndex; i < pattern.length; i++) { + if (pattern[i] === GLOBSTAR) { + firstgs = i; + break; + } + } + var lastgs = -1; + for (i = pattern.length - 1; i >= 0; i--) { + if (pattern[i] === GLOBSTAR) { + lastgs = i; + break; + } + } + var head = pattern.slice(patternIndex, firstgs); + var body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs); + var tail = partial ? [] : pattern.slice(lastgs + 1); + if (head.length) { + var fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this._matchOne(fileHead, head, partial, 0, 0)) { + return false; + } + fileIndex += head.length; + } + var fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) return false; + var tailStart = file.length - tail.length; + if (this._matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; + } + tailStart--; + if (!this._matchOne(file, tail, partial, tailStart, 0)) { + return false; + } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + var sawSome = !!fileTailMatch; + for (i = fileIndex; i < file.length - fileTailMatch; i++) { + var f = String(file[i]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return partial || sawSome; + } + var bodySegments = [[[], 0]]; + var currentBody = bodySegments[0]; + var nonGsParts = 0; + var nonGsPartsSums = [0]; + for (var bi = 0; bi < body.length; bi++) { + var b = body[bi]; + if (b === GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + var idx = bodySegments.length - 1; + var fileLength = file.length - fileTailMatch; + for (var si = 0; si < bodySegments.length; si++) { + bodySegments[si][1] = fileLength - (nonGsPartsSums[idx--] + bodySegments[si][0].length); + } + return !!this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex, + 0, + partial, + 0, + !!fileTailMatch ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + }; + Minimatch.prototype._matchGlobStarBodySections = function(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + var bs = bodySegments[bodyIndex]; + if (!bs) { + for (var i = fileIndex; i < file.length; i++) { + sawTail = true; + var f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return sawTail; + } + var body = bs[0]; + var after = bs[1]; + while (fileIndex <= after) { + var m = this._matchOne( + file.slice(0, fileIndex + body.length), + body, + partial, + fileIndex, + 0 + ); + if (m && globStarDepth < this.maxGlobstarRecursion) { + var sub = this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex + body.length, + bodyIndex + 1, + partial, + globStarDepth + 1, + sawTail + ); + if (sub !== false) { + return sub; + } + } + var f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + fileIndex++; + } + return partial || null; + }; + Minimatch.prototype._matchOne = function(file, pattern, partial, fileIndex, patternIndex) { + var fi, pi, fl, pl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { this.debug("matchOne loop"); var p = pattern[pi]; var f = file[fi]; this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } - return false; - } + if (p === false || p === GLOBSTAR) return false; var hit; if (typeof p === "string") { hit = f === p; @@ -60543,7 +60642,7 @@ var require_fxp = __commonJS({ }, o: (t2, e2) => Object.prototype.hasOwnProperty.call(t2, e2), r: (t2) => { "undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t2, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t2, "__esModule", { value: true }); } }, e = {}; - t.r(e), t.d(e, { XMLBuilder: () => dt, XMLParser: () => it, XMLValidator: () => gt }); + t.r(e), t.d(e, { XMLBuilder: () => gt, XMLParser: () => it, XMLValidator: () => xt }); const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$"); function s(t2, e2) { const n2 = []; @@ -60565,90 +60664,90 @@ var require_fxp = __commonJS({ const n2 = []; let i2 = false, s2 = false; "\uFEFF" === t2[0] && (t2 = t2.substr(1)); - for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2] && "?" === t2[o2 + 1]) { - if (o2 += 2, o2 = u(t2, o2), o2.err) return o2; + for (let r2 = 0; r2 < t2.length; r2++) if ("<" === t2[r2] && "?" === t2[r2 + 1]) { + if (r2 += 2, r2 = u(t2, r2), r2.err) return r2; } else { - if ("<" !== t2[o2]) { - if (l(t2[o2])) continue; - return m("InvalidChar", "char '" + t2[o2] + "' is not expected.", b(t2, o2)); + if ("<" !== t2[r2]) { + if (l(t2[r2])) continue; + return m("InvalidChar", "char '" + t2[r2] + "' is not expected.", N(t2, r2)); } { - let a2 = o2; - if (o2++, "!" === t2[o2]) { - o2 = h(t2, o2); + let o2 = r2; + if (r2++, "!" === t2[r2]) { + r2 = d(t2, r2); continue; } { - let d2 = false; - "/" === t2[o2] && (d2 = true, o2++); - let p2 = ""; - for (; o2 < t2.length && ">" !== t2[o2] && " " !== t2[o2] && " " !== t2[o2] && "\n" !== t2[o2] && "\r" !== t2[o2]; o2++) p2 += t2[o2]; - if (p2 = p2.trim(), "/" === p2[p2.length - 1] && (p2 = p2.substring(0, p2.length - 1), o2--), !r(p2)) { + let a2 = false; + "/" === t2[r2] && (a2 = true, r2++); + let h2 = ""; + for (; r2 < t2.length && ">" !== t2[r2] && " " !== t2[r2] && " " !== t2[r2] && "\n" !== t2[r2] && "\r" !== t2[r2]; r2++) h2 += t2[r2]; + if (h2 = h2.trim(), "/" === h2[h2.length - 1] && (h2 = h2.substring(0, h2.length - 1), r2--), !b(h2)) { let e3; - return e3 = 0 === p2.trim().length ? "Invalid space after '<'." : "Tag '" + p2 + "' is an invalid name.", m("InvalidTag", e3, b(t2, o2)); + return e3 = 0 === h2.trim().length ? "Invalid space after '<'." : "Tag '" + h2 + "' is an invalid name.", m("InvalidTag", e3, N(t2, r2)); } - const c2 = f(t2, o2); - if (false === c2) return m("InvalidAttr", "Attributes for '" + p2 + "' have open quote.", b(t2, o2)); - let E2 = c2.value; - if (o2 = c2.index, "/" === E2[E2.length - 1]) { - const n3 = o2 - E2.length; - E2 = E2.substring(0, E2.length - 1); - const s3 = g(E2, e2); - if (true !== s3) return m(s3.err.code, s3.err.msg, b(t2, n3 + s3.err.line)); + const p2 = c(t2, r2); + if (false === p2) return m("InvalidAttr", "Attributes for '" + h2 + "' have open quote.", N(t2, r2)); + let f2 = p2.value; + if (r2 = p2.index, "/" === f2[f2.length - 1]) { + const n3 = r2 - f2.length; + f2 = f2.substring(0, f2.length - 1); + const s3 = g(f2, e2); + if (true !== s3) return m(s3.err.code, s3.err.msg, N(t2, n3 + s3.err.line)); i2 = true; - } else if (d2) { - if (!c2.tagClosed) return m("InvalidTag", "Closing tag '" + p2 + "' doesn't have proper closing.", b(t2, o2)); - if (E2.trim().length > 0) return m("InvalidTag", "Closing tag '" + p2 + "' can't have attributes or invalid starting.", b(t2, a2)); - if (0 === n2.length) return m("InvalidTag", "Closing tag '" + p2 + "' has not been opened.", b(t2, a2)); + } else if (a2) { + if (!p2.tagClosed) return m("InvalidTag", "Closing tag '" + h2 + "' doesn't have proper closing.", N(t2, r2)); + if (f2.trim().length > 0) return m("InvalidTag", "Closing tag '" + h2 + "' can't have attributes or invalid starting.", N(t2, o2)); + if (0 === n2.length) return m("InvalidTag", "Closing tag '" + h2 + "' has not been opened.", N(t2, o2)); { const e3 = n2.pop(); - if (p2 !== e3.tagName) { - let n3 = b(t2, e3.tagStartPos); - return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + p2 + "'.", b(t2, a2)); + if (h2 !== e3.tagName) { + let n3 = N(t2, e3.tagStartPos); + return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + h2 + "'.", N(t2, o2)); } 0 == n2.length && (s2 = true); } } else { - const r2 = g(E2, e2); - if (true !== r2) return m(r2.err.code, r2.err.msg, b(t2, o2 - E2.length + r2.err.line)); - if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", b(t2, o2)); - -1 !== e2.unpairedTags.indexOf(p2) || n2.push({ tagName: p2, tagStartPos: a2 }), i2 = true; + const a3 = g(f2, e2); + if (true !== a3) return m(a3.err.code, a3.err.msg, N(t2, r2 - f2.length + a3.err.line)); + if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", N(t2, r2)); + -1 !== e2.unpairedTags.indexOf(h2) || n2.push({ tagName: h2, tagStartPos: o2 }), i2 = true; } - for (o2++; o2 < t2.length; o2++) if ("<" === t2[o2]) { - if ("!" === t2[o2 + 1]) { - o2++, o2 = h(t2, o2); + for (r2++; r2 < t2.length; r2++) if ("<" === t2[r2]) { + if ("!" === t2[r2 + 1]) { + r2++, r2 = d(t2, r2); continue; } - if ("?" !== t2[o2 + 1]) break; - if (o2 = u(t2, ++o2), o2.err) return o2; - } else if ("&" === t2[o2]) { - const e3 = x(t2, o2); - if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", b(t2, o2)); - o2 = e3; - } else if (true === s2 && !l(t2[o2])) return m("InvalidXml", "Extra text at the end", b(t2, o2)); - "<" === t2[o2] && o2--; + if ("?" !== t2[r2 + 1]) break; + if (r2 = u(t2, ++r2), r2.err) return r2; + } else if ("&" === t2[r2]) { + const e3 = x(t2, r2); + if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", N(t2, r2)); + r2 = e3; + } else if (true === s2 && !l(t2[r2])) return m("InvalidXml", "Extra text at the end", N(t2, r2)); + "<" === t2[r2] && r2--; } } } - return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", b(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map(((t3) => t3.tagName)), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); + return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", N(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map((t3) => t3.tagName), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); } function l(t2) { return " " === t2 || " " === t2 || "\n" === t2 || "\r" === t2; } function u(t2, e2) { const n2 = e2; - for (; e2 < t2.length; e2++) if ("?" != t2[e2] && " " != t2[e2]) ; - else { + for (; e2 < t2.length; e2++) if ("?" == t2[e2] || " " == t2[e2]) { const i2 = t2.substr(n2, e2 - n2); - if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", b(t2, e2)); + if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", N(t2, e2)); if ("?" == t2[e2] && ">" == t2[e2 + 1]) { e2++; break; } + continue; } return e2; } - function h(t2, e2) { + function d(t2, e2) { if (t2.length > e2 + 5 && "-" === t2[e2 + 1] && "-" === t2[e2 + 2]) { for (e2 += 3; e2 < t2.length; e2++) if ("-" === t2[e2] && "-" === t2[e2 + 1] && ">" === t2[e2 + 2]) { e2 += 2; @@ -60666,11 +60765,11 @@ var require_fxp = __commonJS({ } return e2; } - const d = '"', p = "'"; - function f(t2, e2) { + const h = '"', p = "'"; + function c(t2, e2) { let n2 = "", i2 = "", s2 = false; for (; e2 < t2.length; e2++) { - if (t2[e2] === d || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); + if (t2[e2] === h || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); else if (">" === t2[e2] && "" === i2) { s2 = true; break; @@ -60679,16 +60778,16 @@ var require_fxp = __commonJS({ } return "" === i2 && { value: n2, index: e2, tagClosed: s2 }; } - const c = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); + const f = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); function g(t2, e2) { - const n2 = s(t2, c), i2 = {}; + const n2 = s(t2, f), i2 = {}; for (let t3 = 0; t3 < n2.length; t3++) { - if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", N(n2[t3])); - if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", N(n2[t3])); - if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", N(n2[t3])); + if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", y(n2[t3])); + if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", y(n2[t3])); + if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", y(n2[t3])); const s2 = n2[t3][2]; - if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", N(n2[t3])); - if (i2.hasOwnProperty(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", N(n2[t3])); + if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", y(n2[t3])); + if (Object.prototype.hasOwnProperty.call(i2, s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", y(n2[t3])); i2[s2] = 1; } return true; @@ -60716,49 +60815,52 @@ var require_fxp = __commonJS({ function E(t2) { return r(t2); } - function b(t2, e2) { + function b(t2) { + return r(t2); + } + function N(t2, e2) { const n2 = t2.substring(0, e2).split(/\r?\n/); return { line: n2.length, col: n2[n2.length - 1].length + 1 }; } - function N(t2) { + function y(t2) { return t2.startIndex + t2[1].length; } - const y = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { + const T = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, stopNodes: [], alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(t2, e2, n2) { return t2; - }, captureMetaData: false }; - function T(t2) { - return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : T(true); + }, captureMetaData: false, maxNestedTags: 100, strictReservedNames: true }; + function w(t2) { + return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : w(true); } - const w = function(t2) { - const e2 = Object.assign({}, y, t2); - return e2.processEntities = T(e2.processEntities), e2; + const v = function(t2) { + const e2 = Object.assign({}, T, t2); + return e2.processEntities = w(e2.processEntities), e2; }; - let v; - v = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); + let O; + O = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); class I { constructor(t2) { - this.tagname = t2, this.child = [], this[":@"] = {}; + this.tagname = t2, this.child = [], this[":@"] = /* @__PURE__ */ Object.create(null); } add(t2, e2) { "__proto__" === t2 && (t2 = "#__proto__"), this.child.push({ [t2]: e2 }); } addChild(t2, e2) { - "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][v] = { startIndex: e2 }); + "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][O] = { startIndex: e2 }); } static getMetaDataSymbol() { - return v; + return O; } } - class O { + class P { constructor(t2) { this.suppressValidationErr = !t2, this.options = t2; } readDocType(t2, e2) { - const n2 = {}; + const n2 = /* @__PURE__ */ Object.create(null); if ("O" !== t2[e2 + 3] || "C" !== t2[e2 + 4] || "T" !== t2[e2 + 5] || "Y" !== t2[e2 + 6] || "P" !== t2[e2 + 7] || "E" !== t2[e2 + 8]) throw new Error("Invalid Tag instead of DOCTYPE"); { e2 += 9; @@ -60767,23 +60869,23 @@ var require_fxp = __commonJS({ if (r2 ? "-" === t2[e2 - 1] && "-" === t2[e2 - 2] && (r2 = false, i2--) : i2--, 0 === i2) break; } else "[" === t2[e2] ? s2 = true : o2 += t2[e2]; else { - if (s2 && A(t2, "!ENTITY", e2)) { + if (s2 && S(t2, "!ENTITY", e2)) { let i3, s3; if (e2 += 7, [i3, s3, e2] = this.readEntityExp(t2, e2 + 1, this.suppressValidationErr), -1 === s3.indexOf("&")) { const t3 = i3.replace(/[.\-+*:]/g, "\\."); n2[i3] = { regx: RegExp(`&${t3};`, "g"), val: s3 }; } - } else if (s2 && A(t2, "!ELEMENT", e2)) { + } else if (s2 && S(t2, "!ELEMENT", e2)) { e2 += 8; const { index: n3 } = this.readElementExp(t2, e2 + 1); e2 = n3; - } else if (s2 && A(t2, "!ATTLIST", e2)) e2 += 8; - else if (s2 && A(t2, "!NOTATION", e2)) { + } else if (s2 && S(t2, "!ATTLIST", e2)) e2 += 8; + else if (s2 && S(t2, "!NOTATION", e2)) { e2 += 9; const { index: n3 } = this.readNotationExp(t2, e2 + 1, this.suppressValidationErr); e2 = n3; } else { - if (!A(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); + if (!S(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); r2 = true; } i2++, o2 = ""; @@ -60793,10 +60895,10 @@ var require_fxp = __commonJS({ return { entities: n2, i: e2 }; } readEntityExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]) && '"' !== t2[e2] && "'" !== t2[e2]; ) n2 += t2[e2], e2++; - if (S(n2), e2 = P(t2, e2), !this.suppressValidationErr) { + if (C(n2), e2 = A(t2, e2), !this.suppressValidationErr) { if ("SYSTEM" === t2.substring(e2, e2 + 6).toUpperCase()) throw new Error("External entities are not supported"); if ("%" === t2[e2]) throw new Error("Parameter entities are not supported"); } @@ -60805,15 +60907,15 @@ var require_fxp = __commonJS({ return [n2, i2, --e2]; } readNotationExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - !this.suppressValidationErr && S(n2), e2 = P(t2, e2); + !this.suppressValidationErr && C(n2), e2 = A(t2, e2); const i2 = t2.substring(e2, e2 + 6).toUpperCase(); if (!this.suppressValidationErr && "SYSTEM" !== i2 && "PUBLIC" !== i2) throw new Error(`Expected SYSTEM or PUBLIC, found "${i2}"`); - e2 += i2.length, e2 = P(t2, e2); + e2 += i2.length, e2 = A(t2, e2); let s2 = null, r2 = null; - if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = P(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); + if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = A(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); else if ("SYSTEM" === i2 && ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"), !this.suppressValidationErr && !r2)) throw new Error("Missing mandatory system identifier for SYSTEM notation"); return { notationName: n2, publicIdentifier: s2, systemIdentifier: r2, index: --e2 }; } @@ -60826,13 +60928,13 @@ var require_fxp = __commonJS({ return [++e2, i2]; } readElementExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; if (!this.suppressValidationErr && !r(n2)) throw new Error(`Invalid element name: "${n2}"`); let i2 = ""; - if ("E" === t2[e2 = P(t2, e2)] && A(t2, "MPTY", e2)) e2 += 4; - else if ("A" === t2[e2] && A(t2, "NY", e2)) e2 += 2; + if ("E" === t2[e2 = A(t2, e2)] && S(t2, "MPTY", e2)) e2 += 4; + else if ("A" === t2[e2] && S(t2, "NY", e2)) e2 += 2; else if ("(" === t2[e2]) { for (e2++; e2 < t2.length && ")" !== t2[e2]; ) i2 += t2[e2], e2++; if (")" !== t2[e2]) throw new Error("Unterminated content model"); @@ -60840,24 +60942,24 @@ var require_fxp = __commonJS({ return { elementName: n2, contentModel: i2.trim(), index: e2 }; } readAttlistExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - S(n2), e2 = P(t2, e2); + C(n2), e2 = A(t2, e2); let i2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) i2 += t2[e2], e2++; - if (!S(i2)) throw new Error(`Invalid attribute name: "${i2}"`); - e2 = P(t2, e2); + if (!C(i2)) throw new Error(`Invalid attribute name: "${i2}"`); + e2 = A(t2, e2); let s2 = ""; if ("NOTATION" === t2.substring(e2, e2 + 8).toUpperCase()) { - if (s2 = "NOTATION", "(" !== t2[e2 = P(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); + if (s2 = "NOTATION", "(" !== t2[e2 = A(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); e2++; let n3 = []; for (; e2 < t2.length && ")" !== t2[e2]; ) { let i3 = ""; for (; e2 < t2.length && "|" !== t2[e2] && ")" !== t2[e2]; ) i3 += t2[e2], e2++; - if (i3 = i3.trim(), !S(i3)) throw new Error(`Invalid notation name: "${i3}"`); - n3.push(i3), "|" === t2[e2] && (e2++, e2 = P(t2, e2)); + if (i3 = i3.trim(), !C(i3)) throw new Error(`Invalid notation name: "${i3}"`); + n3.push(i3), "|" === t2[e2] && (e2++, e2 = A(t2, e2)); } if (")" !== t2[e2]) throw new Error("Unterminated list of notations"); e2++, s2 += " (" + n3.join("|") + ")"; @@ -60866,45 +60968,43 @@ var require_fxp = __commonJS({ const n3 = ["CDATA", "ID", "IDREF", "IDREFS", "ENTITY", "ENTITIES", "NMTOKEN", "NMTOKENS"]; if (!this.suppressValidationErr && !n3.includes(s2.toUpperCase())) throw new Error(`Invalid attribute type: "${s2}"`); } - e2 = P(t2, e2); + e2 = A(t2, e2); let r2 = ""; return "#REQUIRED" === t2.substring(e2, e2 + 8).toUpperCase() ? (r2 = "#REQUIRED", e2 += 8) : "#IMPLIED" === t2.substring(e2, e2 + 7).toUpperCase() ? (r2 = "#IMPLIED", e2 += 7) : [e2, r2] = this.readIdentifierVal(t2, e2, "ATTLIST"), { elementName: n2, attributeName: i2, attributeType: s2, defaultValue: r2, index: e2 }; } } - const P = (t2, e2) => { + const A = (t2, e2) => { for (; e2 < t2.length && /\s/.test(t2[e2]); ) e2++; return e2; }; - function A(t2, e2, n2) { + function S(t2, e2, n2) { for (let i2 = 0; i2 < e2.length; i2++) if (e2[i2] !== t2[n2 + i2 + 1]) return false; return true; } - function S(t2) { + function C(t2) { if (r(t2)) return t2; throw new Error(`Invalid entity name ${t2}`); } - const C = /^[-+]?0x[a-fA-F0-9]+$/, $ = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; - const D = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; - function L(t2) { - return "function" == typeof t2 ? t2 : Array.isArray(t2) ? (e2) => { - for (const n2 of t2) { - if ("string" == typeof n2 && e2 === n2) return true; - if (n2 instanceof RegExp && n2.test(e2)) return true; - } - } : () => false; - } - class F { + const $ = /^[-+]?0x[a-fA-F0-9]+$/, V = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, D = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; + const j = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; + class L { constructor(t2) { - if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e2) => K(e2, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e2) => K(e2, 16, "&#x") } }, this.addExternalEntities = j, this.parseXml = B, this.parseTextData = M, this.resolveNameSpace = _, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = R, this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { + var e2; + if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e3) => K(e3, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e3) => K(e3, 16, "&#x") } }, this.addExternalEntities = F, this.parseXml = R, this.parseTextData = M, this.resolveNameSpace = k, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = B, this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { this.stopNodesExact = /* @__PURE__ */ new Set(), this.stopNodesWildcard = /* @__PURE__ */ new Set(); for (let t3 = 0; t3 < this.options.stopNodes.length; t3++) { - const e2 = this.options.stopNodes[t3]; - "string" == typeof e2 && (e2.startsWith("*.") ? this.stopNodesWildcard.add(e2.substring(2)) : this.stopNodesExact.add(e2)); + const e3 = this.options.stopNodes[t3]; + "string" == typeof e3 && (e3.startsWith("*.") ? this.stopNodesWildcard.add(e3.substring(2)) : this.stopNodesExact.add(e3)); } } } } - function j(t2) { + function F(t2) { const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2], s2 = i2.replace(/[.\-+*:]/g, "\\."); @@ -60918,7 +61018,7 @@ var require_fxp = __commonJS({ return null == i3 ? t2 : typeof i3 != typeof t2 || i3 !== t2 ? i3 : this.options.trimValues || t2.trim() === t2 ? Z(t2, this.options.parseTagValue, this.options.numberParseOptions) : t2; } } - function _(t2) { + function k(t2) { if (this.options.removeNSPrefix) { const e2 = t2.split(":"), n2 = "/" === t2.charAt(0) ? "/" : ""; if ("xmlns" === e2[0]) return ""; @@ -60926,10 +61026,10 @@ var require_fxp = __commonJS({ } return t2; } - const k = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); + const _ = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); function U(t2, e2, n2) { if (true !== this.options.ignoreAttributes && "string" == typeof t2) { - const i2 = s(t2, k), r2 = i2.length, o2 = {}; + const i2 = s(t2, _), r2 = i2.length, o2 = {}; for (let t3 = 0; t3 < r2; t3++) { const s2 = this.resolveNameSpace(i2[t3][1]); if (this.ignoreAttributesFn(s2, e2)) continue; @@ -60948,12 +61048,12 @@ var require_fxp = __commonJS({ return o2; } } - const B = function(t2) { + const R = function(t2) { t2 = t2.replace(/\r\n?/g, "\n"); const e2 = new I("!xml"); let n2 = e2, i2 = "", s2 = ""; this.entityExpansionCount = 0, this.currentExpandedLength = 0; - const r2 = new O(this.options.processEntities); + const r2 = new P(this.options.processEntities); for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2]) if ("/" === t2[o2 + 1]) { const e3 = z(t2, ">", o2, "Closing Tag is not closed."); let r3 = t2.substring(o2 + 2, e3).trim(); @@ -60993,26 +61093,27 @@ var require_fxp = __commonJS({ } else { let r3 = W(t2, o2, this.options.removeNSPrefix), a2 = r3.tagName; const l2 = r3.rawTagName; - let u2 = r3.tagExp, h2 = r3.attrExpPresent, d2 = r3.closeIndex; + let u2 = r3.tagExp, d2 = r3.attrExpPresent, h2 = r3.closeIndex; if (this.options.transformTagName) { const t3 = this.options.transformTagName(a2); u2 === a2 && (u2 = t3), a2 = t3; } + if (this.options.strictReservedNames && (a2 === this.options.commentPropName || a2 === this.options.cdataPropName)) throw new Error(`Invalid tag name: ${a2}`); n2 && i2 && "!xml" !== n2.tagname && (i2 = this.saveTextToParentTag(i2, n2, s2, false)); const p2 = n2; p2 && -1 !== this.options.unpairedTags.indexOf(p2.tagname) && (n2 = this.tagsNodeStack.pop(), s2 = s2.substring(0, s2.lastIndexOf("."))), a2 !== e2.tagname && (s2 += s2 ? "." + a2 : a2); - const f2 = o2; + const c2 = o2; if (this.isItStopNode(this.stopNodesExact, this.stopNodesWildcard, s2, a2)) { let e3 = ""; if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) "/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), o2 = r3.closeIndex; else if (-1 !== this.options.unpairedTags.indexOf(a2)) o2 = r3.closeIndex; else { - const n3 = this.readStopNodeData(t2, l2, d2 + 1); + const n3 = this.readStopNodeData(t2, l2, h2 + 1); if (!n3) throw new Error(`Unexpected end of ${l2}`); o2 = n3.i, e3 = n3.tagContent; } const i3 = new I(a2); - a2 !== u2 && h2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, h2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, f2); + a2 !== u2 && d2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, d2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, c2); } else { if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) { if ("/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), this.options.transformTagName) { @@ -61020,18 +61121,26 @@ var require_fxp = __commonJS({ u2 === a2 && (u2 = t4), a2 = t4; } const t3 = new I(a2); - a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), s2 = s2.substr(0, s2.lastIndexOf(".")); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")); } else { - const t3 = new I(a2); - this.tagsNodeStack.push(n2), a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), n2 = t3; + if (-1 !== this.options.unpairedTags.indexOf(a2)) { + const t3 = new I(a2); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")), o2 = r3.closeIndex; + continue; + } + { + const t3 = new I(a2); + if (this.tagsNodeStack.length > this.options.maxNestedTags) throw new Error("Maximum nested tags exceeded"); + this.tagsNodeStack.push(n2), a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), n2 = t3; + } } - i2 = "", o2 = d2; + i2 = "", o2 = h2; } } else i2 += t2[o2]; return e2.child; }; - function R(t2, e2, n2, i2) { + function B(t2, e2, n2, i2) { this.options.captureMetaData || (i2 = void 0); const s2 = this.options.updateTag(e2.tagname, n2, e2[":@"]); false === s2 || ("string" == typeof s2 ? (e2.tagname = s2, t2.addChild(e2, i2)) : t2.addChild(e2, i2)); @@ -61092,12 +61201,12 @@ var require_fxp = __commonJS({ const o2 = s2.index, a2 = r2.search(/\s/); let l2 = r2, u2 = true; -1 !== a2 && (l2 = r2.substring(0, a2), r2 = r2.substring(a2 + 1).trimStart()); - const h2 = l2; + const d2 = l2; if (n2) { const t3 = l2.indexOf(":"); -1 !== t3 && (l2 = l2.substr(t3 + 1), u2 = l2 !== s2.data.substr(t3 + 1)); } - return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: h2 }; + return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: d2 }; } function q(t2, e2, n2) { const i2 = n2; @@ -61118,19 +61227,19 @@ var require_fxp = __commonJS({ if (e2 && "string" == typeof t2) { const e3 = t2.trim(); return "true" === e3 || "false" !== e3 && (function(t3, e4 = {}) { - if (e4 = Object.assign({}, V, e4), !t3 || "string" != typeof t3) return t3; + if (e4 = Object.assign({}, D, e4), !t3 || "string" != typeof t3) return t3; let n3 = t3.trim(); if (void 0 !== e4.skipLike && e4.skipLike.test(n3)) return t3; if ("0" === t3) return 0; - if (e4.hex && C.test(n3)) return (function(t4) { + if (e4.hex && $.test(n3)) return (function(t4) { if (parseInt) return parseInt(t4, 16); if (Number.parseInt) return Number.parseInt(t4, 16); if (window && window.parseInt) return window.parseInt(t4, 16); throw new Error("parseInt, Number.parseInt, window.parseInt are not supported"); })(n3); - if (-1 !== n3.search(/.+[eE].+/)) return (function(t4, e5, n4) { + if (n3.includes("e") || n3.includes("E")) return (function(t4, e5, n4) { if (!n4.eNotation) return t4; - const i3 = e5.match(D); + const i3 = e5.match(j); if (i3) { let s2 = i3[1] || ""; const r2 = -1 === i3[3].indexOf("e") ? "E" : "e", o2 = i3[2], a2 = s2 ? t4[o2.length + 1] === r2 : t4[o2.length] === r2; @@ -61139,7 +61248,7 @@ var require_fxp = __commonJS({ return t4; })(t3, n3, e4); { - const s2 = $.exec(n3); + const s2 = V.exec(n3); if (s2) { const r2 = s2[1] || "", o2 = s2[2]; let a2 = (i2 = s2[3]) && -1 !== i2.indexOf(".") ? ("." === (i2 = i2.replace(/0+$/, "")) ? i2 = "0" : "." === i2[0] ? i2 = "0" + i2 : "." === i2[i2.length - 1] && (i2 = i2.substring(0, i2.length - 1)), i2) : i2; @@ -61147,7 +61256,7 @@ var require_fxp = __commonJS({ if (!e4.leadingZeros && (o2.length > 1 || 1 === o2.length && !l2)) return t3; { const i3 = Number(n3), s3 = String(i3); - if (0 === i3 || -0 === i3) return i3; + if (0 === i3) return i3; if (-1 !== s3.search(/[eE]/)) return e4.eNotation ? i3 : t3; if (-1 !== n3.indexOf(".")) return "0" === s3 || s3 === a2 || s3 === `${r2}${a2}` ? i3 : t3; let l3 = o2 ? a2 : n3; @@ -61181,7 +61290,7 @@ var require_fxp = __commonJS({ if (o2[a2]) { let t3 = H(o2[a2], e2, l2); const n3 = nt(t3, e2); - void 0 !== o2[Q] && (t3[Q] = o2[Q]), o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== s2[a2] && s2.hasOwnProperty(a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; + o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== o2[Q] && "object" == typeof t3 && null !== t3 && (t3[Q] = o2[Q]), void 0 !== s2[a2] && Object.prototype.hasOwnProperty.call(s2, a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; } } } @@ -61209,7 +61318,7 @@ var require_fxp = __commonJS({ } class it { constructor(t2) { - this.externalEntities = {}, this.options = w(t2); + this.externalEntities = {}, this.options = v(t2); } parse(t2, e2) { if ("string" != typeof t2 && t2.toString) t2 = t2.toString(); @@ -61219,7 +61328,7 @@ var require_fxp = __commonJS({ const n3 = a(t2, e2); if (true !== n3) throw Error(`${n3.err.msg}:${n3.err.line}:${n3.err.col}`); } - const n2 = new F(this.options); + const n2 = new L(this.options); n2.addExternalEntities(this.externalEntities); const i2 = n2.parseXml(t2); return this.options.preserveOrder || void 0 === i2 ? i2 : J(i2, this.options); @@ -61240,6 +61349,13 @@ var require_fxp = __commonJS({ } function rt(t2, e2, n2, i2) { let s2 = "", r2 = false; + if (!Array.isArray(t2)) { + if (null != t2) { + let n3 = t2.toString(); + return n3 = ut(n3, e2), n3; + } + return ""; + } for (let o2 = 0; o2 < t2.length; o2++) { const a2 = t2[o2], l2 = ot(a2); if (void 0 === l2) continue; @@ -61263,10 +61379,10 @@ var require_fxp = __commonJS({ o3 = 0 !== o3.length ? " " + o3 : "", s2 += n3 + `<${l2}${o3}${t3}?>`, r2 = true; continue; } - let h2 = i2; - "" !== h2 && (h2 += e2.indentBy); - const d2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, h2); - -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += d2 + ">" : s2 += d2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += d2 + `>${p2}${i2}` : (s2 += d2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += d2 + "/>", r2 = true; + let d2 = i2; + "" !== d2 && (d2 += e2.indentBy); + const h2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, d2); + -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += h2 + ">" : s2 += h2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += h2 + `>${p2}${i2}` : (s2 += h2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += h2 + "/>", r2 = true; } return s2; } @@ -61274,13 +61390,13 @@ var require_fxp = __commonJS({ const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2]; - if (t2.hasOwnProperty(i2) && ":@" !== i2) return i2; + if (Object.prototype.hasOwnProperty.call(t2, i2) && ":@" !== i2) return i2; } } function at(t2, e2) { let n2 = ""; if (t2 && !e2.ignoreAttributes) for (let i2 in t2) { - if (!t2.hasOwnProperty(i2)) continue; + if (!Object.prototype.hasOwnProperty.call(t2, i2)) continue; let s2 = e2.attributeValueProcessor(i2, t2[i2]); s2 = ut(s2, e2), true === s2 && e2.suppressBooleanAttributes ? n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}` : n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}="${s2}"`; } @@ -61298,15 +61414,21 @@ var require_fxp = __commonJS({ } return t2; } - const ht = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { + const dt = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [{ regex: new RegExp("&", "g"), val: "&" }, { regex: new RegExp(">", "g"), val: ">" }, { regex: new RegExp("<", "g"), val: "<" }, { regex: new RegExp("'", "g"), val: "'" }, { regex: new RegExp('"', "g"), val: """ }], processEntities: true, stopNodes: [], oneListGroup: false }; - function dt(t2) { - this.options = Object.assign({}, ht, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { + function ht(t2) { + var e2; + this.options = Object.assign({}, dt, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { return false; - } : (this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ct), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ft, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { + } : (this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ft), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ct, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { return ""; }, this.tagEndChar = ">", this.newLine = ""); } @@ -61314,15 +61436,15 @@ var require_fxp = __commonJS({ const s2 = this.j2x(t2, n2 + 1, i2.concat(e2)); return void 0 !== t2[this.options.textNodeName] && 1 === Object.keys(t2).length ? this.buildTextValNode(t2[this.options.textNodeName], e2, s2.attrStr, n2) : this.buildObjectNode(s2.val, e2, s2.attrStr, n2); } - function ft(t2) { + function ct(t2) { return this.options.indentBy.repeat(t2); } - function ct(t2) { + function ft(t2) { return !(!t2.startsWith(this.options.attributeNamePrefix) || t2 === this.options.textNodeName) && t2.substr(this.attrPrefixLen); } - dt.prototype.build = function(t2) { + ht.prototype.build = function(t2) { return this.options.preserveOrder ? st(t2, this.options) : (Array.isArray(t2) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t2 = { [this.options.arrayNodeName]: t2 }), this.j2x(t2, 0, []).val); - }, dt.prototype.j2x = function(t2, e2, n2) { + }, ht.prototype.j2x = function(t2, e2, n2) { let i2 = "", s2 = ""; const r2 = n2.join("."); for (let o2 in t2) if (Object.prototype.hasOwnProperty.call(t2, o2)) if (void 0 === t2[o2]) this.isAttribute(o2) && (s2 += ""); @@ -61357,18 +61479,18 @@ var require_fxp = __commonJS({ for (let s3 = 0; s3 < n3; s3++) i2 += this.buildAttrPairStr(e3[s3], "" + t2[o2][e3[s3]]); } else s2 += this.processTextOrObjNode(t2[o2], o2, e2, n2); return { attrStr: i2, val: s2 }; - }, dt.prototype.buildAttrPairStr = function(t2, e2) { + }, ht.prototype.buildAttrPairStr = function(t2, e2) { return e2 = this.options.attributeValueProcessor(t2, "" + e2), e2 = this.replaceEntitiesValue(e2), this.options.suppressBooleanAttributes && "true" === e2 ? " " + t2 : " " + t2 + '="' + e2 + '"'; - }, dt.prototype.buildObjectNode = function(t2, e2, n2, i2) { + }, ht.prototype.buildObjectNode = function(t2, e2, n2, i2) { if ("" === t2) return "?" === e2[0] ? this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar; { let s2 = "` + this.newLine : this.indentate(i2) + "<" + e2 + n2 + r2 + this.tagEndChar + t2 + this.indentate(i2) + s2 : this.indentate(i2) + "<" + e2 + n2 + r2 + ">" + t2 + s2; } - }, dt.prototype.closeTag = function(t2) { + }, ht.prototype.closeTag = function(t2) { let e2 = ""; return -1 !== this.options.unpairedTags.indexOf(t2) ? this.options.suppressUnpairedNode || (e2 = "/") : e2 = this.options.suppressEmptyNode ? "/" : `>` + this.newLine; if (false !== this.options.commentPropName && e2 === this.options.commentPropName) return this.indentate(i2) + `` + this.newLine; if ("?" === e2[0]) return this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar; @@ -61376,14 +61498,14 @@ var require_fxp = __commonJS({ let s2 = this.options.tagValueProcessor(e2, t2); return s2 = this.replaceEntitiesValue(s2), "" === s2 ? this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + ">" + s2 + " 0 && this.options.processEntities) for (let e2 = 0; e2 < this.options.entities.length; e2++) { const n2 = this.options.entities[e2]; t2 = t2.replace(n2.regex, n2.val); } return t2; }; - const gt = { validate: a }; + const gt = ht, xt = { validate: a }; module2.exports = e; })(); } @@ -120490,8 +120612,8 @@ var path = __toESM(require("path")); var semver4 = __toESM(require_semver2()); // src/defaults.json -var bundleVersion = "codeql-bundle-v2.24.2"; -var cliVersion = "2.24.2"; +var bundleVersion = "codeql-bundle-v2.24.3"; +var cliVersion = "2.24.3"; // src/overlay/index.ts var actionsCache = __toESM(require_cache5()); @@ -120834,6 +120956,11 @@ var featureConfig = { // cannot be found when interpreting results. minimumVersion: void 0 }, + ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES", + minimumVersion: void 0 + }, ["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: { defaultValue: false, envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE", @@ -120845,11 +120972,6 @@ var featureConfig = { minimumVersion: void 0, toolsFeature: "bundleSupportsOverlay" /* BundleSupportsOverlay */ }, - ["use_repository_properties_v2" /* UseRepositoryProperties */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES", - minimumVersion: void 0 - }, ["validate_db_config" /* ValidateDbConfig */]: { defaultValue: false, envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG", @@ -121626,6 +121748,18 @@ var LANGUAGE_TO_REGISTRY_TYPE = { rust: ["cargo_registry"], go: ["goproxy_server", "git_source"] }; +var NEW_LANGUAGE_TO_REGISTRY_TYPE = { + actions: [], + cpp: [], + java: ["maven_repository"], + csharp: ["nuget_feed"], + javascript: [], + python: [], + ruby: [], + rust: [], + swift: [], + go: ["goproxy_server", "git_source"] +}; function getRegistryAddress(registry) { if (isDefined2(registry.url)) { return { @@ -121643,8 +121777,9 @@ function getRegistryAddress(registry) { ); } } -function getCredentials(logger, registrySecrets, registriesCredentials, language) { - const registryTypeForLanguage = language ? LANGUAGE_TO_REGISTRY_TYPE[language] : void 0; +function getCredentials(logger, registrySecrets, registriesCredentials, language, skipUnusedRegistries = false) { + const registryMapping = skipUnusedRegistries ? NEW_LANGUAGE_TO_REGISTRY_TYPE : LANGUAGE_TO_REGISTRY_TYPE; + const registryTypeForLanguage = language ? registryMapping[language] : void 0; let credentialsStr; if (registriesCredentials !== void 0) { logger.info(`Using registries_credentials input.`); @@ -122141,11 +122276,15 @@ async function run(startedAt) { ); const languageInput = getOptionalInput("language"); language = languageInput ? parseLanguage(languageInput) : void 0; + const skipUnusedRegistries = await features.getValue( + "start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */ + ); const credentials = getCredentials( logger, getOptionalInput("registry_secrets"), getOptionalInput("registries_credentials"), - language + language, + skipUnusedRegistries ); if (credentials.length === 0) { logger.info("No credentials found, skipping proxy setup."); diff --git a/lib/upload-lib.js b/lib/upload-lib.js index 135a18e5f..236eb8512 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -204,7 +204,7 @@ var require_file_command = __commonJS({ exports2.issueFileCommand = issueFileCommand; exports2.prepareKeyValueMessage = prepareKeyValueMessage; var crypto2 = __importStar2(require("crypto")); - var fs12 = __importStar2(require("fs")); + var fs13 = __importStar2(require("fs")); var os2 = __importStar2(require("os")); var utils_1 = require_utils(); function issueFileCommand(command, message) { @@ -212,10 +212,10 @@ var require_file_command = __commonJS({ if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs12.existsSync(filePath)) { + if (!fs13.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs12.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, { + fs13.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, { encoding: "utf8" }); } @@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({ exports2.isRooted = isRooted; exports2.tryGetExecutablePath = tryGetExecutablePath; exports2.getCmdPath = getCmdPath; - var fs12 = __importStar2(require("fs")); + var fs13 = __importStar2(require("fs")); var path12 = __importStar2(require("path")); - _a = fs12.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + _a = fs13.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; function readlink(fsPath) { return __awaiter2(this, void 0, void 0, function* () { - const result = yield fs12.promises.readlink(fsPath); + const result = yield fs13.promises.readlink(fsPath); if (exports2.IS_WINDOWS && !result.endsWith("\\")) { return `${result}\\`; } @@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({ }); } exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs12.constants.O_RDONLY; + exports2.READONLY = fs13.constants.O_RDONLY; function exists(fsPath) { return __awaiter2(this, void 0, void 0, function* () { try { @@ -47283,7 +47283,7 @@ var require_package = __commonJS({ "package.json"(exports2, module2) { module2.exports = { name: "codeql", - version: "4.32.5", + version: "4.32.7", private: true, description: "CodeQL action", scripts: { @@ -47292,7 +47292,7 @@ var require_package = __commonJS({ lint: "eslint --report-unused-disable-directives --max-warnings=0 .", "lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif", "lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix", - ava: "npm run transpile && ava --serial --verbose", + ava: "npm run transpile && ava --verbose", test: "npm run ava -- src/", "test-debug": "npm run test -- --timeout=20m", transpile: "tsc --build --verbose" @@ -47341,6 +47341,7 @@ var require_package = __commonJS({ "@types/js-yaml": "^4.0.9", "@types/node": "^20.19.9", "@types/node-forge": "^1.3.14", + "@types/sarif": "^2.1.7", "@types/semver": "^7.7.1", "@types/sinon": "^21.0.0", ava: "^6.4.1", @@ -47349,14 +47350,14 @@ var require_package = __commonJS({ "eslint-import-resolver-typescript": "^3.8.7", "eslint-plugin-github": "^6.0.0", "eslint-plugin-import-x": "^4.16.1", - "eslint-plugin-jsdoc": "^62.5.0", + "eslint-plugin-jsdoc": "^62.7.1", "eslint-plugin-no-async-foreach": "^0.1.1", glob: "^11.1.0", - globals: "^16.5.0", + globals: "^17.3.0", nock: "^14.0.11", sinon: "^21.0.1", typescript: "^5.9.3", - "typescript-eslint": "^8.56.0" + "typescript-eslint": "^8.56.1" }, overrides: { "@actions/tool-cache": { @@ -49361,6 +49362,7 @@ var require_minimatch = __commonJS({ pattern = pattern.split(path12.sep).join("/"); } this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200; this.set = []; this.pattern = pattern; this.regexp = null; @@ -49757,50 +49759,147 @@ var require_minimatch = __commonJS({ return this.negate; }; Minimatch.prototype.matchOne = function(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } + if (pattern.indexOf(GLOBSTAR) !== -1) { + return this._matchGlobstar(file, pattern, partial, 0, 0); + } + return this._matchOne(file, pattern, partial, 0, 0); + }; + Minimatch.prototype._matchGlobstar = function(file, pattern, partial, fileIndex, patternIndex) { + var i; + var firstgs = -1; + for (i = patternIndex; i < pattern.length; i++) { + if (pattern[i] === GLOBSTAR) { + firstgs = i; + break; + } + } + var lastgs = -1; + for (i = pattern.length - 1; i >= 0; i--) { + if (pattern[i] === GLOBSTAR) { + lastgs = i; + break; + } + } + var head = pattern.slice(patternIndex, firstgs); + var body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs); + var tail = partial ? [] : pattern.slice(lastgs + 1); + if (head.length) { + var fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this._matchOne(fileHead, head, partial, 0, 0)) { + return false; + } + fileIndex += head.length; + } + var fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) return false; + var tailStart = file.length - tail.length; + if (this._matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; + } + tailStart--; + if (!this._matchOne(file, tail, partial, tailStart, 0)) { + return false; + } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + var sawSome = !!fileTailMatch; + for (i = fileIndex; i < file.length - fileTailMatch; i++) { + var f = String(file[i]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return partial || sawSome; + } + var bodySegments = [[[], 0]]; + var currentBody = bodySegments[0]; + var nonGsParts = 0; + var nonGsPartsSums = [0]; + for (var bi = 0; bi < body.length; bi++) { + var b = body[bi]; + if (b === GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + var idx = bodySegments.length - 1; + var fileLength = file.length - fileTailMatch; + for (var si = 0; si < bodySegments.length; si++) { + bodySegments[si][1] = fileLength - (nonGsPartsSums[idx--] + bodySegments[si][0].length); + } + return !!this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex, + 0, + partial, + 0, + !!fileTailMatch ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + }; + Minimatch.prototype._matchGlobStarBodySections = function(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + var bs = bodySegments[bodyIndex]; + if (!bs) { + for (var i = fileIndex; i < file.length; i++) { + sawTail = true; + var f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return sawTail; + } + var body = bs[0]; + var after = bs[1]; + while (fileIndex <= after) { + var m = this._matchOne( + file.slice(0, fileIndex + body.length), + body, + partial, + fileIndex, + 0 + ); + if (m && globStarDepth < this.maxGlobstarRecursion) { + var sub = this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex + body.length, + bodyIndex + 1, + partial, + globStarDepth + 1, + sawTail + ); + if (sub !== false) { + return sub; + } + } + var f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + fileIndex++; + } + return partial || null; + }; + Minimatch.prototype._matchOne = function(file, pattern, partial, fileIndex, patternIndex) { + var fi, pi, fl, pl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { this.debug("matchOne loop"); var p = pattern[pi]; var f = file[fi]; this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } - return false; - } + if (p === false || p === GLOBSTAR) return false; var hit; if (typeof p === "string") { hit = f === p; @@ -50305,7 +50404,7 @@ var require_internal_globber = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core12 = __importStar2(require_core()); - var fs12 = __importStar2(require("fs")); + var fs13 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path12 = __importStar2(require("path")); var patternHelper = __importStar2(require_internal_pattern_helper()); @@ -50359,7 +50458,7 @@ var require_internal_globber = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core12.debug(`Search path '${searchPath}'`); try { - yield __await2(fs12.promises.lstat(searchPath)); + yield __await2(fs13.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -50393,7 +50492,7 @@ var require_internal_globber = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await2(fs12.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path12.join(item.path, x), childLevel)); + const childItems = (yield __await2(fs13.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path12.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await2(item.path); @@ -50428,7 +50527,7 @@ var require_internal_globber = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs12.promises.stat(item.path); + stats = yield fs13.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -50440,10 +50539,10 @@ var require_internal_globber = __commonJS({ throw err; } } else { - stats = yield fs12.promises.lstat(item.path); + stats = yield fs13.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs12.promises.realpath(item.path); + const realPath = yield fs13.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -50552,7 +50651,7 @@ var require_internal_hash_files = __commonJS({ exports2.hashFiles = hashFiles; var crypto2 = __importStar2(require("crypto")); var core12 = __importStar2(require_core()); - var fs12 = __importStar2(require("fs")); + var fs13 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); var path12 = __importStar2(require("path")); @@ -50575,13 +50674,13 @@ var require_internal_hash_files = __commonJS({ writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); continue; } - if (fs12.statSync(file).isDirectory()) { + if (fs13.statSync(file).isDirectory()) { writeDelegate(`Skip directory '${file}'.`); continue; } const hash2 = crypto2.createHash("sha256"); const pipeline = util.promisify(stream2.pipeline); - yield pipeline(fs12.createReadStream(file), hash2); + yield pipeline(fs13.createReadStream(file), hash2); result.write(hash2.digest()); count++; if (!hasMatch) { @@ -51956,7 +52055,7 @@ var require_cacheUtils = __commonJS({ var glob = __importStar2(require_glob()); var io6 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); - var fs12 = __importStar2(require("fs")); + var fs13 = __importStar2(require("fs")); var path12 = __importStar2(require("path")); var semver9 = __importStar2(require_semver3()); var util = __importStar2(require("util")); @@ -51985,7 +52084,7 @@ var require_cacheUtils = __commonJS({ }); } function getArchiveFileSizeInBytes(filePath) { - return fs12.statSync(filePath).size; + return fs13.statSync(filePath).size; } function resolvePaths(patterns) { return __awaiter2(this, void 0, void 0, function* () { @@ -52023,7 +52122,7 @@ var require_cacheUtils = __commonJS({ } function unlinkFile(filePath) { return __awaiter2(this, void 0, void 0, function* () { - return util.promisify(fs12.unlink)(filePath); + return util.promisify(fs13.unlink)(filePath); }); } function getVersion(app_1) { @@ -52065,7 +52164,7 @@ var require_cacheUtils = __commonJS({ } function getGnuTarPathOnWindows() { return __awaiter2(this, void 0, void 0, function* () { - if (fs12.existsSync(constants_1.GnuTarPathOnWindows)) { + if (fs13.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion("tar"); @@ -61840,7 +61939,7 @@ var require_fxp = __commonJS({ }, o: (t2, e2) => Object.prototype.hasOwnProperty.call(t2, e2), r: (t2) => { "undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t2, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t2, "__esModule", { value: true }); } }, e = {}; - t.r(e), t.d(e, { XMLBuilder: () => dt, XMLParser: () => it, XMLValidator: () => gt }); + t.r(e), t.d(e, { XMLBuilder: () => gt, XMLParser: () => it, XMLValidator: () => xt }); const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$"); function s(t2, e2) { const n2 = []; @@ -61862,90 +61961,90 @@ var require_fxp = __commonJS({ const n2 = []; let i2 = false, s2 = false; "\uFEFF" === t2[0] && (t2 = t2.substr(1)); - for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2] && "?" === t2[o2 + 1]) { - if (o2 += 2, o2 = u(t2, o2), o2.err) return o2; + for (let r2 = 0; r2 < t2.length; r2++) if ("<" === t2[r2] && "?" === t2[r2 + 1]) { + if (r2 += 2, r2 = u(t2, r2), r2.err) return r2; } else { - if ("<" !== t2[o2]) { - if (l(t2[o2])) continue; - return m("InvalidChar", "char '" + t2[o2] + "' is not expected.", b(t2, o2)); + if ("<" !== t2[r2]) { + if (l(t2[r2])) continue; + return m("InvalidChar", "char '" + t2[r2] + "' is not expected.", N(t2, r2)); } { - let a2 = o2; - if (o2++, "!" === t2[o2]) { - o2 = h(t2, o2); + let o2 = r2; + if (r2++, "!" === t2[r2]) { + r2 = d(t2, r2); continue; } { - let d2 = false; - "/" === t2[o2] && (d2 = true, o2++); - let p2 = ""; - for (; o2 < t2.length && ">" !== t2[o2] && " " !== t2[o2] && " " !== t2[o2] && "\n" !== t2[o2] && "\r" !== t2[o2]; o2++) p2 += t2[o2]; - if (p2 = p2.trim(), "/" === p2[p2.length - 1] && (p2 = p2.substring(0, p2.length - 1), o2--), !r(p2)) { + let a2 = false; + "/" === t2[r2] && (a2 = true, r2++); + let h2 = ""; + for (; r2 < t2.length && ">" !== t2[r2] && " " !== t2[r2] && " " !== t2[r2] && "\n" !== t2[r2] && "\r" !== t2[r2]; r2++) h2 += t2[r2]; + if (h2 = h2.trim(), "/" === h2[h2.length - 1] && (h2 = h2.substring(0, h2.length - 1), r2--), !b(h2)) { let e3; - return e3 = 0 === p2.trim().length ? "Invalid space after '<'." : "Tag '" + p2 + "' is an invalid name.", m("InvalidTag", e3, b(t2, o2)); + return e3 = 0 === h2.trim().length ? "Invalid space after '<'." : "Tag '" + h2 + "' is an invalid name.", m("InvalidTag", e3, N(t2, r2)); } - const c2 = f(t2, o2); - if (false === c2) return m("InvalidAttr", "Attributes for '" + p2 + "' have open quote.", b(t2, o2)); - let E2 = c2.value; - if (o2 = c2.index, "/" === E2[E2.length - 1]) { - const n3 = o2 - E2.length; - E2 = E2.substring(0, E2.length - 1); - const s3 = g(E2, e2); - if (true !== s3) return m(s3.err.code, s3.err.msg, b(t2, n3 + s3.err.line)); + const p2 = c(t2, r2); + if (false === p2) return m("InvalidAttr", "Attributes for '" + h2 + "' have open quote.", N(t2, r2)); + let f2 = p2.value; + if (r2 = p2.index, "/" === f2[f2.length - 1]) { + const n3 = r2 - f2.length; + f2 = f2.substring(0, f2.length - 1); + const s3 = g(f2, e2); + if (true !== s3) return m(s3.err.code, s3.err.msg, N(t2, n3 + s3.err.line)); i2 = true; - } else if (d2) { - if (!c2.tagClosed) return m("InvalidTag", "Closing tag '" + p2 + "' doesn't have proper closing.", b(t2, o2)); - if (E2.trim().length > 0) return m("InvalidTag", "Closing tag '" + p2 + "' can't have attributes or invalid starting.", b(t2, a2)); - if (0 === n2.length) return m("InvalidTag", "Closing tag '" + p2 + "' has not been opened.", b(t2, a2)); + } else if (a2) { + if (!p2.tagClosed) return m("InvalidTag", "Closing tag '" + h2 + "' doesn't have proper closing.", N(t2, r2)); + if (f2.trim().length > 0) return m("InvalidTag", "Closing tag '" + h2 + "' can't have attributes or invalid starting.", N(t2, o2)); + if (0 === n2.length) return m("InvalidTag", "Closing tag '" + h2 + "' has not been opened.", N(t2, o2)); { const e3 = n2.pop(); - if (p2 !== e3.tagName) { - let n3 = b(t2, e3.tagStartPos); - return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + p2 + "'.", b(t2, a2)); + if (h2 !== e3.tagName) { + let n3 = N(t2, e3.tagStartPos); + return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + h2 + "'.", N(t2, o2)); } 0 == n2.length && (s2 = true); } } else { - const r2 = g(E2, e2); - if (true !== r2) return m(r2.err.code, r2.err.msg, b(t2, o2 - E2.length + r2.err.line)); - if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", b(t2, o2)); - -1 !== e2.unpairedTags.indexOf(p2) || n2.push({ tagName: p2, tagStartPos: a2 }), i2 = true; + const a3 = g(f2, e2); + if (true !== a3) return m(a3.err.code, a3.err.msg, N(t2, r2 - f2.length + a3.err.line)); + if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", N(t2, r2)); + -1 !== e2.unpairedTags.indexOf(h2) || n2.push({ tagName: h2, tagStartPos: o2 }), i2 = true; } - for (o2++; o2 < t2.length; o2++) if ("<" === t2[o2]) { - if ("!" === t2[o2 + 1]) { - o2++, o2 = h(t2, o2); + for (r2++; r2 < t2.length; r2++) if ("<" === t2[r2]) { + if ("!" === t2[r2 + 1]) { + r2++, r2 = d(t2, r2); continue; } - if ("?" !== t2[o2 + 1]) break; - if (o2 = u(t2, ++o2), o2.err) return o2; - } else if ("&" === t2[o2]) { - const e3 = x(t2, o2); - if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", b(t2, o2)); - o2 = e3; - } else if (true === s2 && !l(t2[o2])) return m("InvalidXml", "Extra text at the end", b(t2, o2)); - "<" === t2[o2] && o2--; + if ("?" !== t2[r2 + 1]) break; + if (r2 = u(t2, ++r2), r2.err) return r2; + } else if ("&" === t2[r2]) { + const e3 = x(t2, r2); + if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", N(t2, r2)); + r2 = e3; + } else if (true === s2 && !l(t2[r2])) return m("InvalidXml", "Extra text at the end", N(t2, r2)); + "<" === t2[r2] && r2--; } } } - return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", b(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map(((t3) => t3.tagName)), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); + return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", N(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map((t3) => t3.tagName), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); } function l(t2) { return " " === t2 || " " === t2 || "\n" === t2 || "\r" === t2; } function u(t2, e2) { const n2 = e2; - for (; e2 < t2.length; e2++) if ("?" != t2[e2] && " " != t2[e2]) ; - else { + for (; e2 < t2.length; e2++) if ("?" == t2[e2] || " " == t2[e2]) { const i2 = t2.substr(n2, e2 - n2); - if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", b(t2, e2)); + if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", N(t2, e2)); if ("?" == t2[e2] && ">" == t2[e2 + 1]) { e2++; break; } + continue; } return e2; } - function h(t2, e2) { + function d(t2, e2) { if (t2.length > e2 + 5 && "-" === t2[e2 + 1] && "-" === t2[e2 + 2]) { for (e2 += 3; e2 < t2.length; e2++) if ("-" === t2[e2] && "-" === t2[e2 + 1] && ">" === t2[e2 + 2]) { e2 += 2; @@ -61963,11 +62062,11 @@ var require_fxp = __commonJS({ } return e2; } - const d = '"', p = "'"; - function f(t2, e2) { + const h = '"', p = "'"; + function c(t2, e2) { let n2 = "", i2 = "", s2 = false; for (; e2 < t2.length; e2++) { - if (t2[e2] === d || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); + if (t2[e2] === h || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); else if (">" === t2[e2] && "" === i2) { s2 = true; break; @@ -61976,16 +62075,16 @@ var require_fxp = __commonJS({ } return "" === i2 && { value: n2, index: e2, tagClosed: s2 }; } - const c = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); + const f = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); function g(t2, e2) { - const n2 = s(t2, c), i2 = {}; + const n2 = s(t2, f), i2 = {}; for (let t3 = 0; t3 < n2.length; t3++) { - if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", N(n2[t3])); - if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", N(n2[t3])); - if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", N(n2[t3])); + if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", y(n2[t3])); + if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", y(n2[t3])); + if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", y(n2[t3])); const s2 = n2[t3][2]; - if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", N(n2[t3])); - if (i2.hasOwnProperty(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", N(n2[t3])); + if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", y(n2[t3])); + if (Object.prototype.hasOwnProperty.call(i2, s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", y(n2[t3])); i2[s2] = 1; } return true; @@ -62013,49 +62112,52 @@ var require_fxp = __commonJS({ function E(t2) { return r(t2); } - function b(t2, e2) { + function b(t2) { + return r(t2); + } + function N(t2, e2) { const n2 = t2.substring(0, e2).split(/\r?\n/); return { line: n2.length, col: n2[n2.length - 1].length + 1 }; } - function N(t2) { + function y(t2) { return t2.startIndex + t2[1].length; } - const y = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { + const T = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, stopNodes: [], alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(t2, e2, n2) { return t2; - }, captureMetaData: false }; - function T(t2) { - return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : T(true); + }, captureMetaData: false, maxNestedTags: 100, strictReservedNames: true }; + function w(t2) { + return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : w(true); } - const w = function(t2) { - const e2 = Object.assign({}, y, t2); - return e2.processEntities = T(e2.processEntities), e2; + const v = function(t2) { + const e2 = Object.assign({}, T, t2); + return e2.processEntities = w(e2.processEntities), e2; }; - let v; - v = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); + let O; + O = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); class I { constructor(t2) { - this.tagname = t2, this.child = [], this[":@"] = {}; + this.tagname = t2, this.child = [], this[":@"] = /* @__PURE__ */ Object.create(null); } add(t2, e2) { "__proto__" === t2 && (t2 = "#__proto__"), this.child.push({ [t2]: e2 }); } addChild(t2, e2) { - "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][v] = { startIndex: e2 }); + "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][O] = { startIndex: e2 }); } static getMetaDataSymbol() { - return v; + return O; } } - class O { + class P { constructor(t2) { this.suppressValidationErr = !t2, this.options = t2; } readDocType(t2, e2) { - const n2 = {}; + const n2 = /* @__PURE__ */ Object.create(null); if ("O" !== t2[e2 + 3] || "C" !== t2[e2 + 4] || "T" !== t2[e2 + 5] || "Y" !== t2[e2 + 6] || "P" !== t2[e2 + 7] || "E" !== t2[e2 + 8]) throw new Error("Invalid Tag instead of DOCTYPE"); { e2 += 9; @@ -62064,23 +62166,23 @@ var require_fxp = __commonJS({ if (r2 ? "-" === t2[e2 - 1] && "-" === t2[e2 - 2] && (r2 = false, i2--) : i2--, 0 === i2) break; } else "[" === t2[e2] ? s2 = true : o2 += t2[e2]; else { - if (s2 && A(t2, "!ENTITY", e2)) { + if (s2 && S(t2, "!ENTITY", e2)) { let i3, s3; if (e2 += 7, [i3, s3, e2] = this.readEntityExp(t2, e2 + 1, this.suppressValidationErr), -1 === s3.indexOf("&")) { const t3 = i3.replace(/[.\-+*:]/g, "\\."); n2[i3] = { regx: RegExp(`&${t3};`, "g"), val: s3 }; } - } else if (s2 && A(t2, "!ELEMENT", e2)) { + } else if (s2 && S(t2, "!ELEMENT", e2)) { e2 += 8; const { index: n3 } = this.readElementExp(t2, e2 + 1); e2 = n3; - } else if (s2 && A(t2, "!ATTLIST", e2)) e2 += 8; - else if (s2 && A(t2, "!NOTATION", e2)) { + } else if (s2 && S(t2, "!ATTLIST", e2)) e2 += 8; + else if (s2 && S(t2, "!NOTATION", e2)) { e2 += 9; const { index: n3 } = this.readNotationExp(t2, e2 + 1, this.suppressValidationErr); e2 = n3; } else { - if (!A(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); + if (!S(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); r2 = true; } i2++, o2 = ""; @@ -62090,10 +62192,10 @@ var require_fxp = __commonJS({ return { entities: n2, i: e2 }; } readEntityExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]) && '"' !== t2[e2] && "'" !== t2[e2]; ) n2 += t2[e2], e2++; - if (S(n2), e2 = P(t2, e2), !this.suppressValidationErr) { + if (C(n2), e2 = A(t2, e2), !this.suppressValidationErr) { if ("SYSTEM" === t2.substring(e2, e2 + 6).toUpperCase()) throw new Error("External entities are not supported"); if ("%" === t2[e2]) throw new Error("Parameter entities are not supported"); } @@ -62102,15 +62204,15 @@ var require_fxp = __commonJS({ return [n2, i2, --e2]; } readNotationExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - !this.suppressValidationErr && S(n2), e2 = P(t2, e2); + !this.suppressValidationErr && C(n2), e2 = A(t2, e2); const i2 = t2.substring(e2, e2 + 6).toUpperCase(); if (!this.suppressValidationErr && "SYSTEM" !== i2 && "PUBLIC" !== i2) throw new Error(`Expected SYSTEM or PUBLIC, found "${i2}"`); - e2 += i2.length, e2 = P(t2, e2); + e2 += i2.length, e2 = A(t2, e2); let s2 = null, r2 = null; - if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = P(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); + if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = A(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); else if ("SYSTEM" === i2 && ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"), !this.suppressValidationErr && !r2)) throw new Error("Missing mandatory system identifier for SYSTEM notation"); return { notationName: n2, publicIdentifier: s2, systemIdentifier: r2, index: --e2 }; } @@ -62123,13 +62225,13 @@ var require_fxp = __commonJS({ return [++e2, i2]; } readElementExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; if (!this.suppressValidationErr && !r(n2)) throw new Error(`Invalid element name: "${n2}"`); let i2 = ""; - if ("E" === t2[e2 = P(t2, e2)] && A(t2, "MPTY", e2)) e2 += 4; - else if ("A" === t2[e2] && A(t2, "NY", e2)) e2 += 2; + if ("E" === t2[e2 = A(t2, e2)] && S(t2, "MPTY", e2)) e2 += 4; + else if ("A" === t2[e2] && S(t2, "NY", e2)) e2 += 2; else if ("(" === t2[e2]) { for (e2++; e2 < t2.length && ")" !== t2[e2]; ) i2 += t2[e2], e2++; if (")" !== t2[e2]) throw new Error("Unterminated content model"); @@ -62137,24 +62239,24 @@ var require_fxp = __commonJS({ return { elementName: n2, contentModel: i2.trim(), index: e2 }; } readAttlistExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - S(n2), e2 = P(t2, e2); + C(n2), e2 = A(t2, e2); let i2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) i2 += t2[e2], e2++; - if (!S(i2)) throw new Error(`Invalid attribute name: "${i2}"`); - e2 = P(t2, e2); + if (!C(i2)) throw new Error(`Invalid attribute name: "${i2}"`); + e2 = A(t2, e2); let s2 = ""; if ("NOTATION" === t2.substring(e2, e2 + 8).toUpperCase()) { - if (s2 = "NOTATION", "(" !== t2[e2 = P(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); + if (s2 = "NOTATION", "(" !== t2[e2 = A(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); e2++; let n3 = []; for (; e2 < t2.length && ")" !== t2[e2]; ) { let i3 = ""; for (; e2 < t2.length && "|" !== t2[e2] && ")" !== t2[e2]; ) i3 += t2[e2], e2++; - if (i3 = i3.trim(), !S(i3)) throw new Error(`Invalid notation name: "${i3}"`); - n3.push(i3), "|" === t2[e2] && (e2++, e2 = P(t2, e2)); + if (i3 = i3.trim(), !C(i3)) throw new Error(`Invalid notation name: "${i3}"`); + n3.push(i3), "|" === t2[e2] && (e2++, e2 = A(t2, e2)); } if (")" !== t2[e2]) throw new Error("Unterminated list of notations"); e2++, s2 += " (" + n3.join("|") + ")"; @@ -62163,45 +62265,43 @@ var require_fxp = __commonJS({ const n3 = ["CDATA", "ID", "IDREF", "IDREFS", "ENTITY", "ENTITIES", "NMTOKEN", "NMTOKENS"]; if (!this.suppressValidationErr && !n3.includes(s2.toUpperCase())) throw new Error(`Invalid attribute type: "${s2}"`); } - e2 = P(t2, e2); + e2 = A(t2, e2); let r2 = ""; return "#REQUIRED" === t2.substring(e2, e2 + 8).toUpperCase() ? (r2 = "#REQUIRED", e2 += 8) : "#IMPLIED" === t2.substring(e2, e2 + 7).toUpperCase() ? (r2 = "#IMPLIED", e2 += 7) : [e2, r2] = this.readIdentifierVal(t2, e2, "ATTLIST"), { elementName: n2, attributeName: i2, attributeType: s2, defaultValue: r2, index: e2 }; } } - const P = (t2, e2) => { + const A = (t2, e2) => { for (; e2 < t2.length && /\s/.test(t2[e2]); ) e2++; return e2; }; - function A(t2, e2, n2) { + function S(t2, e2, n2) { for (let i2 = 0; i2 < e2.length; i2++) if (e2[i2] !== t2[n2 + i2 + 1]) return false; return true; } - function S(t2) { + function C(t2) { if (r(t2)) return t2; throw new Error(`Invalid entity name ${t2}`); } - const C = /^[-+]?0x[a-fA-F0-9]+$/, $ = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; - const D = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; - function L(t2) { - return "function" == typeof t2 ? t2 : Array.isArray(t2) ? (e2) => { - for (const n2 of t2) { - if ("string" == typeof n2 && e2 === n2) return true; - if (n2 instanceof RegExp && n2.test(e2)) return true; - } - } : () => false; - } - class F { + const $ = /^[-+]?0x[a-fA-F0-9]+$/, V = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, D = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; + const j = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; + class L { constructor(t2) { - if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e2) => K(e2, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e2) => K(e2, 16, "&#x") } }, this.addExternalEntities = j, this.parseXml = B, this.parseTextData = M, this.resolveNameSpace = _, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = R, this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { + var e2; + if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e3) => K(e3, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e3) => K(e3, 16, "&#x") } }, this.addExternalEntities = F, this.parseXml = R, this.parseTextData = M, this.resolveNameSpace = k, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = B, this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { this.stopNodesExact = /* @__PURE__ */ new Set(), this.stopNodesWildcard = /* @__PURE__ */ new Set(); for (let t3 = 0; t3 < this.options.stopNodes.length; t3++) { - const e2 = this.options.stopNodes[t3]; - "string" == typeof e2 && (e2.startsWith("*.") ? this.stopNodesWildcard.add(e2.substring(2)) : this.stopNodesExact.add(e2)); + const e3 = this.options.stopNodes[t3]; + "string" == typeof e3 && (e3.startsWith("*.") ? this.stopNodesWildcard.add(e3.substring(2)) : this.stopNodesExact.add(e3)); } } } } - function j(t2) { + function F(t2) { const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2], s2 = i2.replace(/[.\-+*:]/g, "\\."); @@ -62215,7 +62315,7 @@ var require_fxp = __commonJS({ return null == i3 ? t2 : typeof i3 != typeof t2 || i3 !== t2 ? i3 : this.options.trimValues || t2.trim() === t2 ? Z(t2, this.options.parseTagValue, this.options.numberParseOptions) : t2; } } - function _(t2) { + function k(t2) { if (this.options.removeNSPrefix) { const e2 = t2.split(":"), n2 = "/" === t2.charAt(0) ? "/" : ""; if ("xmlns" === e2[0]) return ""; @@ -62223,10 +62323,10 @@ var require_fxp = __commonJS({ } return t2; } - const k = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); + const _ = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); function U(t2, e2, n2) { if (true !== this.options.ignoreAttributes && "string" == typeof t2) { - const i2 = s(t2, k), r2 = i2.length, o2 = {}; + const i2 = s(t2, _), r2 = i2.length, o2 = {}; for (let t3 = 0; t3 < r2; t3++) { const s2 = this.resolveNameSpace(i2[t3][1]); if (this.ignoreAttributesFn(s2, e2)) continue; @@ -62245,12 +62345,12 @@ var require_fxp = __commonJS({ return o2; } } - const B = function(t2) { + const R = function(t2) { t2 = t2.replace(/\r\n?/g, "\n"); const e2 = new I("!xml"); let n2 = e2, i2 = "", s2 = ""; this.entityExpansionCount = 0, this.currentExpandedLength = 0; - const r2 = new O(this.options.processEntities); + const r2 = new P(this.options.processEntities); for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2]) if ("/" === t2[o2 + 1]) { const e3 = z(t2, ">", o2, "Closing Tag is not closed."); let r3 = t2.substring(o2 + 2, e3).trim(); @@ -62290,26 +62390,27 @@ var require_fxp = __commonJS({ } else { let r3 = W(t2, o2, this.options.removeNSPrefix), a2 = r3.tagName; const l2 = r3.rawTagName; - let u2 = r3.tagExp, h2 = r3.attrExpPresent, d2 = r3.closeIndex; + let u2 = r3.tagExp, d2 = r3.attrExpPresent, h2 = r3.closeIndex; if (this.options.transformTagName) { const t3 = this.options.transformTagName(a2); u2 === a2 && (u2 = t3), a2 = t3; } + if (this.options.strictReservedNames && (a2 === this.options.commentPropName || a2 === this.options.cdataPropName)) throw new Error(`Invalid tag name: ${a2}`); n2 && i2 && "!xml" !== n2.tagname && (i2 = this.saveTextToParentTag(i2, n2, s2, false)); const p2 = n2; p2 && -1 !== this.options.unpairedTags.indexOf(p2.tagname) && (n2 = this.tagsNodeStack.pop(), s2 = s2.substring(0, s2.lastIndexOf("."))), a2 !== e2.tagname && (s2 += s2 ? "." + a2 : a2); - const f2 = o2; + const c2 = o2; if (this.isItStopNode(this.stopNodesExact, this.stopNodesWildcard, s2, a2)) { let e3 = ""; if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) "/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), o2 = r3.closeIndex; else if (-1 !== this.options.unpairedTags.indexOf(a2)) o2 = r3.closeIndex; else { - const n3 = this.readStopNodeData(t2, l2, d2 + 1); + const n3 = this.readStopNodeData(t2, l2, h2 + 1); if (!n3) throw new Error(`Unexpected end of ${l2}`); o2 = n3.i, e3 = n3.tagContent; } const i3 = new I(a2); - a2 !== u2 && h2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, h2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, f2); + a2 !== u2 && d2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, d2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, c2); } else { if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) { if ("/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), this.options.transformTagName) { @@ -62317,18 +62418,26 @@ var require_fxp = __commonJS({ u2 === a2 && (u2 = t4), a2 = t4; } const t3 = new I(a2); - a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), s2 = s2.substr(0, s2.lastIndexOf(".")); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")); } else { - const t3 = new I(a2); - this.tagsNodeStack.push(n2), a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), n2 = t3; + if (-1 !== this.options.unpairedTags.indexOf(a2)) { + const t3 = new I(a2); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")), o2 = r3.closeIndex; + continue; + } + { + const t3 = new I(a2); + if (this.tagsNodeStack.length > this.options.maxNestedTags) throw new Error("Maximum nested tags exceeded"); + this.tagsNodeStack.push(n2), a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), n2 = t3; + } } - i2 = "", o2 = d2; + i2 = "", o2 = h2; } } else i2 += t2[o2]; return e2.child; }; - function R(t2, e2, n2, i2) { + function B(t2, e2, n2, i2) { this.options.captureMetaData || (i2 = void 0); const s2 = this.options.updateTag(e2.tagname, n2, e2[":@"]); false === s2 || ("string" == typeof s2 ? (e2.tagname = s2, t2.addChild(e2, i2)) : t2.addChild(e2, i2)); @@ -62389,12 +62498,12 @@ var require_fxp = __commonJS({ const o2 = s2.index, a2 = r2.search(/\s/); let l2 = r2, u2 = true; -1 !== a2 && (l2 = r2.substring(0, a2), r2 = r2.substring(a2 + 1).trimStart()); - const h2 = l2; + const d2 = l2; if (n2) { const t3 = l2.indexOf(":"); -1 !== t3 && (l2 = l2.substr(t3 + 1), u2 = l2 !== s2.data.substr(t3 + 1)); } - return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: h2 }; + return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: d2 }; } function q(t2, e2, n2) { const i2 = n2; @@ -62415,19 +62524,19 @@ var require_fxp = __commonJS({ if (e2 && "string" == typeof t2) { const e3 = t2.trim(); return "true" === e3 || "false" !== e3 && (function(t3, e4 = {}) { - if (e4 = Object.assign({}, V, e4), !t3 || "string" != typeof t3) return t3; + if (e4 = Object.assign({}, D, e4), !t3 || "string" != typeof t3) return t3; let n3 = t3.trim(); if (void 0 !== e4.skipLike && e4.skipLike.test(n3)) return t3; if ("0" === t3) return 0; - if (e4.hex && C.test(n3)) return (function(t4) { + if (e4.hex && $.test(n3)) return (function(t4) { if (parseInt) return parseInt(t4, 16); if (Number.parseInt) return Number.parseInt(t4, 16); if (window && window.parseInt) return window.parseInt(t4, 16); throw new Error("parseInt, Number.parseInt, window.parseInt are not supported"); })(n3); - if (-1 !== n3.search(/.+[eE].+/)) return (function(t4, e5, n4) { + if (n3.includes("e") || n3.includes("E")) return (function(t4, e5, n4) { if (!n4.eNotation) return t4; - const i3 = e5.match(D); + const i3 = e5.match(j); if (i3) { let s2 = i3[1] || ""; const r2 = -1 === i3[3].indexOf("e") ? "E" : "e", o2 = i3[2], a2 = s2 ? t4[o2.length + 1] === r2 : t4[o2.length] === r2; @@ -62436,7 +62545,7 @@ var require_fxp = __commonJS({ return t4; })(t3, n3, e4); { - const s2 = $.exec(n3); + const s2 = V.exec(n3); if (s2) { const r2 = s2[1] || "", o2 = s2[2]; let a2 = (i2 = s2[3]) && -1 !== i2.indexOf(".") ? ("." === (i2 = i2.replace(/0+$/, "")) ? i2 = "0" : "." === i2[0] ? i2 = "0" + i2 : "." === i2[i2.length - 1] && (i2 = i2.substring(0, i2.length - 1)), i2) : i2; @@ -62444,7 +62553,7 @@ var require_fxp = __commonJS({ if (!e4.leadingZeros && (o2.length > 1 || 1 === o2.length && !l2)) return t3; { const i3 = Number(n3), s3 = String(i3); - if (0 === i3 || -0 === i3) return i3; + if (0 === i3) return i3; if (-1 !== s3.search(/[eE]/)) return e4.eNotation ? i3 : t3; if (-1 !== n3.indexOf(".")) return "0" === s3 || s3 === a2 || s3 === `${r2}${a2}` ? i3 : t3; let l3 = o2 ? a2 : n3; @@ -62478,7 +62587,7 @@ var require_fxp = __commonJS({ if (o2[a2]) { let t3 = H(o2[a2], e2, l2); const n3 = nt(t3, e2); - void 0 !== o2[Q] && (t3[Q] = o2[Q]), o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== s2[a2] && s2.hasOwnProperty(a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; + o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== o2[Q] && "object" == typeof t3 && null !== t3 && (t3[Q] = o2[Q]), void 0 !== s2[a2] && Object.prototype.hasOwnProperty.call(s2, a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; } } } @@ -62506,7 +62615,7 @@ var require_fxp = __commonJS({ } class it { constructor(t2) { - this.externalEntities = {}, this.options = w(t2); + this.externalEntities = {}, this.options = v(t2); } parse(t2, e2) { if ("string" != typeof t2 && t2.toString) t2 = t2.toString(); @@ -62516,7 +62625,7 @@ var require_fxp = __commonJS({ const n3 = a(t2, e2); if (true !== n3) throw Error(`${n3.err.msg}:${n3.err.line}:${n3.err.col}`); } - const n2 = new F(this.options); + const n2 = new L(this.options); n2.addExternalEntities(this.externalEntities); const i2 = n2.parseXml(t2); return this.options.preserveOrder || void 0 === i2 ? i2 : J(i2, this.options); @@ -62537,6 +62646,13 @@ var require_fxp = __commonJS({ } function rt(t2, e2, n2, i2) { let s2 = "", r2 = false; + if (!Array.isArray(t2)) { + if (null != t2) { + let n3 = t2.toString(); + return n3 = ut(n3, e2), n3; + } + return ""; + } for (let o2 = 0; o2 < t2.length; o2++) { const a2 = t2[o2], l2 = ot(a2); if (void 0 === l2) continue; @@ -62560,10 +62676,10 @@ var require_fxp = __commonJS({ o3 = 0 !== o3.length ? " " + o3 : "", s2 += n3 + `<${l2}${o3}${t3}?>`, r2 = true; continue; } - let h2 = i2; - "" !== h2 && (h2 += e2.indentBy); - const d2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, h2); - -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += d2 + ">" : s2 += d2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += d2 + `>${p2}${i2}` : (s2 += d2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += d2 + "/>", r2 = true; + let d2 = i2; + "" !== d2 && (d2 += e2.indentBy); + const h2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, d2); + -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += h2 + ">" : s2 += h2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += h2 + `>${p2}${i2}` : (s2 += h2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += h2 + "/>", r2 = true; } return s2; } @@ -62571,13 +62687,13 @@ var require_fxp = __commonJS({ const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2]; - if (t2.hasOwnProperty(i2) && ":@" !== i2) return i2; + if (Object.prototype.hasOwnProperty.call(t2, i2) && ":@" !== i2) return i2; } } function at(t2, e2) { let n2 = ""; if (t2 && !e2.ignoreAttributes) for (let i2 in t2) { - if (!t2.hasOwnProperty(i2)) continue; + if (!Object.prototype.hasOwnProperty.call(t2, i2)) continue; let s2 = e2.attributeValueProcessor(i2, t2[i2]); s2 = ut(s2, e2), true === s2 && e2.suppressBooleanAttributes ? n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}` : n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}="${s2}"`; } @@ -62595,15 +62711,21 @@ var require_fxp = __commonJS({ } return t2; } - const ht = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { + const dt = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [{ regex: new RegExp("&", "g"), val: "&" }, { regex: new RegExp(">", "g"), val: ">" }, { regex: new RegExp("<", "g"), val: "<" }, { regex: new RegExp("'", "g"), val: "'" }, { regex: new RegExp('"', "g"), val: """ }], processEntities: true, stopNodes: [], oneListGroup: false }; - function dt(t2) { - this.options = Object.assign({}, ht, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { + function ht(t2) { + var e2; + this.options = Object.assign({}, dt, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { return false; - } : (this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ct), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ft, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { + } : (this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ft), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ct, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { return ""; }, this.tagEndChar = ">", this.newLine = ""); } @@ -62611,15 +62733,15 @@ var require_fxp = __commonJS({ const s2 = this.j2x(t2, n2 + 1, i2.concat(e2)); return void 0 !== t2[this.options.textNodeName] && 1 === Object.keys(t2).length ? this.buildTextValNode(t2[this.options.textNodeName], e2, s2.attrStr, n2) : this.buildObjectNode(s2.val, e2, s2.attrStr, n2); } - function ft(t2) { + function ct(t2) { return this.options.indentBy.repeat(t2); } - function ct(t2) { + function ft(t2) { return !(!t2.startsWith(this.options.attributeNamePrefix) || t2 === this.options.textNodeName) && t2.substr(this.attrPrefixLen); } - dt.prototype.build = function(t2) { + ht.prototype.build = function(t2) { return this.options.preserveOrder ? st(t2, this.options) : (Array.isArray(t2) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t2 = { [this.options.arrayNodeName]: t2 }), this.j2x(t2, 0, []).val); - }, dt.prototype.j2x = function(t2, e2, n2) { + }, ht.prototype.j2x = function(t2, e2, n2) { let i2 = "", s2 = ""; const r2 = n2.join("."); for (let o2 in t2) if (Object.prototype.hasOwnProperty.call(t2, o2)) if (void 0 === t2[o2]) this.isAttribute(o2) && (s2 += ""); @@ -62654,18 +62776,18 @@ var require_fxp = __commonJS({ for (let s3 = 0; s3 < n3; s3++) i2 += this.buildAttrPairStr(e3[s3], "" + t2[o2][e3[s3]]); } else s2 += this.processTextOrObjNode(t2[o2], o2, e2, n2); return { attrStr: i2, val: s2 }; - }, dt.prototype.buildAttrPairStr = function(t2, e2) { + }, ht.prototype.buildAttrPairStr = function(t2, e2) { return e2 = this.options.attributeValueProcessor(t2, "" + e2), e2 = this.replaceEntitiesValue(e2), this.options.suppressBooleanAttributes && "true" === e2 ? " " + t2 : " " + t2 + '="' + e2 + '"'; - }, dt.prototype.buildObjectNode = function(t2, e2, n2, i2) { + }, ht.prototype.buildObjectNode = function(t2, e2, n2, i2) { if ("" === t2) return "?" === e2[0] ? this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar; { let s2 = "` + this.newLine : this.indentate(i2) + "<" + e2 + n2 + r2 + this.tagEndChar + t2 + this.indentate(i2) + s2 : this.indentate(i2) + "<" + e2 + n2 + r2 + ">" + t2 + s2; } - }, dt.prototype.closeTag = function(t2) { + }, ht.prototype.closeTag = function(t2) { let e2 = ""; return -1 !== this.options.unpairedTags.indexOf(t2) ? this.options.suppressUnpairedNode || (e2 = "/") : e2 = this.options.suppressEmptyNode ? "/" : `>` + this.newLine; if (false !== this.options.commentPropName && e2 === this.options.commentPropName) return this.indentate(i2) + `` + this.newLine; if ("?" === e2[0]) return this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar; @@ -62673,14 +62795,14 @@ var require_fxp = __commonJS({ let s2 = this.options.tagValueProcessor(e2, t2); return s2 = this.replaceEntitiesValue(s2), "" === s2 ? this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + ">" + s2 + " 0 && this.options.processEntities) for (let e2 = 0; e2 < this.options.entities.length; e2++) { const n2 = this.options.entities[e2]; t2 = t2.replace(n2.regex, n2.val); } return t2; }; - const gt = { validate: a }; + const gt = ht, xt = { validate: a }; module2.exports = e; })(); } @@ -92199,7 +92321,7 @@ var require_downloadUtils = __commonJS({ var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); - var fs12 = __importStar2(require("fs")); + var fs13 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); var utils = __importStar2(require_cacheUtils()); @@ -92310,7 +92432,7 @@ var require_downloadUtils = __commonJS({ exports2.DownloadProgress = DownloadProgress; function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter2(this, void 0, void 0, function* () { - const writeStream = fs12.createWriteStream(archivePath); + const writeStream = fs13.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); @@ -92335,7 +92457,7 @@ var require_downloadUtils = __commonJS({ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { return __awaiter2(this, void 0, void 0, function* () { var _a; - const archiveDescriptor = yield fs12.promises.open(archivePath, "w"); + const archiveDescriptor = yield fs13.promises.open(archivePath, "w"); const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { socketTimeout: options.timeoutInMs, keepAlive: true @@ -92451,7 +92573,7 @@ var require_downloadUtils = __commonJS({ } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); - const fd = fs12.openSync(archivePath, "w"); + const fd = fs13.openSync(archivePath, "w"); try { downloadProgress.startDisplayTimer(); const controller = new abort_controller_1.AbortController(); @@ -92469,12 +92591,12 @@ var require_downloadUtils = __commonJS({ controller.abort(); throw new Error("Aborting cache download as the download time exceeded the timeout."); } else if (Buffer.isBuffer(result)) { - fs12.writeFileSync(fd, result); + fs13.writeFileSync(fd, result); } } } finally { downloadProgress.stopDisplayTimer(); - fs12.closeSync(fd); + fs13.closeSync(fd); } } }); @@ -92796,7 +92918,7 @@ var require_cacheHttpClient = __commonJS({ var core12 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); - var fs12 = __importStar2(require("fs")); + var fs13 = __importStar2(require("fs")); var url_1 = require("url"); var utils = __importStar2(require_cacheUtils()); var uploadUtils_1 = require_uploadUtils(); @@ -92931,7 +93053,7 @@ Other caches with similar key:`); return __awaiter2(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs12.openSync(archivePath, "r"); + const fd = fs13.openSync(archivePath, "r"); const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); @@ -92945,7 +93067,7 @@ Other caches with similar key:`); const start = offset; const end = offset + chunkSize - 1; offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs12.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs13.createReadStream(archivePath, { fd, start, end, @@ -92956,7 +93078,7 @@ Other caches with similar key:`); } }))); } finally { - fs12.closeSync(fd); + fs13.closeSync(fd); } return; }); @@ -98912,7 +99034,7 @@ var require_manifest = __commonJS({ var core_1 = require_core(); var os2 = require("os"); var cp = require("child_process"); - var fs12 = require("fs"); + var fs13 = require("fs"); function _findMatch(versionSpec, stable, candidates, archFilter) { return __awaiter2(this, void 0, void 0, function* () { const platFilter = os2.platform(); @@ -98974,10 +99096,10 @@ var require_manifest = __commonJS({ const lsbReleaseFile = "/etc/lsb-release"; const osReleaseFile = "/etc/os-release"; let contents = ""; - if (fs12.existsSync(lsbReleaseFile)) { - contents = fs12.readFileSync(lsbReleaseFile).toString(); - } else if (fs12.existsSync(osReleaseFile)) { - contents = fs12.readFileSync(osReleaseFile).toString(); + if (fs13.existsSync(lsbReleaseFile)) { + contents = fs13.readFileSync(lsbReleaseFile).toString(); + } else if (fs13.existsSync(osReleaseFile)) { + contents = fs13.readFileSync(osReleaseFile).toString(); } return contents; } @@ -99186,7 +99308,7 @@ var require_tool_cache = __commonJS({ var core12 = __importStar2(require_core()); var io6 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); - var fs12 = __importStar2(require("fs")); + var fs13 = __importStar2(require("fs")); var mm = __importStar2(require_manifest()); var os2 = __importStar2(require("os")); var path12 = __importStar2(require("path")); @@ -99232,7 +99354,7 @@ var require_tool_cache = __commonJS({ } function downloadToolAttempt(url2, dest, auth2, headers) { return __awaiter2(this, void 0, void 0, function* () { - if (fs12.existsSync(dest)) { + if (fs13.existsSync(dest)) { throw new Error(`Destination file path ${dest} already exists`); } const http = new httpm.HttpClient(userAgent2, [], { @@ -99256,7 +99378,7 @@ var require_tool_cache = __commonJS({ const readStream = responseMessageFactory(); let succeeded = false; try { - yield pipeline(readStream, fs12.createWriteStream(dest)); + yield pipeline(readStream, fs13.createWriteStream(dest)); core12.debug("download complete"); succeeded = true; return dest; @@ -99468,11 +99590,11 @@ var require_tool_cache = __commonJS({ arch2 = arch2 || os2.arch(); core12.debug(`Caching tool ${tool} ${version} ${arch2}`); core12.debug(`source dir: ${sourceDir}`); - if (!fs12.statSync(sourceDir).isDirectory()) { + if (!fs13.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } const destPath = yield _createToolPath(tool, version, arch2); - for (const itemName of fs12.readdirSync(sourceDir)) { + for (const itemName of fs13.readdirSync(sourceDir)) { const s = path12.join(sourceDir, itemName); yield io6.cp(s, destPath, { recursive: true }); } @@ -99486,7 +99608,7 @@ var require_tool_cache = __commonJS({ arch2 = arch2 || os2.arch(); core12.debug(`Caching tool ${tool} ${version} ${arch2}`); core12.debug(`source file: ${sourceFile}`); - if (!fs12.statSync(sourceFile).isFile()) { + if (!fs13.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); @@ -99515,7 +99637,7 @@ var require_tool_cache = __commonJS({ versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path12.join(_getCacheDirectory(), toolName, versionSpec, arch2); core12.debug(`checking cache: ${cachePath}`); - if (fs12.existsSync(cachePath) && fs12.existsSync(`${cachePath}.complete`)) { + if (fs13.existsSync(cachePath) && fs13.existsSync(`${cachePath}.complete`)) { core12.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { @@ -99528,12 +99650,12 @@ var require_tool_cache = __commonJS({ const versions = []; arch2 = arch2 || os2.arch(); const toolPath = path12.join(_getCacheDirectory(), toolName); - if (fs12.existsSync(toolPath)) { - const children = fs12.readdirSync(toolPath); + if (fs13.existsSync(toolPath)) { + const children = fs13.readdirSync(toolPath); for (const child of children) { if (isExplicitVersion(child)) { const fullPath = path12.join(toolPath, child, arch2 || ""); - if (fs12.existsSync(fullPath) && fs12.existsSync(`${fullPath}.complete`)) { + if (fs13.existsSync(fullPath) && fs13.existsSync(`${fullPath}.complete`)) { versions.push(child); } } @@ -99604,7 +99726,7 @@ var require_tool_cache = __commonJS({ function _completeToolPath(tool, version, arch2) { const folderPath = path12.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; - fs12.writeFileSync(markerPath, ""); + fs13.writeFileSync(markerPath, ""); core12.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { @@ -103108,13 +103230,12 @@ var require_sarif_schema_2_1_0 = __commonJS({ // src/upload-lib.ts var upload_lib_exports = {}; __export(upload_lib_exports, { - InvalidSarifUploadError: () => InvalidSarifUploadError, buildPayload: () => buildPayload, findSarifFilesInDir: () => findSarifFilesInDir, getGroupedSarifFilePaths: () => getGroupedSarifFilePaths, populateRunAutomationDetails: () => populateRunAutomationDetails, postProcessSarifFiles: () => postProcessSarifFiles, - readSarifFile: () => readSarifFile, + readSarifFileOrThrow: () => readSarifFileOrThrow, shouldConsiderConfigurationError: () => shouldConsiderConfigurationError, shouldConsiderInvalidRequest: () => shouldConsiderInvalidRequest, shouldShowCombineSarifFilesDeprecationWarning: () => shouldShowCombineSarifFilesDeprecationWarning, @@ -103128,7 +103249,7 @@ __export(upload_lib_exports, { writePostProcessedFiles: () => writePostProcessedFiles }); module.exports = __toCommonJS(upload_lib_exports); -var fs11 = __toESM(require("fs")); +var fs12 = __toESM(require("fs")); var path11 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); @@ -103157,21 +103278,21 @@ async function getFolderSize(itemPath, options) { getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); async function core(rootItemPath, options = {}, returnType = {}) { - const fs12 = options.fs || await import("node:fs/promises"); + const fs13 = options.fs || await import("node:fs/promises"); let folderSize = 0n; const foundInos = /* @__PURE__ */ new Set(); const errors = []; await processItem(rootItemPath); async function processItem(itemPath) { if (options.ignore?.test(itemPath)) return; - const stats = returnType.strict ? await fs12.lstat(itemPath, { bigint: true }) : await fs12.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3)); + const stats = returnType.strict ? await fs13.lstat(itemPath, { bigint: true }) : await fs13.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3)); if (typeof stats !== "object") return; if (!foundInos.has(stats.ino)) { foundInos.add(stats.ino); folderSize += stats.size; } if (stats.isDirectory()) { - const directoryItems = returnType.strict ? await fs12.readdir(itemPath) : await fs12.readdir(itemPath).catch((error3) => errors.push(error3)); + const directoryItems = returnType.strict ? await fs13.readdir(itemPath) : await fs13.readdir(itemPath).catch((error3) => errors.push(error3)); if (typeof directoryItems !== "object") return; await Promise.all( directoryItems.map( @@ -105811,17 +105932,6 @@ function getExtraOptionsEnvParam() { ); } } -function getToolNames(sarif) { - const toolNames = {}; - for (const run of sarif.runs || []) { - const tool = run.tool || {}; - const driver = tool.driver || {}; - if (typeof driver.name === "string" && driver.name.length > 0) { - toolNames[driver.name] = true; - } - } - return Object.keys(toolNames); -} function getCodeQLDatabasePath(config, language) { return path.resolve(config.dbLocation, language); } @@ -106824,8 +106934,8 @@ var path5 = __toESM(require("path")); var semver5 = __toESM(require_semver2()); // src/defaults.json -var bundleVersion = "codeql-bundle-v2.24.2"; -var cliVersion = "2.24.2"; +var bundleVersion = "codeql-bundle-v2.24.3"; +var cliVersion = "2.24.3"; // src/overlay/index.ts var fs3 = __toESM(require("fs")); @@ -107313,6 +107423,11 @@ var featureConfig = { // cannot be found when interpreting results. minimumVersion: void 0 }, + ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES", + minimumVersion: void 0 + }, ["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: { defaultValue: false, envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE", @@ -107324,11 +107439,6 @@ var featureConfig = { minimumVersion: void 0, toolsFeature: "bundleSupportsOverlay" /* BundleSupportsOverlay */ }, - ["use_repository_properties_v2" /* UseRepositoryProperties */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES", - minimumVersion: void 0 - }, ["validate_db_config" /* ValidateDbConfig */]: { defaultValue: false, envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG", @@ -110083,12 +110193,12 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) { } return uri; } -async function addFingerprints(sarif, sourceRoot, logger) { +async function addFingerprints(sarifLog, sourceRoot, logger) { logger.info( `Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.` ); const callbacksByFile = {}; - for (const run of sarif.runs || []) { + for (const run of sarifLog.runs || []) { const artifacts = run.artifacts || []; for (const result of run.results || []) { const primaryLocation = (result.locations || [])[0]; @@ -110128,7 +110238,7 @@ async function addFingerprints(sarif, sourceRoot, logger) { }; await hash(teeCallback, filepath); } - return sarif; + return sarifLog; } // src/init.ts @@ -110163,36 +110273,48 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe }; } -// src/upload-lib.ts -var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning."; -var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository."; +// src/sarif/index.ts +var fs11 = __toESM(require("fs")); +var InvalidSarifUploadError = class extends Error { +}; +function getToolNames(sarifFile) { + const toolNames = {}; + for (const run of sarifFile.runs || []) { + const tool = run.tool || {}; + const driver = tool.driver || {}; + if (typeof driver.name === "string" && driver.name.length > 0) { + toolNames[driver.name] = true; + } + } + return Object.keys(toolNames); +} +function readSarifFile(sarifFilePath) { + return JSON.parse(fs11.readFileSync(sarifFilePath, "utf8")); +} function combineSarifFiles(sarifFiles, logger) { logger.info(`Loading SARIF file(s)`); - const combinedSarif = { - version: null, - runs: [] - }; + const runs = []; + let version = void 0; for (const sarifFile of sarifFiles) { logger.debug(`Loading SARIF file: ${sarifFile}`); - const sarifObject = JSON.parse( - fs11.readFileSync(sarifFile, "utf8") - ); - if (combinedSarif.version === null) { - combinedSarif.version = sarifObject.version; - } else if (combinedSarif.version !== sarifObject.version) { + const sarifLog = readSarifFile(sarifFile); + if (version === void 0) { + version = sarifLog.version; + } else if (version !== sarifLog.version) { throw new InvalidSarifUploadError( - `Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}` + `Different SARIF versions encountered: ${version} and ${sarifLog.version}` ); } - combinedSarif.runs.push(...sarifObject.runs); + runs.push(...sarifLog?.runs || []); } - return combinedSarif; + if (version === void 0) { + version = "2.1.0"; + } + return { version, runs }; } -function areAllRunsProducedByCodeQL(sarifObjects) { - return sarifObjects.every((sarifObject) => { - return sarifObject.runs?.every( - (run) => run.tool?.driver?.name === "CodeQL" - ); +function areAllRunsProducedByCodeQL(sarifLogs) { + return sarifLogs.every((sarifLog) => { + return sarifLog.runs?.every((run) => run.tool?.driver?.name === "CodeQL"); }); } function createRunKey(run) { @@ -110205,10 +110327,13 @@ function createRunKey(run) { automationId: run.automationDetails?.id }; } -function areAllRunsUnique(sarifObjects) { +function areAllRunsUnique(sarifLogs) { const keys = /* @__PURE__ */ new Set(); - for (const sarifObject of sarifObjects) { - for (const run of sarifObject.runs) { + for (const sarifLog of sarifLogs) { + if (sarifLog.runs === void 0) { + continue; + } + for (const run of sarifLog.runs) { const key = JSON.stringify(createRunKey(run)); if (keys.has(key)) { return false; @@ -110218,6 +110343,10 @@ function areAllRunsUnique(sarifObjects) { } return true; } + +// src/upload-lib.ts +var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning."; +var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository."; async function shouldShowCombineSarifFilesDeprecationWarning(sarifObjects, githubVersion) { if (githubVersion.type === "GitHub Enterprise Server" /* GHES */ && satisfiesGHESVersion(githubVersion.version, "<3.14", true)) { return false; @@ -110246,9 +110375,7 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) { } async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) { logger.info("Combining SARIF files using the CodeQL CLI"); - const sarifObjects = sarifFiles.map((sarifFile) => { - return JSON.parse(fs11.readFileSync(sarifFile, "utf8")); - }); + const sarifObjects = sarifFiles.map(readSarifFile); const deprecationWarningMessage = gitHubVersion.type === "GitHub Enterprise Server" /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025"; const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload"; if (!areAllRunsProducedByCodeQL(sarifObjects)) { @@ -110301,27 +110428,27 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo codeQL = initCodeQLResult.codeql; } const baseTempDir = path11.resolve(tempDir, "combined-sarif"); - fs11.mkdirSync(baseTempDir, { recursive: true }); - const outputDirectory = fs11.mkdtempSync(path11.resolve(baseTempDir, "output-")); + fs12.mkdirSync(baseTempDir, { recursive: true }); + const outputDirectory = fs12.mkdtempSync(path11.resolve(baseTempDir, "output-")); const outputFile = path11.resolve(outputDirectory, "combined-sarif.sarif"); await codeQL.mergeResults(sarifFiles, outputFile, { mergeRunsFromEqualCategory: true }); - return JSON.parse(fs11.readFileSync(outputFile, "utf8")); + return readSarifFile(outputFile); } -function populateRunAutomationDetails(sarif, category, analysis_key, environment) { +function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) { const automationID = getAutomationID2(category, analysis_key, environment); if (automationID !== void 0) { - for (const run of sarif.runs || []) { + for (const run of sarifFile.runs || []) { if (run.automationDetails === void 0) { run.automationDetails = { id: automationID }; } } - return sarif; + return sarifFile; } - return sarif; + return sarifFile; } function getAutomationID2(category, analysis_key, environment) { if (category !== void 0) { @@ -110344,7 +110471,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { `SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}` ); logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`); - fs11.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); + fs12.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); return "dummy-sarif-id"; } const client = getApiClient(); @@ -110378,7 +110505,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { function findSarifFilesInDir(sarifPath, isSarif) { const sarifFiles = []; const walkSarifFiles = (dir) => { - const entries = fs11.readdirSync(dir, { withFileTypes: true }); + const entries = fs12.readdirSync(dir, { withFileTypes: true }); for (const entry of entries) { if (entry.isFile() && isSarif(entry.name)) { sarifFiles.push(path11.resolve(dir, entry.name)); @@ -110391,11 +110518,11 @@ function findSarifFilesInDir(sarifPath, isSarif) { return sarifFiles; } function getSarifFilePaths(sarifPath, isSarif) { - if (!fs11.existsSync(sarifPath)) { + if (!fs12.existsSync(sarifPath)) { throw new ConfigurationError(`Path does not exist: ${sarifPath}`); } let sarifFiles; - if (fs11.lstatSync(sarifPath).isDirectory()) { + if (fs12.lstatSync(sarifPath).isDirectory()) { sarifFiles = findSarifFilesInDir(sarifPath, isSarif); if (sarifFiles.length === 0) { throw new ConfigurationError( @@ -110408,7 +110535,7 @@ function getSarifFilePaths(sarifPath, isSarif) { return sarifFiles; } async function getGroupedSarifFilePaths(logger, sarifPath) { - const stats = fs11.statSync(sarifPath, { throwIfNoEntry: false }); + const stats = fs12.statSync(sarifPath, { throwIfNoEntry: false }); if (stats === void 0) { throw new ConfigurationError(`Path does not exist: ${sarifPath}`); } @@ -110455,9 +110582,9 @@ async function getGroupedSarifFilePaths(logger, sarifPath) { } return results; } -function countResultsInSarif(sarif) { +function countResultsInSarif(sarifLog) { let numResults = 0; - const parsedSarif = JSON.parse(sarif); + const parsedSarif = JSON.parse(sarifLog); if (!Array.isArray(parsedSarif.runs)) { throw new InvalidSarifUploadError("Invalid SARIF. Missing 'runs' array."); } @@ -110471,26 +110598,26 @@ function countResultsInSarif(sarif) { } return numResults; } -function readSarifFile(sarifFilePath) { +function readSarifFileOrThrow(sarifFilePath) { try { - return JSON.parse(fs11.readFileSync(sarifFilePath, "utf8")); + return readSarifFile(sarifFilePath); } catch (e) { throw new InvalidSarifUploadError( `Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}` ); } } -function validateSarifFileSchema(sarif, sarifFilePath, logger) { - if (areAllRunsProducedByCodeQL([sarif]) && // We want to validate CodeQL SARIF in testing environments. +function validateSarifFileSchema(sarifLog, sarifFilePath, logger) { + if (areAllRunsProducedByCodeQL([sarifLog]) && // We want to validate CodeQL SARIF in testing environments. !getTestingEnvironment()) { logger.debug( `Skipping SARIF schema validation for ${sarifFilePath} as all runs are produced by CodeQL.` ); - return; + return true; } logger.info(`Validating ${sarifFilePath}`); const schema2 = require_sarif_schema_2_1_0(); - const result = new jsonschema2.Validator().validate(sarif, schema2); + const result = new jsonschema2.Validator().validate(sarifLog, schema2); const warningAttributes = ["uri-reference", "uri"]; const errors = (result.errors ?? []).filter( (err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument)) @@ -110517,6 +110644,7 @@ ${sarifErrors.join( )}` ); } + return true; } function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, mergeBaseCommitOid) { const payloadObj = { @@ -110542,7 +110670,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo payloadObj.base_sha = mergeBaseCommitOid; } else if (process.env.GITHUB_EVENT_PATH) { const githubEvent = JSON.parse( - fs11.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") + fs12.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") ); payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`; payloadObj.base_sha = githubEvent.pull_request.base.sha; @@ -110553,14 +110681,14 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) { logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`); const gitHubVersion = await getGitHubVersion(); - let sarif; + let sarifLog; category = analysis.fixCategory(logger, category); if (sarifPaths.length > 1) { for (const sarifPath of sarifPaths) { - const parsedSarif = readSarifFile(sarifPath); + const parsedSarif = readSarifFileOrThrow(sarifPath); validateSarifFileSchema(parsedSarif, sarifPath, logger); } - sarif = await combineSarifFilesUsingCLI( + sarifLog = await combineSarifFilesUsingCLI( sarifPaths, gitHubVersion, features, @@ -110568,21 +110696,21 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, ); } else { const sarifPath = sarifPaths[0]; - sarif = readSarifFile(sarifPath); - validateSarifFileSchema(sarif, sarifPath, logger); - await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion); + sarifLog = readSarifFileOrThrow(sarifPath); + validateSarifFileSchema(sarifLog, sarifPath, logger); + await throwIfCombineSarifFilesDisabled([sarifLog], gitHubVersion); } - sarif = filterAlertsByDiffRange(logger, sarif); - sarif = await addFingerprints(sarif, checkoutPath, logger); + sarifLog = filterAlertsByDiffRange(logger, sarifLog); + sarifLog = await addFingerprints(sarifLog, checkoutPath, logger); const analysisKey = await getAnalysisKey(); const environment = getRequiredInput("matrix"); - sarif = populateRunAutomationDetails( - sarif, + sarifLog = populateRunAutomationDetails( + sarifLog, category, analysisKey, environment ); - return { sarif, analysisKey, environment }; + return { sarif: sarifLog, analysisKey, environment }; } async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) { const outputPath = pathInput || getOptionalEnvVar("CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */); @@ -110629,12 +110757,12 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features } async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) { logger.startGroup(`Uploading ${uploadTarget.name} results`); - const sarif = postProcessingResults.sarif; - const toolNames = getToolNames(sarif); + const sarifLog = postProcessingResults.sarif; + const toolNames = getToolNames(sarifLog); logger.debug(`Validating that each SARIF run has a unique category`); - validateUniqueCategory(sarif, uploadTarget.sentinelPrefix); + validateUniqueCategory(sarifLog, uploadTarget.sentinelPrefix); logger.debug(`Serializing SARIF for upload`); - const sarifPayload = JSON.stringify(sarif); + const sarifPayload = JSON.stringify(sarifLog); logger.debug(`Compressing serialized SARIF`); const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64"); const checkoutURI = url.pathToFileURL(checkoutPath).href; @@ -110676,9 +110804,9 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post }; } function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) { - if (!fs11.existsSync(outputDir)) { - fs11.mkdirSync(outputDir, { recursive: true }); - } else if (!fs11.lstatSync(outputDir).isDirectory()) { + if (!fs12.existsSync(outputDir)) { + fs12.mkdirSync(outputDir, { recursive: true }); + } else if (!fs12.lstatSync(outputDir).isDirectory()) { throw new ConfigurationError( `The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}` ); @@ -110688,7 +110816,7 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) { `upload${uploadTarget.sarifExtension}` ); logger.info(`Writing processed SARIF file to ${outputFile}`); - fs11.writeFileSync(outputFile, sarifPayload); + fs12.writeFileSync(outputFile, sarifPayload); } var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3; var STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1e3; @@ -110786,9 +110914,9 @@ function handleProcessingResultForUnsuccessfulExecution(response, status, logger assertNever(status); } } -function validateUniqueCategory(sarif, sentinelPrefix) { +function validateUniqueCategory(sarifLog, sentinelPrefix) { const categories = {}; - for (const run of sarif.runs) { + for (const run of sarifLog.runs || []) { const id = run?.automationDetails?.id; const tool = run.tool?.driver?.name; const category = `${sanitize(id)}_${sanitize(tool)}`; @@ -110807,15 +110935,16 @@ function validateUniqueCategory(sarif, sentinelPrefix) { function sanitize(str2) { return (str2 ?? "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase(); } -var InvalidSarifUploadError = class extends Error { -}; -function filterAlertsByDiffRange(logger, sarif) { +function filterAlertsByDiffRange(logger, sarifLog) { const diffRanges = readDiffRangesJsonFile(logger); if (!diffRanges?.length) { - return sarif; + return sarifLog; + } + if (sarifLog.runs === void 0) { + return sarifLog; } const checkoutPath = getRequiredInput("checkout_path"); - for (const run of sarif.runs) { + for (const run of sarifLog.runs) { if (run.results) { run.results = run.results.filter((result) => { const locations = [ @@ -110836,17 +110965,16 @@ function filterAlertsByDiffRange(logger, sarif) { }); } } - return sarif; + return sarifLog; } // Annotate the CommonJS export names for ESM import in node: 0 && (module.exports = { - InvalidSarifUploadError, buildPayload, findSarifFilesInDir, getGroupedSarifFilePaths, populateRunAutomationDetails, postProcessSarifFiles, - readSarifFile, + readSarifFileOrThrow, shouldConsiderConfigurationError, shouldConsiderInvalidRequest, shouldShowCombineSarifFilesDeprecationWarning, diff --git a/lib/upload-sarif-action-post.js b/lib/upload-sarif-action-post.js index 01a0406f5..dab78eb86 100644 --- a/lib/upload-sarif-action-post.js +++ b/lib/upload-sarif-action-post.js @@ -45986,7 +45986,7 @@ var require_package = __commonJS({ "package.json"(exports2, module2) { module2.exports = { name: "codeql", - version: "4.32.5", + version: "4.32.7", private: true, description: "CodeQL action", scripts: { @@ -45995,7 +45995,7 @@ var require_package = __commonJS({ lint: "eslint --report-unused-disable-directives --max-warnings=0 .", "lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif", "lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix", - ava: "npm run transpile && ava --serial --verbose", + ava: "npm run transpile && ava --verbose", test: "npm run ava -- src/", "test-debug": "npm run test -- --timeout=20m", transpile: "tsc --build --verbose" @@ -46044,6 +46044,7 @@ var require_package = __commonJS({ "@types/js-yaml": "^4.0.9", "@types/node": "^20.19.9", "@types/node-forge": "^1.3.14", + "@types/sarif": "^2.1.7", "@types/semver": "^7.7.1", "@types/sinon": "^21.0.0", ava: "^6.4.1", @@ -46052,14 +46053,14 @@ var require_package = __commonJS({ "eslint-import-resolver-typescript": "^3.8.7", "eslint-plugin-github": "^6.0.0", "eslint-plugin-import-x": "^4.16.1", - "eslint-plugin-jsdoc": "^62.5.0", + "eslint-plugin-jsdoc": "^62.7.1", "eslint-plugin-no-async-foreach": "^0.1.1", glob: "^11.1.0", - globals: "^16.5.0", + globals: "^17.3.0", nock: "^14.0.11", sinon: "^21.0.1", typescript: "^5.9.3", - "typescript-eslint": "^8.56.0" + "typescript-eslint": "^8.56.1" }, overrides: { "@actions/tool-cache": { @@ -64151,7 +64152,7 @@ var require_fxp = __commonJS({ }, o: (t2, e2) => Object.prototype.hasOwnProperty.call(t2, e2), r: (t2) => { "undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t2, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t2, "__esModule", { value: true }); } }, e = {}; - t.r(e), t.d(e, { XMLBuilder: () => dt, XMLParser: () => it, XMLValidator: () => gt }); + t.r(e), t.d(e, { XMLBuilder: () => gt, XMLParser: () => it, XMLValidator: () => xt }); const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$"); function s(t2, e2) { const n2 = []; @@ -64173,90 +64174,90 @@ var require_fxp = __commonJS({ const n2 = []; let i2 = false, s2 = false; "\uFEFF" === t2[0] && (t2 = t2.substr(1)); - for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2] && "?" === t2[o2 + 1]) { - if (o2 += 2, o2 = u(t2, o2), o2.err) return o2; + for (let r2 = 0; r2 < t2.length; r2++) if ("<" === t2[r2] && "?" === t2[r2 + 1]) { + if (r2 += 2, r2 = u(t2, r2), r2.err) return r2; } else { - if ("<" !== t2[o2]) { - if (l(t2[o2])) continue; - return m("InvalidChar", "char '" + t2[o2] + "' is not expected.", b(t2, o2)); + if ("<" !== t2[r2]) { + if (l(t2[r2])) continue; + return m("InvalidChar", "char '" + t2[r2] + "' is not expected.", N(t2, r2)); } { - let a2 = o2; - if (o2++, "!" === t2[o2]) { - o2 = h(t2, o2); + let o2 = r2; + if (r2++, "!" === t2[r2]) { + r2 = d(t2, r2); continue; } { - let d2 = false; - "/" === t2[o2] && (d2 = true, o2++); - let p2 = ""; - for (; o2 < t2.length && ">" !== t2[o2] && " " !== t2[o2] && " " !== t2[o2] && "\n" !== t2[o2] && "\r" !== t2[o2]; o2++) p2 += t2[o2]; - if (p2 = p2.trim(), "/" === p2[p2.length - 1] && (p2 = p2.substring(0, p2.length - 1), o2--), !r(p2)) { + let a2 = false; + "/" === t2[r2] && (a2 = true, r2++); + let h2 = ""; + for (; r2 < t2.length && ">" !== t2[r2] && " " !== t2[r2] && " " !== t2[r2] && "\n" !== t2[r2] && "\r" !== t2[r2]; r2++) h2 += t2[r2]; + if (h2 = h2.trim(), "/" === h2[h2.length - 1] && (h2 = h2.substring(0, h2.length - 1), r2--), !b(h2)) { let e3; - return e3 = 0 === p2.trim().length ? "Invalid space after '<'." : "Tag '" + p2 + "' is an invalid name.", m("InvalidTag", e3, b(t2, o2)); + return e3 = 0 === h2.trim().length ? "Invalid space after '<'." : "Tag '" + h2 + "' is an invalid name.", m("InvalidTag", e3, N(t2, r2)); } - const c2 = f(t2, o2); - if (false === c2) return m("InvalidAttr", "Attributes for '" + p2 + "' have open quote.", b(t2, o2)); - let E2 = c2.value; - if (o2 = c2.index, "/" === E2[E2.length - 1]) { - const n3 = o2 - E2.length; - E2 = E2.substring(0, E2.length - 1); - const s3 = g(E2, e2); - if (true !== s3) return m(s3.err.code, s3.err.msg, b(t2, n3 + s3.err.line)); + const p2 = c(t2, r2); + if (false === p2) return m("InvalidAttr", "Attributes for '" + h2 + "' have open quote.", N(t2, r2)); + let f2 = p2.value; + if (r2 = p2.index, "/" === f2[f2.length - 1]) { + const n3 = r2 - f2.length; + f2 = f2.substring(0, f2.length - 1); + const s3 = g(f2, e2); + if (true !== s3) return m(s3.err.code, s3.err.msg, N(t2, n3 + s3.err.line)); i2 = true; - } else if (d2) { - if (!c2.tagClosed) return m("InvalidTag", "Closing tag '" + p2 + "' doesn't have proper closing.", b(t2, o2)); - if (E2.trim().length > 0) return m("InvalidTag", "Closing tag '" + p2 + "' can't have attributes or invalid starting.", b(t2, a2)); - if (0 === n2.length) return m("InvalidTag", "Closing tag '" + p2 + "' has not been opened.", b(t2, a2)); + } else if (a2) { + if (!p2.tagClosed) return m("InvalidTag", "Closing tag '" + h2 + "' doesn't have proper closing.", N(t2, r2)); + if (f2.trim().length > 0) return m("InvalidTag", "Closing tag '" + h2 + "' can't have attributes or invalid starting.", N(t2, o2)); + if (0 === n2.length) return m("InvalidTag", "Closing tag '" + h2 + "' has not been opened.", N(t2, o2)); { const e3 = n2.pop(); - if (p2 !== e3.tagName) { - let n3 = b(t2, e3.tagStartPos); - return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + p2 + "'.", b(t2, a2)); + if (h2 !== e3.tagName) { + let n3 = N(t2, e3.tagStartPos); + return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + h2 + "'.", N(t2, o2)); } 0 == n2.length && (s2 = true); } } else { - const r2 = g(E2, e2); - if (true !== r2) return m(r2.err.code, r2.err.msg, b(t2, o2 - E2.length + r2.err.line)); - if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", b(t2, o2)); - -1 !== e2.unpairedTags.indexOf(p2) || n2.push({ tagName: p2, tagStartPos: a2 }), i2 = true; + const a3 = g(f2, e2); + if (true !== a3) return m(a3.err.code, a3.err.msg, N(t2, r2 - f2.length + a3.err.line)); + if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", N(t2, r2)); + -1 !== e2.unpairedTags.indexOf(h2) || n2.push({ tagName: h2, tagStartPos: o2 }), i2 = true; } - for (o2++; o2 < t2.length; o2++) if ("<" === t2[o2]) { - if ("!" === t2[o2 + 1]) { - o2++, o2 = h(t2, o2); + for (r2++; r2 < t2.length; r2++) if ("<" === t2[r2]) { + if ("!" === t2[r2 + 1]) { + r2++, r2 = d(t2, r2); continue; } - if ("?" !== t2[o2 + 1]) break; - if (o2 = u(t2, ++o2), o2.err) return o2; - } else if ("&" === t2[o2]) { - const e3 = x(t2, o2); - if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", b(t2, o2)); - o2 = e3; - } else if (true === s2 && !l(t2[o2])) return m("InvalidXml", "Extra text at the end", b(t2, o2)); - "<" === t2[o2] && o2--; + if ("?" !== t2[r2 + 1]) break; + if (r2 = u(t2, ++r2), r2.err) return r2; + } else if ("&" === t2[r2]) { + const e3 = x(t2, r2); + if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", N(t2, r2)); + r2 = e3; + } else if (true === s2 && !l(t2[r2])) return m("InvalidXml", "Extra text at the end", N(t2, r2)); + "<" === t2[r2] && r2--; } } } - return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", b(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map(((t3) => t3.tagName)), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); + return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", N(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map((t3) => t3.tagName), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); } function l(t2) { return " " === t2 || " " === t2 || "\n" === t2 || "\r" === t2; } function u(t2, e2) { const n2 = e2; - for (; e2 < t2.length; e2++) if ("?" != t2[e2] && " " != t2[e2]) ; - else { + for (; e2 < t2.length; e2++) if ("?" == t2[e2] || " " == t2[e2]) { const i2 = t2.substr(n2, e2 - n2); - if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", b(t2, e2)); + if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", N(t2, e2)); if ("?" == t2[e2] && ">" == t2[e2 + 1]) { e2++; break; } + continue; } return e2; } - function h(t2, e2) { + function d(t2, e2) { if (t2.length > e2 + 5 && "-" === t2[e2 + 1] && "-" === t2[e2 + 2]) { for (e2 += 3; e2 < t2.length; e2++) if ("-" === t2[e2] && "-" === t2[e2 + 1] && ">" === t2[e2 + 2]) { e2 += 2; @@ -64274,11 +64275,11 @@ var require_fxp = __commonJS({ } return e2; } - const d = '"', p = "'"; - function f(t2, e2) { + const h = '"', p = "'"; + function c(t2, e2) { let n2 = "", i2 = "", s2 = false; for (; e2 < t2.length; e2++) { - if (t2[e2] === d || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); + if (t2[e2] === h || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); else if (">" === t2[e2] && "" === i2) { s2 = true; break; @@ -64287,16 +64288,16 @@ var require_fxp = __commonJS({ } return "" === i2 && { value: n2, index: e2, tagClosed: s2 }; } - const c = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); + const f = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); function g(t2, e2) { - const n2 = s(t2, c), i2 = {}; + const n2 = s(t2, f), i2 = {}; for (let t3 = 0; t3 < n2.length; t3++) { - if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", N(n2[t3])); - if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", N(n2[t3])); - if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", N(n2[t3])); + if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", y(n2[t3])); + if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", y(n2[t3])); + if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", y(n2[t3])); const s2 = n2[t3][2]; - if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", N(n2[t3])); - if (i2.hasOwnProperty(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", N(n2[t3])); + if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", y(n2[t3])); + if (Object.prototype.hasOwnProperty.call(i2, s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", y(n2[t3])); i2[s2] = 1; } return true; @@ -64324,49 +64325,52 @@ var require_fxp = __commonJS({ function E(t2) { return r(t2); } - function b(t2, e2) { + function b(t2) { + return r(t2); + } + function N(t2, e2) { const n2 = t2.substring(0, e2).split(/\r?\n/); return { line: n2.length, col: n2[n2.length - 1].length + 1 }; } - function N(t2) { + function y(t2) { return t2.startIndex + t2[1].length; } - const y = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { + const T = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, stopNodes: [], alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(t2, e2, n2) { return t2; - }, captureMetaData: false }; - function T(t2) { - return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : T(true); + }, captureMetaData: false, maxNestedTags: 100, strictReservedNames: true }; + function w(t2) { + return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : w(true); } - const w = function(t2) { - const e2 = Object.assign({}, y, t2); - return e2.processEntities = T(e2.processEntities), e2; + const v = function(t2) { + const e2 = Object.assign({}, T, t2); + return e2.processEntities = w(e2.processEntities), e2; }; - let v; - v = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); + let O; + O = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); class I { constructor(t2) { - this.tagname = t2, this.child = [], this[":@"] = {}; + this.tagname = t2, this.child = [], this[":@"] = /* @__PURE__ */ Object.create(null); } add(t2, e2) { "__proto__" === t2 && (t2 = "#__proto__"), this.child.push({ [t2]: e2 }); } addChild(t2, e2) { - "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][v] = { startIndex: e2 }); + "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][O] = { startIndex: e2 }); } static getMetaDataSymbol() { - return v; + return O; } } - class O { + class P { constructor(t2) { this.suppressValidationErr = !t2, this.options = t2; } readDocType(t2, e2) { - const n2 = {}; + const n2 = /* @__PURE__ */ Object.create(null); if ("O" !== t2[e2 + 3] || "C" !== t2[e2 + 4] || "T" !== t2[e2 + 5] || "Y" !== t2[e2 + 6] || "P" !== t2[e2 + 7] || "E" !== t2[e2 + 8]) throw new Error("Invalid Tag instead of DOCTYPE"); { e2 += 9; @@ -64375,23 +64379,23 @@ var require_fxp = __commonJS({ if (r2 ? "-" === t2[e2 - 1] && "-" === t2[e2 - 2] && (r2 = false, i2--) : i2--, 0 === i2) break; } else "[" === t2[e2] ? s2 = true : o2 += t2[e2]; else { - if (s2 && A(t2, "!ENTITY", e2)) { + if (s2 && S(t2, "!ENTITY", e2)) { let i3, s3; if (e2 += 7, [i3, s3, e2] = this.readEntityExp(t2, e2 + 1, this.suppressValidationErr), -1 === s3.indexOf("&")) { const t3 = i3.replace(/[.\-+*:]/g, "\\."); n2[i3] = { regx: RegExp(`&${t3};`, "g"), val: s3 }; } - } else if (s2 && A(t2, "!ELEMENT", e2)) { + } else if (s2 && S(t2, "!ELEMENT", e2)) { e2 += 8; const { index: n3 } = this.readElementExp(t2, e2 + 1); e2 = n3; - } else if (s2 && A(t2, "!ATTLIST", e2)) e2 += 8; - else if (s2 && A(t2, "!NOTATION", e2)) { + } else if (s2 && S(t2, "!ATTLIST", e2)) e2 += 8; + else if (s2 && S(t2, "!NOTATION", e2)) { e2 += 9; const { index: n3 } = this.readNotationExp(t2, e2 + 1, this.suppressValidationErr); e2 = n3; } else { - if (!A(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); + if (!S(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); r2 = true; } i2++, o2 = ""; @@ -64401,10 +64405,10 @@ var require_fxp = __commonJS({ return { entities: n2, i: e2 }; } readEntityExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]) && '"' !== t2[e2] && "'" !== t2[e2]; ) n2 += t2[e2], e2++; - if (S(n2), e2 = P(t2, e2), !this.suppressValidationErr) { + if (C(n2), e2 = A(t2, e2), !this.suppressValidationErr) { if ("SYSTEM" === t2.substring(e2, e2 + 6).toUpperCase()) throw new Error("External entities are not supported"); if ("%" === t2[e2]) throw new Error("Parameter entities are not supported"); } @@ -64413,15 +64417,15 @@ var require_fxp = __commonJS({ return [n2, i2, --e2]; } readNotationExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - !this.suppressValidationErr && S(n2), e2 = P(t2, e2); + !this.suppressValidationErr && C(n2), e2 = A(t2, e2); const i2 = t2.substring(e2, e2 + 6).toUpperCase(); if (!this.suppressValidationErr && "SYSTEM" !== i2 && "PUBLIC" !== i2) throw new Error(`Expected SYSTEM or PUBLIC, found "${i2}"`); - e2 += i2.length, e2 = P(t2, e2); + e2 += i2.length, e2 = A(t2, e2); let s2 = null, r2 = null; - if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = P(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); + if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = A(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); else if ("SYSTEM" === i2 && ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"), !this.suppressValidationErr && !r2)) throw new Error("Missing mandatory system identifier for SYSTEM notation"); return { notationName: n2, publicIdentifier: s2, systemIdentifier: r2, index: --e2 }; } @@ -64434,13 +64438,13 @@ var require_fxp = __commonJS({ return [++e2, i2]; } readElementExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; if (!this.suppressValidationErr && !r(n2)) throw new Error(`Invalid element name: "${n2}"`); let i2 = ""; - if ("E" === t2[e2 = P(t2, e2)] && A(t2, "MPTY", e2)) e2 += 4; - else if ("A" === t2[e2] && A(t2, "NY", e2)) e2 += 2; + if ("E" === t2[e2 = A(t2, e2)] && S(t2, "MPTY", e2)) e2 += 4; + else if ("A" === t2[e2] && S(t2, "NY", e2)) e2 += 2; else if ("(" === t2[e2]) { for (e2++; e2 < t2.length && ")" !== t2[e2]; ) i2 += t2[e2], e2++; if (")" !== t2[e2]) throw new Error("Unterminated content model"); @@ -64448,24 +64452,24 @@ var require_fxp = __commonJS({ return { elementName: n2, contentModel: i2.trim(), index: e2 }; } readAttlistExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - S(n2), e2 = P(t2, e2); + C(n2), e2 = A(t2, e2); let i2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) i2 += t2[e2], e2++; - if (!S(i2)) throw new Error(`Invalid attribute name: "${i2}"`); - e2 = P(t2, e2); + if (!C(i2)) throw new Error(`Invalid attribute name: "${i2}"`); + e2 = A(t2, e2); let s2 = ""; if ("NOTATION" === t2.substring(e2, e2 + 8).toUpperCase()) { - if (s2 = "NOTATION", "(" !== t2[e2 = P(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); + if (s2 = "NOTATION", "(" !== t2[e2 = A(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); e2++; let n3 = []; for (; e2 < t2.length && ")" !== t2[e2]; ) { let i3 = ""; for (; e2 < t2.length && "|" !== t2[e2] && ")" !== t2[e2]; ) i3 += t2[e2], e2++; - if (i3 = i3.trim(), !S(i3)) throw new Error(`Invalid notation name: "${i3}"`); - n3.push(i3), "|" === t2[e2] && (e2++, e2 = P(t2, e2)); + if (i3 = i3.trim(), !C(i3)) throw new Error(`Invalid notation name: "${i3}"`); + n3.push(i3), "|" === t2[e2] && (e2++, e2 = A(t2, e2)); } if (")" !== t2[e2]) throw new Error("Unterminated list of notations"); e2++, s2 += " (" + n3.join("|") + ")"; @@ -64474,45 +64478,43 @@ var require_fxp = __commonJS({ const n3 = ["CDATA", "ID", "IDREF", "IDREFS", "ENTITY", "ENTITIES", "NMTOKEN", "NMTOKENS"]; if (!this.suppressValidationErr && !n3.includes(s2.toUpperCase())) throw new Error(`Invalid attribute type: "${s2}"`); } - e2 = P(t2, e2); + e2 = A(t2, e2); let r2 = ""; return "#REQUIRED" === t2.substring(e2, e2 + 8).toUpperCase() ? (r2 = "#REQUIRED", e2 += 8) : "#IMPLIED" === t2.substring(e2, e2 + 7).toUpperCase() ? (r2 = "#IMPLIED", e2 += 7) : [e2, r2] = this.readIdentifierVal(t2, e2, "ATTLIST"), { elementName: n2, attributeName: i2, attributeType: s2, defaultValue: r2, index: e2 }; } } - const P = (t2, e2) => { + const A = (t2, e2) => { for (; e2 < t2.length && /\s/.test(t2[e2]); ) e2++; return e2; }; - function A(t2, e2, n2) { + function S(t2, e2, n2) { for (let i2 = 0; i2 < e2.length; i2++) if (e2[i2] !== t2[n2 + i2 + 1]) return false; return true; } - function S(t2) { + function C(t2) { if (r(t2)) return t2; throw new Error(`Invalid entity name ${t2}`); } - const C = /^[-+]?0x[a-fA-F0-9]+$/, $ = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; - const D = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; - function L(t2) { - return "function" == typeof t2 ? t2 : Array.isArray(t2) ? (e2) => { - for (const n2 of t2) { - if ("string" == typeof n2 && e2 === n2) return true; - if (n2 instanceof RegExp && n2.test(e2)) return true; - } - } : () => false; - } - class F { + const $ = /^[-+]?0x[a-fA-F0-9]+$/, V = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, D = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; + const j = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; + class L { constructor(t2) { - if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e2) => K(e2, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e2) => K(e2, 16, "&#x") } }, this.addExternalEntities = j, this.parseXml = B, this.parseTextData = M, this.resolveNameSpace = _2, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = R, this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { + var e2; + if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e3) => K(e3, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e3) => K(e3, 16, "&#x") } }, this.addExternalEntities = F, this.parseXml = R, this.parseTextData = M, this.resolveNameSpace = k, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = B, this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { this.stopNodesExact = /* @__PURE__ */ new Set(), this.stopNodesWildcard = /* @__PURE__ */ new Set(); for (let t3 = 0; t3 < this.options.stopNodes.length; t3++) { - const e2 = this.options.stopNodes[t3]; - "string" == typeof e2 && (e2.startsWith("*.") ? this.stopNodesWildcard.add(e2.substring(2)) : this.stopNodesExact.add(e2)); + const e3 = this.options.stopNodes[t3]; + "string" == typeof e3 && (e3.startsWith("*.") ? this.stopNodesWildcard.add(e3.substring(2)) : this.stopNodesExact.add(e3)); } } } } - function j(t2) { + function F(t2) { const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2], s2 = i2.replace(/[.\-+*:]/g, "\\."); @@ -64526,7 +64528,7 @@ var require_fxp = __commonJS({ return null == i3 ? t2 : typeof i3 != typeof t2 || i3 !== t2 ? i3 : this.options.trimValues || t2.trim() === t2 ? Z(t2, this.options.parseTagValue, this.options.numberParseOptions) : t2; } } - function _2(t2) { + function k(t2) { if (this.options.removeNSPrefix) { const e2 = t2.split(":"), n2 = "/" === t2.charAt(0) ? "/" : ""; if ("xmlns" === e2[0]) return ""; @@ -64534,10 +64536,10 @@ var require_fxp = __commonJS({ } return t2; } - const k = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); + const _2 = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); function U(t2, e2, n2) { if (true !== this.options.ignoreAttributes && "string" == typeof t2) { - const i2 = s(t2, k), r2 = i2.length, o2 = {}; + const i2 = s(t2, _2), r2 = i2.length, o2 = {}; for (let t3 = 0; t3 < r2; t3++) { const s2 = this.resolveNameSpace(i2[t3][1]); if (this.ignoreAttributesFn(s2, e2)) continue; @@ -64556,12 +64558,12 @@ var require_fxp = __commonJS({ return o2; } } - const B = function(t2) { + const R = function(t2) { t2 = t2.replace(/\r\n?/g, "\n"); const e2 = new I("!xml"); let n2 = e2, i2 = "", s2 = ""; this.entityExpansionCount = 0, this.currentExpandedLength = 0; - const r2 = new O(this.options.processEntities); + const r2 = new P(this.options.processEntities); for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2]) if ("/" === t2[o2 + 1]) { const e3 = z(t2, ">", o2, "Closing Tag is not closed."); let r3 = t2.substring(o2 + 2, e3).trim(); @@ -64601,26 +64603,27 @@ var require_fxp = __commonJS({ } else { let r3 = W(t2, o2, this.options.removeNSPrefix), a2 = r3.tagName; const l2 = r3.rawTagName; - let u2 = r3.tagExp, h2 = r3.attrExpPresent, d2 = r3.closeIndex; + let u2 = r3.tagExp, d2 = r3.attrExpPresent, h2 = r3.closeIndex; if (this.options.transformTagName) { const t3 = this.options.transformTagName(a2); u2 === a2 && (u2 = t3), a2 = t3; } + if (this.options.strictReservedNames && (a2 === this.options.commentPropName || a2 === this.options.cdataPropName)) throw new Error(`Invalid tag name: ${a2}`); n2 && i2 && "!xml" !== n2.tagname && (i2 = this.saveTextToParentTag(i2, n2, s2, false)); const p2 = n2; p2 && -1 !== this.options.unpairedTags.indexOf(p2.tagname) && (n2 = this.tagsNodeStack.pop(), s2 = s2.substring(0, s2.lastIndexOf("."))), a2 !== e2.tagname && (s2 += s2 ? "." + a2 : a2); - const f2 = o2; + const c2 = o2; if (this.isItStopNode(this.stopNodesExact, this.stopNodesWildcard, s2, a2)) { let e3 = ""; if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) "/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), o2 = r3.closeIndex; else if (-1 !== this.options.unpairedTags.indexOf(a2)) o2 = r3.closeIndex; else { - const n3 = this.readStopNodeData(t2, l2, d2 + 1); + const n3 = this.readStopNodeData(t2, l2, h2 + 1); if (!n3) throw new Error(`Unexpected end of ${l2}`); o2 = n3.i, e3 = n3.tagContent; } const i3 = new I(a2); - a2 !== u2 && h2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, h2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, f2); + a2 !== u2 && d2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, d2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, c2); } else { if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) { if ("/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), this.options.transformTagName) { @@ -64628,18 +64631,26 @@ var require_fxp = __commonJS({ u2 === a2 && (u2 = t4), a2 = t4; } const t3 = new I(a2); - a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), s2 = s2.substr(0, s2.lastIndexOf(".")); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")); } else { - const t3 = new I(a2); - this.tagsNodeStack.push(n2), a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), n2 = t3; + if (-1 !== this.options.unpairedTags.indexOf(a2)) { + const t3 = new I(a2); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")), o2 = r3.closeIndex; + continue; + } + { + const t3 = new I(a2); + if (this.tagsNodeStack.length > this.options.maxNestedTags) throw new Error("Maximum nested tags exceeded"); + this.tagsNodeStack.push(n2), a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), n2 = t3; + } } - i2 = "", o2 = d2; + i2 = "", o2 = h2; } } else i2 += t2[o2]; return e2.child; }; - function R(t2, e2, n2, i2) { + function B(t2, e2, n2, i2) { this.options.captureMetaData || (i2 = void 0); const s2 = this.options.updateTag(e2.tagname, n2, e2[":@"]); false === s2 || ("string" == typeof s2 ? (e2.tagname = s2, t2.addChild(e2, i2)) : t2.addChild(e2, i2)); @@ -64700,12 +64711,12 @@ var require_fxp = __commonJS({ const o2 = s2.index, a2 = r2.search(/\s/); let l2 = r2, u2 = true; -1 !== a2 && (l2 = r2.substring(0, a2), r2 = r2.substring(a2 + 1).trimStart()); - const h2 = l2; + const d2 = l2; if (n2) { const t3 = l2.indexOf(":"); -1 !== t3 && (l2 = l2.substr(t3 + 1), u2 = l2 !== s2.data.substr(t3 + 1)); } - return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: h2 }; + return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: d2 }; } function q(t2, e2, n2) { const i2 = n2; @@ -64726,19 +64737,19 @@ var require_fxp = __commonJS({ if (e2 && "string" == typeof t2) { const e3 = t2.trim(); return "true" === e3 || "false" !== e3 && (function(t3, e4 = {}) { - if (e4 = Object.assign({}, V, e4), !t3 || "string" != typeof t3) return t3; + if (e4 = Object.assign({}, D, e4), !t3 || "string" != typeof t3) return t3; let n3 = t3.trim(); if (void 0 !== e4.skipLike && e4.skipLike.test(n3)) return t3; if ("0" === t3) return 0; - if (e4.hex && C.test(n3)) return (function(t4) { + if (e4.hex && $.test(n3)) return (function(t4) { if (parseInt) return parseInt(t4, 16); if (Number.parseInt) return Number.parseInt(t4, 16); if (window && window.parseInt) return window.parseInt(t4, 16); throw new Error("parseInt, Number.parseInt, window.parseInt are not supported"); })(n3); - if (-1 !== n3.search(/.+[eE].+/)) return (function(t4, e5, n4) { + if (n3.includes("e") || n3.includes("E")) return (function(t4, e5, n4) { if (!n4.eNotation) return t4; - const i3 = e5.match(D); + const i3 = e5.match(j); if (i3) { let s2 = i3[1] || ""; const r2 = -1 === i3[3].indexOf("e") ? "E" : "e", o2 = i3[2], a2 = s2 ? t4[o2.length + 1] === r2 : t4[o2.length] === r2; @@ -64747,7 +64758,7 @@ var require_fxp = __commonJS({ return t4; })(t3, n3, e4); { - const s2 = $.exec(n3); + const s2 = V.exec(n3); if (s2) { const r2 = s2[1] || "", o2 = s2[2]; let a2 = (i2 = s2[3]) && -1 !== i2.indexOf(".") ? ("." === (i2 = i2.replace(/0+$/, "")) ? i2 = "0" : "." === i2[0] ? i2 = "0" + i2 : "." === i2[i2.length - 1] && (i2 = i2.substring(0, i2.length - 1)), i2) : i2; @@ -64755,7 +64766,7 @@ var require_fxp = __commonJS({ if (!e4.leadingZeros && (o2.length > 1 || 1 === o2.length && !l2)) return t3; { const i3 = Number(n3), s3 = String(i3); - if (0 === i3 || -0 === i3) return i3; + if (0 === i3) return i3; if (-1 !== s3.search(/[eE]/)) return e4.eNotation ? i3 : t3; if (-1 !== n3.indexOf(".")) return "0" === s3 || s3 === a2 || s3 === `${r2}${a2}` ? i3 : t3; let l3 = o2 ? a2 : n3; @@ -64789,7 +64800,7 @@ var require_fxp = __commonJS({ if (o2[a2]) { let t3 = H(o2[a2], e2, l2); const n3 = nt(t3, e2); - void 0 !== o2[Q] && (t3[Q] = o2[Q]), o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== s2[a2] && s2.hasOwnProperty(a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; + o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== o2[Q] && "object" == typeof t3 && null !== t3 && (t3[Q] = o2[Q]), void 0 !== s2[a2] && Object.prototype.hasOwnProperty.call(s2, a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; } } } @@ -64817,7 +64828,7 @@ var require_fxp = __commonJS({ } class it { constructor(t2) { - this.externalEntities = {}, this.options = w(t2); + this.externalEntities = {}, this.options = v(t2); } parse(t2, e2) { if ("string" != typeof t2 && t2.toString) t2 = t2.toString(); @@ -64827,7 +64838,7 @@ var require_fxp = __commonJS({ const n3 = a(t2, e2); if (true !== n3) throw Error(`${n3.err.msg}:${n3.err.line}:${n3.err.col}`); } - const n2 = new F(this.options); + const n2 = new L(this.options); n2.addExternalEntities(this.externalEntities); const i2 = n2.parseXml(t2); return this.options.preserveOrder || void 0 === i2 ? i2 : J(i2, this.options); @@ -64848,6 +64859,13 @@ var require_fxp = __commonJS({ } function rt(t2, e2, n2, i2) { let s2 = "", r2 = false; + if (!Array.isArray(t2)) { + if (null != t2) { + let n3 = t2.toString(); + return n3 = ut(n3, e2), n3; + } + return ""; + } for (let o2 = 0; o2 < t2.length; o2++) { const a2 = t2[o2], l2 = ot(a2); if (void 0 === l2) continue; @@ -64871,10 +64889,10 @@ var require_fxp = __commonJS({ o3 = 0 !== o3.length ? " " + o3 : "", s2 += n3 + `<${l2}${o3}${t3}?>`, r2 = true; continue; } - let h2 = i2; - "" !== h2 && (h2 += e2.indentBy); - const d2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, h2); - -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += d2 + ">" : s2 += d2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += d2 + `>${p2}${i2}` : (s2 += d2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += d2 + "/>", r2 = true; + let d2 = i2; + "" !== d2 && (d2 += e2.indentBy); + const h2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, d2); + -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += h2 + ">" : s2 += h2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += h2 + `>${p2}${i2}` : (s2 += h2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += h2 + "/>", r2 = true; } return s2; } @@ -64882,13 +64900,13 @@ var require_fxp = __commonJS({ const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2]; - if (t2.hasOwnProperty(i2) && ":@" !== i2) return i2; + if (Object.prototype.hasOwnProperty.call(t2, i2) && ":@" !== i2) return i2; } } function at(t2, e2) { let n2 = ""; if (t2 && !e2.ignoreAttributes) for (let i2 in t2) { - if (!t2.hasOwnProperty(i2)) continue; + if (!Object.prototype.hasOwnProperty.call(t2, i2)) continue; let s2 = e2.attributeValueProcessor(i2, t2[i2]); s2 = ut(s2, e2), true === s2 && e2.suppressBooleanAttributes ? n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}` : n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}="${s2}"`; } @@ -64906,15 +64924,21 @@ var require_fxp = __commonJS({ } return t2; } - const ht = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { + const dt = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [{ regex: new RegExp("&", "g"), val: "&" }, { regex: new RegExp(">", "g"), val: ">" }, { regex: new RegExp("<", "g"), val: "<" }, { regex: new RegExp("'", "g"), val: "'" }, { regex: new RegExp('"', "g"), val: """ }], processEntities: true, stopNodes: [], oneListGroup: false }; - function dt(t2) { - this.options = Object.assign({}, ht, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { + function ht(t2) { + var e2; + this.options = Object.assign({}, dt, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { return false; - } : (this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ct), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ft, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { + } : (this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ft), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ct, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { return ""; }, this.tagEndChar = ">", this.newLine = ""); } @@ -64922,15 +64946,15 @@ var require_fxp = __commonJS({ const s2 = this.j2x(t2, n2 + 1, i2.concat(e2)); return void 0 !== t2[this.options.textNodeName] && 1 === Object.keys(t2).length ? this.buildTextValNode(t2[this.options.textNodeName], e2, s2.attrStr, n2) : this.buildObjectNode(s2.val, e2, s2.attrStr, n2); } - function ft(t2) { + function ct(t2) { return this.options.indentBy.repeat(t2); } - function ct(t2) { + function ft(t2) { return !(!t2.startsWith(this.options.attributeNamePrefix) || t2 === this.options.textNodeName) && t2.substr(this.attrPrefixLen); } - dt.prototype.build = function(t2) { + ht.prototype.build = function(t2) { return this.options.preserveOrder ? st(t2, this.options) : (Array.isArray(t2) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t2 = { [this.options.arrayNodeName]: t2 }), this.j2x(t2, 0, []).val); - }, dt.prototype.j2x = function(t2, e2, n2) { + }, ht.prototype.j2x = function(t2, e2, n2) { let i2 = "", s2 = ""; const r2 = n2.join("."); for (let o2 in t2) if (Object.prototype.hasOwnProperty.call(t2, o2)) if (void 0 === t2[o2]) this.isAttribute(o2) && (s2 += ""); @@ -64965,18 +64989,18 @@ var require_fxp = __commonJS({ for (let s3 = 0; s3 < n3; s3++) i2 += this.buildAttrPairStr(e3[s3], "" + t2[o2][e3[s3]]); } else s2 += this.processTextOrObjNode(t2[o2], o2, e2, n2); return { attrStr: i2, val: s2 }; - }, dt.prototype.buildAttrPairStr = function(t2, e2) { + }, ht.prototype.buildAttrPairStr = function(t2, e2) { return e2 = this.options.attributeValueProcessor(t2, "" + e2), e2 = this.replaceEntitiesValue(e2), this.options.suppressBooleanAttributes && "true" === e2 ? " " + t2 : " " + t2 + '="' + e2 + '"'; - }, dt.prototype.buildObjectNode = function(t2, e2, n2, i2) { + }, ht.prototype.buildObjectNode = function(t2, e2, n2, i2) { if ("" === t2) return "?" === e2[0] ? this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar; { let s2 = "` + this.newLine : this.indentate(i2) + "<" + e2 + n2 + r2 + this.tagEndChar + t2 + this.indentate(i2) + s2 : this.indentate(i2) + "<" + e2 + n2 + r2 + ">" + t2 + s2; } - }, dt.prototype.closeTag = function(t2) { + }, ht.prototype.closeTag = function(t2) { let e2 = ""; return -1 !== this.options.unpairedTags.indexOf(t2) ? this.options.suppressUnpairedNode || (e2 = "/") : e2 = this.options.suppressEmptyNode ? "/" : `>` + this.newLine; if (false !== this.options.commentPropName && e2 === this.options.commentPropName) return this.indentate(i2) + `` + this.newLine; if ("?" === e2[0]) return this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar; @@ -64984,14 +65008,14 @@ var require_fxp = __commonJS({ let s2 = this.options.tagValueProcessor(e2, t2); return s2 = this.replaceEntitiesValue(s2), "" === s2 ? this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + ">" + s2 + " 0 && this.options.processEntities) for (let e2 = 0; e2 < this.options.entities.length; e2++) { const n2 = this.options.entities[e2]; t2 = t2.replace(n2.regex, n2.val); } return t2; }; - const gt = { validate: a }; + const gt = ht, xt = { validate: a }; module2.exports = e; })(); } @@ -94285,6 +94309,7 @@ var require_minimatch = __commonJS({ assertValidPattern(pattern); if (!options) options = {}; this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200; this.set = []; this.pattern = pattern; this.windowsPathsNoEscape = !!options.windowsPathsNoEscape || options.allowWindowsEscape === false; @@ -94341,51 +94366,146 @@ var require_minimatch = __commonJS({ // out of pattern, then that's fine, as long as all // the parts match. matchOne(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } - ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { - this.debug("matchOne loop"); - var p = pattern[pi]; - var f = file[fi]; - this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } + if (pattern.indexOf(GLOBSTAR) !== -1) { + return this._matchGlobstar(file, pattern, partial, 0, 0); + } + return this._matchOne(file, pattern, partial, 0, 0); + } + _matchGlobstar(file, pattern, partial, fileIndex, patternIndex) { + let firstgs = -1; + for (let i = patternIndex; i < pattern.length; i++) { + if (pattern[i] === GLOBSTAR) { + firstgs = i; + break; + } + } + let lastgs = -1; + for (let i = pattern.length - 1; i >= 0; i--) { + if (pattern[i] === GLOBSTAR) { + lastgs = i; + break; + } + } + const head = pattern.slice(patternIndex, firstgs); + const body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs); + const tail = partial ? [] : pattern.slice(lastgs + 1); + if (head.length) { + const fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this._matchOne(fileHead, head, partial, 0, 0)) { return false; } - var hit; + fileIndex += head.length; + } + let fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) return false; + const tailStart = file.length - tail.length; + if (this._matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; + } + if (!this._matchOne(file, tail, partial, tailStart - 1, 0)) { + return false; + } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + let sawSome = !!fileTailMatch; + for (let i = fileIndex; i < file.length - fileTailMatch; i++) { + const f = String(file[i]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return partial || sawSome; + } + const bodySegments = [[[], 0]]; + let currentBody = bodySegments[0]; + let nonGsParts = 0; + const nonGsPartsSums = [0]; + for (const b of body) { + if (b === GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + let idx = bodySegments.length - 1; + const fileLength = file.length - fileTailMatch; + for (const b of bodySegments) { + b[1] = fileLength - (nonGsPartsSums[idx--] + b[0].length); + } + return !!this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex, + 0, + partial, + 0, + !!fileTailMatch + ); + } + // return false for "nope, not matching" + // return null for "not matching, cannot keep trying" + _matchGlobStarBodySections(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + const bs = bodySegments[bodyIndex]; + if (!bs) { + for (let i = fileIndex; i < file.length; i++) { + sawTail = true; + const f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return sawTail; + } + const [body, after] = bs; + while (fileIndex <= after) { + const m = this._matchOne( + file.slice(0, fileIndex + body.length), + body, + partial, + fileIndex, + 0 + ); + if (m && globStarDepth < this.maxGlobstarRecursion) { + const sub = this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex + body.length, + bodyIndex + 1, + partial, + globStarDepth + 1, + sawTail + ); + if (sub !== false) { + return sub; + } + } + const f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + fileIndex++; + } + return partial || null; + } + _matchOne(file, pattern, partial, fileIndex, patternIndex) { + let fi, pi, fl, pl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + this.debug("matchOne loop"); + const p = pattern[pi]; + const f = file[fi]; + this.debug(pattern, p, f); + if (p === false || p === GLOBSTAR) return false; + let hit; if (typeof p === "string") { hit = f === p; this.debug("string match", p, f, hit); @@ -107706,12 +107826,60 @@ var require_unescape = __commonJS({ var require_ast = __commonJS({ "node_modules/glob/node_modules/minimatch/dist/commonjs/ast.js"(exports2) { "use strict"; + var _a; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.AST = void 0; var brace_expressions_js_1 = require_brace_expressions(); var unescape_js_1 = require_unescape(); var types = /* @__PURE__ */ new Set(["!", "?", "+", "*", "@"]); var isExtglobType = (c) => types.has(c); + var isExtglobAST = (c) => isExtglobType(c.type); + var adoptionMap = /* @__PURE__ */ new Map([ + ["!", ["@"]], + ["?", ["?", "@"]], + ["@", ["@"]], + ["*", ["*", "+", "?", "@"]], + ["+", ["+", "@"]] + ]); + var adoptionWithSpaceMap = /* @__PURE__ */ new Map([ + ["!", ["?"]], + ["@", ["?"]], + ["+", ["?", "*"]] + ]); + var adoptionAnyMap = /* @__PURE__ */ new Map([ + ["!", ["?", "@"]], + ["?", ["?", "@"]], + ["@", ["?", "@"]], + ["*", ["*", "+", "?", "@"]], + ["+", ["+", "@", "?", "*"]] + ]); + var usurpMap = /* @__PURE__ */ new Map([ + ["!", /* @__PURE__ */ new Map([["!", "@"]])], + [ + "?", + /* @__PURE__ */ new Map([ + ["*", "*"], + ["+", "*"] + ]) + ], + [ + "@", + /* @__PURE__ */ new Map([ + ["!", "!"], + ["?", "?"], + ["@", "@"], + ["*", "*"], + ["+", "+"] + ]) + ], + [ + "+", + /* @__PURE__ */ new Map([ + ["?", "*"], + ["*", "*"] + ]) + ] + ]); var startNoTraversal = "(?!(?:^|/)\\.\\.?(?:$|/))"; var startNoDot = "(?!\\.)"; var addPatternStart = /* @__PURE__ */ new Set(["[", "."]); @@ -107721,7 +107889,8 @@ var require_ast = __commonJS({ var qmark = "[^/]"; var star = qmark + "*?"; var starNoEmpty = qmark + "+?"; - var AST = class _AST { + var ID = 0; + var AST = class { type; #root; #hasMagic; @@ -107736,6 +107905,22 @@ var require_ast = __commonJS({ // set to true if it's an extglob with no children // (which really means one child of '') #emptyExt = false; + id = ++ID; + get depth() { + return (this.#parent?.depth ?? -1) + 1; + } + [/* @__PURE__ */ Symbol.for("nodejs.util.inspect.custom")]() { + return { + "@@type": "AST", + id: this.id, + type: this.type, + root: this.#root.id, + parent: this.#parent?.id, + depth: this.depth, + partsLength: this.#parts.length, + parts: this.#parts + }; + } constructor(type2, parent, options = {}) { this.type = type2; if (type2) @@ -107801,7 +107986,7 @@ var require_ast = __commonJS({ for (const p of parts) { if (p === "") continue; - if (typeof p !== "string" && !(p instanceof _AST && p.#parent === this)) { + if (typeof p !== "string" && !(p instanceof _a && p.#parent === this)) { throw new Error("invalid part: " + p); } this.#parts.push(p); @@ -107826,7 +108011,7 @@ var require_ast = __commonJS({ const p = this.#parent; for (let i = 0; i < this.#parentIndex; i++) { const pp = p.#parts[i]; - if (!(pp instanceof _AST && pp.type === "!")) { + if (!(pp instanceof _a && pp.type === "!")) { return false; } } @@ -107851,13 +108036,14 @@ var require_ast = __commonJS({ this.push(part.clone(this)); } clone(parent) { - const c = new _AST(this.type, parent); + const c = new _a(this.type, parent); for (const p of this.#parts) { c.copyIn(p); } return c; } - static #parseAST(str2, ast, pos, opt) { + static #parseAST(str2, ast, pos, opt, extDepth) { + const maxDepth = opt.maxExtglobRecursion ?? 2; let escaping = false; let inBrace = false; let braceStart = -1; @@ -107889,11 +108075,12 @@ var require_ast = __commonJS({ acc2 += c; continue; } - if (!opt.noext && isExtglobType(c) && str2.charAt(i2) === "(") { + const doRecurse = !opt.noext && isExtglobType(c) && str2.charAt(i2) === "(" && extDepth <= maxDepth; + if (doRecurse) { ast.push(acc2); acc2 = ""; - const ext = new _AST(c, ast); - i2 = _AST.#parseAST(str2, ext, i2, opt); + const ext = new _a(c, ast); + i2 = _a.#parseAST(str2, ext, i2, opt, extDepth + 1); ast.push(ext); continue; } @@ -107903,7 +108090,7 @@ var require_ast = __commonJS({ return i2; } let i = pos + 1; - let part = new _AST(null, ast); + let part = new _a(null, ast); const parts = []; let acc = ""; while (i < str2.length) { @@ -107930,19 +108117,22 @@ var require_ast = __commonJS({ acc += c; continue; } - if (isExtglobType(c) && str2.charAt(i) === "(") { + const doRecurse = !opt.noext && isExtglobType(c) && str2.charAt(i) === "(" && /* c8 ignore start - the maxDepth is sufficient here */ + (extDepth <= maxDepth || ast && ast.#canAdoptType(c)); + if (doRecurse) { + const depthAdd = ast && ast.#canAdoptType(c) ? 0 : 1; part.push(acc); acc = ""; - const ext = new _AST(c, part); + const ext = new _a(c, part); part.push(ext); - i = _AST.#parseAST(str2, ext, i, opt); + i = _a.#parseAST(str2, ext, i, opt, extDepth + depthAdd); continue; } if (c === "|") { part.push(acc); acc = ""; parts.push(part); - part = new _AST(null, ast); + part = new _a(null, ast); continue; } if (c === ")") { @@ -107961,9 +108151,71 @@ var require_ast = __commonJS({ ast.#parts = [str2.substring(pos - 1)]; return i; } + #canAdoptWithSpace(child) { + return this.#canAdopt(child, adoptionWithSpaceMap); + } + #canAdopt(child, map2 = adoptionMap) { + if (!child || typeof child !== "object" || child.type !== null || child.#parts.length !== 1 || this.type === null) { + return false; + } + const gc = child.#parts[0]; + if (!gc || typeof gc !== "object" || gc.type === null) { + return false; + } + return this.#canAdoptType(gc.type, map2); + } + #canAdoptType(c, map2 = adoptionAnyMap) { + return !!map2.get(this.type)?.includes(c); + } + #adoptWithSpace(child, index) { + const gc = child.#parts[0]; + const blank = new _a(null, gc, this.options); + blank.#parts.push(""); + gc.push(blank); + this.#adopt(child, index); + } + #adopt(child, index) { + const gc = child.#parts[0]; + this.#parts.splice(index, 1, ...gc.#parts); + for (const p of gc.#parts) { + if (typeof p === "object") + p.#parent = this; + } + this.#toString = void 0; + } + #canUsurpType(c) { + const m = usurpMap.get(this.type); + return !!m?.has(c); + } + #canUsurp(child) { + if (!child || typeof child !== "object" || child.type !== null || child.#parts.length !== 1 || this.type === null || this.#parts.length !== 1) { + return false; + } + const gc = child.#parts[0]; + if (!gc || typeof gc !== "object" || gc.type === null) { + return false; + } + return this.#canUsurpType(gc.type); + } + #usurp(child) { + const m = usurpMap.get(this.type); + const gc = child.#parts[0]; + const nt = m?.get(gc.type); + if (!nt) + return false; + this.#parts = gc.#parts; + for (const p of this.#parts) { + if (typeof p === "object") { + p.#parent = this; + } + } + this.type = nt; + this.#toString = void 0; + this.#emptyExt = false; + } static fromGlob(pattern, options = {}) { - const ast = new _AST(null, void 0, options); - _AST.#parseAST(pattern, ast, 0, options); + const ast = new _a(null, void 0, options); + _a.#parseAST(pattern, ast, 0, options, 0); return ast; } // returns the regular expression if there's magic, or the unescaped @@ -108057,12 +108309,14 @@ var require_ast = __commonJS({ // or start or whatever) and prepend ^ or / at the Regexp construction. toRegExpSource(allowDot) { const dot = allowDot ?? !!this.#options.dot; - if (this.#root === this) + if (this.#root === this) { + this.#flatten(); this.#fillNegs(); - if (!this.type) { + } + if (!isExtglobAST(this)) { const noEmpty = this.isStart() && this.isEnd() && !this.#parts.some((s) => typeof s !== "string"); const src = this.#parts.map((p) => { - const [re, _2, hasMagic, uflag] = typeof p === "string" ? _AST.#parseGlob(p, this.#hasMagic, noEmpty) : p.toRegExpSource(allowDot); + const [re, _2, hasMagic, uflag] = typeof p === "string" ? _a.#parseGlob(p, this.#hasMagic, noEmpty) : p.toRegExpSource(allowDot); this.#hasMagic = this.#hasMagic || hasMagic; this.#uflag = this.#uflag || uflag; return re; @@ -108101,9 +108355,10 @@ var require_ast = __commonJS({ let body = this.#partsToRegExp(dot); if (this.isStart() && this.isEnd() && !body && this.type !== "!") { const s = this.toString(); - this.#parts = [s]; - this.type = null; - this.#hasMagic = void 0; + const me = this; + me.#parts = [s]; + me.type = null; + me.#hasMagic = void 0; return [s, (0, unescape_js_1.unescape)(this.toString()), false, false]; } let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot ? "" : this.#partsToRegExp(true); @@ -108130,6 +108385,38 @@ var require_ast = __commonJS({ this.#uflag ]; } + #flatten() { + if (!isExtglobAST(this)) { + for (const p of this.#parts) { + if (typeof p === "object") { + p.#flatten(); + } + } + } else { + let iterations = 0; + let done = false; + do { + done = true; + for (let i = 0; i < this.#parts.length; i++) { + const c = this.#parts[i]; + if (typeof c === "object") { + c.#flatten(); + if (this.#canAdopt(c)) { + done = false; + this.#adopt(c, i); + } else if (this.#canAdoptWithSpace(c)) { + done = false; + this.#adoptWithSpace(c, i); + } else if (this.#canUsurp(c)) { + done = false; + this.#usurp(c); + } + } + } + } while (!done && ++iterations < 10); + } + this.#toString = void 0; + } #partsToRegExp(dot) { return this.#parts.map((p) => { if (typeof p === "string") { @@ -108191,6 +108478,7 @@ var require_ast = __commonJS({ } }; exports2.AST = AST; + _a = AST; } }); @@ -108375,11 +108663,13 @@ var require_commonjs20 = __commonJS({ isWindows; platform; windowsNoMagicRoot; + maxGlobstarRecursion; regexp; constructor(pattern, options = {}) { (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); options = options || {}; this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion ?? 200; this.pattern = pattern; this.platform = options.platform || defaultPlatform; this.isWindows = this.platform === "win32"; @@ -108716,7 +109006,8 @@ var require_commonjs20 = __commonJS({ // out of pattern, then that's fine, as long as all // the parts match. matchOne(file, pattern, partial = false) { - const options = this.options; + let fileStartIndex = 0; + let patternStartIndex = 0; if (this.isWindows) { const fileDrive = typeof file[0] === "string" && /^[a-z]:$/i.test(file[0]); const fileUNC = !fileDrive && file[0] === "" && file[1] === "" && file[2] === "?" && /^[a-z]:$/i.test(file[3]); @@ -108731,11 +109022,8 @@ var require_commonjs20 = __commonJS({ ]; if (fd.toLowerCase() === pd.toLowerCase()) { pattern[pdi] = fd; - if (pdi > fdi) { - pattern = pattern.slice(pdi); - } else if (fdi > pdi) { - file = file.slice(fdi); - } + patternStartIndex = pdi; + fileStartIndex = fdi; } } } @@ -108743,49 +109031,123 @@ var require_commonjs20 = __commonJS({ if (optimizationLevel >= 2) { file = this.levelTwoFileOptimize(file); } - this.debug("matchOne", this, { file, pattern }); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { - this.debug("matchOne loop"); - var p = pattern[pi]; - var f = file[fi]; - this.debug(pattern, p, f); - if (p === false) { + if (pattern.includes(exports2.GLOBSTAR)) { + return this.#matchGlobstar(file, pattern, partial, fileStartIndex, patternStartIndex); + } + return this.#matchOne(file, pattern, partial, fileStartIndex, patternStartIndex); + } + #matchGlobstar(file, pattern, partial, fileIndex, patternIndex) { + const firstgs = pattern.indexOf(exports2.GLOBSTAR, patternIndex); + const lastgs = pattern.lastIndexOf(exports2.GLOBSTAR); + const [head, body, tail] = partial ? [ + pattern.slice(patternIndex, firstgs), + pattern.slice(firstgs + 1), + [] + ] : [ + pattern.slice(patternIndex, firstgs), + pattern.slice(firstgs + 1, lastgs), + pattern.slice(lastgs + 1) + ]; + if (head.length) { + const fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this.#matchOne(fileHead, head, partial, 0, 0)) { return false; } - if (p === exports2.GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") - return false; - } - return true; + fileIndex += head.length; + patternIndex += head.length; + } + let fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) + return false; + let tailStart = file.length - tail.length; + if (this.#matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } + tailStart--; + if (!this.#matchOne(file, tail, partial, tailStart, 0)) { + return false; } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) { - return true; - } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + let sawSome = !!fileTailMatch; + for (let i2 = fileIndex; i2 < file.length - fileTailMatch; i2++) { + const f = String(file[i2]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) { + return false; } + } + return partial || sawSome; + } + const bodySegments = [[[], 0]]; + let currentBody = bodySegments[0]; + let nonGsParts = 0; + const nonGsPartsSums = [0]; + for (const b of body) { + if (b === exports2.GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + let i = bodySegments.length - 1; + const fileLength = file.length - fileTailMatch; + for (const b of bodySegments) { + b[1] = fileLength - (nonGsPartsSums[i--] + b[0].length); + } + return !!this.#matchGlobStarBodySections(file, bodySegments, fileIndex, 0, partial, 0, !!fileTailMatch); + } + // return false for "nope, not matching" + // return null for "not matching, cannot keep trying" + #matchGlobStarBodySections(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + const bs = bodySegments[bodyIndex]; + if (!bs) { + for (let i = fileIndex; i < file.length; i++) { + sawTail = true; + const f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) { + return false; + } + } + return sawTail; + } + const [body, after] = bs; + while (fileIndex <= after) { + const m = this.#matchOne(file.slice(0, fileIndex + body.length), body, partial, fileIndex, 0); + if (m && globStarDepth < this.maxGlobstarRecursion) { + const sub = this.#matchGlobStarBodySections(file, bodySegments, fileIndex + body.length, bodyIndex + 1, partial, globStarDepth + 1, sawTail); + if (sub !== false) { + return sub; + } + } + const f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.startsWith(".")) { + return false; + } + fileIndex++; + } + return partial || null; + } + #matchOne(file, pattern, partial, fileIndex, patternIndex) { + let fi; + let pi; + let pl; + let fl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + this.debug("matchOne loop"); + let p = pattern[pi]; + let f = file[fi]; + this.debug(pattern, p, f); + if (p === false || p === exports2.GLOBSTAR) { return false; } let hit; @@ -150788,6 +151150,7 @@ var require_minimatch2 = __commonJS({ pattern = pattern.split(path3.sep).join("/"); } this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200; this.set = []; this.pattern = pattern; this.regexp = null; @@ -151184,50 +151547,147 @@ var require_minimatch2 = __commonJS({ return this.negate; }; Minimatch.prototype.matchOne = function(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } + if (pattern.indexOf(GLOBSTAR) !== -1) { + return this._matchGlobstar(file, pattern, partial, 0, 0); + } + return this._matchOne(file, pattern, partial, 0, 0); + }; + Minimatch.prototype._matchGlobstar = function(file, pattern, partial, fileIndex, patternIndex) { + var i; + var firstgs = -1; + for (i = patternIndex; i < pattern.length; i++) { + if (pattern[i] === GLOBSTAR) { + firstgs = i; + break; + } + } + var lastgs = -1; + for (i = pattern.length - 1; i >= 0; i--) { + if (pattern[i] === GLOBSTAR) { + lastgs = i; + break; + } + } + var head = pattern.slice(patternIndex, firstgs); + var body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs); + var tail = partial ? [] : pattern.slice(lastgs + 1); + if (head.length) { + var fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this._matchOne(fileHead, head, partial, 0, 0)) { + return false; + } + fileIndex += head.length; + } + var fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) return false; + var tailStart = file.length - tail.length; + if (this._matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; + } + tailStart--; + if (!this._matchOne(file, tail, partial, tailStart, 0)) { + return false; + } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + var sawSome = !!fileTailMatch; + for (i = fileIndex; i < file.length - fileTailMatch; i++) { + var f = String(file[i]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return partial || sawSome; + } + var bodySegments = [[[], 0]]; + var currentBody = bodySegments[0]; + var nonGsParts = 0; + var nonGsPartsSums = [0]; + for (var bi = 0; bi < body.length; bi++) { + var b = body[bi]; + if (b === GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + var idx = bodySegments.length - 1; + var fileLength = file.length - fileTailMatch; + for (var si = 0; si < bodySegments.length; si++) { + bodySegments[si][1] = fileLength - (nonGsPartsSums[idx--] + bodySegments[si][0].length); + } + return !!this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex, + 0, + partial, + 0, + !!fileTailMatch ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + }; + Minimatch.prototype._matchGlobStarBodySections = function(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + var bs = bodySegments[bodyIndex]; + if (!bs) { + for (var i = fileIndex; i < file.length; i++) { + sawTail = true; + var f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return sawTail; + } + var body = bs[0]; + var after = bs[1]; + while (fileIndex <= after) { + var m = this._matchOne( + file.slice(0, fileIndex + body.length), + body, + partial, + fileIndex, + 0 + ); + if (m && globStarDepth < this.maxGlobstarRecursion) { + var sub = this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex + body.length, + bodyIndex + 1, + partial, + globStarDepth + 1, + sawTail + ); + if (sub !== false) { + return sub; + } + } + var f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + fileIndex++; + } + return partial || null; + }; + Minimatch.prototype._matchOne = function(file, pattern, partial, fileIndex, patternIndex) { + var fi, pi, fl, pl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { this.debug("matchOne loop"); var p = pattern[pi]; var f = file[fi]; this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } - return false; - } + if (p === false || p === GLOBSTAR) return false; var hit; if (typeof p === "string") { hit = f === p; @@ -160637,7 +161097,7 @@ var safeDump = renamed("safeDump", "dump"); var semver = __toESM(require_semver2()); // src/api-compatibility.json -var maximumVersion = "3.20"; +var maximumVersion = "3.21"; var minimumVersion = "3.14"; // src/util.ts @@ -161335,6 +161795,11 @@ var featureConfig = { // cannot be found when interpreting results. minimumVersion: void 0 }, + ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES", + minimumVersion: void 0 + }, ["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: { defaultValue: false, envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE", @@ -161346,11 +161811,6 @@ var featureConfig = { minimumVersion: void 0, toolsFeature: "bundleSupportsOverlay" /* BundleSupportsOverlay */ }, - ["use_repository_properties_v2" /* UseRepositoryProperties */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES", - minimumVersion: void 0 - }, ["validate_db_config" /* ValidateDbConfig */]: { defaultValue: false, envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG", diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index d31044fda..ab769c22f 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -204,7 +204,7 @@ var require_file_command = __commonJS({ exports2.issueFileCommand = issueFileCommand; exports2.prepareKeyValueMessage = prepareKeyValueMessage; var crypto2 = __importStar2(require("crypto")); - var fs13 = __importStar2(require("fs")); + var fs14 = __importStar2(require("fs")); var os3 = __importStar2(require("os")); var utils_1 = require_utils(); function issueFileCommand(command, message) { @@ -212,10 +212,10 @@ var require_file_command = __commonJS({ if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs13.existsSync(filePath)) { + if (!fs14.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs13.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os3.EOL}`, { + fs14.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os3.EOL}`, { encoding: "utf8" }); } @@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({ exports2.isRooted = isRooted; exports2.tryGetExecutablePath = tryGetExecutablePath; exports2.getCmdPath = getCmdPath; - var fs13 = __importStar2(require("fs")); + var fs14 = __importStar2(require("fs")); var path13 = __importStar2(require("path")); - _a = fs13.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + _a = fs14.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; function readlink(fsPath) { return __awaiter2(this, void 0, void 0, function* () { - const result = yield fs13.promises.readlink(fsPath); + const result = yield fs14.promises.readlink(fsPath); if (exports2.IS_WINDOWS && !result.endsWith("\\")) { return `${result}\\`; } @@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({ }); } exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs13.constants.O_RDONLY; + exports2.READONLY = fs14.constants.O_RDONLY; function exists(fsPath) { return __awaiter2(this, void 0, void 0, function* () { try { @@ -45986,7 +45986,7 @@ var require_package = __commonJS({ "package.json"(exports2, module2) { module2.exports = { name: "codeql", - version: "4.32.5", + version: "4.32.7", private: true, description: "CodeQL action", scripts: { @@ -45995,7 +45995,7 @@ var require_package = __commonJS({ lint: "eslint --report-unused-disable-directives --max-warnings=0 .", "lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif", "lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix", - ava: "npm run transpile && ava --serial --verbose", + ava: "npm run transpile && ava --verbose", test: "npm run ava -- src/", "test-debug": "npm run test -- --timeout=20m", transpile: "tsc --build --verbose" @@ -46044,6 +46044,7 @@ var require_package = __commonJS({ "@types/js-yaml": "^4.0.9", "@types/node": "^20.19.9", "@types/node-forge": "^1.3.14", + "@types/sarif": "^2.1.7", "@types/semver": "^7.7.1", "@types/sinon": "^21.0.0", ava: "^6.4.1", @@ -46052,14 +46053,14 @@ var require_package = __commonJS({ "eslint-import-resolver-typescript": "^3.8.7", "eslint-plugin-github": "^6.0.0", "eslint-plugin-import-x": "^4.16.1", - "eslint-plugin-jsdoc": "^62.5.0", + "eslint-plugin-jsdoc": "^62.7.1", "eslint-plugin-no-async-foreach": "^0.1.1", glob: "^11.1.0", - globals: "^16.5.0", + globals: "^17.3.0", nock: "^14.0.11", sinon: "^21.0.1", typescript: "^5.9.3", - "typescript-eslint": "^8.56.0" + "typescript-eslint": "^8.56.1" }, overrides: { "@actions/tool-cache": { @@ -48064,6 +48065,7 @@ var require_minimatch = __commonJS({ pattern = pattern.split(path13.sep).join("/"); } this.options = options; + this.maxGlobstarRecursion = options.maxGlobstarRecursion !== void 0 ? options.maxGlobstarRecursion : 200; this.set = []; this.pattern = pattern; this.regexp = null; @@ -48460,50 +48462,147 @@ var require_minimatch = __commonJS({ return this.negate; }; Minimatch.prototype.matchOne = function(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } + if (pattern.indexOf(GLOBSTAR) !== -1) { + return this._matchGlobstar(file, pattern, partial, 0, 0); + } + return this._matchOne(file, pattern, partial, 0, 0); + }; + Minimatch.prototype._matchGlobstar = function(file, pattern, partial, fileIndex, patternIndex) { + var i; + var firstgs = -1; + for (i = patternIndex; i < pattern.length; i++) { + if (pattern[i] === GLOBSTAR) { + firstgs = i; + break; + } + } + var lastgs = -1; + for (i = pattern.length - 1; i >= 0; i--) { + if (pattern[i] === GLOBSTAR) { + lastgs = i; + break; + } + } + var head = pattern.slice(patternIndex, firstgs); + var body = partial ? pattern.slice(firstgs + 1) : pattern.slice(firstgs + 1, lastgs); + var tail = partial ? [] : pattern.slice(lastgs + 1); + if (head.length) { + var fileHead = file.slice(fileIndex, fileIndex + head.length); + if (!this._matchOne(fileHead, head, partial, 0, 0)) { + return false; + } + fileIndex += head.length; + } + var fileTailMatch = 0; + if (tail.length) { + if (tail.length + fileIndex > file.length) return false; + var tailStart = file.length - tail.length; + if (this._matchOne(file, tail, partial, tailStart, 0)) { + fileTailMatch = tail.length; + } else { + if (file[file.length - 1] !== "" || fileIndex + tail.length === file.length) { + return false; + } + tailStart--; + if (!this._matchOne(file, tail, partial, tailStart, 0)) { + return false; + } + fileTailMatch = tail.length + 1; + } + } + if (!body.length) { + var sawSome = !!fileTailMatch; + for (i = fileIndex; i < file.length - fileTailMatch; i++) { + var f = String(file[i]); + sawSome = true; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return partial || sawSome; + } + var bodySegments = [[[], 0]]; + var currentBody = bodySegments[0]; + var nonGsParts = 0; + var nonGsPartsSums = [0]; + for (var bi = 0; bi < body.length; bi++) { + var b = body[bi]; + if (b === GLOBSTAR) { + nonGsPartsSums.push(nonGsParts); + currentBody = [[], 0]; + bodySegments.push(currentBody); + } else { + currentBody[0].push(b); + nonGsParts++; + } + } + var idx = bodySegments.length - 1; + var fileLength = file.length - fileTailMatch; + for (var si = 0; si < bodySegments.length; si++) { + bodySegments[si][1] = fileLength - (nonGsPartsSums[idx--] + bodySegments[si][0].length); + } + return !!this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex, + 0, + partial, + 0, + !!fileTailMatch ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + }; + Minimatch.prototype._matchGlobStarBodySections = function(file, bodySegments, fileIndex, bodyIndex, partial, globStarDepth, sawTail) { + var bs = bodySegments[bodyIndex]; + if (!bs) { + for (var i = fileIndex; i < file.length; i++) { + sawTail = true; + var f = file[i]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + } + return sawTail; + } + var body = bs[0]; + var after = bs[1]; + while (fileIndex <= after) { + var m = this._matchOne( + file.slice(0, fileIndex + body.length), + body, + partial, + fileIndex, + 0 + ); + if (m && globStarDepth < this.maxGlobstarRecursion) { + var sub = this._matchGlobStarBodySections( + file, + bodySegments, + fileIndex + body.length, + bodyIndex + 1, + partial, + globStarDepth + 1, + sawTail + ); + if (sub !== false) { + return sub; + } + } + var f = file[fileIndex]; + if (f === "." || f === ".." || !this.options.dot && f.charAt(0) === ".") { + return false; + } + fileIndex++; + } + return partial || null; + }; + Minimatch.prototype._matchOne = function(file, pattern, partial, fileIndex, patternIndex) { + var fi, pi, fl, pl; + for (fi = fileIndex, pi = patternIndex, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { this.debug("matchOne loop"); var p = pattern[pi]; var f = file[fi]; this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } - return false; - } + if (p === false || p === GLOBSTAR) return false; var hit; if (typeof p === "string") { hit = f === p; @@ -49008,7 +49107,7 @@ var require_internal_globber = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; var core14 = __importStar2(require_core()); - var fs13 = __importStar2(require("fs")); + var fs14 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path13 = __importStar2(require("path")); var patternHelper = __importStar2(require_internal_pattern_helper()); @@ -49062,7 +49161,7 @@ var require_internal_globber = __commonJS({ for (const searchPath of patternHelper.getSearchPaths(patterns)) { core14.debug(`Search path '${searchPath}'`); try { - yield __await2(fs13.promises.lstat(searchPath)); + yield __await2(fs14.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") { continue; @@ -49096,7 +49195,7 @@ var require_internal_globber = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await2(fs13.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path13.join(item.path, x), childLevel)); + const childItems = (yield __await2(fs14.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path13.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await2(item.path); @@ -49131,7 +49230,7 @@ var require_internal_globber = __commonJS({ let stats; if (options.followSymbolicLinks) { try { - stats = yield fs13.promises.stat(item.path); + stats = yield fs14.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { @@ -49143,10 +49242,10 @@ var require_internal_globber = __commonJS({ throw err; } } else { - stats = yield fs13.promises.lstat(item.path); + stats = yield fs14.promises.lstat(item.path); } if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs13.promises.realpath(item.path); + const realPath = yield fs14.promises.realpath(item.path); while (traversalChain.length >= item.level) { traversalChain.pop(); } @@ -49255,7 +49354,7 @@ var require_internal_hash_files = __commonJS({ exports2.hashFiles = hashFiles; var crypto2 = __importStar2(require("crypto")); var core14 = __importStar2(require_core()); - var fs13 = __importStar2(require("fs")); + var fs14 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); var path13 = __importStar2(require("path")); @@ -49278,13 +49377,13 @@ var require_internal_hash_files = __commonJS({ writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); continue; } - if (fs13.statSync(file).isDirectory()) { + if (fs14.statSync(file).isDirectory()) { writeDelegate(`Skip directory '${file}'.`); continue; } const hash2 = crypto2.createHash("sha256"); const pipeline = util.promisify(stream2.pipeline); - yield pipeline(fs13.createReadStream(file), hash2); + yield pipeline(fs14.createReadStream(file), hash2); result.write(hash2.digest()); count++; if (!hasMatch) { @@ -50659,7 +50758,7 @@ var require_cacheUtils = __commonJS({ var glob = __importStar2(require_glob()); var io6 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); - var fs13 = __importStar2(require("fs")); + var fs14 = __importStar2(require("fs")); var path13 = __importStar2(require("path")); var semver9 = __importStar2(require_semver3()); var util = __importStar2(require("util")); @@ -50688,7 +50787,7 @@ var require_cacheUtils = __commonJS({ }); } function getArchiveFileSizeInBytes(filePath) { - return fs13.statSync(filePath).size; + return fs14.statSync(filePath).size; } function resolvePaths(patterns) { return __awaiter2(this, void 0, void 0, function* () { @@ -50726,7 +50825,7 @@ var require_cacheUtils = __commonJS({ } function unlinkFile(filePath) { return __awaiter2(this, void 0, void 0, function* () { - return util.promisify(fs13.unlink)(filePath); + return util.promisify(fs14.unlink)(filePath); }); } function getVersion(app_1) { @@ -50768,7 +50867,7 @@ var require_cacheUtils = __commonJS({ } function getGnuTarPathOnWindows() { return __awaiter2(this, void 0, void 0, function* () { - if (fs13.existsSync(constants_1.GnuTarPathOnWindows)) { + if (fs14.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion("tar"); @@ -60543,7 +60642,7 @@ var require_fxp = __commonJS({ }, o: (t2, e2) => Object.prototype.hasOwnProperty.call(t2, e2), r: (t2) => { "undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t2, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t2, "__esModule", { value: true }); } }, e = {}; - t.r(e), t.d(e, { XMLBuilder: () => dt, XMLParser: () => it, XMLValidator: () => gt }); + t.r(e), t.d(e, { XMLBuilder: () => gt, XMLParser: () => it, XMLValidator: () => xt }); const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$"); function s(t2, e2) { const n2 = []; @@ -60565,90 +60664,90 @@ var require_fxp = __commonJS({ const n2 = []; let i2 = false, s2 = false; "\uFEFF" === t2[0] && (t2 = t2.substr(1)); - for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2] && "?" === t2[o2 + 1]) { - if (o2 += 2, o2 = u(t2, o2), o2.err) return o2; + for (let r2 = 0; r2 < t2.length; r2++) if ("<" === t2[r2] && "?" === t2[r2 + 1]) { + if (r2 += 2, r2 = u(t2, r2), r2.err) return r2; } else { - if ("<" !== t2[o2]) { - if (l(t2[o2])) continue; - return m("InvalidChar", "char '" + t2[o2] + "' is not expected.", b(t2, o2)); + if ("<" !== t2[r2]) { + if (l(t2[r2])) continue; + return m("InvalidChar", "char '" + t2[r2] + "' is not expected.", N(t2, r2)); } { - let a2 = o2; - if (o2++, "!" === t2[o2]) { - o2 = h(t2, o2); + let o2 = r2; + if (r2++, "!" === t2[r2]) { + r2 = d(t2, r2); continue; } { - let d2 = false; - "/" === t2[o2] && (d2 = true, o2++); - let p2 = ""; - for (; o2 < t2.length && ">" !== t2[o2] && " " !== t2[o2] && " " !== t2[o2] && "\n" !== t2[o2] && "\r" !== t2[o2]; o2++) p2 += t2[o2]; - if (p2 = p2.trim(), "/" === p2[p2.length - 1] && (p2 = p2.substring(0, p2.length - 1), o2--), !r(p2)) { + let a2 = false; + "/" === t2[r2] && (a2 = true, r2++); + let h2 = ""; + for (; r2 < t2.length && ">" !== t2[r2] && " " !== t2[r2] && " " !== t2[r2] && "\n" !== t2[r2] && "\r" !== t2[r2]; r2++) h2 += t2[r2]; + if (h2 = h2.trim(), "/" === h2[h2.length - 1] && (h2 = h2.substring(0, h2.length - 1), r2--), !b(h2)) { let e3; - return e3 = 0 === p2.trim().length ? "Invalid space after '<'." : "Tag '" + p2 + "' is an invalid name.", m("InvalidTag", e3, b(t2, o2)); + return e3 = 0 === h2.trim().length ? "Invalid space after '<'." : "Tag '" + h2 + "' is an invalid name.", m("InvalidTag", e3, N(t2, r2)); } - const c2 = f(t2, o2); - if (false === c2) return m("InvalidAttr", "Attributes for '" + p2 + "' have open quote.", b(t2, o2)); - let E2 = c2.value; - if (o2 = c2.index, "/" === E2[E2.length - 1]) { - const n3 = o2 - E2.length; - E2 = E2.substring(0, E2.length - 1); - const s3 = g(E2, e2); - if (true !== s3) return m(s3.err.code, s3.err.msg, b(t2, n3 + s3.err.line)); + const p2 = c(t2, r2); + if (false === p2) return m("InvalidAttr", "Attributes for '" + h2 + "' have open quote.", N(t2, r2)); + let f2 = p2.value; + if (r2 = p2.index, "/" === f2[f2.length - 1]) { + const n3 = r2 - f2.length; + f2 = f2.substring(0, f2.length - 1); + const s3 = g(f2, e2); + if (true !== s3) return m(s3.err.code, s3.err.msg, N(t2, n3 + s3.err.line)); i2 = true; - } else if (d2) { - if (!c2.tagClosed) return m("InvalidTag", "Closing tag '" + p2 + "' doesn't have proper closing.", b(t2, o2)); - if (E2.trim().length > 0) return m("InvalidTag", "Closing tag '" + p2 + "' can't have attributes or invalid starting.", b(t2, a2)); - if (0 === n2.length) return m("InvalidTag", "Closing tag '" + p2 + "' has not been opened.", b(t2, a2)); + } else if (a2) { + if (!p2.tagClosed) return m("InvalidTag", "Closing tag '" + h2 + "' doesn't have proper closing.", N(t2, r2)); + if (f2.trim().length > 0) return m("InvalidTag", "Closing tag '" + h2 + "' can't have attributes or invalid starting.", N(t2, o2)); + if (0 === n2.length) return m("InvalidTag", "Closing tag '" + h2 + "' has not been opened.", N(t2, o2)); { const e3 = n2.pop(); - if (p2 !== e3.tagName) { - let n3 = b(t2, e3.tagStartPos); - return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + p2 + "'.", b(t2, a2)); + if (h2 !== e3.tagName) { + let n3 = N(t2, e3.tagStartPos); + return m("InvalidTag", "Expected closing tag '" + e3.tagName + "' (opened in line " + n3.line + ", col " + n3.col + ") instead of closing tag '" + h2 + "'.", N(t2, o2)); } 0 == n2.length && (s2 = true); } } else { - const r2 = g(E2, e2); - if (true !== r2) return m(r2.err.code, r2.err.msg, b(t2, o2 - E2.length + r2.err.line)); - if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", b(t2, o2)); - -1 !== e2.unpairedTags.indexOf(p2) || n2.push({ tagName: p2, tagStartPos: a2 }), i2 = true; + const a3 = g(f2, e2); + if (true !== a3) return m(a3.err.code, a3.err.msg, N(t2, r2 - f2.length + a3.err.line)); + if (true === s2) return m("InvalidXml", "Multiple possible root nodes found.", N(t2, r2)); + -1 !== e2.unpairedTags.indexOf(h2) || n2.push({ tagName: h2, tagStartPos: o2 }), i2 = true; } - for (o2++; o2 < t2.length; o2++) if ("<" === t2[o2]) { - if ("!" === t2[o2 + 1]) { - o2++, o2 = h(t2, o2); + for (r2++; r2 < t2.length; r2++) if ("<" === t2[r2]) { + if ("!" === t2[r2 + 1]) { + r2++, r2 = d(t2, r2); continue; } - if ("?" !== t2[o2 + 1]) break; - if (o2 = u(t2, ++o2), o2.err) return o2; - } else if ("&" === t2[o2]) { - const e3 = x(t2, o2); - if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", b(t2, o2)); - o2 = e3; - } else if (true === s2 && !l(t2[o2])) return m("InvalidXml", "Extra text at the end", b(t2, o2)); - "<" === t2[o2] && o2--; + if ("?" !== t2[r2 + 1]) break; + if (r2 = u(t2, ++r2), r2.err) return r2; + } else if ("&" === t2[r2]) { + const e3 = x(t2, r2); + if (-1 == e3) return m("InvalidChar", "char '&' is not expected.", N(t2, r2)); + r2 = e3; + } else if (true === s2 && !l(t2[r2])) return m("InvalidXml", "Extra text at the end", N(t2, r2)); + "<" === t2[r2] && r2--; } } } - return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", b(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map(((t3) => t3.tagName)), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); + return i2 ? 1 == n2.length ? m("InvalidTag", "Unclosed tag '" + n2[0].tagName + "'.", N(t2, n2[0].tagStartPos)) : !(n2.length > 0) || m("InvalidXml", "Invalid '" + JSON.stringify(n2.map((t3) => t3.tagName), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : m("InvalidXml", "Start tag expected.", 1); } function l(t2) { return " " === t2 || " " === t2 || "\n" === t2 || "\r" === t2; } function u(t2, e2) { const n2 = e2; - for (; e2 < t2.length; e2++) if ("?" != t2[e2] && " " != t2[e2]) ; - else { + for (; e2 < t2.length; e2++) if ("?" == t2[e2] || " " == t2[e2]) { const i2 = t2.substr(n2, e2 - n2); - if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", b(t2, e2)); + if (e2 > 5 && "xml" === i2) return m("InvalidXml", "XML declaration allowed only at the start of the document.", N(t2, e2)); if ("?" == t2[e2] && ">" == t2[e2 + 1]) { e2++; break; } + continue; } return e2; } - function h(t2, e2) { + function d(t2, e2) { if (t2.length > e2 + 5 && "-" === t2[e2 + 1] && "-" === t2[e2 + 2]) { for (e2 += 3; e2 < t2.length; e2++) if ("-" === t2[e2] && "-" === t2[e2 + 1] && ">" === t2[e2 + 2]) { e2 += 2; @@ -60666,11 +60765,11 @@ var require_fxp = __commonJS({ } return e2; } - const d = '"', p = "'"; - function f(t2, e2) { + const h = '"', p = "'"; + function c(t2, e2) { let n2 = "", i2 = "", s2 = false; for (; e2 < t2.length; e2++) { - if (t2[e2] === d || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); + if (t2[e2] === h || t2[e2] === p) "" === i2 ? i2 = t2[e2] : i2 !== t2[e2] || (i2 = ""); else if (">" === t2[e2] && "" === i2) { s2 = true; break; @@ -60679,16 +60778,16 @@ var require_fxp = __commonJS({ } return "" === i2 && { value: n2, index: e2, tagClosed: s2 }; } - const c = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); + const f = new RegExp(`(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['"])(([\\s\\S])*?)\\5)?`, "g"); function g(t2, e2) { - const n2 = s(t2, c), i2 = {}; + const n2 = s(t2, f), i2 = {}; for (let t3 = 0; t3 < n2.length; t3++) { - if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", N(n2[t3])); - if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", N(n2[t3])); - if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", N(n2[t3])); + if (0 === n2[t3][1].length) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' has no space in starting.", y(n2[t3])); + if (void 0 !== n2[t3][3] && void 0 === n2[t3][4]) return m("InvalidAttr", "Attribute '" + n2[t3][2] + "' is without value.", y(n2[t3])); + if (void 0 === n2[t3][3] && !e2.allowBooleanAttributes) return m("InvalidAttr", "boolean attribute '" + n2[t3][2] + "' is not allowed.", y(n2[t3])); const s2 = n2[t3][2]; - if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", N(n2[t3])); - if (i2.hasOwnProperty(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", N(n2[t3])); + if (!E(s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is an invalid name.", y(n2[t3])); + if (Object.prototype.hasOwnProperty.call(i2, s2)) return m("InvalidAttr", "Attribute '" + s2 + "' is repeated.", y(n2[t3])); i2[s2] = 1; } return true; @@ -60716,49 +60815,52 @@ var require_fxp = __commonJS({ function E(t2) { return r(t2); } - function b(t2, e2) { + function b(t2) { + return r(t2); + } + function N(t2, e2) { const n2 = t2.substring(0, e2).split(/\r?\n/); return { line: n2.length, col: n2[n2.length - 1].length + 1 }; } - function N(t2) { + function y(t2) { return t2.startIndex + t2[1].length; } - const y = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { + const T = { preserveOrder: false, attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, removeNSPrefix: false, allowBooleanAttributes: false, parseTagValue: true, parseAttributeValue: false, trimValues: true, cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, stopNodes: [], alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(t2, e2, n2) { return t2; - }, captureMetaData: false }; - function T(t2) { - return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : T(true); + }, captureMetaData: false, maxNestedTags: 100, strictReservedNames: true }; + function w(t2) { + return "boolean" == typeof t2 ? { enabled: t2, maxEntitySize: 1e4, maxExpansionDepth: 10, maxTotalExpansions: 1e3, maxExpandedLength: 1e5, allowedTags: null, tagFilter: null } : "object" == typeof t2 && null !== t2 ? { enabled: false !== t2.enabled, maxEntitySize: t2.maxEntitySize ?? 1e4, maxExpansionDepth: t2.maxExpansionDepth ?? 10, maxTotalExpansions: t2.maxTotalExpansions ?? 1e3, maxExpandedLength: t2.maxExpandedLength ?? 1e5, allowedTags: t2.allowedTags ?? null, tagFilter: t2.tagFilter ?? null } : w(true); } - const w = function(t2) { - const e2 = Object.assign({}, y, t2); - return e2.processEntities = T(e2.processEntities), e2; + const v = function(t2) { + const e2 = Object.assign({}, T, t2); + return e2.processEntities = w(e2.processEntities), e2; }; - let v; - v = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); + let O; + O = "function" != typeof Symbol ? "@@xmlMetadata" : /* @__PURE__ */ Symbol("XML Node Metadata"); class I { constructor(t2) { - this.tagname = t2, this.child = [], this[":@"] = {}; + this.tagname = t2, this.child = [], this[":@"] = /* @__PURE__ */ Object.create(null); } add(t2, e2) { "__proto__" === t2 && (t2 = "#__proto__"), this.child.push({ [t2]: e2 }); } addChild(t2, e2) { - "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][v] = { startIndex: e2 }); + "__proto__" === t2.tagname && (t2.tagname = "#__proto__"), t2[":@"] && Object.keys(t2[":@"]).length > 0 ? this.child.push({ [t2.tagname]: t2.child, ":@": t2[":@"] }) : this.child.push({ [t2.tagname]: t2.child }), void 0 !== e2 && (this.child[this.child.length - 1][O] = { startIndex: e2 }); } static getMetaDataSymbol() { - return v; + return O; } } - class O { + class P { constructor(t2) { this.suppressValidationErr = !t2, this.options = t2; } readDocType(t2, e2) { - const n2 = {}; + const n2 = /* @__PURE__ */ Object.create(null); if ("O" !== t2[e2 + 3] || "C" !== t2[e2 + 4] || "T" !== t2[e2 + 5] || "Y" !== t2[e2 + 6] || "P" !== t2[e2 + 7] || "E" !== t2[e2 + 8]) throw new Error("Invalid Tag instead of DOCTYPE"); { e2 += 9; @@ -60767,23 +60869,23 @@ var require_fxp = __commonJS({ if (r2 ? "-" === t2[e2 - 1] && "-" === t2[e2 - 2] && (r2 = false, i2--) : i2--, 0 === i2) break; } else "[" === t2[e2] ? s2 = true : o2 += t2[e2]; else { - if (s2 && A(t2, "!ENTITY", e2)) { + if (s2 && S(t2, "!ENTITY", e2)) { let i3, s3; if (e2 += 7, [i3, s3, e2] = this.readEntityExp(t2, e2 + 1, this.suppressValidationErr), -1 === s3.indexOf("&")) { const t3 = i3.replace(/[.\-+*:]/g, "\\."); n2[i3] = { regx: RegExp(`&${t3};`, "g"), val: s3 }; } - } else if (s2 && A(t2, "!ELEMENT", e2)) { + } else if (s2 && S(t2, "!ELEMENT", e2)) { e2 += 8; const { index: n3 } = this.readElementExp(t2, e2 + 1); e2 = n3; - } else if (s2 && A(t2, "!ATTLIST", e2)) e2 += 8; - else if (s2 && A(t2, "!NOTATION", e2)) { + } else if (s2 && S(t2, "!ATTLIST", e2)) e2 += 8; + else if (s2 && S(t2, "!NOTATION", e2)) { e2 += 9; const { index: n3 } = this.readNotationExp(t2, e2 + 1, this.suppressValidationErr); e2 = n3; } else { - if (!A(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); + if (!S(t2, "!--", e2)) throw new Error("Invalid DOCTYPE"); r2 = true; } i2++, o2 = ""; @@ -60793,10 +60895,10 @@ var require_fxp = __commonJS({ return { entities: n2, i: e2 }; } readEntityExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]) && '"' !== t2[e2] && "'" !== t2[e2]; ) n2 += t2[e2], e2++; - if (S(n2), e2 = P(t2, e2), !this.suppressValidationErr) { + if (C(n2), e2 = A(t2, e2), !this.suppressValidationErr) { if ("SYSTEM" === t2.substring(e2, e2 + 6).toUpperCase()) throw new Error("External entities are not supported"); if ("%" === t2[e2]) throw new Error("Parameter entities are not supported"); } @@ -60805,15 +60907,15 @@ var require_fxp = __commonJS({ return [n2, i2, --e2]; } readNotationExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - !this.suppressValidationErr && S(n2), e2 = P(t2, e2); + !this.suppressValidationErr && C(n2), e2 = A(t2, e2); const i2 = t2.substring(e2, e2 + 6).toUpperCase(); if (!this.suppressValidationErr && "SYSTEM" !== i2 && "PUBLIC" !== i2) throw new Error(`Expected SYSTEM or PUBLIC, found "${i2}"`); - e2 += i2.length, e2 = P(t2, e2); + e2 += i2.length, e2 = A(t2, e2); let s2 = null, r2 = null; - if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = P(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); + if ("PUBLIC" === i2) [e2, s2] = this.readIdentifierVal(t2, e2, "publicIdentifier"), '"' !== t2[e2 = A(t2, e2)] && "'" !== t2[e2] || ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier")); else if ("SYSTEM" === i2 && ([e2, r2] = this.readIdentifierVal(t2, e2, "systemIdentifier"), !this.suppressValidationErr && !r2)) throw new Error("Missing mandatory system identifier for SYSTEM notation"); return { notationName: n2, publicIdentifier: s2, systemIdentifier: r2, index: --e2 }; } @@ -60826,13 +60928,13 @@ var require_fxp = __commonJS({ return [++e2, i2]; } readElementExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; if (!this.suppressValidationErr && !r(n2)) throw new Error(`Invalid element name: "${n2}"`); let i2 = ""; - if ("E" === t2[e2 = P(t2, e2)] && A(t2, "MPTY", e2)) e2 += 4; - else if ("A" === t2[e2] && A(t2, "NY", e2)) e2 += 2; + if ("E" === t2[e2 = A(t2, e2)] && S(t2, "MPTY", e2)) e2 += 4; + else if ("A" === t2[e2] && S(t2, "NY", e2)) e2 += 2; else if ("(" === t2[e2]) { for (e2++; e2 < t2.length && ")" !== t2[e2]; ) i2 += t2[e2], e2++; if (")" !== t2[e2]) throw new Error("Unterminated content model"); @@ -60840,24 +60942,24 @@ var require_fxp = __commonJS({ return { elementName: n2, contentModel: i2.trim(), index: e2 }; } readAttlistExp(t2, e2) { - e2 = P(t2, e2); + e2 = A(t2, e2); let n2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) n2 += t2[e2], e2++; - S(n2), e2 = P(t2, e2); + C(n2), e2 = A(t2, e2); let i2 = ""; for (; e2 < t2.length && !/\s/.test(t2[e2]); ) i2 += t2[e2], e2++; - if (!S(i2)) throw new Error(`Invalid attribute name: "${i2}"`); - e2 = P(t2, e2); + if (!C(i2)) throw new Error(`Invalid attribute name: "${i2}"`); + e2 = A(t2, e2); let s2 = ""; if ("NOTATION" === t2.substring(e2, e2 + 8).toUpperCase()) { - if (s2 = "NOTATION", "(" !== t2[e2 = P(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); + if (s2 = "NOTATION", "(" !== t2[e2 = A(t2, e2 += 8)]) throw new Error(`Expected '(', found "${t2[e2]}"`); e2++; let n3 = []; for (; e2 < t2.length && ")" !== t2[e2]; ) { let i3 = ""; for (; e2 < t2.length && "|" !== t2[e2] && ")" !== t2[e2]; ) i3 += t2[e2], e2++; - if (i3 = i3.trim(), !S(i3)) throw new Error(`Invalid notation name: "${i3}"`); - n3.push(i3), "|" === t2[e2] && (e2++, e2 = P(t2, e2)); + if (i3 = i3.trim(), !C(i3)) throw new Error(`Invalid notation name: "${i3}"`); + n3.push(i3), "|" === t2[e2] && (e2++, e2 = A(t2, e2)); } if (")" !== t2[e2]) throw new Error("Unterminated list of notations"); e2++, s2 += " (" + n3.join("|") + ")"; @@ -60866,45 +60968,43 @@ var require_fxp = __commonJS({ const n3 = ["CDATA", "ID", "IDREF", "IDREFS", "ENTITY", "ENTITIES", "NMTOKEN", "NMTOKENS"]; if (!this.suppressValidationErr && !n3.includes(s2.toUpperCase())) throw new Error(`Invalid attribute type: "${s2}"`); } - e2 = P(t2, e2); + e2 = A(t2, e2); let r2 = ""; return "#REQUIRED" === t2.substring(e2, e2 + 8).toUpperCase() ? (r2 = "#REQUIRED", e2 += 8) : "#IMPLIED" === t2.substring(e2, e2 + 7).toUpperCase() ? (r2 = "#IMPLIED", e2 += 7) : [e2, r2] = this.readIdentifierVal(t2, e2, "ATTLIST"), { elementName: n2, attributeName: i2, attributeType: s2, defaultValue: r2, index: e2 }; } } - const P = (t2, e2) => { + const A = (t2, e2) => { for (; e2 < t2.length && /\s/.test(t2[e2]); ) e2++; return e2; }; - function A(t2, e2, n2) { + function S(t2, e2, n2) { for (let i2 = 0; i2 < e2.length; i2++) if (e2[i2] !== t2[n2 + i2 + 1]) return false; return true; } - function S(t2) { + function C(t2) { if (r(t2)) return t2; throw new Error(`Invalid entity name ${t2}`); } - const C = /^[-+]?0x[a-fA-F0-9]+$/, $ = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; - const D = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; - function L(t2) { - return "function" == typeof t2 ? t2 : Array.isArray(t2) ? (e2) => { - for (const n2 of t2) { - if ("string" == typeof n2 && e2 === n2) return true; - if (n2 instanceof RegExp && n2.test(e2)) return true; - } - } : () => false; - } - class F { + const $ = /^[-+]?0x[a-fA-F0-9]+$/, V = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, D = { hex: true, leadingZeros: true, decimalPoint: ".", eNotation: true }; + const j = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; + class L { constructor(t2) { - if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e2) => K(e2, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e2) => K(e2, 16, "&#x") } }, this.addExternalEntities = j, this.parseXml = B, this.parseTextData = M, this.resolveNameSpace = _, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = R, this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { + var e2; + if (this.options = t2, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: '"' } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "\xA2" }, pound: { regex: /&(pound|#163);/g, val: "\xA3" }, yen: { regex: /&(yen|#165);/g, val: "\xA5" }, euro: { regex: /&(euro|#8364);/g, val: "\u20AC" }, copyright: { regex: /&(copy|#169);/g, val: "\xA9" }, reg: { regex: /&(reg|#174);/g, val: "\xAE" }, inr: { regex: /&(inr|#8377);/g, val: "\u20B9" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t3, e3) => K(e3, 10, "&#") }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t3, e3) => K(e3, 16, "&#x") } }, this.addExternalEntities = F, this.parseXml = R, this.parseTextData = M, this.resolveNameSpace = k, this.buildAttributesMap = U, this.isItStopNode = X, this.replaceEntitiesValue = Y, this.readStopNodeData = q, this.saveTextToParentTag = G, this.addChild = B, this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.entityExpansionCount = 0, this.currentExpandedLength = 0, this.options.stopNodes && this.options.stopNodes.length > 0) { this.stopNodesExact = /* @__PURE__ */ new Set(), this.stopNodesWildcard = /* @__PURE__ */ new Set(); for (let t3 = 0; t3 < this.options.stopNodes.length; t3++) { - const e2 = this.options.stopNodes[t3]; - "string" == typeof e2 && (e2.startsWith("*.") ? this.stopNodesWildcard.add(e2.substring(2)) : this.stopNodesExact.add(e2)); + const e3 = this.options.stopNodes[t3]; + "string" == typeof e3 && (e3.startsWith("*.") ? this.stopNodesWildcard.add(e3.substring(2)) : this.stopNodesExact.add(e3)); } } } } - function j(t2) { + function F(t2) { const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2], s2 = i2.replace(/[.\-+*:]/g, "\\."); @@ -60918,7 +61018,7 @@ var require_fxp = __commonJS({ return null == i3 ? t2 : typeof i3 != typeof t2 || i3 !== t2 ? i3 : this.options.trimValues || t2.trim() === t2 ? Z(t2, this.options.parseTagValue, this.options.numberParseOptions) : t2; } } - function _(t2) { + function k(t2) { if (this.options.removeNSPrefix) { const e2 = t2.split(":"), n2 = "/" === t2.charAt(0) ? "/" : ""; if ("xmlns" === e2[0]) return ""; @@ -60926,10 +61026,10 @@ var require_fxp = __commonJS({ } return t2; } - const k = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); + const _ = new RegExp(`([^\\s=]+)\\s*(=\\s*(['"])([\\s\\S]*?)\\3)?`, "gm"); function U(t2, e2, n2) { if (true !== this.options.ignoreAttributes && "string" == typeof t2) { - const i2 = s(t2, k), r2 = i2.length, o2 = {}; + const i2 = s(t2, _), r2 = i2.length, o2 = {}; for (let t3 = 0; t3 < r2; t3++) { const s2 = this.resolveNameSpace(i2[t3][1]); if (this.ignoreAttributesFn(s2, e2)) continue; @@ -60948,12 +61048,12 @@ var require_fxp = __commonJS({ return o2; } } - const B = function(t2) { + const R = function(t2) { t2 = t2.replace(/\r\n?/g, "\n"); const e2 = new I("!xml"); let n2 = e2, i2 = "", s2 = ""; this.entityExpansionCount = 0, this.currentExpandedLength = 0; - const r2 = new O(this.options.processEntities); + const r2 = new P(this.options.processEntities); for (let o2 = 0; o2 < t2.length; o2++) if ("<" === t2[o2]) if ("/" === t2[o2 + 1]) { const e3 = z(t2, ">", o2, "Closing Tag is not closed."); let r3 = t2.substring(o2 + 2, e3).trim(); @@ -60993,26 +61093,27 @@ var require_fxp = __commonJS({ } else { let r3 = W(t2, o2, this.options.removeNSPrefix), a2 = r3.tagName; const l2 = r3.rawTagName; - let u2 = r3.tagExp, h2 = r3.attrExpPresent, d2 = r3.closeIndex; + let u2 = r3.tagExp, d2 = r3.attrExpPresent, h2 = r3.closeIndex; if (this.options.transformTagName) { const t3 = this.options.transformTagName(a2); u2 === a2 && (u2 = t3), a2 = t3; } + if (this.options.strictReservedNames && (a2 === this.options.commentPropName || a2 === this.options.cdataPropName)) throw new Error(`Invalid tag name: ${a2}`); n2 && i2 && "!xml" !== n2.tagname && (i2 = this.saveTextToParentTag(i2, n2, s2, false)); const p2 = n2; p2 && -1 !== this.options.unpairedTags.indexOf(p2.tagname) && (n2 = this.tagsNodeStack.pop(), s2 = s2.substring(0, s2.lastIndexOf("."))), a2 !== e2.tagname && (s2 += s2 ? "." + a2 : a2); - const f2 = o2; + const c2 = o2; if (this.isItStopNode(this.stopNodesExact, this.stopNodesWildcard, s2, a2)) { let e3 = ""; if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) "/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), o2 = r3.closeIndex; else if (-1 !== this.options.unpairedTags.indexOf(a2)) o2 = r3.closeIndex; else { - const n3 = this.readStopNodeData(t2, l2, d2 + 1); + const n3 = this.readStopNodeData(t2, l2, h2 + 1); if (!n3) throw new Error(`Unexpected end of ${l2}`); o2 = n3.i, e3 = n3.tagContent; } const i3 = new I(a2); - a2 !== u2 && h2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, h2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, f2); + a2 !== u2 && d2 && (i3[":@"] = this.buildAttributesMap(u2, s2, a2)), e3 && (e3 = this.parseTextData(e3, a2, s2, true, d2, true, true)), s2 = s2.substr(0, s2.lastIndexOf(".")), i3.add(this.options.textNodeName, e3), this.addChild(n2, i3, s2, c2); } else { if (u2.length > 0 && u2.lastIndexOf("/") === u2.length - 1) { if ("/" === a2[a2.length - 1] ? (a2 = a2.substr(0, a2.length - 1), s2 = s2.substr(0, s2.length - 1), u2 = a2) : u2 = u2.substr(0, u2.length - 1), this.options.transformTagName) { @@ -61020,18 +61121,26 @@ var require_fxp = __commonJS({ u2 === a2 && (u2 = t4), a2 = t4; } const t3 = new I(a2); - a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), s2 = s2.substr(0, s2.lastIndexOf(".")); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")); } else { - const t3 = new I(a2); - this.tagsNodeStack.push(n2), a2 !== u2 && h2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, f2), n2 = t3; + if (-1 !== this.options.unpairedTags.indexOf(a2)) { + const t3 = new I(a2); + a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2)), this.addChild(n2, t3, s2, c2), s2 = s2.substr(0, s2.lastIndexOf(".")), o2 = r3.closeIndex; + continue; + } + { + const t3 = new I(a2); + if (this.tagsNodeStack.length > this.options.maxNestedTags) throw new Error("Maximum nested tags exceeded"); + this.tagsNodeStack.push(n2), a2 !== u2 && d2 && (t3[":@"] = this.buildAttributesMap(u2, s2, a2)), this.addChild(n2, t3, s2, c2), n2 = t3; + } } - i2 = "", o2 = d2; + i2 = "", o2 = h2; } } else i2 += t2[o2]; return e2.child; }; - function R(t2, e2, n2, i2) { + function B(t2, e2, n2, i2) { this.options.captureMetaData || (i2 = void 0); const s2 = this.options.updateTag(e2.tagname, n2, e2[":@"]); false === s2 || ("string" == typeof s2 ? (e2.tagname = s2, t2.addChild(e2, i2)) : t2.addChild(e2, i2)); @@ -61092,12 +61201,12 @@ var require_fxp = __commonJS({ const o2 = s2.index, a2 = r2.search(/\s/); let l2 = r2, u2 = true; -1 !== a2 && (l2 = r2.substring(0, a2), r2 = r2.substring(a2 + 1).trimStart()); - const h2 = l2; + const d2 = l2; if (n2) { const t3 = l2.indexOf(":"); -1 !== t3 && (l2 = l2.substr(t3 + 1), u2 = l2 !== s2.data.substr(t3 + 1)); } - return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: h2 }; + return { tagName: l2, tagExp: r2, closeIndex: o2, attrExpPresent: u2, rawTagName: d2 }; } function q(t2, e2, n2) { const i2 = n2; @@ -61118,19 +61227,19 @@ var require_fxp = __commonJS({ if (e2 && "string" == typeof t2) { const e3 = t2.trim(); return "true" === e3 || "false" !== e3 && (function(t3, e4 = {}) { - if (e4 = Object.assign({}, V, e4), !t3 || "string" != typeof t3) return t3; + if (e4 = Object.assign({}, D, e4), !t3 || "string" != typeof t3) return t3; let n3 = t3.trim(); if (void 0 !== e4.skipLike && e4.skipLike.test(n3)) return t3; if ("0" === t3) return 0; - if (e4.hex && C.test(n3)) return (function(t4) { + if (e4.hex && $.test(n3)) return (function(t4) { if (parseInt) return parseInt(t4, 16); if (Number.parseInt) return Number.parseInt(t4, 16); if (window && window.parseInt) return window.parseInt(t4, 16); throw new Error("parseInt, Number.parseInt, window.parseInt are not supported"); })(n3); - if (-1 !== n3.search(/.+[eE].+/)) return (function(t4, e5, n4) { + if (n3.includes("e") || n3.includes("E")) return (function(t4, e5, n4) { if (!n4.eNotation) return t4; - const i3 = e5.match(D); + const i3 = e5.match(j); if (i3) { let s2 = i3[1] || ""; const r2 = -1 === i3[3].indexOf("e") ? "E" : "e", o2 = i3[2], a2 = s2 ? t4[o2.length + 1] === r2 : t4[o2.length] === r2; @@ -61139,7 +61248,7 @@ var require_fxp = __commonJS({ return t4; })(t3, n3, e4); { - const s2 = $.exec(n3); + const s2 = V.exec(n3); if (s2) { const r2 = s2[1] || "", o2 = s2[2]; let a2 = (i2 = s2[3]) && -1 !== i2.indexOf(".") ? ("." === (i2 = i2.replace(/0+$/, "")) ? i2 = "0" : "." === i2[0] ? i2 = "0" + i2 : "." === i2[i2.length - 1] && (i2 = i2.substring(0, i2.length - 1)), i2) : i2; @@ -61147,7 +61256,7 @@ var require_fxp = __commonJS({ if (!e4.leadingZeros && (o2.length > 1 || 1 === o2.length && !l2)) return t3; { const i3 = Number(n3), s3 = String(i3); - if (0 === i3 || -0 === i3) return i3; + if (0 === i3) return i3; if (-1 !== s3.search(/[eE]/)) return e4.eNotation ? i3 : t3; if (-1 !== n3.indexOf(".")) return "0" === s3 || s3 === a2 || s3 === `${r2}${a2}` ? i3 : t3; let l3 = o2 ? a2 : n3; @@ -61181,7 +61290,7 @@ var require_fxp = __commonJS({ if (o2[a2]) { let t3 = H(o2[a2], e2, l2); const n3 = nt(t3, e2); - void 0 !== o2[Q] && (t3[Q] = o2[Q]), o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== s2[a2] && s2.hasOwnProperty(a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; + o2[":@"] ? et(t3, o2[":@"], l2, e2) : 1 !== Object.keys(t3).length || void 0 === t3[e2.textNodeName] || e2.alwaysCreateTextNode ? 0 === Object.keys(t3).length && (e2.alwaysCreateTextNode ? t3[e2.textNodeName] = "" : t3 = "") : t3 = t3[e2.textNodeName], void 0 !== o2[Q] && "object" == typeof t3 && null !== t3 && (t3[Q] = o2[Q]), void 0 !== s2[a2] && Object.prototype.hasOwnProperty.call(s2, a2) ? (Array.isArray(s2[a2]) || (s2[a2] = [s2[a2]]), s2[a2].push(t3)) : e2.isArray(a2, l2, n3) ? s2[a2] = [t3] : s2[a2] = t3; } } } @@ -61209,7 +61318,7 @@ var require_fxp = __commonJS({ } class it { constructor(t2) { - this.externalEntities = {}, this.options = w(t2); + this.externalEntities = {}, this.options = v(t2); } parse(t2, e2) { if ("string" != typeof t2 && t2.toString) t2 = t2.toString(); @@ -61219,7 +61328,7 @@ var require_fxp = __commonJS({ const n3 = a(t2, e2); if (true !== n3) throw Error(`${n3.err.msg}:${n3.err.line}:${n3.err.col}`); } - const n2 = new F(this.options); + const n2 = new L(this.options); n2.addExternalEntities(this.externalEntities); const i2 = n2.parseXml(t2); return this.options.preserveOrder || void 0 === i2 ? i2 : J(i2, this.options); @@ -61240,6 +61349,13 @@ var require_fxp = __commonJS({ } function rt(t2, e2, n2, i2) { let s2 = "", r2 = false; + if (!Array.isArray(t2)) { + if (null != t2) { + let n3 = t2.toString(); + return n3 = ut(n3, e2), n3; + } + return ""; + } for (let o2 = 0; o2 < t2.length; o2++) { const a2 = t2[o2], l2 = ot(a2); if (void 0 === l2) continue; @@ -61263,10 +61379,10 @@ var require_fxp = __commonJS({ o3 = 0 !== o3.length ? " " + o3 : "", s2 += n3 + `<${l2}${o3}${t3}?>`, r2 = true; continue; } - let h2 = i2; - "" !== h2 && (h2 += e2.indentBy); - const d2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, h2); - -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += d2 + ">" : s2 += d2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += d2 + `>${p2}${i2}` : (s2 += d2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += d2 + "/>", r2 = true; + let d2 = i2; + "" !== d2 && (d2 += e2.indentBy); + const h2 = i2 + `<${l2}${at(a2[":@"], e2)}`, p2 = rt(a2[l2], e2, u2, d2); + -1 !== e2.unpairedTags.indexOf(l2) ? e2.suppressUnpairedNode ? s2 += h2 + ">" : s2 += h2 + "/>" : p2 && 0 !== p2.length || !e2.suppressEmptyNode ? p2 && p2.endsWith(">") ? s2 += h2 + `>${p2}${i2}` : (s2 += h2 + ">", p2 && "" !== i2 && (p2.includes("/>") || p2.includes("`) : s2 += h2 + "/>", r2 = true; } return s2; } @@ -61274,13 +61390,13 @@ var require_fxp = __commonJS({ const e2 = Object.keys(t2); for (let n2 = 0; n2 < e2.length; n2++) { const i2 = e2[n2]; - if (t2.hasOwnProperty(i2) && ":@" !== i2) return i2; + if (Object.prototype.hasOwnProperty.call(t2, i2) && ":@" !== i2) return i2; } } function at(t2, e2) { let n2 = ""; if (t2 && !e2.ignoreAttributes) for (let i2 in t2) { - if (!t2.hasOwnProperty(i2)) continue; + if (!Object.prototype.hasOwnProperty.call(t2, i2)) continue; let s2 = e2.attributeValueProcessor(i2, t2[i2]); s2 = ut(s2, e2), true === s2 && e2.suppressBooleanAttributes ? n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}` : n2 += ` ${i2.substr(e2.attributeNamePrefix.length)}="${s2}"`; } @@ -61298,15 +61414,21 @@ var require_fxp = __commonJS({ } return t2; } - const ht = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { + const dt = { attributeNamePrefix: "@_", attributesGroupName: false, textNodeName: "#text", ignoreAttributes: true, cdataPropName: false, format: false, indentBy: " ", suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(t2, e2) { return e2; }, attributeValueProcessor: function(t2, e2) { return e2; }, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [{ regex: new RegExp("&", "g"), val: "&" }, { regex: new RegExp(">", "g"), val: ">" }, { regex: new RegExp("<", "g"), val: "<" }, { regex: new RegExp("'", "g"), val: "'" }, { regex: new RegExp('"', "g"), val: """ }], processEntities: true, stopNodes: [], oneListGroup: false }; - function dt(t2) { - this.options = Object.assign({}, ht, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { + function ht(t2) { + var e2; + this.options = Object.assign({}, dt, t2), true === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { return false; - } : (this.ignoreAttributesFn = L(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ct), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ft, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { + } : (this.ignoreAttributesFn = "function" == typeof (e2 = this.options.ignoreAttributes) ? e2 : Array.isArray(e2) ? (t3) => { + for (const n2 of e2) { + if ("string" == typeof n2 && t3 === n2) return true; + if (n2 instanceof RegExp && n2.test(t3)) return true; + } + } : () => false, this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = ft), this.processTextOrObjNode = pt, this.options.format ? (this.indentate = ct, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { return ""; }, this.tagEndChar = ">", this.newLine = ""); } @@ -61314,15 +61436,15 @@ var require_fxp = __commonJS({ const s2 = this.j2x(t2, n2 + 1, i2.concat(e2)); return void 0 !== t2[this.options.textNodeName] && 1 === Object.keys(t2).length ? this.buildTextValNode(t2[this.options.textNodeName], e2, s2.attrStr, n2) : this.buildObjectNode(s2.val, e2, s2.attrStr, n2); } - function ft(t2) { + function ct(t2) { return this.options.indentBy.repeat(t2); } - function ct(t2) { + function ft(t2) { return !(!t2.startsWith(this.options.attributeNamePrefix) || t2 === this.options.textNodeName) && t2.substr(this.attrPrefixLen); } - dt.prototype.build = function(t2) { + ht.prototype.build = function(t2) { return this.options.preserveOrder ? st(t2, this.options) : (Array.isArray(t2) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t2 = { [this.options.arrayNodeName]: t2 }), this.j2x(t2, 0, []).val); - }, dt.prototype.j2x = function(t2, e2, n2) { + }, ht.prototype.j2x = function(t2, e2, n2) { let i2 = "", s2 = ""; const r2 = n2.join("."); for (let o2 in t2) if (Object.prototype.hasOwnProperty.call(t2, o2)) if (void 0 === t2[o2]) this.isAttribute(o2) && (s2 += ""); @@ -61357,18 +61479,18 @@ var require_fxp = __commonJS({ for (let s3 = 0; s3 < n3; s3++) i2 += this.buildAttrPairStr(e3[s3], "" + t2[o2][e3[s3]]); } else s2 += this.processTextOrObjNode(t2[o2], o2, e2, n2); return { attrStr: i2, val: s2 }; - }, dt.prototype.buildAttrPairStr = function(t2, e2) { + }, ht.prototype.buildAttrPairStr = function(t2, e2) { return e2 = this.options.attributeValueProcessor(t2, "" + e2), e2 = this.replaceEntitiesValue(e2), this.options.suppressBooleanAttributes && "true" === e2 ? " " + t2 : " " + t2 + '="' + e2 + '"'; - }, dt.prototype.buildObjectNode = function(t2, e2, n2, i2) { + }, ht.prototype.buildObjectNode = function(t2, e2, n2, i2) { if ("" === t2) return "?" === e2[0] ? this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar; { let s2 = "` + this.newLine : this.indentate(i2) + "<" + e2 + n2 + r2 + this.tagEndChar + t2 + this.indentate(i2) + s2 : this.indentate(i2) + "<" + e2 + n2 + r2 + ">" + t2 + s2; } - }, dt.prototype.closeTag = function(t2) { + }, ht.prototype.closeTag = function(t2) { let e2 = ""; return -1 !== this.options.unpairedTags.indexOf(t2) ? this.options.suppressUnpairedNode || (e2 = "/") : e2 = this.options.suppressEmptyNode ? "/" : `>` + this.newLine; if (false !== this.options.commentPropName && e2 === this.options.commentPropName) return this.indentate(i2) + `` + this.newLine; if ("?" === e2[0]) return this.indentate(i2) + "<" + e2 + n2 + "?" + this.tagEndChar; @@ -61376,14 +61498,14 @@ var require_fxp = __commonJS({ let s2 = this.options.tagValueProcessor(e2, t2); return s2 = this.replaceEntitiesValue(s2), "" === s2 ? this.indentate(i2) + "<" + e2 + n2 + this.closeTag(e2) + this.tagEndChar : this.indentate(i2) + "<" + e2 + n2 + ">" + s2 + " 0 && this.options.processEntities) for (let e2 = 0; e2 < this.options.entities.length; e2++) { const n2 = this.options.entities[e2]; t2 = t2.replace(n2.regex, n2.val); } return t2; }; - const gt = { validate: a }; + const gt = ht, xt = { validate: a }; module2.exports = e; })(); } @@ -90902,7 +91024,7 @@ var require_downloadUtils = __commonJS({ var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); - var fs13 = __importStar2(require("fs")); + var fs14 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); var utils = __importStar2(require_cacheUtils()); @@ -91013,7 +91135,7 @@ var require_downloadUtils = __commonJS({ exports2.DownloadProgress = DownloadProgress; function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter2(this, void 0, void 0, function* () { - const writeStream = fs13.createWriteStream(archivePath); + const writeStream = fs14.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); @@ -91038,7 +91160,7 @@ var require_downloadUtils = __commonJS({ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { return __awaiter2(this, void 0, void 0, function* () { var _a; - const archiveDescriptor = yield fs13.promises.open(archivePath, "w"); + const archiveDescriptor = yield fs14.promises.open(archivePath, "w"); const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { socketTimeout: options.timeoutInMs, keepAlive: true @@ -91154,7 +91276,7 @@ var require_downloadUtils = __commonJS({ } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); - const fd = fs13.openSync(archivePath, "w"); + const fd = fs14.openSync(archivePath, "w"); try { downloadProgress.startDisplayTimer(); const controller = new abort_controller_1.AbortController(); @@ -91172,12 +91294,12 @@ var require_downloadUtils = __commonJS({ controller.abort(); throw new Error("Aborting cache download as the download time exceeded the timeout."); } else if (Buffer.isBuffer(result)) { - fs13.writeFileSync(fd, result); + fs14.writeFileSync(fd, result); } } } finally { downloadProgress.stopDisplayTimer(); - fs13.closeSync(fd); + fs14.closeSync(fd); } } }); @@ -91499,7 +91621,7 @@ var require_cacheHttpClient = __commonJS({ var core14 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); - var fs13 = __importStar2(require("fs")); + var fs14 = __importStar2(require("fs")); var url_1 = require("url"); var utils = __importStar2(require_cacheUtils()); var uploadUtils_1 = require_uploadUtils(); @@ -91634,7 +91756,7 @@ Other caches with similar key:`); return __awaiter2(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs13.openSync(archivePath, "r"); + const fd = fs14.openSync(archivePath, "r"); const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); @@ -91648,7 +91770,7 @@ Other caches with similar key:`); const start = offset; const end = offset + chunkSize - 1; offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs13.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs14.createReadStream(archivePath, { fd, start, end, @@ -91659,7 +91781,7 @@ Other caches with similar key:`); } }))); } finally { - fs13.closeSync(fd); + fs14.closeSync(fd); } return; }); @@ -98912,7 +99034,7 @@ var require_manifest = __commonJS({ var core_1 = require_core(); var os3 = require("os"); var cp = require("child_process"); - var fs13 = require("fs"); + var fs14 = require("fs"); function _findMatch(versionSpec, stable, candidates, archFilter) { return __awaiter2(this, void 0, void 0, function* () { const platFilter = os3.platform(); @@ -98974,10 +99096,10 @@ var require_manifest = __commonJS({ const lsbReleaseFile = "/etc/lsb-release"; const osReleaseFile = "/etc/os-release"; let contents = ""; - if (fs13.existsSync(lsbReleaseFile)) { - contents = fs13.readFileSync(lsbReleaseFile).toString(); - } else if (fs13.existsSync(osReleaseFile)) { - contents = fs13.readFileSync(osReleaseFile).toString(); + if (fs14.existsSync(lsbReleaseFile)) { + contents = fs14.readFileSync(lsbReleaseFile).toString(); + } else if (fs14.existsSync(osReleaseFile)) { + contents = fs14.readFileSync(osReleaseFile).toString(); } return contents; } @@ -99186,7 +99308,7 @@ var require_tool_cache = __commonJS({ var core14 = __importStar2(require_core()); var io6 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); - var fs13 = __importStar2(require("fs")); + var fs14 = __importStar2(require("fs")); var mm = __importStar2(require_manifest()); var os3 = __importStar2(require("os")); var path13 = __importStar2(require("path")); @@ -99232,7 +99354,7 @@ var require_tool_cache = __commonJS({ } function downloadToolAttempt(url2, dest, auth2, headers) { return __awaiter2(this, void 0, void 0, function* () { - if (fs13.existsSync(dest)) { + if (fs14.existsSync(dest)) { throw new Error(`Destination file path ${dest} already exists`); } const http = new httpm.HttpClient(userAgent2, [], { @@ -99256,7 +99378,7 @@ var require_tool_cache = __commonJS({ const readStream = responseMessageFactory(); let succeeded = false; try { - yield pipeline(readStream, fs13.createWriteStream(dest)); + yield pipeline(readStream, fs14.createWriteStream(dest)); core14.debug("download complete"); succeeded = true; return dest; @@ -99468,11 +99590,11 @@ var require_tool_cache = __commonJS({ arch2 = arch2 || os3.arch(); core14.debug(`Caching tool ${tool} ${version} ${arch2}`); core14.debug(`source dir: ${sourceDir}`); - if (!fs13.statSync(sourceDir).isDirectory()) { + if (!fs14.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } const destPath = yield _createToolPath(tool, version, arch2); - for (const itemName of fs13.readdirSync(sourceDir)) { + for (const itemName of fs14.readdirSync(sourceDir)) { const s = path13.join(sourceDir, itemName); yield io6.cp(s, destPath, { recursive: true }); } @@ -99486,7 +99608,7 @@ var require_tool_cache = __commonJS({ arch2 = arch2 || os3.arch(); core14.debug(`Caching tool ${tool} ${version} ${arch2}`); core14.debug(`source file: ${sourceFile}`); - if (!fs13.statSync(sourceFile).isFile()) { + if (!fs14.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); @@ -99515,7 +99637,7 @@ var require_tool_cache = __commonJS({ versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path13.join(_getCacheDirectory(), toolName, versionSpec, arch2); core14.debug(`checking cache: ${cachePath}`); - if (fs13.existsSync(cachePath) && fs13.existsSync(`${cachePath}.complete`)) { + if (fs14.existsSync(cachePath) && fs14.existsSync(`${cachePath}.complete`)) { core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { @@ -99528,12 +99650,12 @@ var require_tool_cache = __commonJS({ const versions = []; arch2 = arch2 || os3.arch(); const toolPath = path13.join(_getCacheDirectory(), toolName); - if (fs13.existsSync(toolPath)) { - const children = fs13.readdirSync(toolPath); + if (fs14.existsSync(toolPath)) { + const children = fs14.readdirSync(toolPath); for (const child of children) { if (isExplicitVersion(child)) { const fullPath = path13.join(toolPath, child, arch2 || ""); - if (fs13.existsSync(fullPath) && fs13.existsSync(`${fullPath}.complete`)) { + if (fs14.existsSync(fullPath) && fs14.existsSync(`${fullPath}.complete`)) { versions.push(child); } } @@ -99604,7 +99726,7 @@ var require_tool_cache = __commonJS({ function _completeToolPath(tool, version, arch2) { const folderPath = path13.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; - fs13.writeFileSync(markerPath, ""); + fs14.writeFileSync(markerPath, ""); core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { @@ -103131,21 +103253,21 @@ async function getFolderSize(itemPath, options) { getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); async function core(rootItemPath, options = {}, returnType = {}) { - const fs13 = options.fs || await import("node:fs/promises"); + const fs14 = options.fs || await import("node:fs/promises"); let folderSize = 0n; const foundInos = /* @__PURE__ */ new Set(); const errors = []; await processItem(rootItemPath); async function processItem(itemPath) { if (options.ignore?.test(itemPath)) return; - const stats = returnType.strict ? await fs13.lstat(itemPath, { bigint: true }) : await fs13.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3)); + const stats = returnType.strict ? await fs14.lstat(itemPath, { bigint: true }) : await fs14.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3)); if (typeof stats !== "object") return; if (!foundInos.has(stats.ino)) { foundInos.add(stats.ino); folderSize += stats.size; } if (stats.isDirectory()) { - const directoryItems = returnType.strict ? await fs13.readdir(itemPath) : await fs13.readdir(itemPath).catch((error3) => errors.push(error3)); + const directoryItems = returnType.strict ? await fs14.readdir(itemPath) : await fs14.readdir(itemPath).catch((error3) => errors.push(error3)); if (typeof directoryItems !== "object") return; await Promise.all( directoryItems.map( @@ -105785,17 +105907,6 @@ function getExtraOptionsEnvParam() { ); } } -function getToolNames(sarif) { - const toolNames = {}; - for (const run2 of sarif.runs || []) { - const tool = run2.tool || {}; - const driver = tool.driver || {}; - if (typeof driver.name === "string" && driver.name.length > 0) { - toolNames[driver.name] = true; - } - } - return Object.keys(toolNames); -} function getCodeQLDatabasePath(config, language) { return path.resolve(config.dbLocation, language); } @@ -106507,8 +106618,8 @@ var path4 = __toESM(require("path")); var semver4 = __toESM(require_semver2()); // src/defaults.json -var bundleVersion = "codeql-bundle-v2.24.2"; -var cliVersion = "2.24.2"; +var bundleVersion = "codeql-bundle-v2.24.3"; +var cliVersion = "2.24.3"; // src/overlay/index.ts var fs3 = __toESM(require("fs")); @@ -107026,6 +107137,11 @@ var featureConfig = { // cannot be found when interpreting results. minimumVersion: void 0 }, + ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES", + minimumVersion: void 0 + }, ["start_proxy_use_features_release" /* StartProxyUseFeaturesRelease */]: { defaultValue: false, envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE", @@ -107037,11 +107153,6 @@ var featureConfig = { minimumVersion: void 0, toolsFeature: "bundleSupportsOverlay" /* BundleSupportsOverlay */ }, - ["use_repository_properties_v2" /* UseRepositoryProperties */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES", - minimumVersion: void 0 - }, ["validate_db_config" /* ValidateDbConfig */]: { defaultValue: false, envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG", @@ -107372,12 +107483,83 @@ function initFeatures(gitHubVersion, repositoryNwo, tempDir, logger) { } } +// src/sarif/index.ts +var fs5 = __toESM(require("fs")); +var InvalidSarifUploadError = class extends Error { +}; +function getToolNames(sarifFile) { + const toolNames = {}; + for (const run2 of sarifFile.runs || []) { + const tool = run2.tool || {}; + const driver = tool.driver || {}; + if (typeof driver.name === "string" && driver.name.length > 0) { + toolNames[driver.name] = true; + } + } + return Object.keys(toolNames); +} +function readSarifFile(sarifFilePath) { + return JSON.parse(fs5.readFileSync(sarifFilePath, "utf8")); +} +function combineSarifFiles(sarifFiles, logger) { + logger.info(`Loading SARIF file(s)`); + const runs = []; + let version = void 0; + for (const sarifFile of sarifFiles) { + logger.debug(`Loading SARIF file: ${sarifFile}`); + const sarifLog = readSarifFile(sarifFile); + if (version === void 0) { + version = sarifLog.version; + } else if (version !== sarifLog.version) { + throw new InvalidSarifUploadError( + `Different SARIF versions encountered: ${version} and ${sarifLog.version}` + ); + } + runs.push(...sarifLog?.runs || []); + } + if (version === void 0) { + version = "2.1.0"; + } + return { version, runs }; +} +function areAllRunsProducedByCodeQL(sarifLogs) { + return sarifLogs.every((sarifLog) => { + return sarifLog.runs?.every((run2) => run2.tool?.driver?.name === "CodeQL"); + }); +} +function createRunKey(run2) { + return { + name: run2.tool?.driver?.name, + fullName: run2.tool?.driver?.fullName, + version: run2.tool?.driver?.version, + semanticVersion: run2.tool?.driver?.semanticVersion, + guid: run2.tool?.driver?.guid, + automationId: run2.automationDetails?.id + }; +} +function areAllRunsUnique(sarifLogs) { + const keys = /* @__PURE__ */ new Set(); + for (const sarifLog of sarifLogs) { + if (sarifLog.runs === void 0) { + continue; + } + for (const run2 of sarifLog.runs) { + const key = JSON.stringify(createRunKey(run2)); + if (keys.has(key)) { + return false; + } + keys.add(key); + } + } + return true; +} + // src/status-report.ts var os = __toESM(require("os")); var core9 = __toESM(require_core()); // src/config-utils.ts -var fs6 = __toESM(require("fs")); +var fs7 = __toESM(require("fs")); var path7 = __toESM(require("path")); // src/config/db-config.ts @@ -107462,18 +107644,18 @@ function writeDiagnostic(config, language, diagnostic) { } // src/diff-informed-analysis-utils.ts -var fs5 = __toESM(require("fs")); +var fs6 = __toESM(require("fs")); var path6 = __toESM(require("path")); function getDiffRangesJsonFilePath() { return path6.join(getTemporaryDirectory(), "pr-diff-range.json"); } function readDiffRangesJsonFile(logger) { const jsonFilePath = getDiffRangesJsonFilePath(); - if (!fs5.existsSync(jsonFilePath)) { + if (!fs6.existsSync(jsonFilePath)) { logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`); return void 0; } - const jsonContents = fs5.readFileSync(jsonFilePath, "utf8"); + const jsonContents = fs6.readFileSync(jsonFilePath, "utf8"); logger.debug( `Read pr-diff-range JSON file from ${jsonFilePath}: ${jsonContents}` @@ -107522,10 +107704,10 @@ function getPathToParsedConfigFile(tempDir) { } async function getConfig(tempDir, logger) { const configFile = getPathToParsedConfigFile(tempDir); - if (!fs6.existsSync(configFile)) { + if (!fs7.existsSync(configFile)) { return void 0; } - const configString = fs6.readFileSync(configFile, "utf8"); + const configString = fs7.readFileSync(configFile, "utf8"); logger.debug("Loaded config:"); logger.debug(configString); const config = JSON.parse(configString); @@ -107769,7 +107951,7 @@ async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error } // src/upload-lib.ts -var fs12 = __toESM(require("fs")); +var fs13 = __toESM(require("fs")); var path12 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); @@ -107777,7 +107959,7 @@ var core12 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/codeql.ts -var fs10 = __toESM(require("fs")); +var fs11 = __toESM(require("fs")); var path10 = __toESM(require("path")); var core11 = __toESM(require_core()); var toolrunner3 = __toESM(require_toolrunner()); @@ -108025,7 +108207,7 @@ function wrapCliConfigurationError(cliError) { } // src/setup-codeql.ts -var fs9 = __toESM(require("fs")); +var fs10 = __toESM(require("fs")); var path9 = __toESM(require("path")); var toolcache3 = __toESM(require_tool_cache()); var import_fast_deep_equal = __toESM(require_fast_deep_equal()); @@ -108087,7 +108269,7 @@ var v4_default = v4; // src/tar.ts var import_child_process = require("child_process"); -var fs7 = __toESM(require("fs")); +var fs8 = __toESM(require("fs")); var stream = __toESM(require("stream")); var import_toolrunner = __toESM(require_toolrunner()); var io4 = __toESM(require_io()); @@ -108160,7 +108342,7 @@ async function isZstdAvailable(logger) { } } async function extract(tarPath, dest, compressionMethod, tarVersion, logger) { - fs7.mkdirSync(dest, { recursive: true }); + fs8.mkdirSync(dest, { recursive: true }); switch (compressionMethod) { case "gzip": return await toolcache.extractTar(tarPath, dest); @@ -108244,7 +108426,7 @@ function inferCompressionMethod(tarPath) { } // src/tools-download.ts -var fs8 = __toESM(require("fs")); +var fs9 = __toESM(require("fs")); var os2 = __toESM(require("os")); var path8 = __toESM(require("path")); var import_perf_hooks = require("perf_hooks"); @@ -108351,7 +108533,7 @@ async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorizat }; } async function downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorization, headers, tarVersion, logger) { - fs8.mkdirSync(dest, { recursive: true }); + fs9.mkdirSync(dest, { recursive: true }); const agent = new import_http_client.HttpClient().getAgent(codeqlURL); headers = Object.assign( { "User-Agent": "CodeQL Action" }, @@ -108388,7 +108570,7 @@ function getToolcacheDirectory(version) { } function writeToolcacheMarkerFile(extractedPath, logger) { const markerFilePath = `${extractedPath}.complete`; - fs8.writeFileSync(markerFilePath, ""); + fs9.writeFileSync(markerFilePath, ""); logger.info(`Created toolcache marker file ${markerFilePath}`); } function sanitizeUrlForStatusReport(url2) { @@ -108523,7 +108705,7 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) { const candidates = toolcache3.findAllVersions("CodeQL").filter(isGoodVersion).map((version) => ({ folder: toolcache3.find("CodeQL", version), version - })).filter(({ folder }) => fs9.existsSync(path9.join(folder, "pinned-version"))); + })).filter(({ folder }) => fs10.existsSync(path9.join(folder, "pinned-version"))); if (candidates.length === 1) { const candidate = candidates[0]; logger.debug( @@ -109077,7 +109259,7 @@ async function getCodeQLForCmd(cmd, checkVersion) { "tools", "tracing-config.lua" ); - return fs10.existsSync(tracingConfigPath); + return fs11.existsSync(tracingConfigPath); }, async isScannedLanguage(language) { return !await this.isTracedLanguage(language); @@ -109557,7 +109739,7 @@ async function writeCodeScanningConfigFile(config, logger) { logger.startGroup("Augmented user configuration file contents"); logger.info(dump(augmentedConfig)); logger.endGroup(); - fs10.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); + fs11.writeFileSync(codeScanningConfigFile, dump(augmentedConfig)); return codeScanningConfigFile; } var TRAP_CACHE_SIZE_MB = 1024; @@ -109601,7 +109783,7 @@ async function getJobRunUuidSarifOptions(codeql) { } // src/fingerprints.ts -var fs11 = __toESM(require("fs")); +var fs12 = __toESM(require("fs")); var import_path2 = __toESM(require("path")); // node_modules/long/index.js @@ -110589,7 +110771,7 @@ async function hash(callback, filepath) { } updateHash(current); }; - const readStream = fs11.createReadStream(filepath, "utf8"); + const readStream = fs12.createReadStream(filepath, "utf8"); for await (const data of readStream) { for (let i = 0; i < data.length; ++i) { processCharacter(data.charCodeAt(i)); @@ -110664,22 +110846,22 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) { if (!import_path2.default.isAbsolute(uri)) { uri = srcRootPrefix + uri; } - if (!fs11.existsSync(uri)) { + if (!fs12.existsSync(uri)) { logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`); return void 0; } - if (fs11.statSync(uri).isDirectory()) { + if (fs12.statSync(uri).isDirectory()) { logger.debug(`Unable to compute fingerprint for directory: ${uri}`); return void 0; } return uri; } -async function addFingerprints(sarif, sourceRoot, logger) { +async function addFingerprints(sarifLog, sourceRoot, logger) { logger.info( `Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.` ); const callbacksByFile = {}; - for (const run2 of sarif.runs || []) { + for (const run2 of sarifLog.runs || []) { const artifacts = run2.artifacts || []; for (const result of run2.results || []) { const primaryLocation = (result.locations || [])[0]; @@ -110719,7 +110901,7 @@ async function addFingerprints(sarif, sourceRoot, logger) { }; await hash(teeCallback, filepath); } - return sarif; + return sarifLog; } // src/init.ts @@ -110757,58 +110939,6 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe // src/upload-lib.ts var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning."; var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository."; -function combineSarifFiles(sarifFiles, logger) { - logger.info(`Loading SARIF file(s)`); - const combinedSarif = { - version: null, - runs: [] - }; - for (const sarifFile of sarifFiles) { - logger.debug(`Loading SARIF file: ${sarifFile}`); - const sarifObject = JSON.parse( - fs12.readFileSync(sarifFile, "utf8") - ); - if (combinedSarif.version === null) { - combinedSarif.version = sarifObject.version; - } else if (combinedSarif.version !== sarifObject.version) { - throw new InvalidSarifUploadError( - `Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}` - ); - } - combinedSarif.runs.push(...sarifObject.runs); - } - return combinedSarif; -} -function areAllRunsProducedByCodeQL(sarifObjects) { - return sarifObjects.every((sarifObject) => { - return sarifObject.runs?.every( - (run2) => run2.tool?.driver?.name === "CodeQL" - ); - }); -} -function createRunKey(run2) { - return { - name: run2.tool?.driver?.name, - fullName: run2.tool?.driver?.fullName, - version: run2.tool?.driver?.version, - semanticVersion: run2.tool?.driver?.semanticVersion, - guid: run2.tool?.driver?.guid, - automationId: run2.automationDetails?.id - }; -} -function areAllRunsUnique(sarifObjects) { - const keys = /* @__PURE__ */ new Set(); - for (const sarifObject of sarifObjects) { - for (const run2 of sarifObject.runs) { - const key = JSON.stringify(createRunKey(run2)); - if (keys.has(key)) { - return false; - } - keys.add(key); - } - } - return true; -} async function shouldShowCombineSarifFilesDeprecationWarning(sarifObjects, githubVersion) { if (githubVersion.type === "GitHub Enterprise Server" /* GHES */ && satisfiesGHESVersion(githubVersion.version, "<3.14", true)) { return false; @@ -110837,9 +110967,7 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) { } async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) { logger.info("Combining SARIF files using the CodeQL CLI"); - const sarifObjects = sarifFiles.map((sarifFile) => { - return JSON.parse(fs12.readFileSync(sarifFile, "utf8")); - }); + const sarifObjects = sarifFiles.map(readSarifFile); const deprecationWarningMessage = gitHubVersion.type === "GitHub Enterprise Server" /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025"; const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload"; if (!areAllRunsProducedByCodeQL(sarifObjects)) { @@ -110892,27 +111020,27 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo codeQL = initCodeQLResult.codeql; } const baseTempDir = path12.resolve(tempDir, "combined-sarif"); - fs12.mkdirSync(baseTempDir, { recursive: true }); - const outputDirectory = fs12.mkdtempSync(path12.resolve(baseTempDir, "output-")); + fs13.mkdirSync(baseTempDir, { recursive: true }); + const outputDirectory = fs13.mkdtempSync(path12.resolve(baseTempDir, "output-")); const outputFile = path12.resolve(outputDirectory, "combined-sarif.sarif"); await codeQL.mergeResults(sarifFiles, outputFile, { mergeRunsFromEqualCategory: true }); - return JSON.parse(fs12.readFileSync(outputFile, "utf8")); + return readSarifFile(outputFile); } -function populateRunAutomationDetails(sarif, category, analysis_key, environment) { +function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) { const automationID = getAutomationID2(category, analysis_key, environment); if (automationID !== void 0) { - for (const run2 of sarif.runs || []) { + for (const run2 of sarifFile.runs || []) { if (run2.automationDetails === void 0) { run2.automationDetails = { id: automationID }; } } - return sarif; + return sarifFile; } - return sarif; + return sarifFile; } function getAutomationID2(category, analysis_key, environment) { if (category !== void 0) { @@ -110935,7 +111063,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { `SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}` ); logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`); - fs12.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); + fs13.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2)); return "dummy-sarif-id"; } const client = getApiClient(); @@ -110969,7 +111097,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { function findSarifFilesInDir(sarifPath, isSarif) { const sarifFiles = []; const walkSarifFiles = (dir) => { - const entries = fs12.readdirSync(dir, { withFileTypes: true }); + const entries = fs13.readdirSync(dir, { withFileTypes: true }); for (const entry of entries) { if (entry.isFile() && isSarif(entry.name)) { sarifFiles.push(path12.resolve(dir, entry.name)); @@ -110982,7 +111110,7 @@ function findSarifFilesInDir(sarifPath, isSarif) { return sarifFiles; } async function getGroupedSarifFilePaths(logger, sarifPath) { - const stats = fs12.statSync(sarifPath, { throwIfNoEntry: false }); + const stats = fs13.statSync(sarifPath, { throwIfNoEntry: false }); if (stats === void 0) { throw new ConfigurationError(`Path does not exist: ${sarifPath}`); } @@ -111029,9 +111157,9 @@ async function getGroupedSarifFilePaths(logger, sarifPath) { } return results; } -function countResultsInSarif(sarif) { +function countResultsInSarif(sarifLog) { let numResults = 0; - const parsedSarif = JSON.parse(sarif); + const parsedSarif = JSON.parse(sarifLog); if (!Array.isArray(parsedSarif.runs)) { throw new InvalidSarifUploadError("Invalid SARIF. Missing 'runs' array."); } @@ -111045,26 +111173,26 @@ function countResultsInSarif(sarif) { } return numResults; } -function readSarifFile(sarifFilePath) { +function readSarifFileOrThrow(sarifFilePath) { try { - return JSON.parse(fs12.readFileSync(sarifFilePath, "utf8")); + return readSarifFile(sarifFilePath); } catch (e) { throw new InvalidSarifUploadError( `Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}` ); } } -function validateSarifFileSchema(sarif, sarifFilePath, logger) { - if (areAllRunsProducedByCodeQL([sarif]) && // We want to validate CodeQL SARIF in testing environments. +function validateSarifFileSchema(sarifLog, sarifFilePath, logger) { + if (areAllRunsProducedByCodeQL([sarifLog]) && // We want to validate CodeQL SARIF in testing environments. !getTestingEnvironment()) { logger.debug( `Skipping SARIF schema validation for ${sarifFilePath} as all runs are produced by CodeQL.` ); - return; + return true; } logger.info(`Validating ${sarifFilePath}`); const schema2 = require_sarif_schema_2_1_0(); - const result = new jsonschema2.Validator().validate(sarif, schema2); + const result = new jsonschema2.Validator().validate(sarifLog, schema2); const warningAttributes = ["uri-reference", "uri"]; const errors = (result.errors ?? []).filter( (err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument)) @@ -111091,6 +111219,7 @@ ${sarifErrors.join( )}` ); } + return true; } function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, mergeBaseCommitOid) { const payloadObj = { @@ -111116,7 +111245,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo payloadObj.base_sha = mergeBaseCommitOid; } else if (process.env.GITHUB_EVENT_PATH) { const githubEvent = JSON.parse( - fs12.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") + fs13.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8") ); payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`; payloadObj.base_sha = githubEvent.pull_request.base.sha; @@ -111127,14 +111256,14 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) { logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`); const gitHubVersion = await getGitHubVersion(); - let sarif; + let sarifLog; category = analysis.fixCategory(logger, category); if (sarifPaths.length > 1) { for (const sarifPath of sarifPaths) { - const parsedSarif = readSarifFile(sarifPath); + const parsedSarif = readSarifFileOrThrow(sarifPath); validateSarifFileSchema(parsedSarif, sarifPath, logger); } - sarif = await combineSarifFilesUsingCLI( + sarifLog = await combineSarifFilesUsingCLI( sarifPaths, gitHubVersion, features, @@ -111142,21 +111271,21 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, ); } else { const sarifPath = sarifPaths[0]; - sarif = readSarifFile(sarifPath); - validateSarifFileSchema(sarif, sarifPath, logger); - await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion); + sarifLog = readSarifFileOrThrow(sarifPath); + validateSarifFileSchema(sarifLog, sarifPath, logger); + await throwIfCombineSarifFilesDisabled([sarifLog], gitHubVersion); } - sarif = filterAlertsByDiffRange(logger, sarif); - sarif = await addFingerprints(sarif, checkoutPath, logger); + sarifLog = filterAlertsByDiffRange(logger, sarifLog); + sarifLog = await addFingerprints(sarifLog, checkoutPath, logger); const analysisKey = await getAnalysisKey(); const environment = getRequiredInput("matrix"); - sarif = populateRunAutomationDetails( - sarif, + sarifLog = populateRunAutomationDetails( + sarifLog, category, analysisKey, environment ); - return { sarif, analysisKey, environment }; + return { sarif: sarifLog, analysisKey, environment }; } async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) { const outputPath = pathInput || getOptionalEnvVar("CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */); @@ -111173,12 +111302,12 @@ async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProc } async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) { logger.startGroup(`Uploading ${uploadTarget.name} results`); - const sarif = postProcessingResults.sarif; - const toolNames = getToolNames(sarif); + const sarifLog = postProcessingResults.sarif; + const toolNames = getToolNames(sarifLog); logger.debug(`Validating that each SARIF run has a unique category`); - validateUniqueCategory(sarif, uploadTarget.sentinelPrefix); + validateUniqueCategory(sarifLog, uploadTarget.sentinelPrefix); logger.debug(`Serializing SARIF for upload`); - const sarifPayload = JSON.stringify(sarif); + const sarifPayload = JSON.stringify(sarifLog); logger.debug(`Compressing serialized SARIF`); const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64"); const checkoutURI = url.pathToFileURL(checkoutPath).href; @@ -111220,9 +111349,9 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post }; } function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) { - if (!fs12.existsSync(outputDir)) { - fs12.mkdirSync(outputDir, { recursive: true }); - } else if (!fs12.lstatSync(outputDir).isDirectory()) { + if (!fs13.existsSync(outputDir)) { + fs13.mkdirSync(outputDir, { recursive: true }); + } else if (!fs13.lstatSync(outputDir).isDirectory()) { throw new ConfigurationError( `The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}` ); @@ -111232,7 +111361,7 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) { `upload${uploadTarget.sarifExtension}` ); logger.info(`Writing processed SARIF file to ${outputFile}`); - fs12.writeFileSync(outputFile, sarifPayload); + fs13.writeFileSync(outputFile, sarifPayload); } var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3; var STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1e3; @@ -111330,9 +111459,9 @@ function handleProcessingResultForUnsuccessfulExecution(response, status, logger assertNever(status); } } -function validateUniqueCategory(sarif, sentinelPrefix) { +function validateUniqueCategory(sarifLog, sentinelPrefix) { const categories = {}; - for (const run2 of sarif.runs) { + for (const run2 of sarifLog.runs || []) { const id = run2?.automationDetails?.id; const tool = run2.tool?.driver?.name; const category = `${sanitize(id)}_${sanitize(tool)}`; @@ -111351,15 +111480,16 @@ function validateUniqueCategory(sarif, sentinelPrefix) { function sanitize(str2) { return (str2 ?? "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase(); } -var InvalidSarifUploadError = class extends Error { -}; -function filterAlertsByDiffRange(logger, sarif) { +function filterAlertsByDiffRange(logger, sarifLog) { const diffRanges = readDiffRangesJsonFile(logger); if (!diffRanges?.length) { - return sarif; + return sarifLog; + } + if (sarifLog.runs === void 0) { + return sarifLog; } const checkoutPath = getRequiredInput("checkout_path"); - for (const run2 of sarif.runs) { + for (const run2 of sarifLog.runs) { if (run2.results) { run2.results = run2.results.filter((result) => { const locations = [ @@ -111380,7 +111510,7 @@ function filterAlertsByDiffRange(logger, sarif) { }); } } - return sarif; + return sarifLog; } // src/upload-sarif.ts diff --git a/package-lock.json b/package-lock.json index 203901544..2fbb729f7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "codeql", - "version": "4.32.5", + "version": "4.32.7", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "codeql", - "version": "4.32.5", + "version": "4.32.7", "license": "MIT", "dependencies": { "@actions/artifact": "^5.0.3", @@ -43,6 +43,7 @@ "@types/js-yaml": "^4.0.9", "@types/node": "^20.19.9", "@types/node-forge": "^1.3.14", + "@types/sarif": "^2.1.7", "@types/semver": "^7.7.1", "@types/sinon": "^21.0.0", "ava": "^6.4.1", @@ -51,14 +52,14 @@ "eslint-import-resolver-typescript": "^3.8.7", "eslint-plugin-github": "^6.0.0", "eslint-plugin-import-x": "^4.16.1", - "eslint-plugin-jsdoc": "^62.5.0", + "eslint-plugin-jsdoc": "^62.7.1", "eslint-plugin-no-async-foreach": "^0.1.1", "glob": "^11.1.0", - "globals": "^16.5.0", + "globals": "^17.3.0", "nock": "^14.0.11", "sinon": "^21.0.1", "typescript": "^5.9.3", - "typescript-eslint": "^8.56.0" + "typescript-eslint": "^8.56.1" } }, "node_modules/@aashutoshrathi/word-wrap": { @@ -849,17 +850,17 @@ } }, "node_modules/@es-joy/jsdoccomment": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@es-joy/jsdoccomment/-/jsdoccomment-0.83.0.tgz", - "integrity": "sha512-e1MHSEPJ4m35zkBvNT6kcdeH1SvMaJDsPC3Xhfseg3hvF50FUE3f46Yn36jgbrPYYXezlWUQnevv23c+lx2MCA==", + "version": "0.84.0", + "resolved": "https://registry.npmjs.org/@es-joy/jsdoccomment/-/jsdoccomment-0.84.0.tgz", + "integrity": "sha512-0xew1CxOam0gV5OMjh2KjFQZsKL2bByX1+q4j3E73MpYIdyUxcZb/xQct9ccUb+ve5KGUYbCUxyPnYB7RbuP+w==", "dev": true, "license": "MIT", "dependencies": { "@types/estree": "^1.0.8", - "@typescript-eslint/types": "^8.53.1", + "@typescript-eslint/types": "^8.54.0", "comment-parser": "1.4.5", "esquery": "^1.7.0", - "jsdoc-type-pratt-parser": "~7.1.0" + "jsdoc-type-pratt-parser": "~7.1.1" }, "engines": { "node": "^20.19.0 || ^22.13.0 || >=24" @@ -2522,6 +2523,13 @@ "@types/node": "*" } }, + "node_modules/@types/sarif": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@types/sarif/-/sarif-2.1.7.tgz", + "integrity": "sha512-kRz0VEkJqWLf1LLVN4pT1cg1Z9wAuvI6L97V3m2f5B76Tg8d413ddvLBPTEHAZJlnn4XSvu0FkZtViCQGVyrXQ==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/semver": { "version": "7.7.1", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.7.1.tgz", @@ -2545,17 +2553,17 @@ "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.56.0.tgz", - "integrity": "sha512-lRyPDLzNCuae71A3t9NEINBiTn7swyOhvUj3MyUOxb8x6g6vPEFoOU+ZRmGMusNC3X3YMhqMIX7i8ShqhT74Pw==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.56.1.tgz", + "integrity": "sha512-Jz9ZztpB37dNC+HU2HI28Bs9QXpzCz+y/twHOwhyrIRdbuVDxSytJNDl6z/aAKlaRIwC7y8wJdkBv7FxYGgi0A==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.12.2", - "@typescript-eslint/scope-manager": "8.56.0", - "@typescript-eslint/type-utils": "8.56.0", - "@typescript-eslint/utils": "8.56.0", - "@typescript-eslint/visitor-keys": "8.56.0", + "@typescript-eslint/scope-manager": "8.56.1", + "@typescript-eslint/type-utils": "8.56.1", + "@typescript-eslint/utils": "8.56.1", + "@typescript-eslint/visitor-keys": "8.56.1", "ignore": "^7.0.5", "natural-compare": "^1.4.0", "ts-api-utils": "^2.4.0" @@ -2568,7 +2576,7 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^8.56.0", + "@typescript-eslint/parser": "^8.56.1", "eslint": "^8.57.0 || ^9.0.0 || ^10.0.0", "typescript": ">=4.8.4 <6.0.0" } @@ -2584,16 +2592,16 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.56.0.tgz", - "integrity": "sha512-IgSWvLobTDOjnaxAfDTIHaECbkNlAlKv2j5SjpB2v7QHKv1FIfjwMy8FsDbVfDX/KjmCmYICcw7uGaXLhtsLNg==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.56.1.tgz", + "integrity": "sha512-klQbnPAAiGYFyI02+znpBRLyjL4/BrBd0nyWkdC0s/6xFLkXYQ8OoRrSkqacS1ddVxf/LDyODIKbQ5TgKAf/Fg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/scope-manager": "8.56.0", - "@typescript-eslint/types": "8.56.0", - "@typescript-eslint/typescript-estree": "8.56.0", - "@typescript-eslint/visitor-keys": "8.56.0", + "@typescript-eslint/scope-manager": "8.56.1", + "@typescript-eslint/types": "8.56.1", + "@typescript-eslint/typescript-estree": "8.56.1", + "@typescript-eslint/visitor-keys": "8.56.1", "debug": "^4.4.3" }, "engines": { @@ -2627,14 +2635,14 @@ } }, "node_modules/@typescript-eslint/project-service": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.56.0.tgz", - "integrity": "sha512-M3rnyL1vIQOMeWxTWIW096/TtVP+8W3p/XnaFflhmcFp+U4zlxUxWj4XwNs6HbDeTtN4yun0GNTTDBw/SvufKg==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.56.1.tgz", + "integrity": "sha512-TAdqQTzHNNvlVFfR+hu2PDJrURiwKsUvxFn1M0h95BB8ah5jejas08jUWG4dBA68jDMI988IvtfdAI53JzEHOQ==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/tsconfig-utils": "^8.56.0", - "@typescript-eslint/types": "^8.56.0", + "@typescript-eslint/tsconfig-utils": "^8.56.1", + "@typescript-eslint/types": "^8.56.1", "debug": "^4.4.3" }, "engines": { @@ -2667,14 +2675,14 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.56.0.tgz", - "integrity": "sha512-7UiO/XwMHquH+ZzfVCfUNkIXlp/yQjjnlYUyYz7pfvlK3/EyyN6BK+emDmGNyQLBtLGaYrTAI6KOw8tFucWL2w==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.56.1.tgz", + "integrity": "sha512-YAi4VDKcIZp0O4tz/haYKhmIDZFEUPOreKbfdAN3SzUDMcPhJ8QI99xQXqX+HoUVq8cs85eRKnD+rne2UAnj2w==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.56.0", - "@typescript-eslint/visitor-keys": "8.56.0" + "@typescript-eslint/types": "8.56.1", + "@typescript-eslint/visitor-keys": "8.56.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2685,9 +2693,9 @@ } }, "node_modules/@typescript-eslint/tsconfig-utils": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.56.0.tgz", - "integrity": "sha512-bSJoIIt4o3lKXD3xmDh9chZcjCz5Lk8xS7Rxn+6l5/pKrDpkCwtQNQQwZ2qRPk7TkUYhrq3WPIHXOXlbXP0itg==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.56.1.tgz", + "integrity": "sha512-qOtCYzKEeyr3aR9f28mPJqBty7+DBqsdd63eO0yyDwc6vgThj2UjWfJIcsFeSucYydqcuudMOprZ+x1SpF3ZuQ==", "dev": true, "license": "MIT", "engines": { @@ -2702,15 +2710,15 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.56.0.tgz", - "integrity": "sha512-qX2L3HWOU2nuDs6GzglBeuFXviDODreS58tLY/BALPC7iu3Fa+J7EOTwnX9PdNBxUI7Uh0ntP0YWGnxCkXzmfA==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.56.1.tgz", + "integrity": "sha512-yB/7dxi7MgTtGhZdaHCemf7PuwrHMenHjmzgUW1aJpO+bBU43OycnM3Wn+DdvDO/8zzA9HlhaJ0AUGuvri4oGg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.56.0", - "@typescript-eslint/typescript-estree": "8.56.0", - "@typescript-eslint/utils": "8.56.0", + "@typescript-eslint/types": "8.56.1", + "@typescript-eslint/typescript-estree": "8.56.1", + "@typescript-eslint/utils": "8.56.1", "debug": "^4.4.3", "ts-api-utils": "^2.4.0" }, @@ -2745,9 +2753,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.56.0.tgz", - "integrity": "sha512-DBsLPs3GsWhX5HylbP9HNG15U0bnwut55Lx12bHB9MpXxQ+R5GC8MwQe+N1UFXxAeQDvEsEDY6ZYwX03K7Z6HQ==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.56.1.tgz", + "integrity": "sha512-dbMkdIUkIkchgGDIv7KLUpa0Mda4IYjo4IAMJUZ+3xNoUXxMsk9YtKpTHSChRS85o+H9ftm51gsK1dZReY9CVw==", "dev": true, "license": "MIT", "engines": { @@ -2759,18 +2767,18 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.56.0.tgz", - "integrity": "sha512-ex1nTUMWrseMltXUHmR2GAQ4d+WjkZCT4f+4bVsps8QEdh0vlBsaCokKTPlnqBFqqGaxilDNJG7b8dolW2m43Q==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.56.1.tgz", + "integrity": "sha512-qzUL1qgalIvKWAf9C1HpvBjif+Vm6rcT5wZd4VoMb9+Km3iS3Cv9DY6dMRMDtPnwRAFyAi7YXJpTIEXLvdfPxg==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/project-service": "8.56.0", - "@typescript-eslint/tsconfig-utils": "8.56.0", - "@typescript-eslint/types": "8.56.0", - "@typescript-eslint/visitor-keys": "8.56.0", + "@typescript-eslint/project-service": "8.56.1", + "@typescript-eslint/tsconfig-utils": "8.56.1", + "@typescript-eslint/types": "8.56.1", + "@typescript-eslint/visitor-keys": "8.56.1", "debug": "^4.4.3", - "minimatch": "^9.0.5", + "minimatch": "^10.2.2", "semver": "^7.7.3", "tinyglobby": "^0.2.15", "ts-api-utils": "^2.4.0" @@ -2797,9 +2805,9 @@ } }, "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.3.tgz", - "integrity": "sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==", + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.4.tgz", + "integrity": "sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==", "dev": true, "license": "MIT", "dependencies": { @@ -2828,32 +2836,32 @@ } }, "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { - "version": "9.0.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.6.tgz", - "integrity": "sha512-kQAVowdR33euIqeA0+VZTDqU+qo1IeVY+hrKYtZMio3Pg0P0vuh/kwRylLUddJhB6pf3q/botcOvRtx4IN1wqQ==", + "version": "10.2.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz", + "integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==", "dev": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "dependencies": { "brace-expansion": "^5.0.2" }, "engines": { - "node": ">=16 || 14 >=14.17" + "node": "18 || 20 || >=22" }, "funding": { "url": "https://github.com/sponsors/isaacs" } }, "node_modules/@typescript-eslint/utils": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.56.0.tgz", - "integrity": "sha512-RZ3Qsmi2nFGsS+n+kjLAYDPVlrzf7UhTffrDIKr+h2yzAlYP/y5ZulU0yeDEPItos2Ph46JAL5P/On3pe7kDIQ==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.56.1.tgz", + "integrity": "sha512-HPAVNIME3tABJ61siYlHzSWCGtOoeP2RTIaHXFMPqjrQKCGB9OgUVdiNgH7TJS2JNIQ5qQ4RsAUDuGaGme/KOA==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.9.1", - "@typescript-eslint/scope-manager": "8.56.0", - "@typescript-eslint/types": "8.56.0", - "@typescript-eslint/typescript-estree": "8.56.0" + "@typescript-eslint/scope-manager": "8.56.1", + "@typescript-eslint/types": "8.56.1", + "@typescript-eslint/typescript-estree": "8.56.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2868,13 +2876,13 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.56.0.tgz", - "integrity": "sha512-q+SL+b+05Ud6LbEE35qe4A99P+htKTKVbyiNEe45eCbJFyh/HVK9QXwlrbz+Q4L8SOW4roxSVwXYj4DMBT7Ieg==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.56.1.tgz", + "integrity": "sha512-KiROIzYdEV85YygXw6BI/Dx4fnBlFQu6Mq4QE4MOH9fFnhohw6wX/OAvDY2/C+ut0I3RSPKenvZJIVYqJNkhEw==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.56.0", + "@typescript-eslint/types": "8.56.1", "eslint-visitor-keys": "^5.0.0" }, "engines": { @@ -2886,9 +2894,9 @@ } }, "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.0.tgz", - "integrity": "sha512-A0XeIi7CXU7nPlfHS9loMYEKxUaONu/hTEzHTGba9Huu94Cq1hPivf+DE5erJozZOky0LfvXAyrV/tcswpLI0Q==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.1.tgz", + "integrity": "sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA==", "dev": true, "license": "Apache-2.0", "engines": { @@ -5011,6 +5019,19 @@ "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, + "node_modules/eslint-plugin-github/node_modules/globals": { + "version": "16.5.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-16.5.0.tgz", + "integrity": "sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/eslint-plugin-i18n-text": { "version": "1.0.1", "dev": true, @@ -5114,9 +5135,9 @@ } }, "node_modules/eslint-plugin-import-x/node_modules/minimatch": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.2.tgz", - "integrity": "sha512-+G4CpNBxa5MprY+04MbgOw1v7So6n5JY166pFi9KfYwT78fxScCeSNQSNzp6dpPSW2rONOps6Ocam1wFhCgoVw==", + "version": "10.2.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz", + "integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==", "dev": true, "license": "BlueOak-1.0.0", "dependencies": { @@ -5138,13 +5159,13 @@ } }, "node_modules/eslint-plugin-jsdoc": { - "version": "62.5.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-jsdoc/-/eslint-plugin-jsdoc-62.5.0.tgz", - "integrity": "sha512-D+1haMVDzW/ZMoPwOnsbXCK07rJtsq98Z1v+ApvDKxSzYTTcPgmFc/nyUDCGmxm2cP7g7hszyjYHO7Zodl/43w==", + "version": "62.7.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsdoc/-/eslint-plugin-jsdoc-62.7.1.tgz", + "integrity": "sha512-4Zvx99Q7d1uggYBUX/AIjvoyqXhluGbbKrRmG8SQTLprPFg6fa293tVJH1o1GQwNe3lUydd8ZHzn37OaSncgSQ==", "dev": true, "license": "BSD-3-Clause", "dependencies": { - "@es-joy/jsdoccomment": "~0.83.0", + "@es-joy/jsdoccomment": "~0.84.0", "@es-joy/resolve.exports": "1.2.0", "are-docs-informative": "^0.0.2", "comment-parser": "1.4.5", @@ -5155,7 +5176,7 @@ "html-entities": "^2.6.0", "object-deep-merge": "^2.0.0", "parse-imports-exports": "^0.2.4", - "semver": "^7.7.3", + "semver": "^7.7.4", "spdx-expression-parse": "^4.0.0", "to-valid-identifier": "^1.0.0" }, @@ -5163,7 +5184,7 @@ "node": "^20.19.0 || ^22.13.0 || >=24" }, "peerDependencies": { - "eslint": "^7.0.0 || ^8.0.0 || ^9.0.0" + "eslint": "^7.0.0 || ^8.0.0 || ^9.0.0 || ^10.0.0" } }, "node_modules/eslint-plugin-jsdoc/node_modules/debug": { @@ -5629,10 +5650,22 @@ "dev": true, "license": "MIT" }, + "node_modules/fast-xml-builder": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fast-xml-builder/-/fast-xml-builder-1.0.0.tgz", + "integrity": "sha512-fpZuDogrAgnyt9oDDz+5DBz0zgPdPZz6D4IR7iESxRXElrlGTRkHJ9eEt+SACRJwT0FNFrt71DFQIUFBJfX/uQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + } + ], + "license": "MIT" + }, "node_modules/fast-xml-parser": { - "version": "5.3.6", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.3.6.tgz", - "integrity": "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA==", + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.4.1.tgz", + "integrity": "sha512-BQ30U1mKkvXQXXkAGcuyUA/GA26oEB7NzOtsxCDtyu62sjGw5QraKFhx2Em3WQNjPw9PG6MQ9yuIIgkSDfGu5A==", "funding": [ { "type": "github", @@ -5641,6 +5674,7 @@ ], "license": "MIT", "dependencies": { + "fast-xml-builder": "^1.0.0", "strnum": "^2.1.2" }, "bin": { @@ -6024,9 +6058,9 @@ } }, "node_modules/glob/node_modules/minimatch": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.2.tgz", - "integrity": "sha512-+G4CpNBxa5MprY+04MbgOw1v7So6n5JY166pFi9KfYwT78fxScCeSNQSNzp6dpPSW2rONOps6Ocam1wFhCgoVw==", + "version": "10.2.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz", + "integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==", "license": "BlueOak-1.0.0", "dependencies": { "brace-expansion": "^5.0.2" @@ -6039,9 +6073,9 @@ } }, "node_modules/globals": { - "version": "16.5.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-16.5.0.tgz", - "integrity": "sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==", + "version": "17.3.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-17.3.0.tgz", + "integrity": "sha512-yMqGUQVVCkD4tqjOJf3TnrvaaHDMYp4VlUSObbkIiuCPe/ofdMBFIAcBbCSRFWOnos6qRiTVStDwqPLUclaxIw==", "dev": true, "license": "MIT", "engines": { @@ -6909,9 +6943,9 @@ } }, "node_modules/jsdoc-type-pratt-parser": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/jsdoc-type-pratt-parser/-/jsdoc-type-pratt-parser-7.1.0.tgz", - "integrity": "sha512-SX7q7XyCwzM/MEDCYz0l8GgGbJAACGFII9+WfNYr5SLEKukHWRy2Jk3iWRe7P+lpYJNs7oQ+OSei4JtKGUjd7A==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/jsdoc-type-pratt-parser/-/jsdoc-type-pratt-parser-7.1.1.tgz", + "integrity": "sha512-/2uqY7x6bsrpi3i9LVU6J89352C0rpMk0as8trXxCtvd4kPk1ke/Eyif6wqfSLvoNJqcDG9Vk4UsXgygzCt2xA==", "dev": true, "license": "MIT", "engines": { @@ -7240,9 +7274,9 @@ } }, "node_modules/minimatch": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.3.tgz", - "integrity": "sha512-M2GCs7Vk83NxkUyQV1bkABc4yxgz9kILhHImZiBPAZ9ybuvCb0/H7lEl5XvIg3g+9d4eNotkZA5IWwYl0tibaA==", + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz", + "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==", "license": "ISC", "dependencies": { "brace-expansion": "^1.1.7" @@ -7928,9 +7962,9 @@ } }, "node_modules/readdir-glob/node_modules/minimatch": { - "version": "5.1.7", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.7.tgz", - "integrity": "sha512-FjiwU9HaHW6YB3H4a1sFudnv93lvydNjz2lmyUXR6IwKhGI+bgL3SOZrBGn6kvvX2pJvhEkGSGjyTHN47O4rqA==", + "version": "5.1.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.9.tgz", + "integrity": "sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw==", "license": "ISC", "dependencies": { "brace-expansion": "^2.0.1" @@ -8836,9 +8870,9 @@ } }, "node_modules/tar": { - "version": "7.5.7", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.7.tgz", - "integrity": "sha512-fov56fJiRuThVFXD6o6/Q354S7pnWMJIVlDBYijsTNx6jKSE4pvrDTs6lUnmGvNyfJwFQQwWy3owKz1ucIhveQ==", + "version": "7.5.10", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.10.tgz", + "integrity": "sha512-8mOPs1//5q/rlkNSPcCegA6hiHJYDmSLEI8aMH/CdSQJNWztHC9WHNam5zdQlfpTwB9Xp7IBEsHfV5LKMJGVAw==", "dev": true, "license": "BlueOak-1.0.0", "dependencies": { @@ -9168,16 +9202,16 @@ } }, "node_modules/typescript-eslint": { - "version": "8.56.0", - "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.56.0.tgz", - "integrity": "sha512-c7toRLrotJ9oixgdW7liukZpsnq5CZ7PuKztubGYlNppuTqhIoWfhgHo/7EU0v06gS2l/x0i2NEFK1qMIf0rIg==", + "version": "8.56.1", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.56.1.tgz", + "integrity": "sha512-U4lM6pjmBX7J5wk4szltF7I1cGBHXZopnAXCMXb3+fZ3B/0Z3hq3wS/CCUB2NZBNAExK92mCU2tEohWuwVMsDQ==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/eslint-plugin": "8.56.0", - "@typescript-eslint/parser": "8.56.0", - "@typescript-eslint/typescript-estree": "8.56.0", - "@typescript-eslint/utils": "8.56.0" + "@typescript-eslint/eslint-plugin": "8.56.1", + "@typescript-eslint/parser": "8.56.1", + "@typescript-eslint/typescript-estree": "8.56.1", + "@typescript-eslint/utils": "8.56.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" diff --git a/package.json b/package.json index 566d43099..85628de5c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "codeql", - "version": "4.32.5", + "version": "4.32.7", "private": true, "description": "CodeQL action", "scripts": { @@ -9,7 +9,7 @@ "lint": "eslint --report-unused-disable-directives --max-warnings=0 .", "lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif", "lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix", - "ava": "npm run transpile && ava --serial --verbose", + "ava": "npm run transpile && ava --verbose", "test": "npm run ava -- src/", "test-debug": "npm run test -- --timeout=20m", "transpile": "tsc --build --verbose" @@ -58,6 +58,7 @@ "@types/js-yaml": "^4.0.9", "@types/node": "^20.19.9", "@types/node-forge": "^1.3.14", + "@types/sarif": "^2.1.7", "@types/semver": "^7.7.1", "@types/sinon": "^21.0.0", "ava": "^6.4.1", @@ -66,14 +67,14 @@ "eslint-import-resolver-typescript": "^3.8.7", "eslint-plugin-github": "^6.0.0", "eslint-plugin-import-x": "^4.16.1", - "eslint-plugin-jsdoc": "^62.5.0", + "eslint-plugin-jsdoc": "^62.7.1", "eslint-plugin-no-async-foreach": "^0.1.1", "glob": "^11.1.0", - "globals": "^16.5.0", + "globals": "^17.3.0", "nock": "^14.0.11", "sinon": "^21.0.1", "typescript": "^5.9.3", - "typescript-eslint": "^8.56.0" + "typescript-eslint": "^8.56.1" }, "overrides": { "@actions/tool-cache": { diff --git a/pr-checks/.gitignore b/pr-checks/.gitignore index 979f35ea9..c2658d7d1 100644 --- a/pr-checks/.gitignore +++ b/pr-checks/.gitignore @@ -1,3 +1 @@ -env -__pycache__/ -*.pyc +node_modules/ diff --git a/pr-checks/__init__.py b/pr-checks/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/pr-checks/checks/analysis-kinds.yml b/pr-checks/checks/analysis-kinds.yml index 6eedaec02..1fed31053 100644 --- a/pr-checks/checks/analysis-kinds.yml +++ b/pr-checks/checks/analysis-kinds.yml @@ -40,7 +40,7 @@ steps: post-processed-sarif-path: "${{ runner.temp }}/post-processed" - name: Upload SARIF files - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: | analysis-kinds-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }} @@ -48,7 +48,7 @@ steps: retention-days: 7 - name: Upload post-processed SARIF - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: | post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }} diff --git a/pr-checks/checks/analyze-ref-input.yml b/pr-checks/checks/analyze-ref-input.yml index e9d2cd176..f3425ec70 100644 --- a/pr-checks/checks/analyze-ref-input.yml +++ b/pr-checks/checks/analyze-ref-input.yml @@ -2,7 +2,6 @@ name: "Analyze: 'ref' and 'sha' from inputs" description: "Checks that specifying 'ref' and 'sha' as inputs works" versions: ["default"] installGo: true -installPython: true installDotNet: true steps: - uses: ./../action/init diff --git a/pr-checks/checks/autobuild-direct-tracing-with-working-dir.yml b/pr-checks/checks/autobuild-direct-tracing-with-working-dir.yml index 97c832a28..f492ac85a 100644 --- a/pr-checks/checks/autobuild-direct-tracing-with-working-dir.yml +++ b/pr-checks/checks/autobuild-direct-tracing-with-working-dir.yml @@ -5,7 +5,7 @@ description: > autobuild Action. operatingSystems: ["ubuntu", "windows"] versions: ["linked", "nightly-latest"] -installJava: "true" +installJava: true env: CODEQL_ACTION_AUTOBUILD_BUILD_MODE_DIRECT_TRACING: true steps: diff --git a/pr-checks/checks/build-mode-autobuild.yml b/pr-checks/checks/build-mode-autobuild.yml index 8a51926fa..0fc260f7b 100644 --- a/pr-checks/checks/build-mode-autobuild.yml +++ b/pr-checks/checks/build-mode-autobuild.yml @@ -2,8 +2,8 @@ name: "Build mode autobuild" description: "An end-to-end integration test of a Java repository built using 'build-mode: autobuild'" operatingSystems: ["ubuntu", "windows"] versions: ["linked", "nightly-latest"] -installJava: "true" -installYq: "true" +installJava: true +installYq: true steps: - name: Set up Java test repo configuration run: | diff --git a/pr-checks/checks/bundle-from-nightly.yml b/pr-checks/checks/bundle-from-nightly.yml index 4f68b7829..ac7076cb0 100644 --- a/pr-checks/checks/bundle-from-nightly.yml +++ b/pr-checks/checks/bundle-from-nightly.yml @@ -11,5 +11,5 @@ steps: tools: ${{ steps.prepare-test.outputs.tools-url }} languages: javascript - name: Fail if the CodeQL version is not a nightly - if: "!contains(steps.init.outputs.codeql-version, '+')" + if: ${{ !contains(steps.init.outputs.codeql-version, '+') }} run: exit 1 diff --git a/pr-checks/checks/bundle-zstd.yml b/pr-checks/checks/bundle-zstd.yml index 371525aab..002334523 100644 --- a/pr-checks/checks/bundle-zstd.yml +++ b/pr-checks/checks/bundle-zstd.yml @@ -27,7 +27,7 @@ steps: output: ${{ runner.temp }}/results upload-database: false - name: Upload SARIF - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: ${{ matrix.os }}-zstd-bundle.sarif path: ${{ runner.temp }}/results/javascript.sarif diff --git a/pr-checks/checks/config-export.yml b/pr-checks/checks/config-export.yml index fc4c68ad6..ea66ad994 100644 --- a/pr-checks/checks/config-export.yml +++ b/pr-checks/checks/config-export.yml @@ -12,7 +12,7 @@ steps: output: "${{ runner.temp }}/results" upload-database: false - name: Upload SARIF - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: config-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json path: "${{ runner.temp }}/results/javascript.sarif" diff --git a/pr-checks/checks/diagnostics-export.yml b/pr-checks/checks/diagnostics-export.yml index ccf5b3501..1b2696937 100644 --- a/pr-checks/checks/diagnostics-export.yml +++ b/pr-checks/checks/diagnostics-export.yml @@ -25,7 +25,7 @@ steps: output: "${{ runner.temp }}/results" upload-database: false - name: Upload SARIF - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: diagnostics-export-${{ matrix.os }}-${{ matrix.version }}.sarif.json path: "${{ runner.temp }}/results/javascript.sarif" diff --git a/pr-checks/checks/export-file-baseline-information.yml b/pr-checks/checks/export-file-baseline-information.yml index e45fc58ca..114aa8c43 100644 --- a/pr-checks/checks/export-file-baseline-information.yml +++ b/pr-checks/checks/export-file-baseline-information.yml @@ -19,7 +19,7 @@ steps: with: output: "${{ runner.temp }}/results" - name: Upload SARIF - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: with-baseline-information-${{ matrix.os }}-${{ matrix.version }}.sarif.json path: "${{ runner.temp }}/results/javascript.sarif" diff --git a/pr-checks/checks/job-run-uuid-sarif.yml b/pr-checks/checks/job-run-uuid-sarif.yml index e0610846b..815d88000 100644 --- a/pr-checks/checks/job-run-uuid-sarif.yml +++ b/pr-checks/checks/job-run-uuid-sarif.yml @@ -11,7 +11,7 @@ steps: with: output: "${{ runner.temp }}/results" - name: Upload SARIF - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: ${{ matrix.os }}-${{ matrix.version }}.sarif.json path: "${{ runner.temp }}/results/javascript.sarif" diff --git a/pr-checks/checks/local-bundle.yml b/pr-checks/checks/local-bundle.yml index c0930772e..2d090d432 100644 --- a/pr-checks/checks/local-bundle.yml +++ b/pr-checks/checks/local-bundle.yml @@ -2,7 +2,6 @@ name: "Local CodeQL bundle" description: "Tests using a CodeQL bundle from a local file rather than a URL" versions: ["linked"] installGo: true -installPython: true installDotNet: true steps: - name: Fetch latest CodeQL bundle diff --git a/pr-checks/checks/packaging-codescanning-config-inputs-js.yml b/pr-checks/checks/packaging-codescanning-config-inputs-js.yml index 6fd0f7c8a..20fd16a48 100644 --- a/pr-checks/checks/packaging-codescanning-config-inputs-js.yml +++ b/pr-checks/checks/packaging-codescanning-config-inputs-js.yml @@ -3,7 +3,6 @@ description: "Checks that specifying packages using a combination of a config fi versions: ["linked", "default", "nightly-latest"] # This feature is not compatible with old CLIs installGo: true installNode: true -installPython: true installDotNet: true steps: - uses: ./../action/init diff --git a/pr-checks/checks/remote-config.yml b/pr-checks/checks/remote-config.yml index 24249156e..9211cb212 100644 --- a/pr-checks/checks/remote-config.yml +++ b/pr-checks/checks/remote-config.yml @@ -6,7 +6,6 @@ versions: - linked - nightly-latest installGo: true -installPython: true installDotNet: true steps: - uses: ./../action/init diff --git a/pr-checks/checks/unset-environment.yml b/pr-checks/checks/unset-environment.yml index 4cc728600..dd41f159b 100644 --- a/pr-checks/checks/unset-environment.yml +++ b/pr-checks/checks/unset-environment.yml @@ -6,7 +6,6 @@ versions: - linked - nightly-latest installGo: true -installPython: true installDotNet: true steps: - uses: ./../action/init diff --git a/pr-checks/checks/upload-ref-sha-input.yml b/pr-checks/checks/upload-ref-sha-input.yml index 0c8059a51..95b753b2b 100644 --- a/pr-checks/checks/upload-ref-sha-input.yml +++ b/pr-checks/checks/upload-ref-sha-input.yml @@ -2,7 +2,6 @@ name: "Upload-sarif: 'ref' and 'sha' from inputs" description: "Checks that specifying 'ref' and 'sha' as inputs works" versions: ["default"] installGo: true -installPython: true installDotNet: true steps: - uses: ./../action/init diff --git a/pr-checks/checks/upload-sarif.yml b/pr-checks/checks/upload-sarif.yml index cfe66a3f8..403cdab3a 100644 --- a/pr-checks/checks/upload-sarif.yml +++ b/pr-checks/checks/upload-sarif.yml @@ -3,7 +3,6 @@ description: "Checks that uploading SARIFs to the code quality endpoint works" versions: ["default"] analysisKinds: ["code-scanning", "code-quality", "code-scanning,code-quality"] installGo: true -installPython: true installDotNet: true steps: - uses: ./../action/init @@ -32,16 +31,16 @@ steps: category: | ${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:all-files/ - name: "Fail for missing output from `upload-sarif` step for `code-scanning`" - if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-scanning)" + if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-scanning) run: exit 1 - name: "Fail for missing output from `upload-sarif` step for `code-quality`" - if: "contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)" + if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality) run: exit 1 - name: Upload single SARIF file for Code Scanning uses: ./../action/upload-sarif id: upload-single-sarif-code-scanning - if: "contains(matrix.analysis-kinds, 'code-scanning')" + if: contains(matrix.analysis-kinds, 'code-scanning') with: ref: 'refs/heads/main' sha: '5e235361806c361d4d3f8859e3c897658025a9a2' @@ -49,12 +48,12 @@ steps: category: | ${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-scanning/ - name: "Fail for missing output from `upload-single-sarif-code-scanning` step" - if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning)" + if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-sarif-code-scanning.outputs.sarif-ids).code-scanning) run: exit 1 - name: Upload single SARIF file for Code Quality uses: ./../action/upload-sarif id: upload-single-sarif-code-quality - if: "contains(matrix.analysis-kinds, 'code-quality')" + if: contains(matrix.analysis-kinds, 'code-quality') with: ref: 'refs/heads/main' sha: '5e235361806c361d4d3f8859e3c897658025a9a2' @@ -62,16 +61,16 @@ steps: category: | ${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:single-code-quality/ - name: "Fail for missing output from `upload-single-sarif-code-quality` step" - if: "contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality)" + if: contains(matrix.analysis-kinds, 'code-quality') && !(fromJSON(steps.upload-single-sarif-code-quality.outputs.sarif-ids).code-quality) run: exit 1 - name: Change SARIF file extension - if: "contains(matrix.analysis-kinds, 'code-scanning')" + if: contains(matrix.analysis-kinds, 'code-scanning') run: mv ${{ runner.temp }}/results/javascript.sarif ${{ runner.temp }}/results/javascript.sarif.json - name: Upload single non-`.sarif` file uses: ./../action/upload-sarif id: upload-single-non-sarif - if: "contains(matrix.analysis-kinds, 'code-scanning')" + if: contains(matrix.analysis-kinds, 'code-scanning') with: ref: 'refs/heads/main' sha: '5e235361806c361d4d3f8859e3c897658025a9a2' @@ -79,5 +78,5 @@ steps: category: | ${{ github.workflow }}:upload-sarif/analysis-kinds:${{ matrix.analysis-kinds }}/os:${{ matrix.os }}/version:${{ matrix.version }}/test:non-sarif/ - name: "Fail for missing output from `upload-single-non-sarif` step" - if: "contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-non-sarif.outputs.sarif-ids).code-scanning)" + if: contains(matrix.analysis-kinds, 'code-scanning') && !(fromJSON(steps.upload-single-non-sarif.outputs.sarif-ids).code-scanning) run: exit 1 diff --git a/pr-checks/checks/with-checkout-path.yml b/pr-checks/checks/with-checkout-path.yml index 230e342e3..6f69d45d8 100644 --- a/pr-checks/checks/with-checkout-path.yml +++ b/pr-checks/checks/with-checkout-path.yml @@ -2,7 +2,6 @@ name: "Use a custom `checkout_path`" description: "Checks that a custom `checkout_path` will find the proper commit_oid" versions: ["linked"] installGo: true -installPython: true installDotNet: true steps: # This ensures we don't accidentally use the original checkout for any part of the test. diff --git a/pr-checks/package-lock.json b/pr-checks/package-lock.json new file mode 100644 index 000000000..2facb7322 --- /dev/null +++ b/pr-checks/package-lock.json @@ -0,0 +1,605 @@ +{ + "name": "pr-checks", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "dependencies": { + "yaml": "^2.8.2" + }, + "devDependencies": { + "@types/node": "^20.19.9", + "tsx": "^4.21.0", + "typescript": "^5.9.3" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.3.tgz", + "integrity": "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.3.tgz", + "integrity": "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.3.tgz", + "integrity": "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.3.tgz", + "integrity": "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz", + "integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz", + "integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.3.tgz", + "integrity": "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.3.tgz", + "integrity": "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.3.tgz", + "integrity": "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.3.tgz", + "integrity": "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.3.tgz", + "integrity": "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.3.tgz", + "integrity": "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.3.tgz", + "integrity": "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.3.tgz", + "integrity": "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.3.tgz", + "integrity": "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.3.tgz", + "integrity": "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.3.tgz", + "integrity": "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.3.tgz", + "integrity": "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.3.tgz", + "integrity": "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.3.tgz", + "integrity": "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.3.tgz", + "integrity": "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.3.tgz", + "integrity": "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.3.tgz", + "integrity": "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.3.tgz", + "integrity": "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.3.tgz", + "integrity": "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.3.tgz", + "integrity": "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@types/node": { + "version": "20.19.35", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.35.tgz", + "integrity": "sha512-Uarfe6J91b9HAUXxjvSOdiO2UPOKLm07Q1oh0JHxoZ1y8HoqxDAu3gVrsrOHeiio0kSsoVBt4wFrKOm0dKxVPQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/esbuild": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz", + "integrity": "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.3", + "@esbuild/android-arm": "0.27.3", + "@esbuild/android-arm64": "0.27.3", + "@esbuild/android-x64": "0.27.3", + "@esbuild/darwin-arm64": "0.27.3", + "@esbuild/darwin-x64": "0.27.3", + "@esbuild/freebsd-arm64": "0.27.3", + "@esbuild/freebsd-x64": "0.27.3", + "@esbuild/linux-arm": "0.27.3", + "@esbuild/linux-arm64": "0.27.3", + "@esbuild/linux-ia32": "0.27.3", + "@esbuild/linux-loong64": "0.27.3", + "@esbuild/linux-mips64el": "0.27.3", + "@esbuild/linux-ppc64": "0.27.3", + "@esbuild/linux-riscv64": "0.27.3", + "@esbuild/linux-s390x": "0.27.3", + "@esbuild/linux-x64": "0.27.3", + "@esbuild/netbsd-arm64": "0.27.3", + "@esbuild/netbsd-x64": "0.27.3", + "@esbuild/openbsd-arm64": "0.27.3", + "@esbuild/openbsd-x64": "0.27.3", + "@esbuild/openharmony-arm64": "0.27.3", + "@esbuild/sunos-x64": "0.27.3", + "@esbuild/win32-arm64": "0.27.3", + "@esbuild/win32-ia32": "0.27.3", + "@esbuild/win32-x64": "0.27.3" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/get-tsconfig": { + "version": "4.13.6", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.6.tgz", + "integrity": "sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/tsx": { + "version": "4.21.0", + "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.21.0.tgz", + "integrity": "sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "~0.27.0", + "get-tsconfig": "^4.7.5" + }, + "bin": { + "tsx": "dist/cli.mjs" + }, + "engines": { + "node": ">=18.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/yaml": { + "version": "2.8.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz", + "integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==", + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + }, + "funding": { + "url": "https://github.com/sponsors/eemeli" + } + } + } +} diff --git a/pr-checks/package.json b/pr-checks/package.json new file mode 100644 index 000000000..b323b98b8 --- /dev/null +++ b/pr-checks/package.json @@ -0,0 +1,12 @@ +{ + "private": true, + "description": "Dependencies for the sync.ts", + "dependencies": { + "yaml": "^2.8.2" + }, + "devDependencies": { + "@types/node": "^20.19.9", + "tsx": "^4.21.0", + "typescript": "^5.9.3" + } +} diff --git a/pr-checks/readme.md b/pr-checks/readme.md index 283ed3599..81eff0cda 100644 --- a/pr-checks/readme.md +++ b/pr-checks/readme.md @@ -6,9 +6,9 @@ to one of the files in this directory. ## Updating workflows +Run `./sync.sh` to invoke the workflow generator and re-generate the workflow files in `.github/workflows/` based on the templates in `pr-checks/checks/`. + +Alternatively, you can use `just`: + 1. Install https://github.com/casey/just by whichever way you prefer. 2. Run `just update-pr-checks` in your terminal. - -### If you don't want to install `just` - -Manually run each step in the `justfile`. diff --git a/pr-checks/sync.py b/pr-checks/sync.py deleted file mode 100755 index ded79c60a..000000000 --- a/pr-checks/sync.py +++ /dev/null @@ -1,402 +0,0 @@ -#!/usr/bin/env python - -import ruamel.yaml -from ruamel.yaml.scalarstring import SingleQuotedScalarString, LiteralScalarString -import pathlib -import os - -# The default set of CodeQL Bundle versions to use for the PR checks. -defaultTestVersions = [ - # The oldest supported CodeQL version. If bumping, update `CODEQL_MINIMUM_VERSION` in `codeql.ts` - "stable-v2.17.6", - # The last CodeQL release in the 2.18 series. - "stable-v2.18.4", - # The last CodeQL release in the 2.19 series. - "stable-v2.19.4", - # The last CodeQL release in the 2.20 series. - "stable-v2.20.7", - # The last CodeQL release in the 2.21 series. - "stable-v2.21.4", - # The last CodeQL release in the 2.22 series. - "stable-v2.22.4", - # The default version of CodeQL for Dotcom, as determined by feature flags. - "default", - # The version of CodeQL shipped with the Action in `defaults.json`. During the release process - # for a new CodeQL release, there will be a period of time during which this will be newer than - # the default version on Dotcom. - "linked", - # A nightly build directly from the our private repo, built in the last 24 hours. - "nightly-latest" -] - -# When updating the ruamel.yaml version here, update the PR check in -# `.github/workflows/pr-checks.yml` too. -header = """# Warning: This file is generated automatically, and should not be modified. -# Instead, please modify the template in the pr-checks directory and run: -# pr-checks/sync.sh -# to regenerate this file. - -""" - - -def is_truthy(value): - if isinstance(value, str): - return value.lower() == 'true' - return bool(value) - - - -class NonAliasingRTRepresenter(ruamel.yaml.representer.RoundTripRepresenter): - def ignore_aliases(self, data): - return True - - -def writeHeader(checkStream): - checkStream.write(header) - - -yaml = ruamel.yaml.YAML() -yaml.Representer = NonAliasingRTRepresenter -yaml.indent(mapping=2, sequence=4, offset=2) - -this_dir = pathlib.Path(__file__).resolve().parent - -allJobs = {} -collections = {} -for file in sorted((this_dir / 'checks').glob('*.yml')): - with open(file, 'r') as checkStream: - checkSpecification = yaml.load(checkStream) - matrix = [] - - workflowInputs = {} - if 'inputs' in checkSpecification: - workflowInputs = checkSpecification['inputs'] - - for version in checkSpecification.get('versions', defaultTestVersions): - if version == "latest": - raise ValueError('Did not recognize "version: latest". Did you mean "version: linked"?') - - runnerImages = ["ubuntu-latest", "macos-latest", "windows-latest"] - operatingSystems = checkSpecification.get('operatingSystems', ["ubuntu"]) - - for operatingSystem in operatingSystems: - runnerImagesForOs = [image for image in runnerImages if image.startswith(operatingSystem)] - - for runnerImage in runnerImagesForOs: - matrix.append({ - 'os': runnerImage, - 'version': version - }) - - useAllPlatformBundle = "false" # Default to false - if checkSpecification.get('useAllPlatformBundle'): - useAllPlatformBundle = checkSpecification['useAllPlatformBundle'] - - - if 'analysisKinds' in checkSpecification: - newMatrix = [] - for matrixInclude in matrix: - for analysisKind in checkSpecification.get('analysisKinds'): - newMatrix.append( - matrixInclude | - { 'analysis-kinds': analysisKind } - ) - matrix = newMatrix - - # Construct the workflow steps needed for this check. - steps = [ - { - 'name': 'Check out repository', - 'uses': 'actions/checkout@v6' - }, - ] - - installNode = is_truthy(checkSpecification.get('installNode', '')) - - if installNode: - steps.extend([ - { - 'name': 'Install Node.js', - 'uses': 'actions/setup-node@v6', - 'with': { - 'node-version': '20.x', - 'cache': 'npm', - }, - }, - { - 'name': 'Install dependencies', - 'run': 'npm ci', - }, - ]) - - steps.append({ - 'name': 'Prepare test', - 'id': 'prepare-test', - 'uses': './.github/actions/prepare-test', - 'with': { - 'version': '${{ matrix.version }}', - 'use-all-platform-bundle': useAllPlatformBundle, - # If the action is being run from a container, then do not setup kotlin. - # This is because the kotlin binaries cannot be downloaded from the container. - 'setup-kotlin': str(not 'container' in checkSpecification).lower(), - } - }) - - installGo = is_truthy(checkSpecification.get('installGo', '')) - - if installGo: - baseGoVersionExpr = '>=1.21.0' - workflowInputs['go-version'] = { - 'type': 'string', - 'description': 'The version of Go to install', - 'required': False, - 'default': baseGoVersionExpr, - } - - steps.append({ - 'name': 'Install Go', - 'uses': 'actions/setup-go@v6', - 'with': { - 'go-version': '${{ inputs.go-version || \'' + baseGoVersionExpr + '\' }}', - # to avoid potentially misleading autobuilder results where we expect it to download - # dependencies successfully, but they actually come from a warm cache - 'cache': False - } - }) - - installJava = is_truthy(checkSpecification.get('installJava', '')) - - if installJava: - baseJavaVersionExpr = '17' - workflowInputs['java-version'] = { - 'type': 'string', - 'description': 'The version of Java to install', - 'required': False, - 'default': baseJavaVersionExpr, - } - - steps.append({ - 'name': 'Install Java', - 'uses': 'actions/setup-java@v5', - 'with': { - 'java-version': '${{ inputs.java-version || \'' + baseJavaVersionExpr + '\' }}', - 'distribution': 'temurin' - } - }) - - installPython = is_truthy(checkSpecification.get('installPython', '')) - - if installPython: - basePythonVersionExpr = '3.13' - workflowInputs['python-version'] = { - 'type': 'string', - 'description': 'The version of Python to install', - 'required': False, - 'default': basePythonVersionExpr, - } - - steps.append({ - 'name': 'Install Python', - 'if': 'matrix.version != \'nightly-latest\'', - 'uses': 'actions/setup-python@v6', - 'with': { - 'python-version': '${{ inputs.python-version || \'' + basePythonVersionExpr + '\' }}' - } - }) - - installDotNet = is_truthy(checkSpecification.get('installDotNet', '')) - - if installDotNet: - baseDotNetVersionExpr = '9.x' - workflowInputs['dotnet-version'] = { - 'type': 'string', - 'description': 'The version of .NET to install', - 'required': False, - 'default': baseDotNetVersionExpr, - } - - steps.append({ - 'name': 'Install .NET', - 'uses': 'actions/setup-dotnet@v5', - 'with': { - 'dotnet-version': '${{ inputs.dotnet-version || \'' + baseDotNetVersionExpr + '\' }}' - } - }) - - installYq = is_truthy(checkSpecification.get('installYq', '')) - - if installYq: - steps.append({ - 'name': 'Install yq', - 'if': "runner.os == 'Windows'", - 'env': { - 'YQ_PATH': '${{ runner.temp }}/yq', - # This is essentially an arbitrary version of `yq`, which happened to be the one that - # `choco` fetched when we moved away from using that here. - # See https://github.com/github/codeql-action/pull/3423 - 'YQ_VERSION': 'v4.50.1' - }, - 'run': LiteralScalarString( - 'gh release download --repo mikefarah/yq --pattern "yq_windows_amd64.exe" "$YQ_VERSION" -O "$YQ_PATH/yq.exe"\n' - 'echo "$YQ_PATH" >> "$GITHUB_PATH"' - ), - }) - - # If container initialisation steps are present in the check specification, - # make sure to execute them first. - if 'container' in checkSpecification and 'container-init-steps' in checkSpecification: - steps.insert(0, checkSpecification['container-init-steps']) - - - steps.extend(checkSpecification['steps']) - - checkJob = { - 'strategy': { - 'fail-fast': False, - 'matrix': { - 'include': matrix - } - }, - 'name': checkSpecification['name'], - 'if': 'github.triggering_actor != \'dependabot[bot]\'', - 'permissions': { - 'contents': 'read', - 'security-events': 'read' - }, - 'timeout-minutes': 45, - 'runs-on': '${{ matrix.os }}', - 'steps': steps, - } - if 'permissions' in checkSpecification: - checkJob['permissions'] = checkSpecification['permissions'] - - for key in ["env", "container", "services"]: - if key in checkSpecification: - checkJob[key] = checkSpecification[key] - - checkJob['env'] = checkJob.get('env', {}) - if 'CODEQL_ACTION_TEST_MODE' not in checkJob['env']: - checkJob['env']['CODEQL_ACTION_TEST_MODE'] = True - checkName = file.stem - - # If this check belongs to a named collection, record it. - if 'collection' in checkSpecification: - collection_name = checkSpecification['collection'] - collections.setdefault(collection_name, []).append({ - 'specification': checkSpecification, - 'checkName': checkName, - 'inputs': workflowInputs - }) - - raw_file = this_dir.parent / ".github" / "workflows" / f"__{checkName}.yml.raw" - with open(raw_file, 'w', newline='\n') as output_stream: - extraGroupName = "" - for inputName in workflowInputs.keys(): - extraGroupName += "-${{inputs." + inputName + "}}" - - writeHeader(output_stream) - yaml.dump({ - 'name': f"PR Check - {checkSpecification['name']}", - 'env': { - 'GITHUB_TOKEN': '${{ secrets.GITHUB_TOKEN }}', - 'GO111MODULE': 'auto' - }, - 'on': { - 'push': { - 'branches': ['main', 'releases/v*'] - }, - 'pull_request': { - 'types': ["opened", "synchronize", "reopened", "ready_for_review"] - }, - 'merge_group': { - 'types': ['checks_requested'] - }, - 'schedule': [{'cron': SingleQuotedScalarString('0 5 * * *')}], - 'workflow_dispatch': { - 'inputs': workflowInputs - }, - 'workflow_call': { - 'inputs': workflowInputs - } - }, - 'defaults': { - 'run': { - 'shell': 'bash', - }, - }, - 'concurrency': { - # Cancel in-progress workflows in the same 'group' for pull_request events, - # but not other event types. This should have the effect that workflows on PRs - # get cancelled if there is a newer workflow in the same concurrency group. - # For other events, the new workflows should wait until earlier ones have finished. - # This should help reduce the number of concurrent workflows on the repo, and - # consequently the number of concurrent API requests. - # Note, the `|| false` is intentional to rule out that this somehow ends up being - # `true` since we observed workflows for non-`pull_request` events getting cancelled. - 'cancel-in-progress': "${{ github.event_name == 'pull_request' || false }}", - # The group is determined by the workflow name, the ref, and the input values. - # The base name is hard-coded to avoid issues when the workflow is triggered by - # a `workflow_call` event (where `github.workflow` would be the name of the caller). - # The input values are added, since they may result in different behaviour for a - # given workflow on the same ref. - 'group': checkName + "-${{github.ref}}" + extraGroupName - }, - 'jobs': { - checkName: checkJob - } - }, output_stream) - - with open(raw_file, 'r') as input_stream: - with open(this_dir.parent / ".github" / "workflows" / f"__{checkName}.yml", 'w', newline='\n') as output_stream: - content = input_stream.read() - output_stream.write("\n".join(list(map(lambda x:x.rstrip(), content.splitlines()))+[''])) - os.remove(raw_file) - -# write workflow files for collections -for collection_name in collections: - jobs = {} - combinedInputs = {} - - for check in collections[collection_name]: - checkName = check['checkName'] - checkSpecification = check['specification'] - checkInputs = check['inputs'] - checkWith = {} - - combinedInputs |= checkInputs - - for inputName in checkInputs.keys(): - checkWith[inputName] = "${{ inputs." + inputName + " }}" - - jobs[checkName] = { - 'name': checkSpecification['name'], - 'permissions': { - 'contents': 'read', - 'security-events': 'read' - }, - 'uses': "./.github/workflows/" + f"__{checkName}.yml", - 'with': checkWith - } - - raw_file = this_dir.parent / ".github" / "workflows" / f"__{collection_name}.yml.raw" - with open(raw_file, 'w') as output_stream: - writeHeader(output_stream) - yaml.dump({ - 'name': f"Manual Check - {collection_name}", - 'env': { - 'GITHUB_TOKEN': '${{ secrets.GITHUB_TOKEN }}', - 'GO111MODULE': 'auto' - }, - 'on': { - 'workflow_dispatch': { - 'inputs': combinedInputs - }, - }, - 'jobs': jobs - }, output_stream) - - with open(raw_file, 'r') as input_stream: - with open(this_dir.parent / ".github" / "workflows" / f"__{collection_name}.yml", 'w', newline='\n') as output_stream: - content = input_stream.read() - output_stream.write("\n".join(list(map(lambda x:x.rstrip(), content.splitlines()))+[''])) - os.remove(raw_file) diff --git a/pr-checks/sync.sh b/pr-checks/sync.sh index 85df3272c..c05959449 100755 --- a/pr-checks/sync.sh +++ b/pr-checks/sync.sh @@ -2,8 +2,14 @@ set -e cd "$(dirname "$0")" -python3 -m venv env -source env/*/activate -pip3 install ruamel.yaml==0.17.31 -python3 sync.py +# Run `npm ci` in CI or `npm install` otherwise. +if [ "$GITHUB_ACTIONS" = "true" ]; then + echo "In Actions, running 'npm ci' for 'sync.ts'..." + npm ci +else + echo "Running 'npm install' for 'sync.ts'..." + npm install --no-audit --no-fund +fi + +npx tsx sync.ts diff --git a/pr-checks/sync.ts b/pr-checks/sync.ts new file mode 100755 index 000000000..ca2b069cb --- /dev/null +++ b/pr-checks/sync.ts @@ -0,0 +1,525 @@ +#!/usr/bin/env npx tsx + +import * as fs from "fs"; +import * as path from "path"; + +import * as yaml from "yaml"; + +/** Known workflow input names. */ +enum KnownInputName { + GoVersion = "go-version", + JavaVersion = "java-version", + PythonVersion = "python-version", + DotnetVersion = "dotnet-version", +} + +/** + * Represents workflow input definitions. + */ +interface WorkflowInput { + type: string; + description: string; + required: boolean; + default: string; +} + +/** A partial mapping from known input names to input definitions. */ +type WorkflowInputs = Partial>; + +/** + * Represents PR check specifications. + */ +interface Specification { + /** The display name for the check. */ + name: string; + /** The workflow steps specific to this check. */ + steps: any[]; + /** Workflow-level input definitions forwarded to `workflow_dispatch`/`workflow_call`. */ + inputs?: Record; + /** CodeQL bundle versions to test against. Defaults to `DEFAULT_TEST_VERSIONS`. */ + versions?: string[]; + /** Operating system prefixes used to select runner images (e.g. `["ubuntu", "macos"]`). */ + operatingSystems?: string[]; + /** Whether to use the all-platform CodeQL bundle. */ + useAllPlatformBundle?: string; + /** Values for the `analysis-kinds` matrix dimension. */ + analysisKinds?: string[]; + + installNode?: boolean; + installGo?: boolean; + installJava?: boolean; + installPython?: boolean; + installDotNet?: boolean; + installYq?: boolean; + + /** Container image configuration for the job. */ + container?: any; + /** Service containers for the job. */ + services?: any; + + /** Custom permissions override for the job. */ + permissions?: Record; + /** Extra environment variables for the job. */ + env?: Record; + + /** If set, this check is part of a named collection that gets its own caller workflow. */ + collection?: string; +} + +// The default set of CodeQL Bundle versions to use for the PR checks. +const defaultTestVersions = [ + // The oldest supported CodeQL version. If bumping, update `CODEQL_MINIMUM_VERSION` in `codeql.ts` + "stable-v2.17.6", + // The last CodeQL release in the 2.18 series. + "stable-v2.18.4", + // The last CodeQL release in the 2.19 series. + "stable-v2.19.4", + // The last CodeQL release in the 2.20 series. + "stable-v2.20.7", + // The last CodeQL release in the 2.21 series. + "stable-v2.21.4", + // The last CodeQL release in the 2.22 series. + "stable-v2.22.4", + // The default version of CodeQL for Dotcom, as determined by feature flags. + "default", + // The version of CodeQL shipped with the Action in `defaults.json`. During the release process + // for a new CodeQL release, there will be a period of time during which this will be newer than + // the default version on Dotcom. + "linked", + // A nightly build directly from the our private repo, built in the last 24 hours. + "nightly-latest", +]; + +const THIS_DIR = __dirname; +const CHECKS_DIR = path.join(THIS_DIR, "checks"); +const OUTPUT_DIR = path.join(THIS_DIR, "..", ".github", "workflows"); + +/** + * Loads and parses a YAML file. + */ +function loadYaml(filePath: string): yaml.Document { + const content = fs.readFileSync(filePath, "utf8"); + return yaml.parseDocument(content); +} + +/** + * Serialize a value to YAML and write it to a file, prepended with the + * standard header comment. + */ +function writeYaml(filePath: string, workflow: any): void { + const header = `# Warning: This file is generated automatically, and should not be modified. +# Instead, please modify the template in the pr-checks directory and run: +# pr-checks/sync.sh +# to regenerate this file. + +`; + const workflowDoc = new yaml.Document(workflow, { + aliasDuplicateObjects: false, + }); + const yamlStr = yaml.stringify(workflowDoc, { + aliasDuplicateObjects: false, + singleQuote: true, + lineWidth: 0, + }); + fs.writeFileSync(filePath, stripTrailingWhitespace(header + yamlStr), "utf8"); +} + +/** + * Strip trailing whitespace from each line. + */ +function stripTrailingWhitespace(content: string): string { + return content + .split("\n") + .map((line) => line.trimEnd()) + .join("\n"); +} + +/** + * Main entry point for the sync script. + */ +function main(): void { + // Ensure the output directory exists. + fs.mkdirSync(OUTPUT_DIR, { recursive: true }); + + // Discover and sort all check specification files. + const checkFiles = fs + .readdirSync(CHECKS_DIR) + .filter((f) => f.endsWith(".yml")) + .sort() + .map((f) => path.join(CHECKS_DIR, f)); + + console.log(`Found ${checkFiles.length} check specification(s).`); + + const collections: Record< + string, + Array<{ + specification: Specification; + checkName: string; + inputs: Record; + }> + > = {}; + + for (const file of checkFiles) { + const checkName = path.basename(file, ".yml"); + const specDocument = loadYaml(file); + const checkSpecification = specDocument.toJS() as Specification; + + console.log(`Processing: ${checkName} — "${checkSpecification.name}"`); + + const workflowInputs: WorkflowInputs = {}; + let matrix: Array> = []; + + for (const version of checkSpecification.versions ?? defaultTestVersions) { + if (version === "latest") { + throw new Error( + 'Did not recognise "version: latest". Did you mean "version: linked"?', + ); + } + + const runnerImages = ["ubuntu-latest", "macos-latest", "windows-latest"]; + const operatingSystems = checkSpecification.operatingSystems ?? [ + "ubuntu", + ]; + + for (const operatingSystem of operatingSystems) { + const runnerImagesForOs = runnerImages.filter((image) => + image.startsWith(operatingSystem), + ); + + for (const runnerImage of runnerImagesForOs) { + matrix.push({ + os: runnerImage, + version, + }); + } + } + } + + const useAllPlatformBundle = checkSpecification.useAllPlatformBundle + ? checkSpecification.useAllPlatformBundle + : "false"; + + if (checkSpecification.analysisKinds) { + const newMatrix: Array> = []; + for (const matrixInclude of matrix) { + for (const analysisKind of checkSpecification.analysisKinds) { + newMatrix.push({ + ...matrixInclude, + "analysis-kinds": analysisKind, + }); + } + } + matrix = newMatrix; + } + + // Construct the workflow steps needed for this check. + const steps: any[] = [ + { + name: "Check out repository", + uses: "actions/checkout@v6", + }, + ]; + + const installNode = checkSpecification.installNode; + + if (installNode) { + steps.push( + { + name: "Install Node.js", + uses: "actions/setup-node@v6", + with: { + "node-version": "20.x", + cache: "npm", + }, + }, + { + name: "Install dependencies", + run: "npm ci", + }, + ); + } + + steps.push({ + name: "Prepare test", + id: "prepare-test", + uses: "./.github/actions/prepare-test", + with: { + version: "${{ matrix.version }}", + "use-all-platform-bundle": useAllPlatformBundle, + // If the action is being run from a container, then do not setup kotlin. + // This is because the kotlin binaries cannot be downloaded from the container. + "setup-kotlin": "container" in checkSpecification ? "false" : "true", + }, + }); + + const installGo = checkSpecification.installGo; + + if (installGo) { + const baseGoVersionExpr = ">=1.21.0"; + workflowInputs[KnownInputName.GoVersion] = { + type: "string", + description: "The version of Go to install", + required: false, + default: baseGoVersionExpr, + }; + + steps.push({ + name: "Install Go", + uses: "actions/setup-go@v6", + with: { + "go-version": + "${{ inputs.go-version || '" + baseGoVersionExpr + "' }}", + // to avoid potentially misleading autobuilder results where we expect it to download + // dependencies successfully, but they actually come from a warm cache + cache: false, + }, + }); + } + + const installJava = checkSpecification.installJava; + + if (installJava) { + const baseJavaVersionExpr = "17"; + workflowInputs[KnownInputName.JavaVersion] = { + type: "string", + description: "The version of Java to install", + required: false, + default: baseJavaVersionExpr, + }; + + steps.push({ + name: "Install Java", + uses: "actions/setup-java@v5", + with: { + "java-version": + "${{ inputs.java-version || '" + baseJavaVersionExpr + "' }}", + distribution: "temurin", + }, + }); + } + + const installPython = checkSpecification.installPython; + + if (installPython) { + const basePythonVersionExpr = "3.13"; + workflowInputs[KnownInputName.PythonVersion] = { + type: "string", + description: "The version of Python to install", + required: false, + default: basePythonVersionExpr, + }; + + steps.push({ + name: "Install Python", + if: "matrix.version != 'nightly-latest'", + uses: "actions/setup-python@v6", + with: { + "python-version": + "${{ inputs.python-version || '" + basePythonVersionExpr + "' }}", + }, + }); + } + + const installDotNet = checkSpecification.installDotNet; + + if (installDotNet) { + const baseDotNetVersionExpr = "9.x"; + workflowInputs[KnownInputName.DotnetVersion] = { + type: "string", + description: "The version of .NET to install", + required: false, + default: baseDotNetVersionExpr, + }; + + steps.push({ + name: "Install .NET", + uses: "actions/setup-dotnet@v5", + with: { + "dotnet-version": + "${{ inputs.dotnet-version || '" + baseDotNetVersionExpr + "' }}", + }, + }); + } + + const installYq = checkSpecification.installYq; + + if (installYq) { + steps.push({ + name: "Install yq", + if: "runner.os == 'Windows'", + env: { + YQ_PATH: "${{ runner.temp }}/yq", + // This is essentially an arbitrary version of `yq`, which happened to be the one that + // `choco` fetched when we moved away from using that here. + // See https://github.com/github/codeql-action/pull/3423 + YQ_VERSION: "v4.50.1", + }, + run: + 'gh release download --repo mikefarah/yq --pattern "yq_windows_amd64.exe" "$YQ_VERSION" -O "$YQ_PATH/yq.exe"\n' + + 'echo "$YQ_PATH" >> "$GITHUB_PATH"', + }); + } + + // Extract the sequence of steps from the YAML document to persist as much formatting as possible. + const specSteps = specDocument.get("steps") as yaml.YAMLSeq; + + // A handful of workflow specifications use double quotes for values, while we generally use single quotes. + // This replaces double quotes with single quotes for consistency. + yaml.visit(specSteps, { + Scalar(_key, node) { + if (node.type === "QUOTE_DOUBLE") { + node.type = "QUOTE_SINGLE"; + } + }, + }); + + // Add the generated steps in front of the ones from the specification. + specSteps.items.unshift(...steps); + + const checkJob: Record = { + strategy: { + "fail-fast": false, + matrix: { + include: matrix, + }, + }, + name: checkSpecification.name, + if: "github.triggering_actor != 'dependabot[bot]'", + permissions: { + contents: "read", + "security-events": "read", + }, + "timeout-minutes": 45, + "runs-on": "${{ matrix.os }}", + steps: specSteps, + }; + + if (checkSpecification.permissions) { + checkJob.permissions = checkSpecification.permissions; + } + + for (const key of ["env", "container", "services"] as const) { + if (checkSpecification[key] !== undefined) { + checkJob[key] = checkSpecification[key]; + } + } + + checkJob.env = checkJob.env ?? {}; + if (!("CODEQL_ACTION_TEST_MODE" in checkJob.env)) { + checkJob.env.CODEQL_ACTION_TEST_MODE = true; + } + + // If this check belongs to a named collection, record it. + if (checkSpecification.collection) { + const collectionName = checkSpecification.collection; + if (!collections[collectionName]) { + collections[collectionName] = []; + } + collections[collectionName].push({ + specification: checkSpecification, + checkName, + inputs: workflowInputs, + }); + } + + let extraGroupName = ""; + for (const inputName of Object.keys(workflowInputs)) { + extraGroupName += "-${{inputs." + inputName + "}}"; + } + + const cron = new yaml.Scalar("0 5 * * *"); + cron.type = yaml.Scalar.QUOTE_SINGLE; + + const workflow = { + name: `PR Check - ${checkSpecification.name}`, + env: { + GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}", + GO111MODULE: "auto", + }, + on: { + push: { + branches: ["main", "releases/v*"], + }, + pull_request: { + types: ["opened", "synchronize", "reopened", "ready_for_review"], + }, + merge_group: { + types: ["checks_requested"], + }, + schedule: [{ cron }], + workflow_dispatch: { + inputs: workflowInputs, + }, + workflow_call: { + inputs: workflowInputs, + }, + }, + defaults: { + run: { + shell: "bash", + }, + }, + concurrency: { + "cancel-in-progress": + "${{ github.event_name == 'pull_request' || false }}", + group: checkName + "-${{github.ref}}" + extraGroupName, + }, + jobs: { + [checkName]: checkJob, + }, + }; + + const outputPath = path.join(OUTPUT_DIR, `__${checkName}.yml`); + writeYaml(outputPath, workflow); + } + + // Write workflow files for collections. + for (const collectionName of Object.keys(collections)) { + const jobs: Record = {}; + let combinedInputs: Record = {}; + + for (const check of collections[collectionName]) { + const { checkName, specification, inputs: checkInputs } = check; + const checkWith: Record = {}; + + combinedInputs = { ...combinedInputs, ...checkInputs }; + + for (const inputName of Object.keys(checkInputs)) { + checkWith[inputName] = "${{ inputs." + inputName + " }}"; + } + + jobs[checkName] = { + name: specification.name, + permissions: { + contents: "read", + "security-events": "read", + }, + uses: `./.github/workflows/__${checkName}.yml`, + with: checkWith, + }; + } + + const collectionWorkflow = { + name: `Manual Check - ${collectionName}`, + env: { + GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}", + GO111MODULE: "auto", + }, + on: { + workflow_dispatch: { + inputs: combinedInputs, + }, + }, + jobs, + }; + + const outputPath = path.join(OUTPUT_DIR, `__${collectionName}.yml`); + writeYaml(outputPath, collectionWorkflow); + } + + console.log( + `\nDone. Wrote ${checkFiles.length} workflow file(s) to ${OUTPUT_DIR}`, + ); +} + +main(); diff --git a/pr-checks/sync_back.py b/pr-checks/sync_back.py deleted file mode 100755 index 1474b455e..000000000 --- a/pr-checks/sync_back.py +++ /dev/null @@ -1,185 +0,0 @@ -#!/usr/bin/env python3 -""" -Sync-back script to automatically update action versions in source templates -from the generated workflow files after Dependabot updates. - -This script scans the generated workflow files (.github/workflows/__*.yml) to find -all external action versions used, then updates: -1. Hardcoded action versions in pr-checks/sync.py -2. Action version references in template files in pr-checks/checks/ - -The script automatically detects all actions used in generated workflows and -preserves version comments (e.g., # v1.2.3) when syncing versions. - -This ensures that when Dependabot updates action versions in generated workflows, -those changes are properly synced back to the source templates. Regular workflow -files are updated directly by Dependabot and don't need sync-back. -""" - -import os -import re -import glob -import argparse -import sys -from pathlib import Path -from typing import Dict, List - - -def scan_generated_workflows(workflow_dir: str) -> Dict[str, str]: - """ - Scan generated workflow files to extract the latest action versions. - - Args: - workflow_dir: Path to .github/workflows directory - - Returns: - Dictionary mapping action names to their latest versions (including comments) - """ - action_versions = {} - generated_files = glob.glob(os.path.join(workflow_dir, "__*.yml")) - - for file_path in generated_files: - with open(file_path, 'r') as f: - content = f.read() - - # Find all action uses in the file, including potential comments - # This pattern captures: action_name@version_with_possible_comment - pattern = r'uses:\s+([^/\s]+/[^@\s]+)@([^@\n]+)' - matches = re.findall(pattern, content) - - for action_name, version_with_comment in matches: - # Only track non-local actions (those with / but not starting with ./) - if not action_name.startswith('./'): - # Assume that version numbers are consistent (this should be the case on a Dependabot update PR) - action_versions[action_name] = version_with_comment.rstrip() - - return action_versions - - -def update_sync_py(sync_py_path: str, action_versions: Dict[str, str]) -> bool: - """ - Update hardcoded action versions in pr-checks/sync.py - - Args: - sync_py_path: Path to sync.py file - action_versions: Dictionary of action names to versions (may include comments) - - Returns: - True if file was modified, False otherwise - """ - if not os.path.exists(sync_py_path): - raise FileNotFoundError(f"Could not find {sync_py_path}") - - with open(sync_py_path, 'r') as f: - content = f.read() - - original_content = content - - # Update hardcoded action versions - for action_name, version_with_comment in action_versions.items(): - # Extract just the version part (before any comment) for sync.py - version = version_with_comment.split('#')[0].strip() if '#' in version_with_comment else version_with_comment.strip() - - # Look for patterns like 'uses': 'actions/setup-node@v4' - # Note that this will break if we store an Action uses reference in a - # variable - that's a risk we're happy to take since in that case the - # PR checks will just fail. - pattern = rf"('uses':\s*'){re.escape(action_name)}@(?:[^']+)(')" - replacement = rf"\1{action_name}@{version}\2" - content = re.sub(pattern, replacement, content) - - if content != original_content: - with open(sync_py_path, 'w') as f: - f.write(content) - print(f"Updated {sync_py_path}") - return True - else: - print(f"No changes needed in {sync_py_path}") - return False - - -def update_template_files(checks_dir: str, action_versions: Dict[str, str]) -> List[str]: - """ - Update action versions in template files in pr-checks/checks/ - - Args: - checks_dir: Path to pr-checks/checks directory - action_versions: Dictionary of action names to versions (may include comments) - - Returns: - List of files that were modified - """ - modified_files = [] - template_files = glob.glob(os.path.join(checks_dir, "*.yml")) - - for file_path in template_files: - with open(file_path, 'r') as f: - content = f.read() - - original_content = content - - # Update action versions - for action_name, version_with_comment in action_versions.items(): - # Look for patterns like 'uses: actions/setup-node@v4' or 'uses: actions/setup-node@sha # comment' - pattern = rf"(uses:\s+{re.escape(action_name)})@(?:[^@\n]+)" - replacement = rf"\1@{version_with_comment}" - content = re.sub(pattern, replacement, content) - - if content != original_content: - with open(file_path, 'w') as f: - f.write(content) - modified_files.append(file_path) - print(f"Updated {file_path}") - - return modified_files - - -def main(): - parser = argparse.ArgumentParser(description="Sync action versions from generated workflows back to templates") - parser.add_argument("--verbose", "-v", action="store_true", help="Enable verbose output") - args = parser.parse_args() - - # Get the repository root (assuming script is in pr-checks/) - script_dir = Path(__file__).parent - repo_root = script_dir.parent - - workflow_dir = repo_root / ".github" / "workflows" - checks_dir = script_dir / "checks" - sync_py_path = script_dir / "sync.py" - - print("Scanning generated workflows for latest action versions...") - action_versions = scan_generated_workflows(str(workflow_dir)) - - if args.verbose: - print("Found action versions:") - for action, version in action_versions.items(): - print(f" {action}@{version}") - - if not action_versions: - print("No action versions found in generated workflows") - return 1 - - # Update files - print("\nUpdating source files...") - modified_files = [] - - # Update sync.py - if update_sync_py(str(sync_py_path), action_versions): - modified_files.append(str(sync_py_path)) - - # Update template files - template_modified = update_template_files(str(checks_dir), action_versions) - modified_files.extend(template_modified) - - if modified_files: - print(f"\nSync completed. Modified {len(modified_files)} files:") - for file_path in modified_files: - print(f" {file_path}") - else: - print("\nNo files needed updating - all action versions are already in sync") - - return 0 - - -if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file diff --git a/pr-checks/sync_back.test.ts b/pr-checks/sync_back.test.ts new file mode 100755 index 000000000..316d2b730 --- /dev/null +++ b/pr-checks/sync_back.test.ts @@ -0,0 +1,250 @@ +#!/usr/bin/env npx tsx + +/* +Tests for the sync_back.ts script +*/ + +import * as assert from "node:assert/strict"; +import * as fs from "node:fs"; +import * as os from "node:os"; +import * as path from "node:path"; +import { afterEach, beforeEach, describe, it } from "node:test"; + +import { + scanGeneratedWorkflows, + updateSyncTs, + updateTemplateFiles, +} from "./sync_back"; + +let testDir: string; +let workflowDir: string; +let checksDir: string; +let syncTsPath: string; + +beforeEach(() => { + /** Set up temporary directories and files for testing */ + testDir = fs.mkdtempSync(path.join(os.tmpdir(), "sync-back-test-")); + workflowDir = path.join(testDir, ".github", "workflows"); + checksDir = path.join(testDir, "pr-checks", "checks"); + fs.mkdirSync(workflowDir, { recursive: true }); + fs.mkdirSync(checksDir, { recursive: true }); + + // Create sync.ts file path + syncTsPath = path.join(testDir, "pr-checks", "sync.ts"); +}); + +afterEach(() => { + /** Clean up temporary directories */ + fs.rmSync(testDir, { recursive: true, force: true }); +}); + +describe("scanGeneratedWorkflows", () => { + it("basic workflow scanning", () => { + /** Test basic workflow scanning functionality */ + const workflowContent = ` +name: Test Workflow +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v5 + - uses: actions/setup-go@v6 +`; + + fs.writeFileSync(path.join(workflowDir, "__test.yml"), workflowContent); + + const result = scanGeneratedWorkflows(workflowDir); + + assert.equal(result["actions/checkout"], "v4"); + assert.equal(result["actions/setup-node"], "v5"); + assert.equal(result["actions/setup-go"], "v6"); + }); + + it("scanning workflows with version comments", () => { + /** Test scanning workflows with version comments */ + const workflowContent = ` +name: Test Workflow +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0 + - uses: actions/setup-python@v6 # Latest Python +`; + + fs.writeFileSync(path.join(workflowDir, "__test.yml"), workflowContent); + + const result = scanGeneratedWorkflows(workflowDir); + + assert.equal(result["actions/checkout"], "v4"); + assert.equal( + result["ruby/setup-ruby"], + "44511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0", + ); + assert.equal(result["actions/setup-python"], "v6 # Latest Python"); + }); + + it("ignores local actions", () => { + /** Test that local actions (starting with ./) are ignored */ + const workflowContent = ` +name: Test Workflow +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: ./.github/actions/local-action + - uses: ./another-local-action@v1 +`; + + fs.writeFileSync(path.join(workflowDir, "__test.yml"), workflowContent); + + const result = scanGeneratedWorkflows(workflowDir); + + assert.equal(result["actions/checkout"], "v4"); + assert.equal("./.github/actions/local-action" in result, false); + assert.equal("./another-local-action" in result, false); + }); +}); + +describe("updateSyncTs", () => { + it("updates sync.ts file", () => { + /** Test updating sync.ts file */ + const syncTsContent = ` +const steps = [ + { + uses: "actions/setup-node@v4", + with: { "node-version": "16" }, + }, + { + uses: "actions/setup-go@v5", + with: { "go-version": "1.19" }, + }, +]; +`; + + fs.writeFileSync(syncTsPath, syncTsContent); + + const actionVersions = { + "actions/setup-node": "v5", + "actions/setup-go": "v6", + }; + + const result = updateSyncTs(syncTsPath, actionVersions); + assert.equal(result, true); + + const updatedContent = fs.readFileSync(syncTsPath, "utf8"); + + assert.ok(updatedContent.includes('uses: "actions/setup-node@v5"')); + assert.ok(updatedContent.includes('uses: "actions/setup-go@v6"')); + }); + + it("strips comments from versions", () => { + /** Test updating sync.ts file when versions have comments */ + const syncTsContent = ` +const steps = [ + { + uses: "actions/setup-node@v4", + with: { "node-version": "16" }, + }, +]; +`; + + fs.writeFileSync(syncTsPath, syncTsContent); + + const actionVersions = { + "actions/setup-node": "v5 # Latest version", + }; + + const result = updateSyncTs(syncTsPath, actionVersions); + assert.equal(result, true); + + const updatedContent = fs.readFileSync(syncTsPath, "utf8"); + + // sync.ts should get the version without comment + assert.ok(updatedContent.includes('uses: "actions/setup-node@v5"')); + assert.ok(!updatedContent.includes("# Latest version")); + }); + + it("returns false when no changes are needed", () => { + /** Test that updateSyncTs returns false when no changes are needed */ + const syncTsContent = ` +const steps = [ + { + uses: "actions/setup-node@v5", + with: { "node-version": "16" }, + }, +]; +`; + + fs.writeFileSync(syncTsPath, syncTsContent); + + const actionVersions = { + "actions/setup-node": "v5", + }; + + const result = updateSyncTs(syncTsPath, actionVersions); + assert.equal(result, false); + }); +}); + +describe("updateTemplateFiles", () => { + it("updates template files", () => { + /** Test updating template files */ + const templateContent = ` +name: Test Template +steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v4 + with: + node-version: 16 +`; + + const templatePath = path.join(checksDir, "test.yml"); + fs.writeFileSync(templatePath, templateContent); + + const actionVersions = { + "actions/checkout": "v4", + "actions/setup-node": "v5 # Latest", + }; + + const result = updateTemplateFiles(checksDir, actionVersions); + assert.equal(result.length, 1); + assert.ok(result.includes(templatePath)); + + const updatedContent = fs.readFileSync(templatePath, "utf8"); + + assert.ok(updatedContent.includes("uses: actions/checkout@v4")); + assert.ok(updatedContent.includes("uses: actions/setup-node@v5 # Latest")); + }); + + it("preserves version comments", () => { + /** Test that updating template files preserves version comments */ + const templateContent = ` +name: Test Template +steps: + - uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb # v1.256.0 +`; + + const templatePath = path.join(checksDir, "test.yml"); + fs.writeFileSync(templatePath, templateContent); + + const actionVersions = { + "ruby/setup-ruby": + "55511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0", + }; + + const result = updateTemplateFiles(checksDir, actionVersions); + assert.equal(result.length, 1); + + const updatedContent = fs.readFileSync(templatePath, "utf8"); + + assert.ok( + updatedContent.includes( + "uses: ruby/setup-ruby@55511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0", + ), + ); + }); +}); diff --git a/pr-checks/sync_back.ts b/pr-checks/sync_back.ts new file mode 100755 index 000000000..7e1375580 --- /dev/null +++ b/pr-checks/sync_back.ts @@ -0,0 +1,220 @@ +#!/usr/bin/env npx tsx + +/* +Sync-back script to automatically update action versions in source templates +from the generated workflow files after Dependabot updates. + +This script scans the generated workflow files (.github/workflows/__*.yml) to find +all external action versions used, then updates: +1. Hardcoded action versions in pr-checks/sync.ts +2. Action version references in template files in pr-checks/checks/ + +The script automatically detects all actions used in generated workflows and +preserves version comments (e.g., # v1.2.3) when syncing versions. + +This ensures that when Dependabot updates action versions in generated workflows, +those changes are properly synced back to the source templates. Regular workflow +files are updated directly by Dependabot and don't need sync-back. +*/ + +import { parseArgs } from "node:util"; + +import * as fs from "fs"; +import * as path from "path"; + +const THIS_DIR = __dirname; +const CHECKS_DIR = path.join(THIS_DIR, "checks"); +const WORKFLOW_DIR = path.join(THIS_DIR, "..", ".github", "workflows"); +const SYNC_TS_PATH = path.join(THIS_DIR, "sync.ts"); + +/** + * Scan generated workflow files to extract the latest action versions. + * + * @param workflowDir - Path to .github/workflows directory + * @returns Map from action names to their latest versions (including comments) + */ +export function scanGeneratedWorkflows(workflowDir: string): Record { + const actionVersions: Record = {}; + + const generatedFiles = fs + .readdirSync(workflowDir) + .filter((f) => f.startsWith("__") && f.endsWith(".yml")) + .map((f) => path.join(workflowDir, f)); + + for (const filePath of generatedFiles) { + const content = fs.readFileSync(filePath, "utf8"); + + // Find all action uses in the file, including potential comments + // This pattern captures: action_name@version_with_possible_comment + const pattern = /uses:\s+([^/\s]+\/[^@\s]+)@([^@\n]+)/g; + let match: RegExpExecArray | null; + + while ((match = pattern.exec(content)) !== null) { + const actionName = match[1]; + const versionWithComment = match[2].trimEnd(); + + // Only track non-local actions (those with / but not starting with ./) + if (!actionName.startsWith("./")) { + // Assume that version numbers are consistent (this should be the case on a Dependabot update PR) + actionVersions[actionName] = versionWithComment; + } + } + } + + return actionVersions; +} + +/** + * Update hardcoded action versions in pr-checks/sync.ts + * + * @param syncTsPath - Path to sync.ts file + * @param actionVersions - Map of action names to versions (may include comments) + * @returns True if the file was modified, false otherwise + */ +export function updateSyncTs( + syncTsPath: string, + actionVersions: Record, +): boolean { + if (!fs.existsSync(syncTsPath)) { + throw new Error(`Could not find ${syncTsPath}`); + } + + let content = fs.readFileSync(syncTsPath, "utf8"); + const originalContent = content; + + // Update hardcoded action versions + for (const [actionName, versionWithComment] of Object.entries( + actionVersions, + )) { + // Extract just the version part (before any comment) for sync.ts + const version = versionWithComment.includes("#") + ? versionWithComment.split("#")[0].trim() + : versionWithComment.trim(); + + // Look for patterns like uses: "actions/setup-node@v4" + // Note that this will break if we store an Action uses reference in a + // variable - that's a risk we're happy to take since in that case the + // PR checks will just fail. + const escaped = actionName.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); + const pattern = new RegExp( + `(uses:\\s*")${escaped}@(?:[^"]+)(")`, + "g", + ); + content = content.replace(pattern, `$1${actionName}@${version}$2`); + } + + if (content !== originalContent) { + fs.writeFileSync(syncTsPath, content, "utf8"); + console.info(`Updated ${syncTsPath}`); + return true; + } else { + console.info(`No changes needed in ${syncTsPath}`); + return false; + } +} + +/** + * Update action versions in template files in pr-checks/checks/ + * + * @param checksDir - Path to pr-checks/checks directory + * @param actionVersions - Map of action names to versions (may include comments) + * @returns List of files that were modified + */ +export function updateTemplateFiles( + checksDir: string, + actionVersions: Record, +): string[] { + const modifiedFiles: string[] = []; + + const templateFiles = fs + .readdirSync(checksDir) + .filter((f) => f.endsWith(".yml")) + .map((f) => path.join(checksDir, f)); + + for (const filePath of templateFiles) { + let content = fs.readFileSync(filePath, "utf8"); + const originalContent = content; + + // Update action versions + for (const [actionName, versionWithComment] of Object.entries( + actionVersions, + )) { + // Look for patterns like 'uses: actions/setup-node@v4' or 'uses: actions/setup-node@sha # comment' + const escaped = actionName.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); + const pattern = new RegExp( + `(uses:\\s+${escaped})@(?:[^@\n]+)`, + "g", + ); + content = content.replace(pattern, `$1@${versionWithComment}`); + } + + if (content !== originalContent) { + fs.writeFileSync(filePath, content, "utf8"); + modifiedFiles.push(filePath); + console.info(`Updated ${filePath}`); + } + } + + return modifiedFiles; +} + +function main(): number { + const { values } = parseArgs({ + options: { + verbose: { + type: "boolean", + short: "v", + default: false, + }, + }, + strict: true, + }); + + const verbose = values.verbose ?? false; + + console.info("Scanning generated workflows for latest action versions..."); + const actionVersions = scanGeneratedWorkflows(WORKFLOW_DIR); + + if (verbose) { + console.info("Found action versions:"); + for (const [action, version] of Object.entries(actionVersions)) { + console.info(` ${action}@${version}`); + } + } + + if (Object.keys(actionVersions).length === 0) { + console.error("No action versions found in generated workflows"); + return 1; + } + + // Update files + console.info("\nUpdating source files..."); + const modifiedFiles: string[] = []; + + // Update sync.ts + if (updateSyncTs(SYNC_TS_PATH, actionVersions)) { + modifiedFiles.push(SYNC_TS_PATH); + } + + // Update template files + const templateModified = updateTemplateFiles(CHECKS_DIR, actionVersions); + modifiedFiles.push(...templateModified); + + if (modifiedFiles.length > 0) { + console.info(`\nSync completed. Modified ${modifiedFiles.length} files:`); + for (const filePath of modifiedFiles) { + console.info(` ${filePath}`); + } + } else { + console.info( + "\nNo files needed updating - all action versions are already in sync", + ); + } + + return 0; +} + +// Only call `main` if this script was run directly. +if (require.main === module) { + process.exit(main()); +} diff --git a/pr-checks/test_sync_back.py b/pr-checks/test_sync_back.py deleted file mode 100644 index de2e42d73..000000000 --- a/pr-checks/test_sync_back.py +++ /dev/null @@ -1,237 +0,0 @@ -#!/usr/bin/env python3 -""" -Tests for the sync_back.py script -""" - -import os -import shutil -import tempfile -import unittest - -import sync_back - - -class TestSyncBack(unittest.TestCase): - - def setUp(self): - """Set up temporary directories and files for testing""" - self.test_dir = tempfile.mkdtemp() - self.workflow_dir = os.path.join(self.test_dir, ".github", "workflows") - self.checks_dir = os.path.join(self.test_dir, "pr-checks", "checks") - os.makedirs(self.workflow_dir) - os.makedirs(self.checks_dir) - - # Create sync.py file - self.sync_py_path = os.path.join(self.test_dir, "pr-checks", "sync.py") - - def tearDown(self): - """Clean up temporary directories""" - shutil.rmtree(self.test_dir) - - def test_scan_generated_workflows_basic(self): - """Test basic workflow scanning functionality""" - # Create a test generated workflow file - workflow_content = """ -name: Test Workflow -jobs: - test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-node@v5 - - uses: actions/setup-go@v6 - """ - - with open(os.path.join(self.workflow_dir, "__test.yml"), 'w') as f: - f.write(workflow_content) - - result = sync_back.scan_generated_workflows(self.workflow_dir) - - self.assertEqual(result['actions/checkout'], 'v4') - self.assertEqual(result['actions/setup-node'], 'v5') - self.assertEqual(result['actions/setup-go'], 'v6') - - def test_scan_generated_workflows_with_comments(self): - """Test scanning workflows with version comments""" - workflow_content = """ -name: Test Workflow -jobs: - test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0 - - uses: actions/setup-python@v6 # Latest Python - """ - - with open(os.path.join(self.workflow_dir, "__test.yml"), 'w') as f: - f.write(workflow_content) - - result = sync_back.scan_generated_workflows(self.workflow_dir) - - self.assertEqual(result['actions/checkout'], 'v4') - self.assertEqual(result['ruby/setup-ruby'], '44511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0') - self.assertEqual(result['actions/setup-python'], 'v6 # Latest Python') - - def test_scan_generated_workflows_ignores_local_actions(self): - """Test that local actions (starting with ./) are ignored""" - workflow_content = """ -name: Test Workflow -jobs: - test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: ./.github/actions/local-action - - uses: ./another-local-action@v1 - """ - - with open(os.path.join(self.workflow_dir, "__test.yml"), 'w') as f: - f.write(workflow_content) - - result = sync_back.scan_generated_workflows(self.workflow_dir) - - self.assertEqual(result['actions/checkout'], 'v4') - self.assertNotIn('./.github/actions/local-action', result) - self.assertNotIn('./another-local-action', result) - - - def test_update_sync_py(self): - """Test updating sync.py file""" - sync_py_content = """ -steps = [ - { - 'uses': 'actions/setup-node@v4', - 'with': {'node-version': '16'} - }, - { - 'uses': 'actions/setup-go@v5', - 'with': {'go-version': '1.19'} - } -] - """ - - with open(self.sync_py_path, 'w') as f: - f.write(sync_py_content) - - action_versions = { - 'actions/setup-node': 'v5', - 'actions/setup-go': 'v6' - } - - result = sync_back.update_sync_py(self.sync_py_path, action_versions) - self.assertTrue(result) - - with open(self.sync_py_path, 'r') as f: - updated_content = f.read() - - self.assertIn("'uses': 'actions/setup-node@v5'", updated_content) - self.assertIn("'uses': 'actions/setup-go@v6'", updated_content) - - def test_update_sync_py_with_comments(self): - """Test updating sync.py file when versions have comments""" - sync_py_content = """ -steps = [ - { - 'uses': 'actions/setup-node@v4', - 'with': {'node-version': '16'} - } -] - """ - - with open(self.sync_py_path, 'w') as f: - f.write(sync_py_content) - - action_versions = { - 'actions/setup-node': 'v5 # Latest version' - } - - result = sync_back.update_sync_py(self.sync_py_path, action_versions) - self.assertTrue(result) - - with open(self.sync_py_path, 'r') as f: - updated_content = f.read() - - # sync.py should get the version without comment - self.assertIn("'uses': 'actions/setup-node@v5'", updated_content) - self.assertNotIn("# Latest version", updated_content) - - def test_update_template_files(self): - """Test updating template files""" - template_content = """ -name: Test Template -steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v4 - with: - node-version: 16 - """ - - template_path = os.path.join(self.checks_dir, "test.yml") - with open(template_path, 'w') as f: - f.write(template_content) - - action_versions = { - 'actions/checkout': 'v4', - 'actions/setup-node': 'v5 # Latest' - } - - result = sync_back.update_template_files(self.checks_dir, action_versions) - self.assertEqual(len(result), 1) - self.assertIn(template_path, result) - - with open(template_path, 'r') as f: - updated_content = f.read() - - self.assertIn("uses: actions/checkout@v4", updated_content) - self.assertIn("uses: actions/setup-node@v5 # Latest", updated_content) - - def test_update_template_files_preserves_comments(self): - """Test that updating template files preserves version comments""" - template_content = """ -name: Test Template -steps: - - uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb # v1.256.0 - """ - - template_path = os.path.join(self.checks_dir, "test.yml") - with open(template_path, 'w') as f: - f.write(template_content) - - action_versions = { - 'ruby/setup-ruby': '55511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0' - } - - result = sync_back.update_template_files(self.checks_dir, action_versions) - self.assertEqual(len(result), 1) - - with open(template_path, 'r') as f: - updated_content = f.read() - - self.assertIn("uses: ruby/setup-ruby@55511735964dcb71245e7e55f72539531f7bc0eb # v1.257.0", updated_content) - - def test_no_changes_needed(self): - """Test that functions return False/empty when no changes are needed""" - # Test sync.py with no changes needed - sync_py_content = """ -steps = [ - { - 'uses': 'actions/setup-node@v5', - 'with': {'node-version': '16'} - } -] - """ - - with open(self.sync_py_path, 'w') as f: - f.write(sync_py_content) - - action_versions = { - 'actions/setup-node': 'v5' - } - - result = sync_back.update_sync_py(self.sync_py_path, action_versions) - self.assertFalse(result) - - -if __name__ == '__main__': - unittest.main() diff --git a/src/actions-util.test.ts b/src/actions-util.test.ts index 68b5c6319..3940cf755 100644 --- a/src/actions-util.test.ts +++ b/src/actions-util.test.ts @@ -100,7 +100,7 @@ test("computeAutomationID()", async (t) => { ); }); -test("getPullRequestBranches() with pull request context", (t) => { +test.serial("getPullRequestBranches() with pull request context", (t) => { withMockedContext( { pull_request: { @@ -119,89 +119,104 @@ test("getPullRequestBranches() with pull request context", (t) => { ); }); -test("getPullRequestBranches() returns undefined with push context", (t) => { - withMockedContext( - { - push: { - ref: "refs/heads/main", - }, - }, - () => { - t.is(getPullRequestBranches(), undefined); - t.is(isAnalyzingPullRequest(), false); - }, - ); -}); - -test("getPullRequestBranches() with Default Setup environment variables", (t) => { - withMockedContext({}, () => { - withMockedEnv( +test.serial( + "getPullRequestBranches() returns undefined with push context", + (t) => { + withMockedContext( { - CODE_SCANNING_REF: "refs/heads/feature-branch", - CODE_SCANNING_BASE_BRANCH: "main", - }, - () => { - t.deepEqual(getPullRequestBranches(), { - base: "main", - head: "refs/heads/feature-branch", - }); - t.is(isAnalyzingPullRequest(), true); - }, - ); - }); -}); - -test("getPullRequestBranches() returns undefined when only CODE_SCANNING_REF is set", (t) => { - withMockedContext({}, () => { - withMockedEnv( - { - CODE_SCANNING_REF: "refs/heads/feature-branch", - CODE_SCANNING_BASE_BRANCH: undefined, + push: { + ref: "refs/heads/main", + }, }, () => { t.is(getPullRequestBranches(), undefined); t.is(isAnalyzingPullRequest(), false); }, ); - }); -}); + }, +); -test("getPullRequestBranches() returns undefined when only CODE_SCANNING_BASE_BRANCH is set", (t) => { - withMockedContext({}, () => { - withMockedEnv( - { - CODE_SCANNING_REF: undefined, - CODE_SCANNING_BASE_BRANCH: "main", - }, - () => { - t.is(getPullRequestBranches(), undefined); - t.is(isAnalyzingPullRequest(), false); - }, - ); - }); -}); +test.serial( + "getPullRequestBranches() with Default Setup environment variables", + (t) => { + withMockedContext({}, () => { + withMockedEnv( + { + CODE_SCANNING_REF: "refs/heads/feature-branch", + CODE_SCANNING_BASE_BRANCH: "main", + }, + () => { + t.deepEqual(getPullRequestBranches(), { + base: "main", + head: "refs/heads/feature-branch", + }); + t.is(isAnalyzingPullRequest(), true); + }, + ); + }); + }, +); -test("getPullRequestBranches() returns undefined when no PR context", (t) => { - withMockedContext({}, () => { - withMockedEnv( - { - CODE_SCANNING_REF: undefined, - CODE_SCANNING_BASE_BRANCH: undefined, - }, - () => { - t.is(getPullRequestBranches(), undefined); - t.is(isAnalyzingPullRequest(), false); - }, - ); - }); -}); +test.serial( + "getPullRequestBranches() returns undefined when only CODE_SCANNING_REF is set", + (t) => { + withMockedContext({}, () => { + withMockedEnv( + { + CODE_SCANNING_REF: "refs/heads/feature-branch", + CODE_SCANNING_BASE_BRANCH: undefined, + }, + () => { + t.is(getPullRequestBranches(), undefined); + t.is(isAnalyzingPullRequest(), false); + }, + ); + }); + }, +); -test("initializeEnvironment", (t) => { +test.serial( + "getPullRequestBranches() returns undefined when only CODE_SCANNING_BASE_BRANCH is set", + (t) => { + withMockedContext({}, () => { + withMockedEnv( + { + CODE_SCANNING_REF: undefined, + CODE_SCANNING_BASE_BRANCH: "main", + }, + () => { + t.is(getPullRequestBranches(), undefined); + t.is(isAnalyzingPullRequest(), false); + }, + ); + }); + }, +); + +test.serial( + "getPullRequestBranches() returns undefined when no PR context", + (t) => { + withMockedContext({}, () => { + withMockedEnv( + { + CODE_SCANNING_REF: undefined, + CODE_SCANNING_BASE_BRANCH: undefined, + }, + () => { + t.is(getPullRequestBranches(), undefined); + t.is(isAnalyzingPullRequest(), false); + }, + ); + }); + }, +); + +test.serial("initializeEnvironment", (t) => { initializeEnvironment("1.2.3"); t.deepEqual(process.env[EnvVar.VERSION], "1.2.3"); }); -test("fixCodeQualityCategory", (t) => { +test.serial("fixCodeQualityCategory", (t) => { withMockedEnv( { GITHUB_EVENT_NAME: "dynamic", @@ -249,14 +264,17 @@ test("fixCodeQualityCategory", (t) => { ); }); -test("isDynamicWorkflow() returns true if event name is `dynamic`", (t) => { - process.env.GITHUB_EVENT_NAME = "dynamic"; - t.assert(isDynamicWorkflow()); - process.env.GITHUB_EVENT_NAME = "push"; - t.false(isDynamicWorkflow()); -}); +test.serial( + "isDynamicWorkflow() returns true if event name is `dynamic`", + (t) => { + process.env.GITHUB_EVENT_NAME = "dynamic"; + t.assert(isDynamicWorkflow()); + process.env.GITHUB_EVENT_NAME = "push"; + t.false(isDynamicWorkflow()); + }, +); -test("isDefaultSetup() returns true when expected", (t) => { +test.serial("isDefaultSetup() returns true when expected", (t) => { process.env.GITHUB_EVENT_NAME = "dynamic"; process.env[EnvVar.ANALYSIS_KEY] = "dynamic/github-code-scanning"; t.assert(isDefaultSetup()); diff --git a/src/analyses.test.ts b/src/analyses.test.ts index 36d3d316f..293b4be6d 100644 --- a/src/analyses.test.ts +++ b/src/analyses.test.ts @@ -50,31 +50,40 @@ test("Parsing analysis kinds requires at least one analysis kind", async (t) => }); }); -test("getAnalysisKinds - returns expected analysis kinds for `analysis-kinds` input", async (t) => { - const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput"); - requiredInputStub - .withArgs("analysis-kinds") - .returns("code-scanning,code-quality"); - const result = await getAnalysisKinds(getRunnerLogger(true), true); - t.assert(result.includes(AnalysisKind.CodeScanning)); - t.assert(result.includes(AnalysisKind.CodeQuality)); -}); +test.serial( + "getAnalysisKinds - returns expected analysis kinds for `analysis-kinds` input", + async (t) => { + const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput"); + requiredInputStub + .withArgs("analysis-kinds") + .returns("code-scanning,code-quality"); + const result = await getAnalysisKinds(getRunnerLogger(true), true); + t.assert(result.includes(AnalysisKind.CodeScanning)); + t.assert(result.includes(AnalysisKind.CodeQuality)); + }, +); -test("getAnalysisKinds - includes `code-quality` when deprecated `quality-queries` input is used", async (t) => { - const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput"); - requiredInputStub.withArgs("analysis-kinds").returns("code-scanning"); - const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput"); - optionalInputStub.withArgs("quality-queries").returns("code-quality"); - const result = await getAnalysisKinds(getRunnerLogger(true), true); - t.assert(result.includes(AnalysisKind.CodeScanning)); - t.assert(result.includes(AnalysisKind.CodeQuality)); -}); +test.serial( + "getAnalysisKinds - includes `code-quality` when deprecated `quality-queries` input is used", + async (t) => { + const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput"); + requiredInputStub.withArgs("analysis-kinds").returns("code-scanning"); + const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput"); + optionalInputStub.withArgs("quality-queries").returns("code-quality"); + const result = await getAnalysisKinds(getRunnerLogger(true), true); + t.assert(result.includes(AnalysisKind.CodeScanning)); + t.assert(result.includes(AnalysisKind.CodeQuality)); + }, +); -test("getAnalysisKinds - throws if `analysis-kinds` input is invalid", async (t) => { - const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput"); - requiredInputStub.withArgs("analysis-kinds").returns("no-such-thing"); - await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true)); -}); +test.serial( + "getAnalysisKinds - throws if `analysis-kinds` input is invalid", + async (t) => { + const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput"); + requiredInputStub.withArgs("analysis-kinds").returns("no-such-thing"); + await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true)); + }, +); // Test the compatibility matrix by looping through all analysis kinds. const analysisKinds = Object.values(AnalysisKind); @@ -86,25 +95,31 @@ for (let i = 0; i < analysisKinds.length; i++) { if (analysisKind === otherAnalysis) continue; if (compatibilityMatrix[analysisKind].has(otherAnalysis)) { - test(`getAnalysisKinds - allows ${analysisKind} with ${otherAnalysis}`, async (t) => { - const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput"); - requiredInputStub - .withArgs("analysis-kinds") - .returns([analysisKind, otherAnalysis].join(",")); - const result = await getAnalysisKinds(getRunnerLogger(true), true); - t.is(result.length, 2); - }); + test.serial( + `getAnalysisKinds - allows ${analysisKind} with ${otherAnalysis}`, + async (t) => { + const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput"); + requiredInputStub + .withArgs("analysis-kinds") + .returns([analysisKind, otherAnalysis].join(",")); + const result = await getAnalysisKinds(getRunnerLogger(true), true); + t.is(result.length, 2); + }, + ); } else { - test(`getAnalysisKinds - throws if ${analysisKind} is enabled with ${otherAnalysis}`, async (t) => { - const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput"); - requiredInputStub - .withArgs("analysis-kinds") - .returns([analysisKind, otherAnalysis].join(",")); - await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true), { - instanceOf: ConfigurationError, - message: `${analysisKind} and ${otherAnalysis} cannot be enabled at the same time`, - }); - }); + test.serial( + `getAnalysisKinds - throws if ${analysisKind} is enabled with ${otherAnalysis}`, + async (t) => { + const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput"); + requiredInputStub + .withArgs("analysis-kinds") + .returns([analysisKind, otherAnalysis].join(",")); + await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true), { + instanceOf: ConfigurationError, + message: `${analysisKind} and ${otherAnalysis} cannot be enabled at the same time`, + }); + }, + ); } } } @@ -122,44 +137,50 @@ test("Code Scanning configuration does not accept other SARIF extensions", (t) = } }); -test("Risk Assessment configuration transforms SARIF upload payload", (t) => { - process.env[EnvVar.RISK_ASSESSMENT_ID] = "1"; - const payload = RiskAssessment.transformPayload({ - commit_oid: "abc", - sarif: "sarif", - ref: "ref", - workflow_run_attempt: 1, - workflow_run_id: 1, - checkout_uri: "uri", - tool_names: [], - }) as AssessmentPayload; +test.serial( + "Risk Assessment configuration transforms SARIF upload payload", + (t) => { + process.env[EnvVar.RISK_ASSESSMENT_ID] = "1"; + const payload = RiskAssessment.transformPayload({ + commit_oid: "abc", + sarif: "sarif", + ref: "ref", + workflow_run_attempt: 1, + workflow_run_id: 1, + checkout_uri: "uri", + tool_names: [], + }) as AssessmentPayload; - const expected: AssessmentPayload = { sarif: "sarif", assessment_id: 1 }; - t.deepEqual(expected, payload); -}); + const expected: AssessmentPayload = { sarif: "sarif", assessment_id: 1 }; + t.deepEqual(expected, payload); + }, +); -test("Risk Assessment configuration throws for negative assessment IDs", (t) => { - process.env[EnvVar.RISK_ASSESSMENT_ID] = "-1"; - t.throws( - () => - RiskAssessment.transformPayload({ - commit_oid: "abc", - sarif: "sarif", - ref: "ref", - workflow_run_attempt: 1, - workflow_run_id: 1, - checkout_uri: "uri", - tool_names: [], - }), - { - instanceOf: Error, - message: (msg) => - msg.startsWith(`${EnvVar.RISK_ASSESSMENT_ID} must not be negative: `), - }, - ); -}); +test.serial( + "Risk Assessment configuration throws for negative assessment IDs", + (t) => { + process.env[EnvVar.RISK_ASSESSMENT_ID] = "-1"; + t.throws( + () => + RiskAssessment.transformPayload({ + commit_oid: "abc", + sarif: "sarif", + ref: "ref", + workflow_run_attempt: 1, + workflow_run_id: 1, + checkout_uri: "uri", + tool_names: [], + }), + { + instanceOf: Error, + message: (msg) => + msg.startsWith(`${EnvVar.RISK_ASSESSMENT_ID} must not be negative: `), + }, + ); + }, +); -test("Risk Assessment configuration throws for invalid IDs", (t) => { +test.serial("Risk Assessment configuration throws for invalid IDs", (t) => { process.env[EnvVar.RISK_ASSESSMENT_ID] = "foo"; t.throws( () => diff --git a/src/analyze-action-env.test.ts b/src/analyze-action-env.test.ts index aecbae4b0..e1538bf4b 100644 --- a/src/analyze-action-env.test.ts +++ b/src/analyze-action-env.test.ts @@ -28,9 +28,7 @@ test("analyze action with RAM & threads from environment variables", async (t) = // it a bit to 20s. t.timeout(1000 * 20); await util.withTmpDir(async (tmpDir) => { - process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL; - process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; - process.env["GITHUB_API_URL"] = "https://api.github.com"; + setupActionsVars(tmpDir, tmpDir); sinon .stub(statusReport, "createStatusReportBase") .resolves({} as statusReport.StatusReportBase); @@ -54,7 +52,6 @@ test("analyze action with RAM & threads from environment variables", async (t) = const optionalInputStub = sinon.stub(actionsUtil, "getOptionalInput"); optionalInputStub.withArgs("expect-error").returns("false"); sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion); - setupActionsVars(tmpDir, tmpDir); mockFeatureFlagApiEndpoint(200, {}); // When there are no action inputs for RAM and threads, the action uses diff --git a/src/analyze-action-input.test.ts b/src/analyze-action-input.test.ts index 74c03923d..b2c56e119 100644 --- a/src/analyze-action-input.test.ts +++ b/src/analyze-action-input.test.ts @@ -26,9 +26,7 @@ setupTests(test); test("analyze action with RAM & threads from action inputs", async (t) => { t.timeout(1000 * 20); await util.withTmpDir(async (tmpDir) => { - process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL; - process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; - process.env["GITHUB_API_URL"] = "https://api.github.com"; + setupActionsVars(tmpDir, tmpDir); sinon .stub(statusReport, "createStatusReportBase") .resolves({} as statusReport.StatusReportBase); @@ -51,7 +49,6 @@ test("analyze action with RAM & threads from action inputs", async (t) => { optionalInputStub.withArgs("expect-error").returns("false"); sinon.stub(api, "getGitHubVersion").resolves(gitHubVersion); sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true); - setupActionsVars(tmpDir, tmpDir); mockFeatureFlagApiEndpoint(200, {}); process.env["CODEQL_THREADS"] = "1"; diff --git a/src/analyze.test.ts b/src/analyze.test.ts index a5ab7a34d..664c23853 100644 --- a/src/analyze.test.ts +++ b/src/analyze.test.ts @@ -32,7 +32,7 @@ setupTests(test); * - Checks that the duration fields are populated for the correct language. * - Checks that the QA telemetry status report fields are populated when the QA feature flag is enabled. */ -test("status report fields", async (t) => { +test.serial("status report fields", async (t) => { return await util.withTmpDir(async (tmpDir) => { setupActionsVars(tmpDir, tmpDir); diff --git a/src/analyze.ts b/src/analyze.ts index 6f5693044..c7af4fec9 100644 --- a/src/analyze.ts +++ b/src/analyze.ts @@ -25,6 +25,7 @@ import { FeatureEnablement, Feature } from "./feature-flags"; import { KnownLanguage, Language } from "./languages"; import { Logger, withGroupAsync } from "./logging"; import { OverlayDatabaseMode } from "./overlay"; +import type * as sarif from "./sarif"; import { DatabaseCreationTimings, EventReport } from "./status-report"; import { endTracingForCluster } from "./tracer-config"; import * as util from "./util"; @@ -594,7 +595,7 @@ export async function runQueries( function getPerQueryAlertCounts(sarifPath: string): Record { const sarifObject = JSON.parse( fs.readFileSync(sarifPath, "utf8"), - ) as util.SarifFile; + ) as sarif.Log; // We do not need to compute fingerprints because we are not sending data based off of locations. // Generate the query: alert count object diff --git a/src/api-client.test.ts b/src/api-client.test.ts index 3af9ae282..d0311d0dc 100644 --- a/src/api-client.test.ts +++ b/src/api-client.test.ts @@ -14,7 +14,7 @@ test.beforeEach(() => { util.initializeEnvironment(actionsUtil.getActionVersion()); }); -test("getApiClient", async (t) => { +test.serial("getApiClient", async (t) => { const pluginStub: sinon.SinonStub = sinon.stub(githubUtils.GitHub, "plugin"); const githubStub: sinon.SinonStub = sinon.stub(); pluginStub.returns(githubStub); @@ -61,7 +61,7 @@ function mockGetMetaVersionHeader( return spyGetContents; } -test("getGitHubVersion for Dotcom", async (t) => { +test.serial("getGitHubVersion for Dotcom", async (t) => { const apiDetails = { auth: "", url: "https://github.com", @@ -75,7 +75,7 @@ test("getGitHubVersion for Dotcom", async (t) => { t.deepEqual(util.GitHubVariant.DOTCOM, v.type); }); -test("getGitHubVersion for GHES", async (t) => { +test.serial("getGitHubVersion for GHES", async (t) => { mockGetMetaVersionHeader("2.0"); const v2 = await api.getGitHubVersionFromApi(api.getApiClient(), { auth: "", @@ -88,7 +88,7 @@ test("getGitHubVersion for GHES", async (t) => { ); }); -test("getGitHubVersion for different domain", async (t) => { +test.serial("getGitHubVersion for different domain", async (t) => { mockGetMetaVersionHeader(undefined); const v3 = await api.getGitHubVersionFromApi(api.getApiClient(), { auth: "", @@ -98,7 +98,7 @@ test("getGitHubVersion for different domain", async (t) => { t.deepEqual({ type: util.GitHubVariant.DOTCOM }, v3); }); -test("getGitHubVersion for GHEC-DR", async (t) => { +test.serial("getGitHubVersion for GHEC-DR", async (t) => { mockGetMetaVersionHeader("ghe.com"); const gheDotcom = await api.getGitHubVersionFromApi(api.getApiClient(), { auth: "", @@ -108,96 +108,99 @@ test("getGitHubVersion for GHEC-DR", async (t) => { t.deepEqual({ type: util.GitHubVariant.GHEC_DR }, gheDotcom); }); -test("wrapApiConfigurationError correctly wraps specific configuration errors", (t) => { - // We don't reclassify arbitrary errors - const arbitraryError = new Error("arbitrary error"); - let res = api.wrapApiConfigurationError(arbitraryError); - t.is(res, arbitraryError); +test.serial( + "wrapApiConfigurationError correctly wraps specific configuration errors", + (t) => { + // We don't reclassify arbitrary errors + const arbitraryError = new Error("arbitrary error"); + let res = api.wrapApiConfigurationError(arbitraryError); + t.is(res, arbitraryError); - // Same goes for arbitrary errors - const configError = new util.ConfigurationError("arbitrary error"); - res = api.wrapApiConfigurationError(configError); - t.is(res, configError); + // Same goes for arbitrary errors + const configError = new util.ConfigurationError("arbitrary error"); + res = api.wrapApiConfigurationError(configError); + t.is(res, configError); - // If an HTTP error doesn't contain a specific error message, we don't - // wrap is an an API error. - const httpError = new util.HTTPError("arbitrary HTTP error", 456); - res = api.wrapApiConfigurationError(httpError); - t.is(res, httpError); + // If an HTTP error doesn't contain a specific error message, we don't + // wrap is an an API error. + const httpError = new util.HTTPError("arbitrary HTTP error", 456); + res = api.wrapApiConfigurationError(httpError); + t.is(res, httpError); - // For other HTTP errors, we wrap them as Configuration errors if they contain - // specific error messages. - const httpNotFoundError = new util.HTTPError("commit not found", 404); - res = api.wrapApiConfigurationError(httpNotFoundError); - t.deepEqual(res, new util.ConfigurationError("commit not found")); + // For other HTTP errors, we wrap them as Configuration errors if they contain + // specific error messages. + const httpNotFoundError = new util.HTTPError("commit not found", 404); + res = api.wrapApiConfigurationError(httpNotFoundError); + t.deepEqual(res, new util.ConfigurationError("commit not found")); - const refNotFoundError = new util.HTTPError( - "ref 'refs/heads/jitsi' not found in this repository - https://docs.github.com/rest", - 404, - ); - res = api.wrapApiConfigurationError(refNotFoundError); - t.deepEqual( - res, - new util.ConfigurationError( + const refNotFoundError = new util.HTTPError( "ref 'refs/heads/jitsi' not found in this repository - https://docs.github.com/rest", - ), - ); + 404, + ); + res = api.wrapApiConfigurationError(refNotFoundError); + t.deepEqual( + res, + new util.ConfigurationError( + "ref 'refs/heads/jitsi' not found in this repository - https://docs.github.com/rest", + ), + ); - const apiRateLimitError = new util.HTTPError( - "API rate limit exceeded for installation", - 403, - ); - res = api.wrapApiConfigurationError(apiRateLimitError); - t.deepEqual( - res, - new util.ConfigurationError("API rate limit exceeded for installation"), - ); + const apiRateLimitError = new util.HTTPError( + "API rate limit exceeded for installation", + 403, + ); + res = api.wrapApiConfigurationError(apiRateLimitError); + t.deepEqual( + res, + new util.ConfigurationError("API rate limit exceeded for installation"), + ); - const tokenSuggestionMessage = - "Please check that your token is valid and has the required permissions: contents: read, security-events: write"; - const badCredentialsError = new util.HTTPError("Bad credentials", 401); - res = api.wrapApiConfigurationError(badCredentialsError); - t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage)); + const tokenSuggestionMessage = + "Please check that your token is valid and has the required permissions: contents: read, security-events: write"; + const badCredentialsError = new util.HTTPError("Bad credentials", 401); + res = api.wrapApiConfigurationError(badCredentialsError); + t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage)); - const notFoundError = new util.HTTPError("Not Found", 404); - res = api.wrapApiConfigurationError(notFoundError); - t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage)); + const notFoundError = new util.HTTPError("Not Found", 404); + res = api.wrapApiConfigurationError(notFoundError); + t.deepEqual(res, new util.ConfigurationError(tokenSuggestionMessage)); - const resourceNotAccessibleError = new util.HTTPError( - "Resource not accessible by integration", - 403, - ); - res = api.wrapApiConfigurationError(resourceNotAccessibleError); - t.deepEqual( - res, - new util.ConfigurationError("Resource not accessible by integration"), - ); + const resourceNotAccessibleError = new util.HTTPError( + "Resource not accessible by integration", + 403, + ); + res = api.wrapApiConfigurationError(resourceNotAccessibleError); + t.deepEqual( + res, + new util.ConfigurationError("Resource not accessible by integration"), + ); - // Enablement errors. - const enablementErrorMessages = [ - "Code Security must be enabled for this repository to use code scanning", - "Advanced Security must be enabled for this repository to use code scanning", - "Code Scanning is not enabled for this repository. Please enable code scanning in the repository settings.", - ]; - const transforms = [ - (msg: string) => msg, - (msg: string) => msg.toLowerCase(), - (msg: string) => msg.toLocaleUpperCase(), - ]; + // Enablement errors. + const enablementErrorMessages = [ + "Code Security must be enabled for this repository to use code scanning", + "Advanced Security must be enabled for this repository to use code scanning", + "Code Scanning is not enabled for this repository. Please enable code scanning in the repository settings.", + ]; + const transforms = [ + (msg: string) => msg, + (msg: string) => msg.toLowerCase(), + (msg: string) => msg.toLocaleUpperCase(), + ]; - for (const enablementErrorMessage of enablementErrorMessages) { - for (const transform of transforms) { - const enablementError = new util.HTTPError( - transform(enablementErrorMessage), - 403, - ); - res = api.wrapApiConfigurationError(enablementError); - t.deepEqual( - res, - new util.ConfigurationError( - api.getFeatureEnablementError(enablementError.message), - ), - ); + for (const enablementErrorMessage of enablementErrorMessages) { + for (const transform of transforms) { + const enablementError = new util.HTTPError( + transform(enablementErrorMessage), + 403, + ); + res = api.wrapApiConfigurationError(enablementError); + t.deepEqual( + res, + new util.ConfigurationError( + api.getFeatureEnablementError(enablementError.message), + ), + ); + } } - } -}); + }, +); diff --git a/src/api-compatibility.json b/src/api-compatibility.json index b61bbd26d..2e55b9ad7 100644 --- a/src/api-compatibility.json +++ b/src/api-compatibility.json @@ -1 +1 @@ -{"maximumVersion": "3.20", "minimumVersion": "3.14"} +{"maximumVersion": "3.21", "minimumVersion": "3.14"} diff --git a/src/cli-errors.test.ts b/src/cli-errors.test.ts index 58ebfa2c4..7a3ed892b 100644 --- a/src/cli-errors.test.ts +++ b/src/cli-errors.test.ts @@ -131,27 +131,30 @@ for (const [platform, arch] of [ ["linux", "arm64"], ["win32", "arm64"], ]) { - test(`wrapCliConfigurationError - ${platform}/${arch} unsupported`, (t) => { - sinon.stub(process, "platform").value(platform); - sinon.stub(process, "arch").value(arch); - const commandError = new CommandInvocationError( - "codeql", - ["version"], - 1, - "Some error", - ); - const cliError = new CliError(commandError); + test.serial( + `wrapCliConfigurationError - ${platform}/${arch} unsupported`, + (t) => { + sinon.stub(process, "platform").value(platform); + sinon.stub(process, "arch").value(arch); + const commandError = new CommandInvocationError( + "codeql", + ["version"], + 1, + "Some error", + ); + const cliError = new CliError(commandError); - const wrappedError = wrapCliConfigurationError(cliError); + const wrappedError = wrapCliConfigurationError(cliError); - t.true(wrappedError instanceof ConfigurationError); - t.true( - wrappedError.message.includes( - "CodeQL CLI does not support the platform/architecture combination", - ), - ); - t.true(wrappedError.message.includes(`${platform}/${arch}`)); - }); + t.true(wrappedError instanceof ConfigurationError); + t.true( + wrappedError.message.includes( + "CodeQL CLI does not support the platform/architecture combination", + ), + ); + t.true(wrappedError.message.includes(`${platform}/${arch}`)); + }, + ); } test("wrapCliConfigurationError - supported platform", (t) => { diff --git a/src/codeql.test.ts b/src/codeql.test.ts index eb1ea9b34..cfbddf4f7 100644 --- a/src/codeql.test.ts +++ b/src/codeql.test.ts @@ -120,19 +120,53 @@ async function stubCodeql(): Promise { return codeqlObject; } -test("downloads and caches explicitly requested bundles that aren't in the toolcache", async (t) => { - const features = createFeatures([]); +test.serial( + "downloads and caches explicitly requested bundles that aren't in the toolcache", + async (t) => { + const features = createFeatures([]); - await util.withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); + await util.withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); - const versions = ["20200601", "20200610"]; + const versions = ["20200601", "20200610"]; - for (let i = 0; i < versions.length; i++) { - const version = versions[i]; + for (let i = 0; i < versions.length; i++) { + const version = versions[i]; + const url = mockBundleDownloadApi({ + tagName: `codeql-bundle-${version}`, + isPinned: false, + }); + const result = await codeql.setupCodeQL( + url, + SAMPLE_DOTCOM_API_DETAILS, + tmpDir, + util.GitHubVariant.DOTCOM, + SAMPLE_DEFAULT_CLI_VERSION, + features, + getRunnerLogger(true), + false, + ); + + t.assert(toolcache.find("CodeQL", `0.0.0-${version}`)); + t.is(result.toolsVersion, `0.0.0-${version}`); + t.is(result.toolsSource, ToolsSource.Download); + } + + t.is(toolcache.findAllVersions("CodeQL").length, 2); + }); + }, +); + +test.serial( + "caches semantically versioned bundles using their semantic version number", + async (t) => { + const features = createFeatures([]); + + await util.withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); const url = mockBundleDownloadApi({ - tagName: `codeql-bundle-${version}`, + tagName: `codeql-bundle-v2.15.0`, isPinned: false, }); const result = await codeql.setupCodeQL( @@ -146,78 +180,53 @@ test("downloads and caches explicitly requested bundles that aren't in the toolc false, ); - t.assert(toolcache.find("CodeQL", `0.0.0-${version}`)); - t.is(result.toolsVersion, `0.0.0-${version}`); + t.is(toolcache.findAllVersions("CodeQL").length, 1); + t.assert(toolcache.find("CodeQL", `2.15.0`)); + t.is(result.toolsVersion, `2.15.0`); t.is(result.toolsSource, ToolsSource.Download); - } - - t.is(toolcache.findAllVersions("CodeQL").length, 2); - }); -}); - -test("caches semantically versioned bundles using their semantic version number", async (t) => { - const features = createFeatures([]); - - await util.withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); - const url = mockBundleDownloadApi({ - tagName: `codeql-bundle-v2.15.0`, - isPinned: false, + if (result.toolsDownloadStatusReport) { + assertDurationsInteger(t, result.toolsDownloadStatusReport); + } }); - const result = await codeql.setupCodeQL( - url, - SAMPLE_DOTCOM_API_DETAILS, - tmpDir, - util.GitHubVariant.DOTCOM, - SAMPLE_DEFAULT_CLI_VERSION, - features, - getRunnerLogger(true), - false, - ); + }, +); - t.is(toolcache.findAllVersions("CodeQL").length, 1); - t.assert(toolcache.find("CodeQL", `2.15.0`)); - t.is(result.toolsVersion, `2.15.0`); - t.is(result.toolsSource, ToolsSource.Download); - if (result.toolsDownloadStatusReport) { - assertDurationsInteger(t, result.toolsDownloadStatusReport); - } - }); -}); +test.serial( + "downloads an explicitly requested bundle even if a different version is cached", + async (t) => { + const features = createFeatures([]); -test("downloads an explicitly requested bundle even if a different version is cached", async (t) => { - const features = createFeatures([]); + await util.withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); - await util.withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); + await installIntoToolcache({ + tagName: "codeql-bundle-20200601", + isPinned: true, + tmpDir, + }); - await installIntoToolcache({ - tagName: "codeql-bundle-20200601", - isPinned: true, - tmpDir, + const url = mockBundleDownloadApi({ + tagName: "codeql-bundle-20200610", + }); + const result = await codeql.setupCodeQL( + url, + SAMPLE_DOTCOM_API_DETAILS, + tmpDir, + util.GitHubVariant.DOTCOM, + SAMPLE_DEFAULT_CLI_VERSION, + features, + getRunnerLogger(true), + false, + ); + t.assert(toolcache.find("CodeQL", "0.0.0-20200610")); + t.deepEqual(result.toolsVersion, "0.0.0-20200610"); + t.is(result.toolsSource, ToolsSource.Download); + if (result.toolsDownloadStatusReport) { + assertDurationsInteger(t, result.toolsDownloadStatusReport); + } }); - - const url = mockBundleDownloadApi({ - tagName: "codeql-bundle-20200610", - }); - const result = await codeql.setupCodeQL( - url, - SAMPLE_DOTCOM_API_DETAILS, - tmpDir, - util.GitHubVariant.DOTCOM, - SAMPLE_DEFAULT_CLI_VERSION, - features, - getRunnerLogger(true), - false, - ); - t.assert(toolcache.find("CodeQL", "0.0.0-20200610")); - t.deepEqual(result.toolsVersion, "0.0.0-20200610"); - t.is(result.toolsSource, ToolsSource.Download); - if (result.toolsDownloadStatusReport) { - assertDurationsInteger(t, result.toolsDownloadStatusReport); - } - }); -}); + }, +); const EXPLICITLY_REQUESTED_BUNDLE_TEST_CASES = [ { @@ -234,37 +243,42 @@ for (const { tagName, expectedToolcacheVersion, } of EXPLICITLY_REQUESTED_BUNDLE_TEST_CASES) { - test(`caches explicitly requested bundle ${tagName} as ${expectedToolcacheVersion}`, async (t) => { - const features = createFeatures([]); + test.serial( + `caches explicitly requested bundle ${tagName} as ${expectedToolcacheVersion}`, + async (t) => { + const features = createFeatures([]); - await util.withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); + await util.withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); - mockApiDetails(SAMPLE_DOTCOM_API_DETAILS); - sinon.stub(actionsUtil, "isRunningLocalAction").returns(true); + mockApiDetails(SAMPLE_DOTCOM_API_DETAILS); + sinon.stub(actionsUtil, "isRunningLocalAction").returns(true); - const url = mockBundleDownloadApi({ - tagName, + const url = mockBundleDownloadApi({ + tagName, + }); + + const result = await codeql.setupCodeQL( + url, + SAMPLE_DOTCOM_API_DETAILS, + tmpDir, + util.GitHubVariant.DOTCOM, + SAMPLE_DEFAULT_CLI_VERSION, + features, + getRunnerLogger(true), + false, + ); + t.assert(toolcache.find("CodeQL", expectedToolcacheVersion)); + t.deepEqual(result.toolsVersion, expectedToolcacheVersion); + t.is(result.toolsSource, ToolsSource.Download); + t.assert( + Number.isInteger( + result.toolsDownloadStatusReport?.downloadDurationMs, + ), + ); }); - - const result = await codeql.setupCodeQL( - url, - SAMPLE_DOTCOM_API_DETAILS, - tmpDir, - util.GitHubVariant.DOTCOM, - SAMPLE_DEFAULT_CLI_VERSION, - features, - getRunnerLogger(true), - false, - ); - t.assert(toolcache.find("CodeQL", expectedToolcacheVersion)); - t.deepEqual(result.toolsVersion, expectedToolcacheVersion); - t.is(result.toolsSource, ToolsSource.Download); - t.assert( - Number.isInteger(result.toolsDownloadStatusReport?.downloadDurationMs), - ); - }); - }); + }, + ); } for (const toolcacheVersion of [ @@ -273,7 +287,7 @@ for (const toolcacheVersion of [ SAMPLE_DEFAULT_CLI_VERSION.cliVersion, `${SAMPLE_DEFAULT_CLI_VERSION.cliVersion}-20230101`, ]) { - test( + test.serial( `uses tools from toolcache when ${SAMPLE_DEFAULT_CLI_VERSION.cliVersion} is requested and ` + `${toolcacheVersion} is installed`, async (t) => { @@ -308,158 +322,170 @@ for (const toolcacheVersion of [ ); } -test(`uses a cached bundle when no tools input is given on GHES`, async (t) => { - const features = createFeatures([]); +test.serial( + `uses a cached bundle when no tools input is given on GHES`, + async (t) => { + const features = createFeatures([]); - await util.withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); + await util.withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); - await installIntoToolcache({ - tagName: "codeql-bundle-20200601", - isPinned: true, - tmpDir, + await installIntoToolcache({ + tagName: "codeql-bundle-20200601", + isPinned: true, + tmpDir, + }); + + const result = await codeql.setupCodeQL( + undefined, + SAMPLE_DOTCOM_API_DETAILS, + tmpDir, + util.GitHubVariant.GHES, + { + cliVersion: defaults.cliVersion, + tagName: defaults.bundleVersion, + }, + features, + getRunnerLogger(true), + false, + ); + t.deepEqual(result.toolsVersion, "0.0.0-20200601"); + t.is(result.toolsSource, ToolsSource.Toolcache); + t.is(result.toolsDownloadStatusReport?.combinedDurationMs, undefined); + t.is(result.toolsDownloadStatusReport?.downloadDurationMs, undefined); + t.is(result.toolsDownloadStatusReport?.extractionDurationMs, undefined); + + const cachedVersions = toolcache.findAllVersions("CodeQL"); + t.is(cachedVersions.length, 1); }); + }, +); - const result = await codeql.setupCodeQL( - undefined, - SAMPLE_DOTCOM_API_DETAILS, - tmpDir, - util.GitHubVariant.GHES, - { - cliVersion: defaults.cliVersion, +test.serial( + `downloads bundle if only an unpinned version is cached on GHES`, + async (t) => { + const features = createFeatures([]); + + await util.withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); + + await installIntoToolcache({ + tagName: "codeql-bundle-20200601", + isPinned: false, + tmpDir, + }); + + mockBundleDownloadApi({ tagName: defaults.bundleVersion, - }, - features, - getRunnerLogger(true), - false, - ); - t.deepEqual(result.toolsVersion, "0.0.0-20200601"); - t.is(result.toolsSource, ToolsSource.Toolcache); - t.is(result.toolsDownloadStatusReport?.combinedDurationMs, undefined); - t.is(result.toolsDownloadStatusReport?.downloadDurationMs, undefined); - t.is(result.toolsDownloadStatusReport?.extractionDurationMs, undefined); + }); + const result = await codeql.setupCodeQL( + undefined, + SAMPLE_DOTCOM_API_DETAILS, + tmpDir, + util.GitHubVariant.GHES, + { + cliVersion: defaults.cliVersion, + tagName: defaults.bundleVersion, + }, + features, + getRunnerLogger(true), + false, + ); + t.deepEqual(result.toolsVersion, defaults.cliVersion); + t.is(result.toolsSource, ToolsSource.Download); + if (result.toolsDownloadStatusReport) { + assertDurationsInteger(t, result.toolsDownloadStatusReport); + } - const cachedVersions = toolcache.findAllVersions("CodeQL"); - t.is(cachedVersions.length, 1); - }); -}); - -test(`downloads bundle if only an unpinned version is cached on GHES`, async (t) => { - const features = createFeatures([]); - - await util.withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); - - await installIntoToolcache({ - tagName: "codeql-bundle-20200601", - isPinned: false, - tmpDir, + const cachedVersions = toolcache.findAllVersions("CodeQL"); + t.is(cachedVersions.length, 2); }); + }, +); - mockBundleDownloadApi({ - tagName: defaults.bundleVersion, - }); - const result = await codeql.setupCodeQL( - undefined, - SAMPLE_DOTCOM_API_DETAILS, - tmpDir, - util.GitHubVariant.GHES, - { - cliVersion: defaults.cliVersion, +test.serial( + 'downloads bundle if "latest" tools specified but not cached', + async (t) => { + const features = createFeatures([]); + + await util.withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); + + await installIntoToolcache({ + tagName: "codeql-bundle-20200601", + isPinned: true, + tmpDir, + }); + + mockBundleDownloadApi({ tagName: defaults.bundleVersion, - }, - features, - getRunnerLogger(true), - false, - ); - t.deepEqual(result.toolsVersion, defaults.cliVersion); - t.is(result.toolsSource, ToolsSource.Download); - if (result.toolsDownloadStatusReport) { - assertDurationsInteger(t, result.toolsDownloadStatusReport); - } + }); + const result = await codeql.setupCodeQL( + "latest", + SAMPLE_DOTCOM_API_DETAILS, + tmpDir, + util.GitHubVariant.DOTCOM, + SAMPLE_DEFAULT_CLI_VERSION, + features, + getRunnerLogger(true), + false, + ); + t.deepEqual(result.toolsVersion, defaults.cliVersion); + t.is(result.toolsSource, ToolsSource.Download); + if (result.toolsDownloadStatusReport) { + assertDurationsInteger(t, result.toolsDownloadStatusReport); + } - const cachedVersions = toolcache.findAllVersions("CodeQL"); - t.is(cachedVersions.length, 2); - }); -}); - -test('downloads bundle if "latest" tools specified but not cached', async (t) => { - const features = createFeatures([]); - - await util.withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); - - await installIntoToolcache({ - tagName: "codeql-bundle-20200601", - isPinned: true, - tmpDir, + const cachedVersions = toolcache.findAllVersions("CodeQL"); + t.is(cachedVersions.length, 2); }); + }, +); - mockBundleDownloadApi({ - tagName: defaults.bundleVersion, +test.serial( + "bundle URL from another repo is cached as 0.0.0-bundleVersion", + async (t) => { + const features = createFeatures([]); + + await util.withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); + + mockApiDetails(SAMPLE_DOTCOM_API_DETAILS); + sinon.stub(actionsUtil, "isRunningLocalAction").returns(true); + const releasesApiMock = mockReleaseApi({ + assetNames: ["cli-version-2.14.6.txt"], + tagName: "codeql-bundle-20230203", + }); + mockBundleDownloadApi({ + repo: "codeql-testing/codeql-cli-nightlies", + platformSpecific: false, + tagName: "codeql-bundle-20230203", + }); + const result = await codeql.setupCodeQL( + "https://github.com/codeql-testing/codeql-cli-nightlies/releases/download/codeql-bundle-20230203/codeql-bundle.tar.gz", + SAMPLE_DOTCOM_API_DETAILS, + tmpDir, + util.GitHubVariant.DOTCOM, + SAMPLE_DEFAULT_CLI_VERSION, + features, + getRunnerLogger(true), + false, + ); + + t.is(result.toolsVersion, "0.0.0-20230203"); + t.is(result.toolsSource, ToolsSource.Download); + if (result.toolsDownloadStatusReport) { + assertDurationsInteger(t, result.toolsDownloadStatusReport); + } + + const cachedVersions = toolcache.findAllVersions("CodeQL"); + t.is(cachedVersions.length, 1); + t.is(cachedVersions[0], "0.0.0-20230203"); + + t.false(releasesApiMock.isDone()); }); - const result = await codeql.setupCodeQL( - "latest", - SAMPLE_DOTCOM_API_DETAILS, - tmpDir, - util.GitHubVariant.DOTCOM, - SAMPLE_DEFAULT_CLI_VERSION, - features, - getRunnerLogger(true), - false, - ); - t.deepEqual(result.toolsVersion, defaults.cliVersion); - t.is(result.toolsSource, ToolsSource.Download); - if (result.toolsDownloadStatusReport) { - assertDurationsInteger(t, result.toolsDownloadStatusReport); - } - - const cachedVersions = toolcache.findAllVersions("CodeQL"); - t.is(cachedVersions.length, 2); - }); -}); - -test("bundle URL from another repo is cached as 0.0.0-bundleVersion", async (t) => { - const features = createFeatures([]); - - await util.withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); - - mockApiDetails(SAMPLE_DOTCOM_API_DETAILS); - sinon.stub(actionsUtil, "isRunningLocalAction").returns(true); - const releasesApiMock = mockReleaseApi({ - assetNames: ["cli-version-2.14.6.txt"], - tagName: "codeql-bundle-20230203", - }); - mockBundleDownloadApi({ - repo: "codeql-testing/codeql-cli-nightlies", - platformSpecific: false, - tagName: "codeql-bundle-20230203", - }); - const result = await codeql.setupCodeQL( - "https://github.com/codeql-testing/codeql-cli-nightlies/releases/download/codeql-bundle-20230203/codeql-bundle.tar.gz", - SAMPLE_DOTCOM_API_DETAILS, - tmpDir, - util.GitHubVariant.DOTCOM, - SAMPLE_DEFAULT_CLI_VERSION, - features, - getRunnerLogger(true), - false, - ); - - t.is(result.toolsVersion, "0.0.0-20230203"); - t.is(result.toolsSource, ToolsSource.Download); - if (result.toolsDownloadStatusReport) { - assertDurationsInteger(t, result.toolsDownloadStatusReport); - } - - const cachedVersions = toolcache.findAllVersions("CodeQL"); - t.is(cachedVersions.length, 1); - t.is(cachedVersions[0], "0.0.0-20230203"); - - t.false(releasesApiMock.isDone()); - }); -}); + }, +); function assertDurationsInteger( t: ExecutionContext, @@ -472,7 +498,7 @@ function assertDurationsInteger( } } -test("getExtraOptions works for explicit paths", (t) => { +test.serial("getExtraOptions works for explicit paths", (t) => { t.deepEqual(codeql.getExtraOptions({}, ["foo"], []), []); t.deepEqual(codeql.getExtraOptions({ foo: [42] }, ["foo"], []), ["42"]); @@ -483,11 +509,11 @@ test("getExtraOptions works for explicit paths", (t) => { ); }); -test("getExtraOptions works for wildcards", (t) => { +test.serial("getExtraOptions works for wildcards", (t) => { t.deepEqual(codeql.getExtraOptions({ "*": [42] }, ["foo"], []), ["42"]); }); -test("getExtraOptions works for wildcards and explicit paths", (t) => { +test.serial("getExtraOptions works for wildcards and explicit paths", (t) => { const o1 = { "*": [42], foo: [87] }; t.deepEqual(codeql.getExtraOptions(o1, ["foo"], []), ["42", "87"]); @@ -499,7 +525,7 @@ test("getExtraOptions works for wildcards and explicit paths", (t) => { t.deepEqual(codeql.getExtraOptions(o3, p, []), ["42", "87", "99"]); }); -test("getExtraOptions throws for bad content", (t) => { +test.serial("getExtraOptions throws for bad content", (t) => { t.throws(() => codeql.getExtraOptions({ "*": 42 }, ["foo"], [])); t.throws(() => codeql.getExtraOptions({ foo: 87 }, ["foo"], [])); @@ -564,7 +590,7 @@ const injectedConfigMacro = test.macro({ `databaseInitCluster() injected config: ${providedTitle}`, }); -test( +test.serial( "basic", injectedConfigMacro, { @@ -574,7 +600,7 @@ test( {}, ); -test( +test.serial( "injected packs from input", injectedConfigMacro, { @@ -587,7 +613,7 @@ test( }, ); -test( +test.serial( "injected packs from input with existing packs combines", injectedConfigMacro, { @@ -609,7 +635,7 @@ test( }, ); -test( +test.serial( "injected packs from input with existing packs overrides", injectedConfigMacro, { @@ -629,7 +655,7 @@ test( ); // similar, but with queries -test( +test.serial( "injected queries from input", injectedConfigMacro, { @@ -649,7 +675,7 @@ test( }, ); -test( +test.serial( "injected queries from input overrides", injectedConfigMacro, { @@ -673,7 +699,7 @@ test( }, ); -test( +test.serial( "injected queries from input combines", injectedConfigMacro, { @@ -701,7 +727,7 @@ test( }, ); -test( +test.serial( "injected queries from input combines 2", injectedConfigMacro, { @@ -723,7 +749,7 @@ test( }, ); -test( +test.serial( "injected queries and packs, but empty", injectedConfigMacro, { @@ -742,7 +768,7 @@ test( {}, ); -test( +test.serial( "repo property queries have the highest precedence", injectedConfigMacro, { @@ -764,7 +790,7 @@ test( }, ); -test( +test.serial( "repo property queries combines with queries input", injectedConfigMacro, { @@ -791,7 +817,7 @@ test( }, ); -test( +test.serial( "repo property queries combines everything else", injectedConfigMacro, { @@ -820,55 +846,61 @@ test( }, ); -test("passes a code scanning config AND qlconfig to the CLI", async (t: ExecutionContext) => { - await util.withTmpDir(async (tempDir) => { - const runnerConstructorStub = stubToolRunnerConstructor(); - const codeqlObject = await stubCodeql(); - await codeqlObject.databaseInitCluster( - { ...stubConfig, tempDir }, - "", - undefined, - "/path/to/qlconfig.yml", - getRunnerLogger(true), - ); +test.serial( + "passes a code scanning config AND qlconfig to the CLI", + async (t: ExecutionContext) => { + await util.withTmpDir(async (tempDir) => { + const runnerConstructorStub = stubToolRunnerConstructor(); + const codeqlObject = await stubCodeql(); + await codeqlObject.databaseInitCluster( + { ...stubConfig, tempDir }, + "", + undefined, + "/path/to/qlconfig.yml", + getRunnerLogger(true), + ); - const args = runnerConstructorStub.firstCall.args[1] as string[]; - // should have used a config file - const hasCodeScanningConfigArg = args.some((arg: string) => - arg.startsWith("--codescanning-config="), - ); - t.true(hasCodeScanningConfigArg, "Should have injected a qlconfig"); + const args = runnerConstructorStub.firstCall.args[1] as string[]; + // should have used a config file + const hasCodeScanningConfigArg = args.some((arg: string) => + arg.startsWith("--codescanning-config="), + ); + t.true(hasCodeScanningConfigArg, "Should have injected a qlconfig"); - // should have passed a qlconfig file - const hasQlconfigArg = args.some((arg: string) => - arg.startsWith("--qlconfig-file="), - ); - t.truthy(hasQlconfigArg, "Should have injected a codescanning config"); - }); -}); + // should have passed a qlconfig file + const hasQlconfigArg = args.some((arg: string) => + arg.startsWith("--qlconfig-file="), + ); + t.truthy(hasQlconfigArg, "Should have injected a codescanning config"); + }); + }, +); -test("does not pass a qlconfig to the CLI when it is undefined", async (t: ExecutionContext) => { - await util.withTmpDir(async (tempDir) => { - const runnerConstructorStub = stubToolRunnerConstructor(); - const codeqlObject = await stubCodeql(); +test.serial( + "does not pass a qlconfig to the CLI when it is undefined", + async (t: ExecutionContext) => { + await util.withTmpDir(async (tempDir) => { + const runnerConstructorStub = stubToolRunnerConstructor(); + const codeqlObject = await stubCodeql(); - await codeqlObject.databaseInitCluster( - { ...stubConfig, tempDir }, - "", - undefined, - undefined, // undefined qlconfigFile - getRunnerLogger(true), - ); + await codeqlObject.databaseInitCluster( + { ...stubConfig, tempDir }, + "", + undefined, + undefined, // undefined qlconfigFile + getRunnerLogger(true), + ); - const args = runnerConstructorStub.firstCall.args[1] as any[]; - const hasQlconfigArg = args.some((arg: string) => - arg.startsWith("--qlconfig-file="), - ); - t.false(hasQlconfigArg, "should NOT have injected a qlconfig"); - }); -}); + const args = runnerConstructorStub.firstCall.args[1] as any[]; + const hasQlconfigArg = args.some((arg: string) => + arg.startsWith("--qlconfig-file="), + ); + t.false(hasQlconfigArg, "should NOT have injected a qlconfig"); + }); + }, +); -test("runTool summarizes several fatal errors", async (t) => { +test.serial("runTool summarizes several fatal errors", async (t) => { const heapError = "A fatal error occurred: Evaluator heap must be at least 384.00 MiB"; const datasetImportError = @@ -905,7 +937,7 @@ test("runTool summarizes several fatal errors", async (t) => { ); }); -test("runTool summarizes autobuilder errors", async (t) => { +test.serial("runTool summarizes autobuilder errors", async (t) => { const stderr = ` [2019-09-18 12:00:00] [autobuild] A non-error message [2019-09-18 12:00:00] Untagged message @@ -938,7 +970,7 @@ test("runTool summarizes autobuilder errors", async (t) => { ); }); -test("runTool truncates long autobuilder errors", async (t) => { +test.serial("runTool truncates long autobuilder errors", async (t) => { const stderr = Array.from( { length: 20 }, (_, i) => `[2019-09-18 12:00:00] [autobuild] [ERROR] line${i + 1}`, @@ -964,7 +996,7 @@ test("runTool truncates long autobuilder errors", async (t) => { ); }); -test("runTool recognizes fatal internal errors", async (t) => { +test.serial("runTool recognizes fatal internal errors", async (t) => { const stderr = ` [11/31 eval 8m19s] Evaluation done; writing results to codeql/go-queries/Security/CWE-020/MissingRegexpAnchor.bqrs. Oops! A fatal internal error occurred. Details: @@ -989,64 +1021,70 @@ test("runTool recognizes fatal internal errors", async (t) => { ); }); -test("runTool outputs last line of stderr if fatal error could not be found", async (t) => { - const cliStderr = "line1\nline2\nline3\nline4\nline5"; - stubToolRunnerConstructor(32, cliStderr); - const codeqlObject = await stubCodeql(); - // io throws because of the test CodeQL object. - sinon.stub(io, "which").resolves(""); +test.serial( + "runTool outputs last line of stderr if fatal error could not be found", + async (t) => { + const cliStderr = "line1\nline2\nline3\nline4\nline5"; + stubToolRunnerConstructor(32, cliStderr); + const codeqlObject = await stubCodeql(); + // io throws because of the test CodeQL object. + sinon.stub(io, "which").resolves(""); - await t.throwsAsync( - async () => - await codeqlObject.finalizeDatabase( - "db", - "--threads=2", - "--ram=2048", - false, - ), - { - instanceOf: util.ConfigurationError, - message: new RegExp( - 'Encountered a fatal error while running \\"codeql-for-testing database finalize --finalize-dataset --threads=2 --ram=2048 db\\"\\. ' + - "Exit code was 32 and last log line was: line5\\. See the logs for more details\\.", - ), - }, - ); -}); + await t.throwsAsync( + async () => + await codeqlObject.finalizeDatabase( + "db", + "--threads=2", + "--ram=2048", + false, + ), + { + instanceOf: util.ConfigurationError, + message: new RegExp( + 'Encountered a fatal error while running \\"codeql-for-testing database finalize --finalize-dataset --threads=2 --ram=2048 db\\"\\. ' + + "Exit code was 32 and last log line was: line5\\. See the logs for more details\\.", + ), + }, + ); + }, +); -test("Avoids duplicating --overwrite flag if specified in CODEQL_ACTION_EXTRA_OPTIONS", async (t) => { - const runnerConstructorStub = stubToolRunnerConstructor(); - const codeqlObject = await stubCodeql(); - // io throws because of the test CodeQL object. - sinon.stub(io, "which").resolves(""); +test.serial( + "Avoids duplicating --overwrite flag if specified in CODEQL_ACTION_EXTRA_OPTIONS", + async (t) => { + const runnerConstructorStub = stubToolRunnerConstructor(); + const codeqlObject = await stubCodeql(); + // io throws because of the test CodeQL object. + sinon.stub(io, "which").resolves(""); - process.env["CODEQL_ACTION_EXTRA_OPTIONS"] = - '{ "database": { "init": ["--overwrite"] } }'; + process.env["CODEQL_ACTION_EXTRA_OPTIONS"] = + '{ "database": { "init": ["--overwrite"] } }'; - await codeqlObject.databaseInitCluster( - stubConfig, - "sourceRoot", - undefined, - undefined, - getRunnerLogger(false), - ); + await codeqlObject.databaseInitCluster( + stubConfig, + "sourceRoot", + undefined, + undefined, + getRunnerLogger(false), + ); - t.true(runnerConstructorStub.calledOnce); - const args = runnerConstructorStub.firstCall.args[1] as string[]; - t.is( - args.filter((option: string) => option === "--overwrite").length, - 1, - "--overwrite should only be passed once", - ); + t.true(runnerConstructorStub.calledOnce); + const args = runnerConstructorStub.firstCall.args[1] as string[]; + t.is( + args.filter((option: string) => option === "--overwrite").length, + 1, + "--overwrite should only be passed once", + ); - // Clean up - const configArg = args.find((arg: string) => - arg.startsWith("--codescanning-config="), - ); - t.truthy(configArg, "Should have injected a codescanning config"); - const configFile = configArg!.split("=")[1]; - await fs.promises.rm(configFile, { force: true }); -}); + // Clean up + const configArg = args.find((arg: string) => + arg.startsWith("--codescanning-config="), + ); + t.truthy(configArg, "Should have injected a codescanning config"); + const configFile = configArg!.split("=")[1]; + await fs.promises.rm(configFile, { force: true }); + }, +); export function stubToolRunnerConstructor( exitCode: number = 0, diff --git a/src/config-utils.test.ts b/src/config-utils.test.ts index 7624f1a2d..06994c0ed 100644 --- a/src/config-utils.test.ts +++ b/src/config-utils.test.ts @@ -40,6 +40,8 @@ import { withTmpDir, BuildMode, DiskUsage, + Success, + Failure, } from "./util"; import * as util from "./util"; @@ -137,7 +139,7 @@ function mockListLanguages(languages: string[]) { sinon.stub(api, "getApiClient").value(() => client); } -test("load empty config", async (t) => { +test.serial("load empty config", async (t) => { return await withTmpDir(async (tempDir) => { const logger = getRunnerLogger(true); const languages = "javascript,python"; @@ -178,7 +180,7 @@ test("load empty config", async (t) => { }); }); -test("load code quality config", async (t) => { +test.serial("load code quality config", async (t) => { return await withTmpDir(async (tempDir) => { const logger = getRunnerLogger(true); const languages = "actions"; @@ -228,65 +230,68 @@ test("load code quality config", async (t) => { }); }); -test("initActionState doesn't throw if there are queries configured in the repository properties", async (t) => { - return await withTmpDir(async (tempDir) => { - const logger = getRunnerLogger(true); - const languages = "javascript"; +test.serial( + "initActionState doesn't throw if there are queries configured in the repository properties", + async (t) => { + return await withTmpDir(async (tempDir) => { + const logger = getRunnerLogger(true); + const languages = "javascript"; - const codeql = createStubCodeQL({ - async betterResolveLanguages() { - return { - extractors: { - javascript: [{ extractor_root: "" }], - }, - }; - }, + const codeql = createStubCodeQL({ + async betterResolveLanguages() { + return { + extractors: { + javascript: [{ extractor_root: "" }], + }, + }; + }, + }); + + // This should be ignored and no error should be thrown. + const repositoryProperties = { + "github-codeql-extra-queries": "+foo", + }; + + // Expected configuration for a CQ-only analysis. + const computedConfig: UserConfig = { + "disable-default-queries": true, + queries: [{ uses: "code-quality" }], + "query-filters": [], + }; + + const expectedConfig = createTestConfig({ + analysisKinds: [AnalysisKind.CodeQuality], + languages: [KnownLanguage.javascript], + codeQLCmd: codeql.getPath(), + computedConfig, + dbLocation: path.resolve(tempDir, "codeql_databases"), + debugArtifactName: "", + debugDatabaseName: "", + tempDir, + repositoryProperties, + }); + + await t.notThrowsAsync(async () => { + const config = await configUtils.initConfig( + createFeatures([]), + createTestInitConfigInputs({ + analysisKinds: [AnalysisKind.CodeQuality], + languagesInput: languages, + repository: { owner: "github", repo: "example" }, + tempDir, + codeql, + repositoryProperties, + logger, + }), + ); + + t.deepEqual(config, expectedConfig); + }); }); + }, +); - // This should be ignored and no error should be thrown. - const repositoryProperties = { - "github-codeql-extra-queries": "+foo", - }; - - // Expected configuration for a CQ-only analysis. - const computedConfig: UserConfig = { - "disable-default-queries": true, - queries: [{ uses: "code-quality" }], - "query-filters": [], - }; - - const expectedConfig = createTestConfig({ - analysisKinds: [AnalysisKind.CodeQuality], - languages: [KnownLanguage.javascript], - codeQLCmd: codeql.getPath(), - computedConfig, - dbLocation: path.resolve(tempDir, "codeql_databases"), - debugArtifactName: "", - debugDatabaseName: "", - tempDir, - repositoryProperties, - }); - - await t.notThrowsAsync(async () => { - const config = await configUtils.initConfig( - createFeatures([]), - createTestInitConfigInputs({ - analysisKinds: [AnalysisKind.CodeQuality], - languagesInput: languages, - repository: { owner: "github", repo: "example" }, - tempDir, - codeql, - repositoryProperties, - logger, - }), - ); - - t.deepEqual(config, expectedConfig); - }); - }); -}); - -test("loading a saved config produces the same config", async (t) => { +test.serial("loading a saved config produces the same config", async (t) => { return await withTmpDir(async (tempDir) => { const logger = getRunnerLogger(true); @@ -333,7 +338,7 @@ test("loading a saved config produces the same config", async (t) => { }); }); -test("loading config with version mismatch throws", async (t) => { +test.serial("loading config with version mismatch throws", async (t) => { return await withTmpDir(async (tempDir) => { const logger = getRunnerLogger(true); @@ -385,7 +390,7 @@ test("loading config with version mismatch throws", async (t) => { }); }); -test("load input outside of workspace", async (t) => { +test.serial("load input outside of workspace", async (t) => { return await withTmpDir(async (tempDir) => { try { await configUtils.initConfig( @@ -410,7 +415,7 @@ test("load input outside of workspace", async (t) => { }); }); -test("load non-local input with invalid repo syntax", async (t) => { +test.serial("load non-local input with invalid repo syntax", async (t) => { return await withTmpDir(async (tempDir) => { // no filename given, just a repo const configFile = "octo-org/codeql-config@main"; @@ -438,7 +443,7 @@ test("load non-local input with invalid repo syntax", async (t) => { }); }); -test("load non-existent input", async (t) => { +test.serial("load non-existent input", async (t) => { return await withTmpDir(async (tempDir) => { const languagesInput = "javascript"; const configFile = "input"; @@ -468,7 +473,7 @@ test("load non-existent input", async (t) => { }); }); -test("load non-empty input", async (t) => { +test.serial("load non-empty input", async (t) => { return await withTmpDir(async (tempDir) => { const codeql = createStubCodeQL({ async betterResolveLanguages() { @@ -539,18 +544,20 @@ test("load non-empty input", async (t) => { }); }); -test("Using config input and file together, config input should be used.", async (t) => { - return await withTmpDir(async (tempDir) => { - process.env["RUNNER_TEMP"] = tempDir; - process.env["GITHUB_WORKSPACE"] = tempDir; +test.serial( + "Using config input and file together, config input should be used.", + async (t) => { + return await withTmpDir(async (tempDir) => { + process.env["RUNNER_TEMP"] = tempDir; + process.env["GITHUB_WORKSPACE"] = tempDir; - const inputFileContents = ` + const inputFileContents = ` name: my config queries: - uses: ./foo_file`; - const configFilePath = createConfigFile(inputFileContents, tempDir); + const configFilePath = createConfigFile(inputFileContents, tempDir); - const configInput = ` + const configInput = ` name: my config queries: - uses: ./foo @@ -561,39 +568,40 @@ test("Using config input and file together, config input should be used.", async - c/d@1.2.3 `; - fs.mkdirSync(path.join(tempDir, "foo")); + fs.mkdirSync(path.join(tempDir, "foo")); - const codeql = createStubCodeQL({ - async betterResolveLanguages() { - return { - extractors: { - javascript: [{ extractor_root: "" }], - python: [{ extractor_root: "" }], - }, - }; - }, + const codeql = createStubCodeQL({ + async betterResolveLanguages() { + return { + extractors: { + javascript: [{ extractor_root: "" }], + python: [{ extractor_root: "" }], + }, + }; + }, + }); + + // Only JS, python packs will be ignored + const languagesInput = "javascript"; + + const config = await configUtils.initConfig( + createFeatures([]), + createTestInitConfigInputs({ + languagesInput, + configFile: configFilePath, + configInput, + tempDir, + codeql, + workspacePath: tempDir, + }), + ); + + t.deepEqual(config.originalUserInput, yaml.load(configInput)); }); + }, +); - // Only JS, python packs will be ignored - const languagesInput = "javascript"; - - const config = await configUtils.initConfig( - createFeatures([]), - createTestInitConfigInputs({ - languagesInput, - configFile: configFilePath, - configInput, - tempDir, - codeql, - workspacePath: tempDir, - }), - ); - - t.deepEqual(config.originalUserInput, yaml.load(configInput)); - }); -}); - -test("API client used when reading remote config", async (t) => { +test.serial("API client used when reading remote config", async (t) => { return await withTmpDir(async (tempDir) => { const codeql = createStubCodeQL({ async betterResolveLanguages() { @@ -642,34 +650,37 @@ test("API client used when reading remote config", async (t) => { }); }); -test("Remote config handles the case where a directory is provided", async (t) => { - return await withTmpDir(async (tempDir) => { - const dummyResponse = []; // directories are returned as arrays - mockGetContents(dummyResponse); +test.serial( + "Remote config handles the case where a directory is provided", + async (t) => { + return await withTmpDir(async (tempDir) => { + const dummyResponse = []; // directories are returned as arrays + mockGetContents(dummyResponse); - const repoReference = "octo-org/codeql-config/config.yaml@main"; - try { - await configUtils.initConfig( - createFeatures([]), - createTestInitConfigInputs({ - configFile: repoReference, - tempDir, - workspacePath: tempDir, - }), - ); - throw new Error("initConfig did not throw error"); - } catch (err) { - t.deepEqual( - err, - new ConfigurationError( - errorMessages.getConfigFileDirectoryGivenMessage(repoReference), - ), - ); - } - }); -}); + const repoReference = "octo-org/codeql-config/config.yaml@main"; + try { + await configUtils.initConfig( + createFeatures([]), + createTestInitConfigInputs({ + configFile: repoReference, + tempDir, + workspacePath: tempDir, + }), + ); + throw new Error("initConfig did not throw error"); + } catch (err) { + t.deepEqual( + err, + new ConfigurationError( + errorMessages.getConfigFileDirectoryGivenMessage(repoReference), + ), + ); + } + }); + }, +); -test("Invalid format of remote config handled correctly", async (t) => { +test.serial("Invalid format of remote config handled correctly", async (t) => { return await withTmpDir(async (tempDir) => { const dummyResponse = { // note no "content" property here @@ -698,7 +709,7 @@ test("Invalid format of remote config handled correctly", async (t) => { }); }); -test("No detected languages", async (t) => { +test.serial("No detected languages", async (t) => { return await withTmpDir(async (tempDir) => { mockListLanguages([]); const codeql = createStubCodeQL({ @@ -726,7 +737,7 @@ test("No detected languages", async (t) => { }); }); -test("Unknown languages", async (t) => { +test.serial("Unknown languages", async (t) => { return await withTmpDir(async (tempDir) => { const languagesInput = "rubbish,english"; @@ -753,7 +764,7 @@ test("Unknown languages", async (t) => { const mockLogger = getRunnerLogger(true); -test("no generateRegistries when registries is undefined", async (t) => { +test.serial("no generateRegistries when registries is undefined", async (t) => { return await withTmpDir(async (tmpDir) => { const registriesInput = undefined; const logger = getRunnerLogger(true); @@ -765,24 +776,27 @@ test("no generateRegistries when registries is undefined", async (t) => { }); }); -test("generateRegistries prefers original CODEQL_REGISTRIES_AUTH", async (t) => { - return await withTmpDir(async (tmpDir) => { - process.env.CODEQL_REGISTRIES_AUTH = "original"; - const registriesInput = yaml.dump([ - { - url: "http://ghcr.io", - packages: ["codeql/*", "codeql-testing/*"], - token: "not-a-token", - }, - ]); - const logger = getRunnerLogger(true); - const { registriesAuthTokens, qlconfigFile } = - await configUtils.generateRegistries(registriesInput, tmpDir, logger); +test.serial( + "generateRegistries prefers original CODEQL_REGISTRIES_AUTH", + async (t) => { + return await withTmpDir(async (tmpDir) => { + process.env.CODEQL_REGISTRIES_AUTH = "original"; + const registriesInput = yaml.dump([ + { + url: "http://ghcr.io", + packages: ["codeql/*", "codeql-testing/*"], + token: "not-a-token", + }, + ]); + const logger = getRunnerLogger(true); + const { registriesAuthTokens, qlconfigFile } = + await configUtils.generateRegistries(registriesInput, tmpDir, logger); - t.is(registriesAuthTokens, "original"); - t.is(qlconfigFile, path.join(tmpDir, "qlconfig.yml")); - }); -}); + t.is(registriesAuthTokens, "original"); + t.is(qlconfigFile, path.join(tmpDir, "qlconfig.yml")); + }); + }, +); // getLanguages @@ -860,7 +874,7 @@ const mockRepositoryNwo = parseRepositoryNwo("owner/repo"); expectedLanguages: ["javascript"], }, ].forEach((args) => { - test(`getLanguages: ${args.name}`, async (t) => { + test.serial(`getLanguages: ${args.name}`, async (t) => { const mockRequest = mockLanguagesInRepo(args.languagesInRepository); const stubExtractorEntry = { extractor_root: "", @@ -1012,16 +1026,19 @@ const defaultOverlayDatabaseModeTestSetup: OverlayDatabaseModeTestSetup = { repositoryProperties: {}, }; -const getOverlayDatabaseModeMacro = test.macro({ +const checkOverlayEnablementMacro = test.macro({ exec: async ( t: ExecutionContext, _title: string, setupOverrides: Partial, - expected: { - overlayDatabaseMode: OverlayDatabaseMode; - useOverlayDatabaseCaching: boolean; - disabledReason?: OverlayDisabledReason; - }, + expected: + | { + overlayDatabaseMode: OverlayDatabaseMode; + useOverlayDatabaseCaching: boolean; + } + | { + disabledReason: OverlayDisabledReason; + }, ) => { return await withTmpDir(async (tempDir) => { const messages: LoggedMessage[] = []; @@ -1079,7 +1096,7 @@ const getOverlayDatabaseModeMacro = test.macro({ .stub(gitUtils, "isAnalyzingDefaultBranch") .resolves(setup.isDefaultBranch); - const result = await configUtils.getOverlayDatabaseMode( + const result = await configUtils.checkOverlayEnablement( codeql, features, setup.languages, @@ -1092,22 +1109,22 @@ const getOverlayDatabaseModeMacro = test.macro({ logger, ); - if (!("disabledReason" in expected)) { - expected.disabledReason = undefined; + if ("disabledReason" in expected) { + t.deepEqual(result, new Failure(expected.disabledReason)); + } else { + t.deepEqual(result, new Success(expected)); } - - t.deepEqual(result, expected); } finally { // Restore the original environment process.env = originalEnv; } }); }, - title: (_, title) => `getOverlayDatabaseMode: ${title}`, + title: (_, title) => `checkOverlayEnablement: ${title}`, }); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Environment variable override - Overlay", { overlayDatabaseEnvVar: "overlay", @@ -1118,8 +1135,8 @@ test( }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Environment variable override - OverlayBase", { overlayDatabaseEnvVar: "overlay-base", @@ -1130,46 +1147,42 @@ test( }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Environment variable override - None", { overlayDatabaseEnvVar: "none", }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, + disabledReason: OverlayDisabledReason.DisabledByEnvironmentVariable, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Ignore invalid environment variable", { overlayDatabaseEnvVar: "invalid-mode", }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.FeatureNotEnabled, + disabledReason: OverlayDisabledReason.OverallFeatureNotEnabled, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Ignore feature flag when analyzing non-default branch", { languages: [KnownLanguage.javascript], features: [Feature.OverlayAnalysis, Feature.OverlayAnalysisJavascript], }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, + disabledReason: OverlayDisabledReason.NotPullRequestOrDefaultBranch, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Overlay-base database on default branch when feature enabled", { languages: [KnownLanguage.javascript], @@ -1182,8 +1195,8 @@ test( }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Overlay-base database on default branch when feature enabled with custom analysis", { languages: [KnownLanguage.javascript], @@ -1199,8 +1212,8 @@ test( }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Overlay-base database on default branch when code-scanning feature enabled", { languages: [KnownLanguage.javascript], @@ -1216,8 +1229,8 @@ test( }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay-base database on default branch if runner disk space is too low", { languages: [KnownLanguage.javascript], @@ -1232,14 +1245,12 @@ test( }, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.InsufficientResources, + disabledReason: OverlayDisabledReason.InsufficientDiskSpace, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay-base database on default branch if we can't determine runner disk space", { languages: [KnownLanguage.javascript], @@ -1251,14 +1262,12 @@ test( diskUsage: undefined, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.InsufficientResources, + disabledReason: OverlayDisabledReason.UnableToDetermineDiskUsage, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Overlay-base database on default branch if runner disk space is too low and skip resource checks flag is enabled", { languages: [KnownLanguage.javascript], @@ -1279,8 +1288,8 @@ test( }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay-base database on default branch if runner disk space is below v2 limit and v2 resource checks enabled", { languages: [KnownLanguage.javascript], @@ -1296,14 +1305,12 @@ test( }, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.InsufficientResources, + disabledReason: OverlayDisabledReason.InsufficientDiskSpace, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Overlay-base database on default branch if runner disk space is between v2 and v1 limits and v2 resource checks enabled", { languages: [KnownLanguage.javascript], @@ -1324,8 +1331,8 @@ test( }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay-base database on default branch if runner disk space is between v2 and v1 limits and v2 resource checks not enabled", { languages: [KnownLanguage.javascript], @@ -1340,14 +1347,12 @@ test( }, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.InsufficientResources, + disabledReason: OverlayDisabledReason.InsufficientDiskSpace, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay-base database on default branch if memory flag is too low", { languages: [KnownLanguage.javascript], @@ -1359,14 +1364,31 @@ test( memoryFlagValue: 3072, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.InsufficientResources, + disabledReason: OverlayDisabledReason.InsufficientMemory, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, + "Overlay-base database on default branch if memory flag is too low but CodeQL >= 2.24.3", + { + languages: [KnownLanguage.javascript], + features: [ + Feature.OverlayAnalysis, + Feature.OverlayAnalysisCodeScanningJavascript, + ], + isDefaultBranch: true, + memoryFlagValue: 3072, + codeqlVersion: "2.24.3", + }, + { + overlayDatabaseMode: OverlayDatabaseMode.OverlayBase, + useOverlayDatabaseCaching: true, + }, +); + +test.serial( + checkOverlayEnablementMacro, "Overlay-base database on default branch if memory flag is too low and skip resource checks flag is enabled", { languages: [KnownLanguage.javascript], @@ -1384,8 +1406,8 @@ test( }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay-base database on default branch when cached status indicates previous failure", { languages: [KnownLanguage.javascript], @@ -1398,14 +1420,12 @@ test( shouldSkipOverlayAnalysisDueToCachedStatus: true, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, disabledReason: OverlayDisabledReason.SkippedDueToCachedStatus, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay analysis on PR when cached status indicates previous failure", { languages: [KnownLanguage.javascript], @@ -1418,14 +1438,12 @@ test( shouldSkipOverlayAnalysisDueToCachedStatus: true, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, disabledReason: OverlayDisabledReason.SkippedDueToCachedStatus, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay-base database on default branch when code-scanning feature enabled with disable-default-queries", { languages: [KnownLanguage.javascript], @@ -1439,14 +1457,12 @@ test( isDefaultBranch: true, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.FeatureNotEnabled, + disabledReason: OverlayDisabledReason.NonDefaultQueries, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay-base database on default branch when code-scanning feature enabled with packs", { languages: [KnownLanguage.javascript], @@ -1460,14 +1476,12 @@ test( isDefaultBranch: true, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.FeatureNotEnabled, + disabledReason: OverlayDisabledReason.NonDefaultQueries, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay-base database on default branch when code-scanning feature enabled with queries", { languages: [KnownLanguage.javascript], @@ -1481,14 +1495,12 @@ test( isDefaultBranch: true, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.FeatureNotEnabled, + disabledReason: OverlayDisabledReason.NonDefaultQueries, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay-base database on default branch when code-scanning feature enabled with query-filters", { languages: [KnownLanguage.javascript], @@ -1502,14 +1514,12 @@ test( isDefaultBranch: true, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.FeatureNotEnabled, + disabledReason: OverlayDisabledReason.NonDefaultQueries, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay-base database on default branch when only language-specific feature enabled", { languages: [KnownLanguage.javascript], @@ -1517,14 +1527,12 @@ test( isDefaultBranch: true, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.FeatureNotEnabled, + disabledReason: OverlayDisabledReason.OverallFeatureNotEnabled, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay-base database on default branch when only code-scanning feature enabled", { languages: [KnownLanguage.javascript], @@ -1532,14 +1540,12 @@ test( isDefaultBranch: true, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.FeatureNotEnabled, + disabledReason: OverlayDisabledReason.OverallFeatureNotEnabled, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay-base database on default branch when language-specific feature disabled", { languages: [KnownLanguage.javascript], @@ -1547,14 +1553,12 @@ test( isDefaultBranch: true, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.FeatureNotEnabled, + disabledReason: OverlayDisabledReason.LanguageNotEnabled, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Overlay analysis on PR when feature enabled", { languages: [KnownLanguage.javascript], @@ -1567,8 +1571,8 @@ test( }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Overlay analysis on PR when feature enabled with custom analysis", { languages: [KnownLanguage.javascript], @@ -1584,8 +1588,8 @@ test( }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Overlay analysis on PR when code-scanning feature enabled", { languages: [KnownLanguage.javascript], @@ -1601,8 +1605,8 @@ test( }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay analysis on PR if runner disk space is too low", { languages: [KnownLanguage.javascript], @@ -1617,14 +1621,12 @@ test( }, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.InsufficientResources, + disabledReason: OverlayDisabledReason.InsufficientDiskSpace, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Overlay analysis on PR if runner disk space is too low and skip resource checks flag is enabled", { languages: [KnownLanguage.javascript], @@ -1645,8 +1647,8 @@ test( }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay analysis on PR if we can't determine runner disk space", { languages: [KnownLanguage.javascript], @@ -1658,14 +1660,12 @@ test( diskUsage: undefined, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.InsufficientResources, + disabledReason: OverlayDisabledReason.UnableToDetermineDiskUsage, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay analysis on PR if memory flag is too low", { languages: [KnownLanguage.javascript], @@ -1677,14 +1677,31 @@ test( memoryFlagValue: 3072, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.InsufficientResources, + disabledReason: OverlayDisabledReason.InsufficientMemory, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, + "Overlay analysis on PR if memory flag is too low but CodeQL >= 2.24.3", + { + languages: [KnownLanguage.javascript], + features: [ + Feature.OverlayAnalysis, + Feature.OverlayAnalysisCodeScanningJavascript, + ], + isPullRequest: true, + memoryFlagValue: 3072, + codeqlVersion: "2.24.3", + }, + { + overlayDatabaseMode: OverlayDatabaseMode.Overlay, + useOverlayDatabaseCaching: true, + }, +); + +test.serial( + checkOverlayEnablementMacro, "Overlay analysis on PR if memory flag is too low and skip resource checks flag is enabled", { languages: [KnownLanguage.javascript], @@ -1702,8 +1719,8 @@ test( }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay analysis on PR when code-scanning feature enabled with disable-default-queries", { languages: [KnownLanguage.javascript], @@ -1717,14 +1734,12 @@ test( isPullRequest: true, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.FeatureNotEnabled, + disabledReason: OverlayDisabledReason.NonDefaultQueries, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay analysis on PR when code-scanning feature enabled with packs", { languages: [KnownLanguage.javascript], @@ -1738,14 +1753,12 @@ test( isPullRequest: true, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.FeatureNotEnabled, + disabledReason: OverlayDisabledReason.NonDefaultQueries, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay analysis on PR when code-scanning feature enabled with queries", { languages: [KnownLanguage.javascript], @@ -1759,14 +1772,12 @@ test( isPullRequest: true, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.FeatureNotEnabled, + disabledReason: OverlayDisabledReason.NonDefaultQueries, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay analysis on PR when code-scanning feature enabled with query-filters", { languages: [KnownLanguage.javascript], @@ -1780,14 +1791,12 @@ test( isPullRequest: true, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.FeatureNotEnabled, + disabledReason: OverlayDisabledReason.NonDefaultQueries, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay analysis on PR when only language-specific feature enabled", { languages: [KnownLanguage.javascript], @@ -1795,14 +1804,12 @@ test( isPullRequest: true, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.FeatureNotEnabled, + disabledReason: OverlayDisabledReason.OverallFeatureNotEnabled, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay analysis on PR when only code-scanning feature enabled", { languages: [KnownLanguage.javascript], @@ -1810,14 +1817,12 @@ test( isPullRequest: true, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.FeatureNotEnabled, + disabledReason: OverlayDisabledReason.OverallFeatureNotEnabled, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay analysis on PR when language-specific feature disabled", { languages: [KnownLanguage.javascript], @@ -1825,14 +1830,12 @@ test( isPullRequest: true, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.FeatureNotEnabled, + disabledReason: OverlayDisabledReason.LanguageNotEnabled, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Overlay PR analysis by env", { overlayDatabaseEnvVar: "overlay", @@ -1843,8 +1846,8 @@ test( }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Overlay PR analysis by env on a runner with low disk space", { overlayDatabaseEnvVar: "overlay", @@ -1856,8 +1859,8 @@ test( }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Overlay PR analysis by feature flag", { languages: [KnownLanguage.javascript], @@ -1870,8 +1873,8 @@ test( }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Fallback due to autobuild with traced language", { overlayDatabaseEnvVar: "overlay", @@ -1879,14 +1882,12 @@ test( languages: [KnownLanguage.java], }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, disabledReason: OverlayDisabledReason.IncompatibleBuildMode, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Fallback due to no build mode with traced language", { overlayDatabaseEnvVar: "overlay", @@ -1894,70 +1895,60 @@ test( languages: [KnownLanguage.java], }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, disabledReason: OverlayDisabledReason.IncompatibleBuildMode, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Fallback due to old CodeQL version", { overlayDatabaseEnvVar: "overlay", codeqlVersion: "2.14.0", }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, disabledReason: OverlayDisabledReason.IncompatibleCodeQl, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Fallback due to missing git root", { overlayDatabaseEnvVar: "overlay", gitRoot: undefined, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, disabledReason: OverlayDisabledReason.NoGitRoot, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Fallback due to old git version", { overlayDatabaseEnvVar: "overlay", gitVersion: new GitVersionInfo("2.30.0", "2.30.0"), // Version below required 2.38.0 }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, disabledReason: OverlayDisabledReason.IncompatibleGit, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Fallback when git version cannot be determined", { overlayDatabaseEnvVar: "overlay", gitVersion: undefined, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, disabledReason: OverlayDisabledReason.IncompatibleGit, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "No overlay when disabled via repository property", { languages: [KnownLanguage.javascript], @@ -1968,14 +1959,12 @@ test( }, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, disabledReason: OverlayDisabledReason.DisabledByRepositoryProperty, }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Overlay not disabled when repository property is false", { languages: [KnownLanguage.javascript], @@ -1991,8 +1980,8 @@ test( }, ); -test( - getOverlayDatabaseModeMacro, +test.serial( + checkOverlayEnablementMacro, "Environment variable override takes precedence over repository property", { overlayDatabaseEnvVar: "overlay", @@ -2008,8 +1997,8 @@ test( // Exercise language-specific overlay analysis features code paths for (const language in KnownLanguage) { - test( - getOverlayDatabaseModeMacro, + test.serial( + checkOverlayEnablementMacro, `Check default overlay analysis feature for ${language}`, { languages: [language], @@ -2017,20 +2006,21 @@ for (const language in KnownLanguage) { isPullRequest: true, }, { - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: OverlayDisabledReason.FeatureNotEnabled, + disabledReason: OverlayDisabledReason.LanguageNotEnabled, }, ); } -test("hasActionsWorkflows doesn't throw if workflows folder doesn't exist", async (t) => { - return withTmpDir(async (tmpDir) => { - t.notThrows(() => configUtils.hasActionsWorkflows(tmpDir)); - }); -}); +test.serial( + "hasActionsWorkflows doesn't throw if workflows folder doesn't exist", + async (t) => { + return withTmpDir(async (tmpDir) => { + t.notThrows(() => configUtils.hasActionsWorkflows(tmpDir)); + }); + }, +); -test("getPrimaryAnalysisConfig - single analysis kind", (t) => { +test.serial("getPrimaryAnalysisConfig - single analysis kind", (t) => { // If only one analysis kind is configured, we expect to get the matching configuration. for (const analysisKind of supportedAnalysisKinds) { const singleKind = createTestConfig({ analysisKinds: [analysisKind] }); @@ -2038,7 +2028,7 @@ test("getPrimaryAnalysisConfig - single analysis kind", (t) => { } }); -test("getPrimaryAnalysisConfig - Code Scanning + Code Quality", (t) => { +test.serial("getPrimaryAnalysisConfig - Code Scanning + Code Quality", (t) => { // For CS+CQ, we expect to get the Code Scanning configuration. const codeScanningAndCodeQuality = createTestConfig({ analysisKinds: [AnalysisKind.CodeScanning, AnalysisKind.CodeQuality], diff --git a/src/config-utils.ts b/src/config-utils.ts index 86f784e3a..f04ae78eb 100644 --- a/src/config-utils.ts +++ b/src/config-utils.ts @@ -69,6 +69,9 @@ import { isInTestMode, joinAtMost, DiskUsage, + Result, + Success, + Failure, } from "./util"; /** @@ -92,13 +95,23 @@ const OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB * 1_000_000; /** - * The minimum memory (in MB) that must be available for CodeQL to perform overlay - * analysis. If CodeQL will be given less memory than this threshold, then the - * action will not perform overlay analysis unless overlay analysis has been - * explicitly enabled via environment variable. + * The minimum memory (in MB) that must be available for CodeQL to perform overlay analysis. If + * CodeQL will be given less memory than this threshold, then the action will not perform overlay + * analysis unless overlay analysis has been explicitly enabled via environment variable. + * + * This check is not performed for CodeQL >= `CODEQL_VERSION_REDUCED_OVERLAY_MEMORY_USAGE` since + * improved memory usage in that version makes the check unnecessary. */ const OVERLAY_MINIMUM_MEMORY_MB = 5 * 1024; +/** + * Versions 2.24.3+ of CodeQL reduce overlay analysis's peak RAM usage. + * + * In particular, RAM usage with overlay analysis enabled should generally be no higher than it is + * without overlay analysis for these versions. + */ +const CODEQL_VERSION_REDUCED_OVERLAY_MEMORY_USAGE = "2.24.3"; + export type RegistryConfigWithCredentials = RegistryConfigNoCredentials & { // Token to use when downloading packs from this registry. token: string; @@ -643,14 +656,18 @@ const OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES: Record = { swift: Feature.OverlayAnalysisCodeScanningSwift, }; -async function isOverlayAnalysisFeatureEnabled( +/** + * Checks whether the overlay analysis feature is enabled for the given + * languages and configuration. + */ +async function checkOverlayAnalysisFeatureEnabled( features: FeatureEnablement, codeql: CodeQL, languages: Language[], codeScanningConfig: UserConfig, -): Promise { +): Promise> { if (!(await features.getValue(Feature.OverlayAnalysis, codeql))) { - return false; + return new Failure(OverlayDisabledReason.OverallFeatureNotEnabled); } let enableForCodeScanningOnly = false; for (const language of languages) { @@ -667,43 +684,35 @@ async function isOverlayAnalysisFeatureEnabled( enableForCodeScanningOnly = true; continue; } - return false; + return new Failure(OverlayDisabledReason.LanguageNotEnabled); } if (enableForCodeScanningOnly) { // A code-scanning configuration runs only the (default) code-scanning suite // if the default queries are not disabled, and no packs, queries, or // query-filters are specified. - return ( + const usesDefaultQueriesOnly = codeScanningConfig["disable-default-queries"] !== true && codeScanningConfig.packs === undefined && codeScanningConfig.queries === undefined && - codeScanningConfig["query-filters"] === undefined - ); + codeScanningConfig["query-filters"] === undefined; + if (!usesDefaultQueriesOnly) { + return new Failure(OverlayDisabledReason.NonDefaultQueries); + } } - return true; + return new Success(undefined); } -/** - * Checks if the runner supports overlay analysis based on available disk space - * and the maximum memory CodeQL will be allowed to use. - */ -async function runnerSupportsOverlayAnalysis( - diskUsage: DiskUsage | undefined, - ramInput: string | undefined, +/** Checks if the runner has enough disk space for overlay analysis. */ +function runnerHasSufficientDiskSpace( + diskUsage: DiskUsage, logger: Logger, useV2ResourceChecks: boolean, -): Promise { +): boolean { const minimumDiskSpaceBytes = useV2ResourceChecks ? OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES : OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_BYTES; - if ( - diskUsage === undefined || - diskUsage.numAvailableBytes < minimumDiskSpaceBytes - ) { - const diskSpaceMb = - diskUsage === undefined - ? 0 - : Math.round(diskUsage.numAvailableBytes / 1_000_000); + if (diskUsage.numAvailableBytes < minimumDiskSpaceBytes) { + const diskSpaceMb = Math.round(diskUsage.numAvailableBytes / 1_000_000); const minimumDiskSpaceMb = Math.round(minimumDiskSpaceBytes / 1_000_000); logger.info( `Setting overlay database mode to ${OverlayDatabaseMode.None} ` + @@ -711,6 +720,26 @@ async function runnerSupportsOverlayAnalysis( ); return false; } + return true; +} + +/** Checks if the runner has enough memory for overlay analysis. */ +async function runnerHasSufficientMemory( + codeql: CodeQL, + ramInput: string | undefined, + logger: Logger, +): Promise { + if ( + await codeQlVersionAtLeast( + codeql, + CODEQL_VERSION_REDUCED_OVERLAY_MEMORY_USAGE, + ) + ) { + logger.debug( + `Skipping memory check for overlay analysis because CodeQL version is at least ${CODEQL_VERSION_REDUCED_OVERLAY_MEMORY_USAGE}.`, + ); + return true; + } const memoryFlagValue = getCodeQLMemoryLimit(ramInput, logger); if (memoryFlagValue < OVERLAY_MINIMUM_MEMORY_MB) { @@ -721,9 +750,37 @@ async function runnerSupportsOverlayAnalysis( return false; } + logger.debug( + `Memory available for CodeQL analysis is ${memoryFlagValue} MB, which is above the minimum of ${OVERLAY_MINIMUM_MEMORY_MB} MB.`, + ); return true; } +/** + * Checks if the runner has sufficient disk space and memory for overlay + * analysis. + */ +async function checkRunnerResources( + codeql: CodeQL, + diskUsage: DiskUsage, + ramInput: string | undefined, + logger: Logger, + useV2ResourceChecks: boolean, +): Promise> { + if (!runnerHasSufficientDiskSpace(diskUsage, logger, useV2ResourceChecks)) { + return new Failure(OverlayDisabledReason.InsufficientDiskSpace); + } + if (!(await runnerHasSufficientMemory(codeql, ramInput, logger))) { + return new Failure(OverlayDisabledReason.InsufficientMemory); + } + return new Success(undefined); +} + +interface EnabledOverlayConfig { + overlayDatabaseMode: Exclude; + useOverlayDatabaseCaching: boolean; +} + /** * Calculate and validate the overlay database mode and caching to use. * @@ -742,10 +799,11 @@ async function runnerSupportsOverlayAnalysis( * For `Overlay` and `OverlayBase`, the function performs further checks and * reverts to `None` if any check should fail. * - * @returns An object containing the overlay database mode and whether the - * action should perform overlay-base database caching. + * @returns A `Success` containing the overlay database mode and whether the + * action should perform overlay-base database caching, or a `Failure` + * containing the reason why overlay analysis is disabled. */ -export async function getOverlayDatabaseMode( +export async function checkOverlayEnablement( codeql: CodeQL, features: FeatureEnablement, languages: Language[], @@ -756,15 +814,7 @@ export async function getOverlayDatabaseMode( repositoryProperties: RepositoryProperties, gitVersion: GitVersionInfo | undefined, logger: Logger, -): Promise<{ - overlayDatabaseMode: OverlayDatabaseMode; - useOverlayDatabaseCaching: boolean; - disabledReason: OverlayDisabledReason | undefined; -}> { - let overlayDatabaseMode = OverlayDatabaseMode.None; - let useOverlayDatabaseCaching = false; - let disabledReason: OverlayDisabledReason | undefined; - +): Promise> { const modeEnv = process.env.CODEQL_OVERLAY_DATABASE_MODE; // Any unrecognized CODEQL_OVERLAY_DATABASE_MODE value will be ignored and // treated as if the environment variable was not set. @@ -773,100 +823,132 @@ export async function getOverlayDatabaseMode( modeEnv === OverlayDatabaseMode.OverlayBase || modeEnv === OverlayDatabaseMode.None ) { - overlayDatabaseMode = modeEnv; logger.info( - `Setting overlay database mode to ${overlayDatabaseMode} ` + + `Setting overlay database mode to ${modeEnv} ` + "from the CODEQL_OVERLAY_DATABASE_MODE environment variable.", ); - } else if ( - repositoryProperties[RepositoryPropertyName.DISABLE_OVERLAY] === true - ) { + if (modeEnv === OverlayDatabaseMode.None) { + return new Failure(OverlayDisabledReason.DisabledByEnvironmentVariable); + } + return validateOverlayDatabaseMode( + modeEnv, + false, + codeql, + languages, + sourceRoot, + buildMode, + gitVersion, + logger, + ); + } + + if (repositoryProperties[RepositoryPropertyName.DISABLE_OVERLAY] === true) { logger.info( `Setting overlay database mode to ${OverlayDatabaseMode.None} ` + `because the ${RepositoryPropertyName.DISABLE_OVERLAY} repository property is set to true.`, ); - overlayDatabaseMode = OverlayDatabaseMode.None; - disabledReason = OverlayDisabledReason.DisabledByRepositoryProperty; - } else if ( - await isOverlayAnalysisFeatureEnabled( - features, - codeql, - languages, - codeScanningConfig, - ) + return new Failure(OverlayDisabledReason.DisabledByRepositoryProperty); + } + + const featureResult = await checkOverlayAnalysisFeatureEnabled( + features, + codeql, + languages, + codeScanningConfig, + ); + if (featureResult.isFailure()) { + return featureResult; + } + + const performResourceChecks = !(await features.getValue( + Feature.OverlayAnalysisSkipResourceChecks, + codeql, + )); + const useV2ResourceChecks = await features.getValue( + Feature.OverlayAnalysisResourceChecksV2, + ); + const checkOverlayStatus = await features.getValue( + Feature.OverlayAnalysisStatusCheck, + ); + const needDiskUsage = performResourceChecks || checkOverlayStatus; + const diskUsage = needDiskUsage ? await checkDiskUsage(logger) : undefined; + if (needDiskUsage && diskUsage === undefined) { + logger.warning( + `Unable to determine disk usage, therefore setting overlay database mode to ${OverlayDatabaseMode.None}.`, + ); + return new Failure(OverlayDisabledReason.UnableToDetermineDiskUsage); + } + const resourceResult = + performResourceChecks && diskUsage !== undefined + ? await checkRunnerResources( + codeql, + diskUsage, + ramInput, + logger, + useV2ResourceChecks, + ) + : new Success(undefined); + if (resourceResult.isFailure()) { + return resourceResult; + } + if ( + checkOverlayStatus && + diskUsage !== undefined && + (await shouldSkipOverlayAnalysis(codeql, languages, diskUsage, logger)) ) { - const performResourceChecks = !(await features.getValue( - Feature.OverlayAnalysisSkipResourceChecks, - codeql, - )); - const useV2ResourceChecks = await features.getValue( - Feature.OverlayAnalysisResourceChecksV2, + logger.info( + `Setting overlay database mode to ${OverlayDatabaseMode.None} ` + + "because overlay analysis previously failed with this combination of languages, " + + "disk space, and CodeQL version.", ); - const checkOverlayStatus = await features.getValue( - Feature.OverlayAnalysisStatusCheck, + return new Failure(OverlayDisabledReason.SkippedDueToCachedStatus); + } + + let overlayDatabaseMode: OverlayDatabaseMode; + if (isAnalyzingPullRequest()) { + overlayDatabaseMode = OverlayDatabaseMode.Overlay; + logger.info( + `Setting overlay database mode to ${overlayDatabaseMode} ` + + "with caching because we are analyzing a pull request.", + ); + } else if (await isAnalyzingDefaultBranch()) { + overlayDatabaseMode = OverlayDatabaseMode.OverlayBase; + logger.info( + `Setting overlay database mode to ${overlayDatabaseMode} ` + + "with caching because we are analyzing the default branch.", ); - const diskUsage = - performResourceChecks || checkOverlayStatus - ? await checkDiskUsage(logger) - : undefined; - if ( - performResourceChecks && - !(await runnerSupportsOverlayAnalysis( - diskUsage, - ramInput, - logger, - useV2ResourceChecks, - )) - ) { - overlayDatabaseMode = OverlayDatabaseMode.None; - disabledReason = OverlayDisabledReason.InsufficientResources; - } else if (checkOverlayStatus && diskUsage === undefined) { - logger.warning( - `Unable to determine disk usage, therefore setting overlay database mode to ${OverlayDatabaseMode.None}.`, - ); - overlayDatabaseMode = OverlayDatabaseMode.None; - disabledReason = OverlayDisabledReason.UnableToDetermineDiskUsage; - } else if ( - checkOverlayStatus && - diskUsage && - (await shouldSkipOverlayAnalysis(codeql, languages, diskUsage, logger)) - ) { - logger.info( - `Setting overlay database mode to ${OverlayDatabaseMode.None} ` + - "because overlay analysis previously failed with this combination of languages, " + - "disk space, and CodeQL version.", - ); - overlayDatabaseMode = OverlayDatabaseMode.None; - disabledReason = OverlayDisabledReason.SkippedDueToCachedStatus; - } else if (isAnalyzingPullRequest()) { - overlayDatabaseMode = OverlayDatabaseMode.Overlay; - useOverlayDatabaseCaching = true; - logger.info( - `Setting overlay database mode to ${overlayDatabaseMode} ` + - "with caching because we are analyzing a pull request.", - ); - } else if (await isAnalyzingDefaultBranch()) { - overlayDatabaseMode = OverlayDatabaseMode.OverlayBase; - useOverlayDatabaseCaching = true; - logger.info( - `Setting overlay database mode to ${overlayDatabaseMode} ` + - "with caching because we are analyzing the default branch.", - ); - } } else { - disabledReason = OverlayDisabledReason.FeatureNotEnabled; + return new Failure(OverlayDisabledReason.NotPullRequestOrDefaultBranch); } - const disabledResult = (reason: OverlayDisabledReason | undefined) => ({ - overlayDatabaseMode: OverlayDatabaseMode.None, - useOverlayDatabaseCaching: false, - disabledReason: reason, - }); - - if (overlayDatabaseMode === OverlayDatabaseMode.None) { - return disabledResult(disabledReason); - } + return validateOverlayDatabaseMode( + overlayDatabaseMode, + true, + codeql, + languages, + sourceRoot, + buildMode, + gitVersion, + logger, + ); +} +/** + * Validates that the given overlay database mode is compatible with the current + * configuration (build mode, CodeQL version, git repository, git version). Returns + * the mode unchanged if all checks pass, or falls back to `None` with the + * appropriate disabled reason. + */ +async function validateOverlayDatabaseMode( + overlayDatabaseMode: Exclude, + useOverlayDatabaseCaching: boolean, + codeql: CodeQL, + languages: Language[], + sourceRoot: string, + buildMode: BuildMode | undefined, + gitVersion: GitVersionInfo | undefined, + logger: Logger, +): Promise> { if ( buildMode !== BuildMode.None && ( @@ -887,7 +969,7 @@ export async function getOverlayDatabaseMode( `build-mode is set to "${buildMode}" instead of "none". ` + "Falling back to creating a normal full database instead.", ); - return disabledResult(OverlayDisabledReason.IncompatibleBuildMode); + return new Failure(OverlayDisabledReason.IncompatibleBuildMode); } if (!(await codeQlVersionAtLeast(codeql, CODEQL_OVERLAY_MINIMUM_VERSION))) { logger.warning( @@ -895,7 +977,7 @@ export async function getOverlayDatabaseMode( `the CodeQL CLI is older than ${CODEQL_OVERLAY_MINIMUM_VERSION}. ` + "Falling back to creating a normal full database instead.", ); - return disabledResult(OverlayDisabledReason.IncompatibleCodeQl); + return new Failure(OverlayDisabledReason.IncompatibleCodeQl); } if ((await getGitRoot(sourceRoot)) === undefined) { logger.warning( @@ -903,7 +985,7 @@ export async function getOverlayDatabaseMode( `the source root "${sourceRoot}" is not inside a git repository. ` + "Falling back to creating a normal full database instead.", ); - return disabledResult(OverlayDisabledReason.NoGitRoot); + return new Failure(OverlayDisabledReason.NoGitRoot); } if (gitVersion === undefined) { logger.warning( @@ -911,7 +993,7 @@ export async function getOverlayDatabaseMode( "the Git version could not be determined. " + "Falling back to creating a normal full database instead.", ); - return disabledResult(OverlayDisabledReason.IncompatibleGit); + return new Failure(OverlayDisabledReason.IncompatibleGit); } if (!gitVersion.isAtLeast(GIT_MINIMUM_VERSION_FOR_OVERLAY)) { logger.warning( @@ -919,14 +1001,13 @@ export async function getOverlayDatabaseMode( `the installed Git version is older than ${GIT_MINIMUM_VERSION_FOR_OVERLAY}. ` + "Falling back to creating a normal full database instead.", ); - return disabledResult(OverlayDisabledReason.IncompatibleGit); + return new Failure(OverlayDisabledReason.IncompatibleGit); } - return { + return new Success({ overlayDatabaseMode, useOverlayDatabaseCaching, - disabledReason, - }; + }); } function dbLocationOrDefault( @@ -1072,11 +1153,7 @@ export async function initConfig( // and queries, which in turn depends on the user config and the augmentation // properties. So we need to calculate the overlay database mode after the // rest of the config has been populated. - const { - overlayDatabaseMode, - useOverlayDatabaseCaching, - disabledReason: overlayDisabledReason, - } = await getOverlayDatabaseMode( + const overlayDatabaseModeResult = await checkOverlayEnablement( inputs.codeql, inputs.features, config.languages, @@ -1088,14 +1165,22 @@ export async function initConfig( gitVersion, logger, ); - logger.info( - `Using overlay database mode: ${overlayDatabaseMode} ` + - `${useOverlayDatabaseCaching ? "with" : "without"} caching.`, - ); - config.overlayDatabaseMode = overlayDatabaseMode; - config.useOverlayDatabaseCaching = useOverlayDatabaseCaching; - - if (overlayDisabledReason !== undefined) { + if (overlayDatabaseModeResult.isSuccess()) { + const { overlayDatabaseMode, useOverlayDatabaseCaching } = + overlayDatabaseModeResult.value; + logger.info( + `Using overlay database mode: ${overlayDatabaseMode} ` + + `${useOverlayDatabaseCaching ? "with" : "without"} caching.`, + ); + config.overlayDatabaseMode = overlayDatabaseMode; + config.useOverlayDatabaseCaching = useOverlayDatabaseCaching; + } else { + const overlayDisabledReason = overlayDatabaseModeResult.value; + logger.info( + `Using overlay database mode: ${OverlayDatabaseMode.None} without caching.`, + ); + config.overlayDatabaseMode = OverlayDatabaseMode.None; + config.useOverlayDatabaseCaching = false; await addOverlayDisablementDiagnostics( config, inputs.codeql, @@ -1104,7 +1189,7 @@ export async function initConfig( } if ( - overlayDatabaseMode === OverlayDatabaseMode.Overlay || + config.overlayDatabaseMode === OverlayDatabaseMode.Overlay || (await shouldPerformDiffInformedAnalysis( inputs.codeql, inputs.features, diff --git a/src/database-upload.test.ts b/src/database-upload.test.ts index 57d07adb2..3d8433d8b 100644 --- a/src/database-upload.test.ts +++ b/src/database-upload.test.ts @@ -82,70 +82,76 @@ function getCodeQL() { }); } -test("Abort database upload if 'upload-database' input set to false", async (t) => { - await withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); - sinon - .stub(actionsUtil, "getRequiredInput") - .withArgs("upload-database") - .returns("false"); - sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true); +test.serial( + "Abort database upload if 'upload-database' input set to false", + async (t) => { + await withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); + sinon + .stub(actionsUtil, "getRequiredInput") + .withArgs("upload-database") + .returns("false"); + sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true); - const loggedMessages = []; - await cleanupAndUploadDatabases( - testRepoName, - getCodeQL(), - getTestConfig(tmpDir), - testApiDetails, - createFeatures([]), - getRecordingLogger(loggedMessages), - ); - t.assert( - loggedMessages.find( - (v: LoggedMessage) => - v.type === "debug" && - v.message === - "Database upload disabled in workflow. Skipping upload.", - ) !== undefined, - ); - }); -}); + const loggedMessages = []; + await cleanupAndUploadDatabases( + testRepoName, + getCodeQL(), + getTestConfig(tmpDir), + testApiDetails, + createFeatures([]), + getRecordingLogger(loggedMessages), + ); + t.assert( + loggedMessages.find( + (v: LoggedMessage) => + v.type === "debug" && + v.message === + "Database upload disabled in workflow. Skipping upload.", + ) !== undefined, + ); + }); + }, +); -test("Abort database upload if 'analysis-kinds: code-scanning' is not enabled", async (t) => { - await withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); - sinon - .stub(actionsUtil, "getRequiredInput") - .withArgs("upload-database") - .returns("true"); - sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true); +test.serial( + "Abort database upload if 'analysis-kinds: code-scanning' is not enabled", + async (t) => { + await withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); + sinon + .stub(actionsUtil, "getRequiredInput") + .withArgs("upload-database") + .returns("true"); + sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true); - await mockHttpRequests(201); + await mockHttpRequests(201); - const loggedMessages = []; - await cleanupAndUploadDatabases( - testRepoName, - getCodeQL(), - { - ...getTestConfig(tmpDir), - analysisKinds: [AnalysisKind.CodeQuality], - }, - testApiDetails, - createFeatures([]), - getRecordingLogger(loggedMessages), - ); - t.assert( - loggedMessages.find( - (v: LoggedMessage) => - v.type === "debug" && - v.message === - "Not uploading database because 'analysis-kinds: code-scanning' is not enabled.", - ) !== undefined, - ); - }); -}); + const loggedMessages = []; + await cleanupAndUploadDatabases( + testRepoName, + getCodeQL(), + { + ...getTestConfig(tmpDir), + analysisKinds: [AnalysisKind.CodeQuality], + }, + testApiDetails, + createFeatures([]), + getRecordingLogger(loggedMessages), + ); + t.assert( + loggedMessages.find( + (v: LoggedMessage) => + v.type === "debug" && + v.message === + "Not uploading database because 'analysis-kinds: code-scanning' is not enabled.", + ) !== undefined, + ); + }); + }, +); -test("Abort database upload if running against GHES", async (t) => { +test.serial("Abort database upload if running against GHES", async (t) => { await withTmpDir(async (tmpDir) => { setupActionsVars(tmpDir, tmpDir); sinon @@ -177,35 +183,38 @@ test("Abort database upload if running against GHES", async (t) => { }); }); -test("Abort database upload if not analyzing default branch", async (t) => { - await withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); - sinon - .stub(actionsUtil, "getRequiredInput") - .withArgs("upload-database") - .returns("true"); - sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false); +test.serial( + "Abort database upload if not analyzing default branch", + async (t) => { + await withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); + sinon + .stub(actionsUtil, "getRequiredInput") + .withArgs("upload-database") + .returns("true"); + sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false); - const loggedMessages = []; - await cleanupAndUploadDatabases( - testRepoName, - getCodeQL(), - getTestConfig(tmpDir), - testApiDetails, - createFeatures([]), - getRecordingLogger(loggedMessages), - ); - t.assert( - loggedMessages.find( - (v: LoggedMessage) => - v.type === "debug" && - v.message === "Not analyzing default branch. Skipping upload.", - ) !== undefined, - ); - }); -}); + const loggedMessages = []; + await cleanupAndUploadDatabases( + testRepoName, + getCodeQL(), + getTestConfig(tmpDir), + testApiDetails, + createFeatures([]), + getRecordingLogger(loggedMessages), + ); + t.assert( + loggedMessages.find( + (v: LoggedMessage) => + v.type === "debug" && + v.message === "Not analyzing default branch. Skipping upload.", + ) !== undefined, + ); + }); + }, +); -test("Don't crash if uploading a database fails", async (t) => { +test.serial("Don't crash if uploading a database fails", async (t) => { await withTmpDir(async (tmpDir) => { setupActionsVars(tmpDir, tmpDir); sinon @@ -237,7 +246,7 @@ test("Don't crash if uploading a database fails", async (t) => { }); }); -test("Successfully uploading a database to github.com", async (t) => { +test.serial("Successfully uploading a database to github.com", async (t) => { await withTmpDir(async (tmpDir) => { setupActionsVars(tmpDir, tmpDir); sinon @@ -267,7 +276,7 @@ test("Successfully uploading a database to github.com", async (t) => { }); }); -test("Successfully uploading a database to GHEC-DR", async (t) => { +test.serial("Successfully uploading a database to GHEC-DR", async (t) => { await withTmpDir(async (tmpDir) => { setupActionsVars(tmpDir, tmpDir); sinon diff --git a/src/defaults.json b/src/defaults.json index 94988f4cf..9b6ec84bd 100644 --- a/src/defaults.json +++ b/src/defaults.json @@ -1,6 +1,6 @@ { - "bundleVersion": "codeql-bundle-v2.24.2", - "cliVersion": "2.24.2", - "priorBundleVersion": "codeql-bundle-v2.24.1", - "priorCliVersion": "2.24.1" + "bundleVersion": "codeql-bundle-v2.24.3", + "cliVersion": "2.24.3", + "priorBundleVersion": "codeql-bundle-v2.24.2", + "priorCliVersion": "2.24.2" } diff --git a/src/dependency-caching.test.ts b/src/dependency-caching.test.ts index c37d37b43..a2d75190d 100644 --- a/src/dependency-caching.test.ts +++ b/src/dependency-caching.test.ts @@ -44,27 +44,33 @@ function makeAbsolutePatterns(tmpDir: string, patterns: string[]): string[] { return patterns.map((pattern) => path.join(tmpDir, pattern)); } -test("getCsharpDependencyDirs - does not include BMN dir if FF is enabled", async (t) => { - await withTmpDir(async (tmpDir) => { - process.env["RUNNER_TEMP"] = tmpDir; - const codeql = createStubCodeQL({}); - const features = createFeatures([]); +test.serial( + "getCsharpDependencyDirs - does not include BMN dir if FF is disabled", + async (t) => { + await withTmpDir(async (tmpDir) => { + process.env["RUNNER_TEMP"] = tmpDir; + const codeql = createStubCodeQL({}); + const features = createFeatures([]); - const results = await getCsharpDependencyDirs(codeql, features); - t.false(results.includes(getCsharpTempDependencyDir())); - }); -}); + const results = await getCsharpDependencyDirs(codeql, features); + t.false(results.includes(getCsharpTempDependencyDir())); + }); + }, +); -test("getCsharpDependencyDirs - includes BMN dir if FF is enabled", async (t) => { - await withTmpDir(async (tmpDir) => { - process.env["RUNNER_TEMP"] = tmpDir; - const codeql = createStubCodeQL({}); - const features = createFeatures([Feature.CsharpCacheBuildModeNone]); +test.serial( + "getCsharpDependencyDirs - includes BMN dir if FF is enabled", + async (t) => { + await withTmpDir(async (tmpDir) => { + process.env["RUNNER_TEMP"] = tmpDir; + const codeql = createStubCodeQL({}); + const features = createFeatures([Feature.CsharpCacheBuildModeNone]); - const results = await getCsharpDependencyDirs(codeql, features); - t.assert(results.includes(getCsharpTempDependencyDir())); - }); -}); + const results = await getCsharpDependencyDirs(codeql, features); + t.assert(results.includes(getCsharpTempDependencyDir())); + }); + }, +); test("makePatternCheck - returns undefined if no patterns match", async (t) => { await withTmpDir(async (tmpDir) => { @@ -85,69 +91,81 @@ test("makePatternCheck - returns all patterns if any pattern matches", async (t) }); }); -test("getCsharpHashPatterns - returns base patterns if any pattern matches", async (t) => { - const codeql = createStubCodeQL({}); - const features = createFeatures([]); - const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); +test.serial( + "getCsharpHashPatterns - returns base patterns if any pattern matches", + async (t) => { + const codeql = createStubCodeQL({}); + const features = createFeatures([]); + const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); - makePatternCheckStub - .withArgs(CSHARP_BASE_PATTERNS) - .resolves(CSHARP_BASE_PATTERNS); - makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).rejects(); + makePatternCheckStub + .withArgs(CSHARP_BASE_PATTERNS) + .resolves(CSHARP_BASE_PATTERNS); + makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).rejects(); - await t.notThrowsAsync(async () => { - const result = await getCsharpHashPatterns(codeql, features); - t.deepEqual(result, CSHARP_BASE_PATTERNS); - }); -}); + await t.notThrowsAsync(async () => { + const result = await getCsharpHashPatterns(codeql, features); + t.deepEqual(result, CSHARP_BASE_PATTERNS); + }); + }, +); -test("getCsharpHashPatterns - returns base patterns if any base pattern matches and CsharpNewCacheKey is enabled", async (t) => { - const codeql = createStubCodeQL({}); - const features = createFeatures([Feature.CsharpNewCacheKey]); - const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); +test.serial( + "getCsharpHashPatterns - returns base patterns if any base pattern matches and CsharpNewCacheKey is enabled", + async (t) => { + const codeql = createStubCodeQL({}); + const features = createFeatures([Feature.CsharpNewCacheKey]); + const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); - makePatternCheckStub - .withArgs(CSHARP_BASE_PATTERNS) - .resolves(CSHARP_BASE_PATTERNS); - makePatternCheckStub - .withArgs(CSHARP_EXTRA_PATTERNS) - .resolves(CSHARP_EXTRA_PATTERNS); + makePatternCheckStub + .withArgs(CSHARP_BASE_PATTERNS) + .resolves(CSHARP_BASE_PATTERNS); + makePatternCheckStub + .withArgs(CSHARP_EXTRA_PATTERNS) + .resolves(CSHARP_EXTRA_PATTERNS); - await t.notThrowsAsync(async () => { - const result = await getCsharpHashPatterns(codeql, features); - t.deepEqual(result, CSHARP_BASE_PATTERNS); - }); -}); + await t.notThrowsAsync(async () => { + const result = await getCsharpHashPatterns(codeql, features); + t.deepEqual(result, CSHARP_BASE_PATTERNS); + }); + }, +); -test("getCsharpHashPatterns - returns extra patterns if any extra pattern matches and CsharpNewCacheKey is enabled", async (t) => { - const codeql = createStubCodeQL({}); - const features = createFeatures([Feature.CsharpNewCacheKey]); - const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); +test.serial( + "getCsharpHashPatterns - returns extra patterns if any extra pattern matches and CsharpNewCacheKey is enabled", + async (t) => { + const codeql = createStubCodeQL({}); + const features = createFeatures([Feature.CsharpNewCacheKey]); + const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); - makePatternCheckStub.withArgs(CSHARP_BASE_PATTERNS).resolves(undefined); - makePatternCheckStub - .withArgs(CSHARP_EXTRA_PATTERNS) - .resolves(CSHARP_EXTRA_PATTERNS); + makePatternCheckStub.withArgs(CSHARP_BASE_PATTERNS).resolves(undefined); + makePatternCheckStub + .withArgs(CSHARP_EXTRA_PATTERNS) + .resolves(CSHARP_EXTRA_PATTERNS); - await t.notThrowsAsync(async () => { - const result = await getCsharpHashPatterns(codeql, features); - t.deepEqual(result, CSHARP_EXTRA_PATTERNS); - }); -}); + await t.notThrowsAsync(async () => { + const result = await getCsharpHashPatterns(codeql, features); + t.deepEqual(result, CSHARP_EXTRA_PATTERNS); + }); + }, +); -test("getCsharpHashPatterns - returns undefined if neither base nor extra patterns match", async (t) => { - const codeql = createStubCodeQL({}); - const features = createFeatures([Feature.CsharpNewCacheKey]); - const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); +test.serial( + "getCsharpHashPatterns - returns undefined if neither base nor extra patterns match", + async (t) => { + const codeql = createStubCodeQL({}); + const features = createFeatures([Feature.CsharpNewCacheKey]); + const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); - makePatternCheckStub.withArgs(CSHARP_BASE_PATTERNS).resolves(undefined); - makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined); + makePatternCheckStub.withArgs(CSHARP_BASE_PATTERNS).resolves(undefined); + makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined); - await t.notThrowsAsync(async () => { - const result = await getCsharpHashPatterns(codeql, features); - t.deepEqual(result, undefined); - }); -}); + await t.notThrowsAsync(async () => { + const result = await getCsharpHashPatterns(codeql, features); + t.deepEqual(result, undefined); + }); + }, +); test("checkHashPatterns - logs when no patterns match", async (t) => { const codeql = createStubCodeQL({}); @@ -238,160 +256,169 @@ function makeMockCacheCheck(mockCacheKeys: string[]): RestoreCacheFunc { }; } -test("downloadDependencyCaches - does not restore caches with feature keys if no features are enabled", async (t) => { - process.env["RUNNER_OS"] = "Linux"; +test.serial( + "downloadDependencyCaches - does not restore caches with feature keys if no features are enabled", + async (t) => { + process.env["RUNNER_OS"] = "Linux"; - const codeql = createStubCodeQL({}); - const messages: LoggedMessage[] = []; - const logger = getRecordingLogger(messages); + const codeql = createStubCodeQL({}); + const messages: LoggedMessage[] = []; + const logger = getRecordingLogger(messages); - sinon.stub(glob, "hashFiles").resolves("abcdef"); + sinon.stub(glob, "hashFiles").resolves("abcdef"); - const keyWithFeature = await cacheKey( - codeql, - createFeatures([Feature.CsharpNewCacheKey]), - KnownLanguage.csharp, - // Patterns don't matter here because we have stubbed `hashFiles` to always return a specific hash above. - [], - ); + const keyWithFeature = await cacheKey( + codeql, + createFeatures([Feature.CsharpNewCacheKey]), + KnownLanguage.csharp, + // Patterns don't matter here because we have stubbed `hashFiles` to always return a specific hash above. + [], + ); - const restoreCacheStub = sinon - .stub(actionsCache, "restoreCache") - .callsFake(makeMockCacheCheck([keyWithFeature])); + const restoreCacheStub = sinon + .stub(actionsCache, "restoreCache") + .callsFake(makeMockCacheCheck([keyWithFeature])); - const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); - makePatternCheckStub - .withArgs(CSHARP_BASE_PATTERNS) - .resolves(CSHARP_BASE_PATTERNS); - makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined); + const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); + makePatternCheckStub + .withArgs(CSHARP_BASE_PATTERNS) + .resolves(CSHARP_BASE_PATTERNS); + makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined); - const result = await downloadDependencyCaches( - codeql, - createFeatures([]), - [KnownLanguage.csharp], - logger, - ); - const statusReport = result.statusReport; - t.is(statusReport.length, 1); - t.is(statusReport[0].language, KnownLanguage.csharp); - t.is(statusReport[0].hit_kind, CacheHitKind.Miss); - t.deepEqual(result.restoredKeys, []); - t.assert(restoreCacheStub.calledOnce); -}); + const result = await downloadDependencyCaches( + codeql, + createFeatures([]), + [KnownLanguage.csharp], + logger, + ); + const statusReport = result.statusReport; + t.is(statusReport.length, 1); + t.is(statusReport[0].language, KnownLanguage.csharp); + t.is(statusReport[0].hit_kind, CacheHitKind.Miss); + t.deepEqual(result.restoredKeys, []); + t.assert(restoreCacheStub.calledOnce); + }, +); -test("downloadDependencyCaches - restores caches with feature keys if features are enabled", async (t) => { - process.env["RUNNER_OS"] = "Linux"; +test.serial( + "downloadDependencyCaches - restores caches with feature keys if features are enabled", + async (t) => { + process.env["RUNNER_OS"] = "Linux"; - const codeql = createStubCodeQL({}); - const messages: LoggedMessage[] = []; - const logger = getRecordingLogger(messages); - const features = createFeatures([Feature.CsharpNewCacheKey]); + const codeql = createStubCodeQL({}); + const messages: LoggedMessage[] = []; + const logger = getRecordingLogger(messages); + const features = createFeatures([Feature.CsharpNewCacheKey]); - const mockHash = "abcdef"; - sinon.stub(glob, "hashFiles").resolves(mockHash); + const mockHash = "abcdef"; + sinon.stub(glob, "hashFiles").resolves(mockHash); - const keyWithFeature = await cacheKey( - codeql, - features, - KnownLanguage.csharp, - // Patterns don't matter here because we have stubbed `hashFiles` to always return a specific hash above. - [], - ); + const keyWithFeature = await cacheKey( + codeql, + features, + KnownLanguage.csharp, + // Patterns don't matter here because we have stubbed `hashFiles` to always return a specific hash above. + [], + ); - const restoreCacheStub = sinon - .stub(actionsCache, "restoreCache") - .callsFake(makeMockCacheCheck([keyWithFeature])); + const restoreCacheStub = sinon + .stub(actionsCache, "restoreCache") + .callsFake(makeMockCacheCheck([keyWithFeature])); - const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); - makePatternCheckStub - .withArgs(CSHARP_BASE_PATTERNS) - .resolves(CSHARP_BASE_PATTERNS); - makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined); + const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); + makePatternCheckStub + .withArgs(CSHARP_BASE_PATTERNS) + .resolves(CSHARP_BASE_PATTERNS); + makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined); - const result = await downloadDependencyCaches( - codeql, - features, - [KnownLanguage.csharp], - logger, - ); + const result = await downloadDependencyCaches( + codeql, + features, + [KnownLanguage.csharp], + logger, + ); - // Check that the status report for telemetry indicates that one cache was restored with an exact match. - const statusReport = result.statusReport; - t.is(statusReport.length, 1); - t.is(statusReport[0].language, KnownLanguage.csharp); - t.is(statusReport[0].hit_kind, CacheHitKind.Exact); + // Check that the status report for telemetry indicates that one cache was restored with an exact match. + const statusReport = result.statusReport; + t.is(statusReport.length, 1); + t.is(statusReport[0].language, KnownLanguage.csharp); + t.is(statusReport[0].hit_kind, CacheHitKind.Exact); - // Check that the restored key has been returned. - const restoredKeys = result.restoredKeys; - t.is(restoredKeys.length, 1); - t.assert( - restoredKeys[0].endsWith(mockHash), - "Expected restored key to end with hash returned by `hashFiles`", - ); + // Check that the restored key has been returned. + const restoredKeys = result.restoredKeys; + t.is(restoredKeys.length, 1); + t.assert( + restoredKeys[0].endsWith(mockHash), + "Expected restored key to end with hash returned by `hashFiles`", + ); - // `restoreCache` should have been called exactly once. - t.assert(restoreCacheStub.calledOnce); -}); + // `restoreCache` should have been called exactly once. + t.assert(restoreCacheStub.calledOnce); + }, +); -test("downloadDependencyCaches - restores caches with feature keys if features are enabled for partial matches", async (t) => { - process.env["RUNNER_OS"] = "Linux"; +test.serial( + "downloadDependencyCaches - restores caches with feature keys if features are enabled for partial matches", + async (t) => { + process.env["RUNNER_OS"] = "Linux"; - const codeql = createStubCodeQL({}); - const messages: LoggedMessage[] = []; - const logger = getRecordingLogger(messages); - const features = createFeatures([Feature.CsharpNewCacheKey]); + const codeql = createStubCodeQL({}); + const messages: LoggedMessage[] = []; + const logger = getRecordingLogger(messages); + const features = createFeatures([Feature.CsharpNewCacheKey]); - // We expect two calls to `hashFiles`: the first by the call to `cacheKey` below, - // and the second by `downloadDependencyCaches`. We use the result of the first - // call as part of the cache key that identifies a mock, existing cache. The result - // of the second call is for the primary restore key, which we don't want to match - // the first key so that we can test the restore keys logic. - const restoredHash = "abcdef"; - const hashFilesStub = sinon.stub(glob, "hashFiles"); - hashFilesStub.onFirstCall().resolves(restoredHash); - hashFilesStub.onSecondCall().resolves("123456"); + // We expect two calls to `hashFiles`: the first by the call to `cacheKey` below, + // and the second by `downloadDependencyCaches`. We use the result of the first + // call as part of the cache key that identifies a mock, existing cache. The result + // of the second call is for the primary restore key, which we don't want to match + // the first key so that we can test the restore keys logic. + const restoredHash = "abcdef"; + const hashFilesStub = sinon.stub(glob, "hashFiles"); + hashFilesStub.onFirstCall().resolves(restoredHash); + hashFilesStub.onSecondCall().resolves("123456"); - const keyWithFeature = await cacheKey( - codeql, - features, - KnownLanguage.csharp, - // Patterns don't matter here because we have stubbed `hashFiles` to always return a specific hash above. - [], - ); + const keyWithFeature = await cacheKey( + codeql, + features, + KnownLanguage.csharp, + // Patterns don't matter here because we have stubbed `hashFiles` to always return a specific hash above. + [], + ); - const restoreCacheStub = sinon - .stub(actionsCache, "restoreCache") - .callsFake(makeMockCacheCheck([keyWithFeature])); + const restoreCacheStub = sinon + .stub(actionsCache, "restoreCache") + .callsFake(makeMockCacheCheck([keyWithFeature])); - const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); - makePatternCheckStub - .withArgs(CSHARP_BASE_PATTERNS) - .resolves(CSHARP_BASE_PATTERNS); - makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined); + const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); + makePatternCheckStub + .withArgs(CSHARP_BASE_PATTERNS) + .resolves(CSHARP_BASE_PATTERNS); + makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined); - const result = await downloadDependencyCaches( - codeql, - features, - [KnownLanguage.csharp], - logger, - ); + const result = await downloadDependencyCaches( + codeql, + features, + [KnownLanguage.csharp], + logger, + ); - // Check that the status report for telemetry indicates that one cache was restored with a partial match. - const statusReport = result.statusReport; - t.is(statusReport.length, 1); - t.is(statusReport[0].language, KnownLanguage.csharp); - t.is(statusReport[0].hit_kind, CacheHitKind.Partial); + // Check that the status report for telemetry indicates that one cache was restored with a partial match. + const statusReport = result.statusReport; + t.is(statusReport.length, 1); + t.is(statusReport[0].language, KnownLanguage.csharp); + t.is(statusReport[0].hit_kind, CacheHitKind.Partial); - // Check that the restored key has been returned. - const restoredKeys = result.restoredKeys; - t.is(restoredKeys.length, 1); - t.assert( - restoredKeys[0].endsWith(restoredHash), - "Expected restored key to end with hash returned by `hashFiles`", - ); + // Check that the restored key has been returned. + const restoredKeys = result.restoredKeys; + t.is(restoredKeys.length, 1); + t.assert( + restoredKeys[0].endsWith(restoredHash), + "Expected restored key to end with hash returned by `hashFiles`", + ); - t.assert(restoreCacheStub.calledOnce); -}); + t.assert(restoreCacheStub.calledOnce); + }, +); test("uploadDependencyCaches - skips upload for a language with no cache config", async (t) => { const codeql = createStubCodeQL({}); @@ -409,148 +436,62 @@ test("uploadDependencyCaches - skips upload for a language with no cache config" ]); }); -test("uploadDependencyCaches - skips upload if no files for the hash exist", async (t) => { - const codeql = createStubCodeQL({}); - const messages: LoggedMessage[] = []; - const logger = getRecordingLogger(messages); - const features = createFeatures([]); - const config = createTestConfig({ - languages: [KnownLanguage.go], - }); +test.serial( + "uploadDependencyCaches - skips upload if no files for the hash exist", + async (t) => { + const codeql = createStubCodeQL({}); + const messages: LoggedMessage[] = []; + const logger = getRecordingLogger(messages); + const features = createFeatures([]); + const config = createTestConfig({ + languages: [KnownLanguage.go], + }); - const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); - makePatternCheckStub.resolves(undefined); + const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); + makePatternCheckStub.resolves(undefined); - const result = await uploadDependencyCaches(codeql, features, config, logger); - t.is(result.length, 1); - t.is(result[0].language, KnownLanguage.go); - t.is(result[0].result, CacheStoreResult.NoHash); -}); + const result = await uploadDependencyCaches( + codeql, + features, + config, + logger, + ); + t.is(result.length, 1); + t.is(result[0].language, KnownLanguage.go); + t.is(result[0].result, CacheStoreResult.NoHash); + }, +); -test("uploadDependencyCaches - skips upload if we know the cache already exists", async (t) => { - process.env["RUNNER_OS"] = "Linux"; +test.serial( + "uploadDependencyCaches - skips upload if we know the cache already exists", + async (t) => { + process.env["RUNNER_OS"] = "Linux"; - const codeql = createStubCodeQL({}); - const messages: LoggedMessage[] = []; - const logger = getRecordingLogger(messages); - const features = createFeatures([]); + const codeql = createStubCodeQL({}); + const messages: LoggedMessage[] = []; + const logger = getRecordingLogger(messages); + const features = createFeatures([]); - const mockHash = "abcdef"; - sinon.stub(glob, "hashFiles").resolves(mockHash); + const mockHash = "abcdef"; + sinon.stub(glob, "hashFiles").resolves(mockHash); - const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); - makePatternCheckStub - .withArgs(CSHARP_BASE_PATTERNS) - .resolves(CSHARP_BASE_PATTERNS); + const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); + makePatternCheckStub + .withArgs(CSHARP_BASE_PATTERNS) + .resolves(CSHARP_BASE_PATTERNS); - const primaryCacheKey = await cacheKey( - codeql, - features, - KnownLanguage.csharp, - CSHARP_BASE_PATTERNS, - ); + const primaryCacheKey = await cacheKey( + codeql, + features, + KnownLanguage.csharp, + CSHARP_BASE_PATTERNS, + ); - const config = createTestConfig({ - languages: [KnownLanguage.csharp], - dependencyCachingRestoredKeys: [primaryCacheKey], - }); + const config = createTestConfig({ + languages: [KnownLanguage.csharp], + dependencyCachingRestoredKeys: [primaryCacheKey], + }); - const result = await uploadDependencyCaches(codeql, features, config, logger); - t.is(result.length, 1); - t.is(result[0].language, KnownLanguage.csharp); - t.is(result[0].result, CacheStoreResult.Duplicate); -}); - -test("uploadDependencyCaches - skips upload if cache size is 0", async (t) => { - process.env["RUNNER_OS"] = "Linux"; - - const codeql = createStubCodeQL({}); - const messages: LoggedMessage[] = []; - const logger = getRecordingLogger(messages); - const features = createFeatures([]); - - const mockHash = "abcdef"; - sinon.stub(glob, "hashFiles").resolves(mockHash); - - const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); - makePatternCheckStub - .withArgs(CSHARP_BASE_PATTERNS) - .resolves(CSHARP_BASE_PATTERNS); - - sinon.stub(cachingUtils, "getTotalCacheSize").resolves(0); - - const config = createTestConfig({ - languages: [KnownLanguage.csharp], - }); - - const result = await uploadDependencyCaches(codeql, features, config, logger); - t.is(result.length, 1); - t.is(result[0].language, KnownLanguage.csharp); - t.is(result[0].result, CacheStoreResult.Empty); - - checkExpectedLogMessages(t, messages, [ - "Skipping upload of dependency cache", - ]); -}); - -test("uploadDependencyCaches - uploads caches when all requirements are met", async (t) => { - process.env["RUNNER_OS"] = "Linux"; - - const codeql = createStubCodeQL({}); - const messages: LoggedMessage[] = []; - const logger = getRecordingLogger(messages); - const features = createFeatures([]); - - const mockHash = "abcdef"; - sinon.stub(glob, "hashFiles").resolves(mockHash); - - const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); - makePatternCheckStub - .withArgs(CSHARP_BASE_PATTERNS) - .resolves(CSHARP_BASE_PATTERNS); - - sinon.stub(cachingUtils, "getTotalCacheSize").resolves(1024); - sinon.stub(actionsCache, "saveCache").resolves(); - - const config = createTestConfig({ - languages: [KnownLanguage.csharp], - }); - - const result = await uploadDependencyCaches(codeql, features, config, logger); - t.is(result.length, 1); - t.is(result[0].language, KnownLanguage.csharp); - t.is(result[0].result, CacheStoreResult.Stored); - t.is(result[0].upload_size_bytes, 1024); - - checkExpectedLogMessages(t, messages, ["Uploading cache of size"]); -}); - -test("uploadDependencyCaches - catches `ReserveCacheError` exceptions", async (t) => { - process.env["RUNNER_OS"] = "Linux"; - - const codeql = createStubCodeQL({}); - const messages: LoggedMessage[] = []; - const logger = getRecordingLogger(messages); - const features = createFeatures([]); - - const mockHash = "abcdef"; - sinon.stub(glob, "hashFiles").resolves(mockHash); - - const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); - makePatternCheckStub - .withArgs(CSHARP_BASE_PATTERNS) - .resolves(CSHARP_BASE_PATTERNS); - - sinon.stub(cachingUtils, "getTotalCacheSize").resolves(1024); - sinon - .stub(actionsCache, "saveCache") - .throws(new actionsCache.ReserveCacheError("Already in use")); - - const config = createTestConfig({ - languages: [KnownLanguage.csharp], - }); - - await t.notThrowsAsync(async () => { const result = await uploadDependencyCaches( codeql, features, @@ -560,12 +501,133 @@ test("uploadDependencyCaches - catches `ReserveCacheError` exceptions", async (t t.is(result.length, 1); t.is(result[0].language, KnownLanguage.csharp); t.is(result[0].result, CacheStoreResult.Duplicate); + }, +); - checkExpectedLogMessages(t, messages, ["Not uploading cache for"]); - }); -}); +test.serial( + "uploadDependencyCaches - skips upload if cache size is 0", + async (t) => { + process.env["RUNNER_OS"] = "Linux"; -test("uploadDependencyCaches - throws other exceptions", async (t) => { + const codeql = createStubCodeQL({}); + const messages: LoggedMessage[] = []; + const logger = getRecordingLogger(messages); + const features = createFeatures([]); + + const mockHash = "abcdef"; + sinon.stub(glob, "hashFiles").resolves(mockHash); + + const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); + makePatternCheckStub + .withArgs(CSHARP_BASE_PATTERNS) + .resolves(CSHARP_BASE_PATTERNS); + + sinon.stub(cachingUtils, "getTotalCacheSize").resolves(0); + + const config = createTestConfig({ + languages: [KnownLanguage.csharp], + }); + + const result = await uploadDependencyCaches( + codeql, + features, + config, + logger, + ); + t.is(result.length, 1); + t.is(result[0].language, KnownLanguage.csharp); + t.is(result[0].result, CacheStoreResult.Empty); + + checkExpectedLogMessages(t, messages, [ + "Skipping upload of dependency cache", + ]); + }, +); + +test.serial( + "uploadDependencyCaches - uploads caches when all requirements are met", + async (t) => { + process.env["RUNNER_OS"] = "Linux"; + + const codeql = createStubCodeQL({}); + const messages: LoggedMessage[] = []; + const logger = getRecordingLogger(messages); + const features = createFeatures([]); + + const mockHash = "abcdef"; + sinon.stub(glob, "hashFiles").resolves(mockHash); + + const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); + makePatternCheckStub + .withArgs(CSHARP_BASE_PATTERNS) + .resolves(CSHARP_BASE_PATTERNS); + + sinon.stub(cachingUtils, "getTotalCacheSize").resolves(1024); + sinon.stub(actionsCache, "saveCache").resolves(); + + const config = createTestConfig({ + languages: [KnownLanguage.csharp], + }); + + const result = await uploadDependencyCaches( + codeql, + features, + config, + logger, + ); + t.is(result.length, 1); + t.is(result[0].language, KnownLanguage.csharp); + t.is(result[0].result, CacheStoreResult.Stored); + t.is(result[0].upload_size_bytes, 1024); + + checkExpectedLogMessages(t, messages, ["Uploading cache of size"]); + }, +); + +test.serial( + "uploadDependencyCaches - catches `ReserveCacheError` exceptions", + async (t) => { + process.env["RUNNER_OS"] = "Linux"; + + const codeql = createStubCodeQL({}); + const messages: LoggedMessage[] = []; + const logger = getRecordingLogger(messages); + const features = createFeatures([]); + + const mockHash = "abcdef"; + sinon.stub(glob, "hashFiles").resolves(mockHash); + + const makePatternCheckStub = sinon.stub(internal, "makePatternCheck"); + makePatternCheckStub + .withArgs(CSHARP_BASE_PATTERNS) + .resolves(CSHARP_BASE_PATTERNS); + + sinon.stub(cachingUtils, "getTotalCacheSize").resolves(1024); + sinon + .stub(actionsCache, "saveCache") + .throws(new actionsCache.ReserveCacheError("Already in use")); + + const config = createTestConfig({ + languages: [KnownLanguage.csharp], + }); + + await t.notThrowsAsync(async () => { + const result = await uploadDependencyCaches( + codeql, + features, + config, + logger, + ); + t.is(result.length, 1); + t.is(result[0].language, KnownLanguage.csharp); + t.is(result[0].result, CacheStoreResult.Duplicate); + + checkExpectedLogMessages(t, messages, ["Not uploading cache for"]); + }); + }, +); + +test.serial("uploadDependencyCaches - throws other exceptions", async (t) => { process.env["RUNNER_OS"] = "Linux"; const codeql = createStubCodeQL({}); diff --git a/src/diff-informed-analysis-utils.test.ts b/src/diff-informed-analysis-utils.test.ts index 2d98a5f63..44b7e7791 100644 --- a/src/diff-informed-analysis-utils.test.ts +++ b/src/diff-informed-analysis-utils.test.ts @@ -97,14 +97,14 @@ const testShouldPerformDiffInformedAnalysis = test.macro({ title: (_, title) => `shouldPerformDiffInformedAnalysis: ${title}`, }); -test( +test.serial( testShouldPerformDiffInformedAnalysis, "returns true in the default test case", {}, true, ); -test( +test.serial( testShouldPerformDiffInformedAnalysis, "returns false when feature flag is disabled from the API", { @@ -113,7 +113,7 @@ test( false, ); -test( +test.serial( testShouldPerformDiffInformedAnalysis, "returns false when CODEQL_ACTION_DIFF_INFORMED_QUERIES is set to false", { @@ -123,7 +123,7 @@ test( false, ); -test( +test.serial( testShouldPerformDiffInformedAnalysis, "returns true when CODEQL_ACTION_DIFF_INFORMED_QUERIES is set to true", { @@ -133,7 +133,7 @@ test( true, ); -test( +test.serial( testShouldPerformDiffInformedAnalysis, "returns false for CodeQL version 2.20.0", { @@ -142,7 +142,7 @@ test( false, ); -test( +test.serial( testShouldPerformDiffInformedAnalysis, "returns false for invalid GHES version", { @@ -154,7 +154,7 @@ test( false, ); -test( +test.serial( testShouldPerformDiffInformedAnalysis, "returns false for GHES version 3.18.5", { @@ -166,7 +166,7 @@ test( false, ); -test( +test.serial( testShouldPerformDiffInformedAnalysis, "returns true for GHES version 3.19.0", { @@ -178,7 +178,7 @@ test( true, ); -test( +test.serial( testShouldPerformDiffInformedAnalysis, "returns false when not a pull request", { @@ -202,12 +202,12 @@ function runGetDiffRanges(changes: number, patch: string[] | undefined): any { ); } -test("getDiffRanges: file unchanged", async (t) => { +test.serial("getDiffRanges: file unchanged", async (t) => { const diffRanges = runGetDiffRanges(0, undefined); t.deepEqual(diffRanges, []); }); -test("getDiffRanges: file diff too large", async (t) => { +test.serial("getDiffRanges: file diff too large", async (t) => { const diffRanges = runGetDiffRanges(1000000, undefined); t.deepEqual(diffRanges, [ { @@ -218,43 +218,49 @@ test("getDiffRanges: file diff too large", async (t) => { ]); }); -test("getDiffRanges: diff thunk with single addition range", async (t) => { - const diffRanges = runGetDiffRanges(2, [ - "@@ -30,6 +50,8 @@", - " a", - " b", - " c", - "+1", - "+2", - " d", - " e", - " f", - ]); - t.deepEqual(diffRanges, [ - { - path: "/checkout/path/test.txt", - startLine: 53, - endLine: 54, - }, - ]); -}); +test.serial( + "getDiffRanges: diff thunk with single addition range", + async (t) => { + const diffRanges = runGetDiffRanges(2, [ + "@@ -30,6 +50,8 @@", + " a", + " b", + " c", + "+1", + "+2", + " d", + " e", + " f", + ]); + t.deepEqual(diffRanges, [ + { + path: "/checkout/path/test.txt", + startLine: 53, + endLine: 54, + }, + ]); + }, +); -test("getDiffRanges: diff thunk with single deletion range", async (t) => { - const diffRanges = runGetDiffRanges(2, [ - "@@ -30,8 +50,6 @@", - " a", - " b", - " c", - "-1", - "-2", - " d", - " e", - " f", - ]); - t.deepEqual(diffRanges, []); -}); +test.serial( + "getDiffRanges: diff thunk with single deletion range", + async (t) => { + const diffRanges = runGetDiffRanges(2, [ + "@@ -30,8 +50,6 @@", + " a", + " b", + " c", + "-1", + "-2", + " d", + " e", + " f", + ]); + t.deepEqual(diffRanges, []); + }, +); -test("getDiffRanges: diff thunk with single update range", async (t) => { +test.serial("getDiffRanges: diff thunk with single update range", async (t) => { const diffRanges = runGetDiffRanges(2, [ "@@ -30,7 +50,7 @@", " a", @@ -275,7 +281,7 @@ test("getDiffRanges: diff thunk with single update range", async (t) => { ]); }); -test("getDiffRanges: diff thunk with addition ranges", async (t) => { +test.serial("getDiffRanges: diff thunk with addition ranges", async (t) => { const diffRanges = runGetDiffRanges(2, [ "@@ -30,7 +50,9 @@", " a", @@ -302,7 +308,7 @@ test("getDiffRanges: diff thunk with addition ranges", async (t) => { ]); }); -test("getDiffRanges: diff thunk with mixed ranges", async (t) => { +test.serial("getDiffRanges: diff thunk with mixed ranges", async (t) => { const diffRanges = runGetDiffRanges(2, [ "@@ -30,7 +50,7 @@", " a", @@ -334,7 +340,7 @@ test("getDiffRanges: diff thunk with mixed ranges", async (t) => { ]); }); -test("getDiffRanges: multiple diff thunks", async (t) => { +test.serial("getDiffRanges: multiple diff thunks", async (t) => { const diffRanges = runGetDiffRanges(2, [ "@@ -30,6 +50,8 @@", " a", @@ -369,7 +375,7 @@ test("getDiffRanges: multiple diff thunks", async (t) => { ]); }); -test("getDiffRanges: no diff context lines", async (t) => { +test.serial("getDiffRanges: no diff context lines", async (t) => { const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]); t.deepEqual(diffRanges, [ { @@ -380,7 +386,7 @@ test("getDiffRanges: no diff context lines", async (t) => { ]); }); -test("getDiffRanges: malformed thunk header", async (t) => { +test.serial("getDiffRanges: malformed thunk header", async (t) => { const diffRanges = runGetDiffRanges(2, ["@@ 30 +50,2 @@", "+1", "+2"]); t.deepEqual(diffRanges, undefined); }); diff --git a/src/feature-flags.test.ts b/src/feature-flags.test.ts index 8b7a0c7d5..85007df13 100644 --- a/src/feature-flags.test.ts +++ b/src/feature-flags.test.ts @@ -34,23 +34,26 @@ test.beforeEach(() => { initializeEnvironment("1.2.3"); }); -test(`All features use default values if running against GHES`, async (t) => { - await withTmpDir(async (tmpDir) => { - const loggedMessages = []; - const features = setUpFeatureFlagTests( - tmpDir, - getRecordingLogger(loggedMessages), - { type: GitHubVariant.GHES, version: "3.0.0" }, - ); +test.serial( + `All features use default values if running against GHES`, + async (t) => { + await withTmpDir(async (tmpDir) => { + const loggedMessages = []; + const features = setUpFeatureFlagTests( + tmpDir, + getRecordingLogger(loggedMessages), + { type: GitHubVariant.GHES, version: "3.0.0" }, + ); - await assertAllFeaturesHaveDefaultValues(t, features); - checkExpectedLogMessages(t, loggedMessages, [ - "Not running against github.com. Using default values for all features.", - ]); - }); -}); + await assertAllFeaturesHaveDefaultValues(t, features); + checkExpectedLogMessages(t, loggedMessages, [ + "Not running against github.com. Using default values for all features.", + ]); + }); + }, +); -test(`Feature flags are requested in GHEC-DR`, async (t) => { +test.serial(`Feature flags are requested in GHEC-DR`, async (t) => { await withTmpDir(async (tmpDir) => { const loggedMessages = []; const features = setUpFeatureFlagTests( @@ -78,254 +81,288 @@ test(`Feature flags are requested in GHEC-DR`, async (t) => { }); }); -test("API response missing and features use default value", async (t) => { - await withTmpDir(async (tmpDir) => { - const loggedMessages: LoggedMessage[] = []; - const features = setUpFeatureFlagTests( - tmpDir, - getRecordingLogger(loggedMessages), - ); - - mockFeatureFlagApiEndpoint(403, {}); - - for (const feature of Object.values(Feature)) { - t.assert( - (await getFeatureIncludingCodeQlIfRequired(features, feature)) === - featureConfig[feature].defaultValue, - ); - } - assertAllFeaturesUndefinedInApi(t, loggedMessages); - }); -}); - -test("Features use default value if they're not returned in API response", async (t) => { - await withTmpDir(async (tmpDir) => { - const loggedMessages: LoggedMessage[] = []; - const features = setUpFeatureFlagTests( - tmpDir, - getRecordingLogger(loggedMessages), - ); - - mockFeatureFlagApiEndpoint(200, {}); - - for (const feature of Object.values(Feature)) { - t.assert( - (await getFeatureIncludingCodeQlIfRequired(features, feature)) === - featureConfig[feature].defaultValue, - ); - } - - assertAllFeaturesUndefinedInApi(t, loggedMessages); - }); -}); - -test("Include no more than 25 features in each API request", async (t) => { - await withTmpDir(async (tmpDir) => { - const features = setUpFeatureFlagTests(tmpDir); - - stubFeatureFlagApiEndpoint((request) => { - const requestedFeatures = (request.features as string).split(","); - return { - status: requestedFeatures.length <= 25 ? 200 : 400, - messageIfError: "Can request a maximum of 25 features.", - data: {}, - }; - }); - - // We only need to call getValue once, and it does not matter which feature - // we ask for. Under the hood, the features library will request all features - // from the API. - const feature = Object.values(Feature)[0]; - await t.notThrowsAsync(async () => - getFeatureIncludingCodeQlIfRequired(features, feature), - ); - }); -}); - -test("Feature flags exception is propagated if the API request errors", async (t) => { - await withTmpDir(async (tmpDir) => { - const features = setUpFeatureFlagTests(tmpDir); - - mockFeatureFlagApiEndpoint(500, {}); - - const someFeature = Object.values(Feature)[0]; - - await t.throwsAsync( - async () => getFeatureIncludingCodeQlIfRequired(features, someFeature), - { - message: - "Encountered an error while trying to determine feature enablement: Error: some error message", - }, - ); - }); -}); - -for (const feature of Object.keys(featureConfig)) { - test(`Only feature '${feature}' is enabled if enabled in the API response. Other features disabled`, async (t) => { +test.serial( + "API response missing and features use default value", + async (t) => { await withTmpDir(async (tmpDir) => { - const features = setUpFeatureFlagTests(tmpDir); + const loggedMessages: LoggedMessage[] = []; + const features = setUpFeatureFlagTests( + tmpDir, + getRecordingLogger(loggedMessages), + ); - // set all features to false except the one we're testing - const expectedFeatureEnablement: { [feature: string]: boolean } = {}; - for (const f of Object.keys(featureConfig)) { - expectedFeatureEnablement[f] = f === feature; + mockFeatureFlagApiEndpoint(403, {}); + + for (const feature of Object.values(Feature)) { + t.assert( + (await getFeatureIncludingCodeQlIfRequired(features, feature)) === + featureConfig[feature].defaultValue, + ); } - mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); + assertAllFeaturesUndefinedInApi(t, loggedMessages); + }); + }, +); - // retrieve the values of the actual features - const actualFeatureEnablement: { [feature: string]: boolean } = {}; - for (const f of Object.keys(featureConfig)) { - actualFeatureEnablement[f] = await getFeatureIncludingCodeQlIfRequired( - features, - f as Feature, +test.serial( + "Features use default value if they're not returned in API response", + async (t) => { + await withTmpDir(async (tmpDir) => { + const loggedMessages: LoggedMessage[] = []; + const features = setUpFeatureFlagTests( + tmpDir, + getRecordingLogger(loggedMessages), + ); + + mockFeatureFlagApiEndpoint(200, {}); + + for (const feature of Object.values(Feature)) { + t.assert( + (await getFeatureIncludingCodeQlIfRequired(features, feature)) === + featureConfig[feature].defaultValue, ); } - // All features should be false except the one we're testing - t.deepEqual(actualFeatureEnablement, expectedFeatureEnablement); + assertAllFeaturesUndefinedInApi(t, loggedMessages); }); - }); + }, +); - test(`Only feature '${feature}' is enabled if the associated environment variable is true. Others disabled.`, async (t) => { +test.serial( + "Include no more than 25 features in each API request", + async (t) => { await withTmpDir(async (tmpDir) => { const features = setUpFeatureFlagTests(tmpDir); - const expectedFeatureEnablement = initializeFeatures(false); - mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); + stubFeatureFlagApiEndpoint((request) => { + const requestedFeatures = (request.features as string).split(","); + return { + status: requestedFeatures.length <= 25 ? 200 : 400, + messageIfError: "Can request a maximum of 25 features.", + data: {}, + }; + }); - // feature should be disabled initially - t.assert( - !(await getFeatureIncludingCodeQlIfRequired( - features, - feature as Feature, - )), - ); - - // set env var to true and check that the feature is now enabled - process.env[featureConfig[feature].envVar] = "true"; - t.assert( - await getFeatureIncludingCodeQlIfRequired(features, feature as Feature), + // We only need to call getValue once, and it does not matter which feature + // we ask for. Under the hood, the features library will request all features + // from the API. + const feature = Object.values(Feature)[0]; + await t.notThrowsAsync(async () => + getFeatureIncludingCodeQlIfRequired(features, feature), ); }); - }); + }, +); - test(`Feature '${feature}' is disabled if the associated environment variable is false, even if enabled in API`, async (t) => { +test.serial( + "Feature flags exception is propagated if the API request errors", + async (t) => { await withTmpDir(async (tmpDir) => { const features = setUpFeatureFlagTests(tmpDir); - const expectedFeatureEnablement = initializeFeatures(true); - mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); + mockFeatureFlagApiEndpoint(500, {}); - // feature should be enabled initially - t.assert( - await getFeatureIncludingCodeQlIfRequired(features, feature as Feature), - ); + const someFeature = Object.values(Feature)[0]; - // set env var to false and check that the feature is now disabled - process.env[featureConfig[feature].envVar] = "false"; - t.assert( - !(await getFeatureIncludingCodeQlIfRequired( - features, - feature as Feature, - )), + await t.throwsAsync( + async () => getFeatureIncludingCodeQlIfRequired(features, someFeature), + { + message: + "Encountered an error while trying to determine feature enablement: Error: some error message", + }, ); }); - }); + }, +); + +for (const feature of Object.keys(featureConfig)) { + test.serial( + `Only feature '${feature}' is enabled if enabled in the API response. Other features disabled`, + async (t) => { + await withTmpDir(async (tmpDir) => { + const features = setUpFeatureFlagTests(tmpDir); + + // set all features to false except the one we're testing + const expectedFeatureEnablement: { [feature: string]: boolean } = {}; + for (const f of Object.keys(featureConfig)) { + expectedFeatureEnablement[f] = f === feature; + } + mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); + + // retrieve the values of the actual features + const actualFeatureEnablement: { [feature: string]: boolean } = {}; + for (const f of Object.keys(featureConfig)) { + actualFeatureEnablement[f] = + await getFeatureIncludingCodeQlIfRequired(features, f as Feature); + } + + // All features should be false except the one we're testing + t.deepEqual(actualFeatureEnablement, expectedFeatureEnablement); + }); + }, + ); + + test.serial( + `Only feature '${feature}' is enabled if the associated environment variable is true. Others disabled.`, + async (t) => { + await withTmpDir(async (tmpDir) => { + const features = setUpFeatureFlagTests(tmpDir); + + const expectedFeatureEnablement = initializeFeatures(false); + mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); + + // feature should be disabled initially + t.assert( + !(await getFeatureIncludingCodeQlIfRequired( + features, + feature as Feature, + )), + ); + + // set env var to true and check that the feature is now enabled + process.env[featureConfig[feature].envVar] = "true"; + t.assert( + await getFeatureIncludingCodeQlIfRequired( + features, + feature as Feature, + ), + ); + }); + }, + ); + + test.serial( + `Feature '${feature}' is disabled if the associated environment variable is false, even if enabled in API`, + async (t) => { + await withTmpDir(async (tmpDir) => { + const features = setUpFeatureFlagTests(tmpDir); + + const expectedFeatureEnablement = initializeFeatures(true); + mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); + + // feature should be enabled initially + t.assert( + await getFeatureIncludingCodeQlIfRequired( + features, + feature as Feature, + ), + ); + + // set env var to false and check that the feature is now disabled + process.env[featureConfig[feature].envVar] = "false"; + t.assert( + !(await getFeatureIncludingCodeQlIfRequired( + features, + feature as Feature, + )), + ); + }); + }, + ); if ( featureConfig[feature].minimumVersion !== undefined || featureConfig[feature].toolsFeature !== undefined ) { - test(`Getting feature '${feature} should throw if no codeql is provided`, async (t) => { - await withTmpDir(async (tmpDir) => { - const features = setUpFeatureFlagTests(tmpDir); + test.serial( + `Getting feature '${feature} should throw if no codeql is provided`, + async (t) => { + await withTmpDir(async (tmpDir) => { + const features = setUpFeatureFlagTests(tmpDir); - const expectedFeatureEnablement = initializeFeatures(true); - mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); + const expectedFeatureEnablement = initializeFeatures(true); + mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); - // The type system should prevent this happening, but test that if we - // bypass it we get the expected error. - await t.throwsAsync( - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - async () => features.getValue(feature as any), - { - message: `Internal error: A ${ - featureConfig[feature].minimumVersion !== undefined - ? "minimum version" - : "required tools feature" - } is specified for feature ${feature}, but no instance of CodeQL was provided.`, - }, - ); - }); - }); + // The type system should prevent this happening, but test that if we + // bypass it we get the expected error. + await t.throwsAsync( + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + async () => features.getValue(feature as any), + { + message: `Internal error: A ${ + featureConfig[feature].minimumVersion !== undefined + ? "minimum version" + : "required tools feature" + } is specified for feature ${feature}, but no instance of CodeQL was provided.`, + }, + ); + }); + }, + ); } if (featureConfig[feature].minimumVersion !== undefined) { - test(`Feature '${feature}' is disabled if the minimum CLI version is below ${featureConfig[feature].minimumVersion}`, async (t) => { - await withTmpDir(async (tmpDir) => { - const features = setUpFeatureFlagTests(tmpDir); + test.serial( + `Feature '${feature}' is disabled if the minimum CLI version is below ${featureConfig[feature].minimumVersion}`, + async (t) => { + await withTmpDir(async (tmpDir) => { + const features = setUpFeatureFlagTests(tmpDir); - const expectedFeatureEnablement = initializeFeatures(true); - mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); + const expectedFeatureEnablement = initializeFeatures(true); + mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); - // feature should be disabled when an old CLI version is set - let codeql = mockCodeQLVersion("2.0.0"); - t.assert(!(await features.getValue(feature as Feature, codeql))); + // feature should be disabled when an old CLI version is set + let codeql = mockCodeQLVersion("2.0.0"); + t.assert(!(await features.getValue(feature as Feature, codeql))); - // even setting the env var to true should not enable the feature if - // the minimum CLI version is not met - process.env[featureConfig[feature].envVar] = "true"; - t.assert(!(await features.getValue(feature as Feature, codeql))); + // even setting the env var to true should not enable the feature if + // the minimum CLI version is not met + process.env[featureConfig[feature].envVar] = "true"; + t.assert(!(await features.getValue(feature as Feature, codeql))); - // feature should be enabled when a new CLI version is set - // and env var is not set - process.env[featureConfig[feature].envVar] = ""; - codeql = mockCodeQLVersion( - featureConfig[feature].minimumVersion as string, - ); - t.assert(await features.getValue(feature as Feature, codeql)); + // feature should be enabled when a new CLI version is set + // and env var is not set + process.env[featureConfig[feature].envVar] = ""; + codeql = mockCodeQLVersion( + featureConfig[feature].minimumVersion as string, + ); + t.assert(await features.getValue(feature as Feature, codeql)); - // set env var to false and check that the feature is now disabled - process.env[featureConfig[feature].envVar] = "false"; - t.assert(!(await features.getValue(feature as Feature, codeql))); - }); - }); + // set env var to false and check that the feature is now disabled + process.env[featureConfig[feature].envVar] = "false"; + t.assert(!(await features.getValue(feature as Feature, codeql))); + }); + }, + ); } if (featureConfig[feature].toolsFeature !== undefined) { - test(`Feature '${feature}' is disabled if the required tools feature is not enabled`, async (t) => { - await withTmpDir(async (tmpDir) => { - const features = setUpFeatureFlagTests(tmpDir); + test.serial( + `Feature '${feature}' is disabled if the required tools feature is not enabled`, + async (t) => { + await withTmpDir(async (tmpDir) => { + const features = setUpFeatureFlagTests(tmpDir); - const expectedFeatureEnablement = initializeFeatures(true); - mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); + const expectedFeatureEnablement = initializeFeatures(true); + mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); - // feature should be disabled when the required tools feature is not enabled - let codeql = mockCodeQLVersion("2.0.0"); - t.assert(!(await features.getValue(feature as Feature, codeql))); + // feature should be disabled when the required tools feature is not enabled + let codeql = mockCodeQLVersion("2.0.0"); + t.assert(!(await features.getValue(feature as Feature, codeql))); - // even setting the env var to true should not enable the feature if - // the required tools feature is not enabled - process.env[featureConfig[feature].envVar] = "true"; - t.assert(!(await features.getValue(feature as Feature, codeql))); + // even setting the env var to true should not enable the feature if + // the required tools feature is not enabled + process.env[featureConfig[feature].envVar] = "true"; + t.assert(!(await features.getValue(feature as Feature, codeql))); - // feature should be enabled when the required tools feature is enabled - // and env var is not set - process.env[featureConfig[feature].envVar] = ""; - codeql = mockCodeQLVersion("2.0.0", { - [featureConfig[feature].toolsFeature]: true, + // feature should be enabled when the required tools feature is enabled + // and env var is not set + process.env[featureConfig[feature].envVar] = ""; + codeql = mockCodeQLVersion("2.0.0", { + [featureConfig[feature].toolsFeature]: true, + }); + t.assert(await features.getValue(feature as Feature, codeql)); + + // set env var to false and check that the feature is now disabled + process.env[featureConfig[feature].envVar] = "false"; + t.assert(!(await features.getValue(feature as Feature, codeql))); }); - t.assert(await features.getValue(feature as Feature, codeql)); - - // set env var to false and check that the feature is now disabled - process.env[featureConfig[feature].envVar] = "false"; - t.assert(!(await features.getValue(feature as Feature, codeql))); - }); - }); + }, + ); } } -test("Feature flags are saved to disk", async (t) => { +test.serial("Feature flags are saved to disk", async (t) => { await withTmpDir(async (tmpDir) => { const features = setUpFeatureFlagTests(tmpDir); const expectedFeatureEnablement = initializeFeatures(true); @@ -376,38 +413,41 @@ test("Feature flags are saved to disk", async (t) => { }); }); -test("Environment variable can override feature flag cache", async (t) => { - await withTmpDir(async (tmpDir) => { - const features = setUpFeatureFlagTests(tmpDir); - const expectedFeatureEnablement = initializeFeatures(true); - mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); +test.serial( + "Environment variable can override feature flag cache", + async (t) => { + await withTmpDir(async (tmpDir) => { + const features = setUpFeatureFlagTests(tmpDir); + const expectedFeatureEnablement = initializeFeatures(true); + mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); - const cachedFeatureFlags = path.join(tmpDir, FEATURE_FLAGS_FILE_NAME); - t.true( - await getFeatureIncludingCodeQlIfRequired( - features, - Feature.QaTelemetryEnabled, - ), - "Feature flag should be enabled initially", - ); + const cachedFeatureFlags = path.join(tmpDir, FEATURE_FLAGS_FILE_NAME); + t.true( + await getFeatureIncludingCodeQlIfRequired( + features, + Feature.QaTelemetryEnabled, + ), + "Feature flag should be enabled initially", + ); - t.true( - fs.existsSync(cachedFeatureFlags), - "Feature flag cached file should exist after getting feature flags", - ); - process.env.CODEQL_ACTION_QA_TELEMETRY = "false"; + t.true( + fs.existsSync(cachedFeatureFlags), + "Feature flag cached file should exist after getting feature flags", + ); + process.env.CODEQL_ACTION_QA_TELEMETRY = "false"; - t.false( - await getFeatureIncludingCodeQlIfRequired( - features, - Feature.QaTelemetryEnabled, - ), - "Feature flag should be disabled after setting env var", - ); - }); -}); + t.false( + await getFeatureIncludingCodeQlIfRequired( + features, + Feature.QaTelemetryEnabled, + ), + "Feature flag should be disabled after setting env var", + ); + }); + }, +); -test(`selects CLI from defaults.json on GHES`, async (t) => { +test.serial(`selects CLI from defaults.json on GHES`, async (t) => { await withTmpDir(async (tmpDir) => { const features = setUpFeatureFlagTests(tmpDir); @@ -422,80 +462,94 @@ test(`selects CLI from defaults.json on GHES`, async (t) => { }); for (const variant of [GitHubVariant.DOTCOM, GitHubVariant.GHEC_DR]) { - test(`selects CLI v2.20.1 on ${variant} when feature flags enable v2.20.0 and v2.20.1`, async (t) => { - await withTmpDir(async (tmpDir) => { - const features = setUpFeatureFlagTests(tmpDir); - const expectedFeatureEnablement = initializeFeatures(true); - expectedFeatureEnablement["default_codeql_version_2_20_0_enabled"] = true; - expectedFeatureEnablement["default_codeql_version_2_20_1_enabled"] = true; - expectedFeatureEnablement["default_codeql_version_2_20_2_enabled"] = - false; - expectedFeatureEnablement["default_codeql_version_2_20_3_enabled"] = - false; - expectedFeatureEnablement["default_codeql_version_2_20_4_enabled"] = - false; - expectedFeatureEnablement["default_codeql_version_2_20_5_enabled"] = - false; - mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); + test.serial( + `selects CLI v2.20.1 on ${variant} when feature flags enable v2.20.0 and v2.20.1`, + async (t) => { + await withTmpDir(async (tmpDir) => { + const features = setUpFeatureFlagTests(tmpDir); + const expectedFeatureEnablement = initializeFeatures(true); + expectedFeatureEnablement["default_codeql_version_2_20_0_enabled"] = + true; + expectedFeatureEnablement["default_codeql_version_2_20_1_enabled"] = + true; + expectedFeatureEnablement["default_codeql_version_2_20_2_enabled"] = + false; + expectedFeatureEnablement["default_codeql_version_2_20_3_enabled"] = + false; + expectedFeatureEnablement["default_codeql_version_2_20_4_enabled"] = + false; + expectedFeatureEnablement["default_codeql_version_2_20_5_enabled"] = + false; + mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); - const defaultCliVersion = await features.getDefaultCliVersion(variant); - t.deepEqual(defaultCliVersion, { - cliVersion: "2.20.1", - tagName: "codeql-bundle-v2.20.1", - toolsFeatureFlagsValid: true, + const defaultCliVersion = await features.getDefaultCliVersion(variant); + t.deepEqual(defaultCliVersion, { + cliVersion: "2.20.1", + tagName: "codeql-bundle-v2.20.1", + toolsFeatureFlagsValid: true, + }); }); - }); - }); + }, + ); - test(`selects CLI from defaults.json on ${variant} when no default version feature flags are enabled`, async (t) => { - await withTmpDir(async (tmpDir) => { - const features = setUpFeatureFlagTests(tmpDir); - const expectedFeatureEnablement = initializeFeatures(true); - mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); + test.serial( + `selects CLI from defaults.json on ${variant} when no default version feature flags are enabled`, + async (t) => { + await withTmpDir(async (tmpDir) => { + const features = setUpFeatureFlagTests(tmpDir); + const expectedFeatureEnablement = initializeFeatures(true); + mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); - const defaultCliVersion = await features.getDefaultCliVersion(variant); - t.deepEqual(defaultCliVersion, { - cliVersion: defaults.cliVersion, - tagName: defaults.bundleVersion, - toolsFeatureFlagsValid: false, + const defaultCliVersion = await features.getDefaultCliVersion(variant); + t.deepEqual(defaultCliVersion, { + cliVersion: defaults.cliVersion, + tagName: defaults.bundleVersion, + toolsFeatureFlagsValid: false, + }); }); - }); - }); + }, + ); - test(`ignores invalid version numbers in default version feature flags on ${variant}`, async (t) => { - await withTmpDir(async (tmpDir) => { - const loggedMessages = []; - const features = setUpFeatureFlagTests( - tmpDir, - getRecordingLogger(loggedMessages), - ); - const expectedFeatureEnablement = initializeFeatures(true); - expectedFeatureEnablement["default_codeql_version_2_20_0_enabled"] = true; - expectedFeatureEnablement["default_codeql_version_2_20_1_enabled"] = true; - expectedFeatureEnablement["default_codeql_version_2_20_invalid_enabled"] = - true; - mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); + test.serial( + `ignores invalid version numbers in default version feature flags on ${variant}`, + async (t) => { + await withTmpDir(async (tmpDir) => { + const loggedMessages = []; + const features = setUpFeatureFlagTests( + tmpDir, + getRecordingLogger(loggedMessages), + ); + const expectedFeatureEnablement = initializeFeatures(true); + expectedFeatureEnablement["default_codeql_version_2_20_0_enabled"] = + true; + expectedFeatureEnablement["default_codeql_version_2_20_1_enabled"] = + true; + expectedFeatureEnablement[ + "default_codeql_version_2_20_invalid_enabled" + ] = true; + mockFeatureFlagApiEndpoint(200, expectedFeatureEnablement); - const defaultCliVersion = await features.getDefaultCliVersion(variant); - t.deepEqual(defaultCliVersion, { - cliVersion: "2.20.1", - tagName: "codeql-bundle-v2.20.1", - toolsFeatureFlagsValid: true, + const defaultCliVersion = await features.getDefaultCliVersion(variant); + t.deepEqual(defaultCliVersion, { + cliVersion: "2.20.1", + tagName: "codeql-bundle-v2.20.1", + toolsFeatureFlagsValid: true, + }); + + t.assert( + loggedMessages.find( + (v: LoggedMessage) => + v.type === "warning" && + v.message === + "Ignoring feature flag default_codeql_version_2_20_invalid_enabled as it does not specify a valid CodeQL version.", + ) !== undefined, + ); }); - - t.assert( - loggedMessages.find( - (v: LoggedMessage) => - v.type === "warning" && - v.message === - "Ignoring feature flag default_codeql_version_2_20_invalid_enabled as it does not specify a valid CodeQL version.", - ) !== undefined, - ); - }); - }); + }, + ); } -test("legacy feature flags should end with _enabled", async (t) => { +test.serial("legacy feature flags should end with _enabled", async (t) => { for (const [feature, config] of Object.entries(featureConfig)) { if ((config satisfies FeatureConfig as FeatureConfig).legacyApi) { t.assert( @@ -506,31 +560,40 @@ test("legacy feature flags should end with _enabled", async (t) => { } }); -test("non-legacy feature flags should not end with _enabled", async (t) => { - for (const [feature, config] of Object.entries(featureConfig)) { - if (!(config satisfies FeatureConfig as FeatureConfig).legacyApi) { - t.false( - feature.endsWith("_enabled"), - `non-legacy feature ${feature} should not end with '_enabled'`, - ); +test.serial( + "non-legacy feature flags should not end with _enabled", + async (t) => { + for (const [feature, config] of Object.entries(featureConfig)) { + if (!(config satisfies FeatureConfig as FeatureConfig).legacyApi) { + t.false( + feature.endsWith("_enabled"), + `non-legacy feature ${feature} should not end with '_enabled'`, + ); + } } - } -}); + }, +); -test("non-legacy feature flags should not start with codeql_action_", async (t) => { - for (const [feature, config] of Object.entries(featureConfig)) { - if (!(config satisfies FeatureConfig as FeatureConfig).legacyApi) { - t.false( - feature.startsWith("codeql_action_"), - `non-legacy feature ${feature} should not start with 'codeql_action_'`, - ); +test.serial( + "non-legacy feature flags should not start with codeql_action_", + async (t) => { + for (const [feature, config] of Object.entries(featureConfig)) { + if (!(config satisfies FeatureConfig as FeatureConfig).legacyApi) { + t.false( + feature.startsWith("codeql_action_"), + `non-legacy feature ${feature} should not start with 'codeql_action_'`, + ); + } } - } -}); + }, +); -test("initFeatures returns a `Features` instance by default", async (t) => { - await withTmpDir(async (tmpDir) => { - const features = setUpFeatureFlagTests(tmpDir); - t.is("Features", features.constructor.name); - }); -}); +test.serial( + "initFeatures returns a `Features` instance by default", + async (t) => { + await withTmpDir(async (tmpDir) => { + const features = setUpFeatureFlagTests(tmpDir); + t.is("Features", features.constructor.name); + }); + }, +); diff --git a/src/feature-flags.ts b/src/feature-flags.ts index 546d2e0ff..c773ca9a4 100644 --- a/src/feature-flags.ts +++ b/src/feature-flags.ts @@ -62,24 +62,31 @@ export enum Feature { OverlayAnalysisCodeScanningSwift = "overlay_analysis_code_scanning_swift", OverlayAnalysisCpp = "overlay_analysis_cpp", OverlayAnalysisCsharp = "overlay_analysis_csharp", + /** Controls whether the Actions cache is checked for overlay build outcomes. */ OverlayAnalysisStatusCheck = "overlay_analysis_status_check", + /** Controls whether overlay build failures on are stored in the Actions cache. */ OverlayAnalysisStatusSave = "overlay_analysis_status_save", OverlayAnalysisGo = "overlay_analysis_go", OverlayAnalysisJava = "overlay_analysis_java", OverlayAnalysisJavascript = "overlay_analysis_javascript", OverlayAnalysisPython = "overlay_analysis_python", + /** + * Controls whether lower disk space requirements are used for overlay hardware checks. + * Has no effect if `OverlayAnalysisSkipResourceChecks` is enabled. + */ OverlayAnalysisResourceChecksV2 = "overlay_analysis_resource_checks_v2", OverlayAnalysisRuby = "overlay_analysis_ruby", OverlayAnalysisRust = "overlay_analysis_rust", + /** Controls whether hardware checks are skipped for overlay analysis. */ OverlayAnalysisSkipResourceChecks = "overlay_analysis_skip_resource_checks", OverlayAnalysisSwift = "overlay_analysis_swift", PythonDefaultIsToNotExtractStdlib = "python_default_is_to_not_extract_stdlib", QaTelemetryEnabled = "qa_telemetry_enabled", /** Note that this currently only disables baseline file coverage information. */ SkipFileCoverageOnPrs = "skip_file_coverage_on_prs", + StartProxyRemoveUnusedRegistries = "start_proxy_remove_unused_registries", StartProxyUseFeaturesRelease = "start_proxy_use_features_release", UploadOverlayDbToApi = "upload_overlay_db_to_api", - UseRepositoryProperties = "use_repository_properties_v2", ValidateDbConfig = "validate_db_config", } @@ -328,6 +335,11 @@ export const featureConfig = { // cannot be found when interpreting results. minimumVersion: undefined, }, + [Feature.StartProxyRemoveUnusedRegistries]: { + defaultValue: false, + envVar: "CODEQL_ACTION_START_PROXY_REMOVE_UNUSED_REGISTRIES", + minimumVersion: undefined, + }, [Feature.StartProxyUseFeaturesRelease]: { defaultValue: false, envVar: "CODEQL_ACTION_START_PROXY_USE_FEATURES_RELEASE", @@ -339,11 +351,6 @@ export const featureConfig = { minimumVersion: undefined, toolsFeature: ToolsFeature.BundleSupportsOverlay, }, - [Feature.UseRepositoryProperties]: { - defaultValue: false, - envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES", - minimumVersion: undefined, - }, [Feature.ValidateDbConfig]: { defaultValue: false, envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG", diff --git a/src/feature-flags/properties.test.ts b/src/feature-flags/properties.test.ts index 8cf8ef7cd..a468b3349 100644 --- a/src/feature-flags/properties.test.ts +++ b/src/feature-flags/properties.test.ts @@ -5,59 +5,73 @@ import * as api from "../api-client"; import { getRunnerLogger } from "../logging"; import { parseRepositoryNwo } from "../repository"; import { setupTests } from "../testing-utils"; -import * as util from "../util"; import * as properties from "./properties"; setupTests(test); -test("loadPropertiesFromApi throws if response data is not an array", async (t) => { - sinon.stub(api, "getRepositoryProperties").resolves({ - headers: {}, - status: 200, - url: "", - data: {}, - }); - const logger = getRunnerLogger(true); - const mockRepositoryNwo = parseRepositoryNwo("owner/repo"); - await t.throwsAsync( - properties.loadPropertiesFromApi( +test.serial( + "loadPropertiesFromApi throws if response data is not an array", + async (t) => { + sinon.stub(api, "getRepositoryProperties").resolves({ + headers: {}, + status: 200, + url: "", + data: {}, + }); + const logger = getRunnerLogger(true); + const mockRepositoryNwo = parseRepositoryNwo("owner/repo"); + await t.throwsAsync( + properties.loadPropertiesFromApi(logger, mockRepositoryNwo), { - type: util.GitHubVariant.DOTCOM, + message: /Expected repository properties API to return an array/, }, - logger, - mockRepositoryNwo, - ), - { - message: /Expected repository properties API to return an array/, - }, - ); -}); + ); + }, +); -test("loadPropertiesFromApi throws if response data contains unexpected objects", async (t) => { - sinon.stub(api, "getRepositoryProperties").resolves({ - headers: {}, - status: 200, - url: "", - data: [{}], - }); - const logger = getRunnerLogger(true); - const mockRepositoryNwo = parseRepositoryNwo("owner/repo"); - await t.throwsAsync( - properties.loadPropertiesFromApi( +test.serial( + "loadPropertiesFromApi throws if response data contains objects without `property_name`", + async (t) => { + sinon.stub(api, "getRepositoryProperties").resolves({ + headers: {}, + status: 200, + url: "", + data: [{}], + }); + const logger = getRunnerLogger(true); + const mockRepositoryNwo = parseRepositoryNwo("owner/repo"); + await t.throwsAsync( + properties.loadPropertiesFromApi(logger, mockRepositoryNwo), { - type: util.GitHubVariant.DOTCOM, + message: + /Expected repository property object to have a 'property_name'/, }, - logger, - mockRepositoryNwo, - ), - { - message: /Expected repository property object to have a 'property_name'/, - }, - ); -}); + ); + }, +); -test("loadPropertiesFromApi returns empty object if on GHES", async (t) => { +test.serial( + "loadPropertiesFromApi does not throw for unexpected value types of unknown properties", + async (t) => { + sinon.stub(api, "getRepositoryProperties").resolves({ + headers: {}, + status: 200, + url: "", + data: [ + { property_name: "not-used-by-us", value: { foo: "bar" } }, + { property_name: "also-not-used-by-us", value: ["A", "B", "C"] }, + ], + }); + const logger = getRunnerLogger(true); + const mockRepositoryNwo = parseRepositoryNwo("owner/repo"); + await t.notThrowsAsync( + properties.loadPropertiesFromApi(logger, mockRepositoryNwo), + ); + }, +); + +test.serial("loadPropertiesFromApi loads known properties", async (t) => { sinon.stub(api, "getRepositoryProperties").resolves({ headers: {}, status: 200, @@ -70,39 +84,13 @@ test("loadPropertiesFromApi returns empty object if on GHES", async (t) => { const logger = getRunnerLogger(true); const mockRepositoryNwo = parseRepositoryNwo("owner/repo"); const response = await properties.loadPropertiesFromApi( - { - type: util.GitHubVariant.GHES, - version: "", - }, - logger, - mockRepositoryNwo, - ); - t.deepEqual(response, {}); -}); - -test("loadPropertiesFromApi loads known properties", async (t) => { - sinon.stub(api, "getRepositoryProperties").resolves({ - headers: {}, - status: 200, - url: "", - data: [ - { property_name: "github-codeql-extra-queries", value: "+queries" }, - { property_name: "unknown-property", value: "something" }, - ] satisfies properties.GitHubPropertiesResponse, - }); - const logger = getRunnerLogger(true); - const mockRepositoryNwo = parseRepositoryNwo("owner/repo"); - const response = await properties.loadPropertiesFromApi( - { - type: util.GitHubVariant.DOTCOM, - }, logger, mockRepositoryNwo, ); t.deepEqual(response, { "github-codeql-extra-queries": "+queries" }); }); -test("loadPropertiesFromApi parses true boolean property", async (t) => { +test.serial("loadPropertiesFromApi parses true boolean property", async (t) => { sinon.stub(api, "getRepositoryProperties").resolves({ headers: {}, status: 200, @@ -119,9 +107,6 @@ test("loadPropertiesFromApi parses true boolean property", async (t) => { const warningSpy = sinon.spy(logger, "warning"); const mockRepositoryNwo = parseRepositoryNwo("owner/repo"); const response = await properties.loadPropertiesFromApi( - { - type: util.GitHubVariant.DOTCOM, - }, logger, mockRepositoryNwo, ); @@ -132,86 +117,83 @@ test("loadPropertiesFromApi parses true boolean property", async (t) => { t.true(warningSpy.notCalled); }); -test("loadPropertiesFromApi parses false boolean property", async (t) => { - sinon.stub(api, "getRepositoryProperties").resolves({ - headers: {}, - status: 200, - url: "", - data: [ - { - property_name: "github-codeql-disable-overlay", - value: "false", - }, - ] satisfies properties.GitHubPropertiesResponse, - }); - const logger = getRunnerLogger(true); - const warningSpy = sinon.spy(logger, "warning"); - const mockRepositoryNwo = parseRepositoryNwo("owner/repo"); - const response = await properties.loadPropertiesFromApi( - { - type: util.GitHubVariant.DOTCOM, - }, - logger, - mockRepositoryNwo, - ); - t.deepEqual(response, { - "github-codeql-disable-overlay": false, - }); - t.true(warningSpy.notCalled); -}); - -test("loadPropertiesFromApi throws if property value is not a string", async (t) => { - sinon.stub(api, "getRepositoryProperties").resolves({ - headers: {}, - status: 200, - url: "", - data: [{ property_name: "github-codeql-extra-queries", value: 123 }], - }); - const logger = getRunnerLogger(true); - const mockRepositoryNwo = parseRepositoryNwo("owner/repo"); - await t.throwsAsync( - properties.loadPropertiesFromApi( - { - type: util.GitHubVariant.DOTCOM, - }, +test.serial( + "loadPropertiesFromApi parses false boolean property", + async (t) => { + sinon.stub(api, "getRepositoryProperties").resolves({ + headers: {}, + status: 200, + url: "", + data: [ + { + property_name: "github-codeql-disable-overlay", + value: "false", + }, + ] satisfies properties.GitHubPropertiesResponse, + }); + const logger = getRunnerLogger(true); + const warningSpy = sinon.spy(logger, "warning"); + const mockRepositoryNwo = parseRepositoryNwo("owner/repo"); + const response = await properties.loadPropertiesFromApi( logger, mockRepositoryNwo, - ), - { - message: - /Expected repository property 'github-codeql-extra-queries' to have a string value/, - }, - ); -}); + ); + t.deepEqual(response, { + "github-codeql-disable-overlay": false, + }); + t.true(warningSpy.notCalled); + }, +); -test("loadPropertiesFromApi warns if boolean property has unexpected value", async (t) => { - sinon.stub(api, "getRepositoryProperties").resolves({ - headers: {}, - status: 200, - url: "", - data: [ +test.serial( + "loadPropertiesFromApi throws if known property value is not a string", + async (t) => { + sinon.stub(api, "getRepositoryProperties").resolves({ + headers: {}, + status: 200, + url: "", + data: [{ property_name: "github-codeql-extra-queries", value: 123 }], + }); + const logger = getRunnerLogger(true); + const mockRepositoryNwo = parseRepositoryNwo("owner/repo"); + await t.throwsAsync( + properties.loadPropertiesFromApi(logger, mockRepositoryNwo), { - property_name: "github-codeql-disable-overlay", - value: "yes", + message: + /Unexpected value for repository property 'github-codeql-extra-queries' \(number\), got: 123/, }, - ] satisfies properties.GitHubPropertiesResponse, - }); - const logger = getRunnerLogger(true); - const warningSpy = sinon.spy(logger, "warning"); - const mockRepositoryNwo = parseRepositoryNwo("owner/repo"); - const response = await properties.loadPropertiesFromApi( - { - type: util.GitHubVariant.DOTCOM, - }, - logger, - mockRepositoryNwo, - ); - t.deepEqual(response, { - "github-codeql-disable-overlay": false, - }); - t.true(warningSpy.calledOnce); - t.is( - warningSpy.firstCall.args[0], - "Repository property 'github-codeql-disable-overlay' has unexpected value 'yes'. Expected 'true' or 'false'. Defaulting to false.", - ); -}); + ); + }, +); + +test.serial( + "loadPropertiesFromApi warns if boolean property has unexpected value", + async (t) => { + sinon.stub(api, "getRepositoryProperties").resolves({ + headers: {}, + status: 200, + url: "", + data: [ + { + property_name: "github-codeql-disable-overlay", + value: "yes", + }, + ] satisfies properties.GitHubPropertiesResponse, + }); + const logger = getRunnerLogger(true); + const warningSpy = sinon.spy(logger, "warning"); + const mockRepositoryNwo = parseRepositoryNwo("owner/repo"); + const response = await properties.loadPropertiesFromApi( + logger, + mockRepositoryNwo, + ); + t.deepEqual(response, { + "github-codeql-disable-overlay": false, + }); + t.true(warningSpy.calledOnce); + t.is( + warningSpy.firstCall.args[0], + "Repository property 'github-codeql-disable-overlay' has unexpected value 'yes'. Expected 'true' or 'false'. Defaulting to false.", + ); + }, +); diff --git a/src/feature-flags/properties.ts b/src/feature-flags/properties.ts index 3b55fcb6a..7c339e3f3 100644 --- a/src/feature-flags/properties.ts +++ b/src/feature-flags/properties.ts @@ -1,7 +1,6 @@ import { getRepositoryProperties } from "../api-client"; import { Logger } from "../logging"; import { RepositoryNwo } from "../repository"; -import { GitHubVariant, GitHubVersion } from "../util"; /** * Enumerates repository property names that have some meaning to us. @@ -12,7 +11,7 @@ export enum RepositoryPropertyName { } /** Parsed types of the known repository properties. */ -type AllRepositoryProperties = { +export type AllRepositoryProperties = { [RepositoryPropertyName.DISABLE_OVERLAY]: boolean; [RepositoryPropertyName.EXTRA_QUERIES]: string; }; @@ -20,16 +19,56 @@ type AllRepositoryProperties = { /** Parsed repository properties. */ export type RepositoryProperties = Partial; +/** Maps known repository properties to the type we expect to get from the API. */ +export type RepositoryPropertyApiType = { + [RepositoryPropertyName.DISABLE_OVERLAY]: string; + [RepositoryPropertyName.EXTRA_QUERIES]: string; +}; + +/** The type of functions which take the `value` from the API and try to convert it to the type we want. */ +export type PropertyParser = ( + name: K, + value: RepositoryPropertyApiType[K], + logger: Logger, +) => AllRepositoryProperties[K]; + +/** Possible types of `value`s we get from the API. */ +export type RepositoryPropertyValue = string | string[]; + +/** The type of repository property configurations. */ +export type PropertyInfo = { + /** A validator which checks that the value received from the API is what we expect. */ + validate: ( + value: RepositoryPropertyValue, + ) => value is RepositoryPropertyApiType[K]; + /** A `PropertyParser` for the property. */ + parse: PropertyParser; +}; + +/** Determines whether a value from the API is a string or not. */ +function isString(value: RepositoryPropertyValue): value is string { + return typeof value === "string"; +} + +/** A repository property that we expect to contain a string value. */ +const stringProperty = { + validate: isString, + parse: parseStringRepositoryProperty, +}; + +/** A repository property that we expect to contain a boolean value. */ +const booleanProperty = { + // The value from the API should come as a string, which we then parse into a boolean. + validate: isString, + parse: parseBooleanRepositoryProperty, +}; + /** Parsers that transform repository properties from the API response into typed values. */ const repositoryPropertyParsers: { - [K in RepositoryPropertyName]: ( - name: K, - value: string, - logger: Logger, - ) => AllRepositoryProperties[K]; + [K in RepositoryPropertyName]: PropertyInfo; } = { - [RepositoryPropertyName.DISABLE_OVERLAY]: parseBooleanRepositoryProperty, - [RepositoryPropertyName.EXTRA_QUERIES]: parseStringRepositoryProperty, + [RepositoryPropertyName.DISABLE_OVERLAY]: booleanProperty, + [RepositoryPropertyName.EXTRA_QUERIES]: stringProperty, }; /** @@ -37,7 +76,7 @@ const repositoryPropertyParsers: { */ export interface GitHubRepositoryProperty { property_name: string; - value: string; + value: RepositoryPropertyValue; } /** @@ -53,16 +92,9 @@ export type GitHubPropertiesResponse = GitHubRepositoryProperty[]; * @returns Returns a partial mapping from `RepositoryPropertyName` to values. */ export async function loadPropertiesFromApi( - gitHubVersion: GitHubVersion, logger: Logger, repositoryNwo: RepositoryNwo, ): Promise { - // TODO: To be safe for now; later we should replace this with a version check once we know - // which version of GHES we expect this to be supported by. - if (gitHubVersion.type === GitHubVariant.GHES) { - return {}; - } - try { const response = await getRepositoryProperties(repositoryNwo); const remoteProperties = response.data as GitHubPropertiesResponse; @@ -85,12 +117,6 @@ export async function loadPropertiesFromApi( ); } - if (typeof property.value !== "string") { - throw new Error( - `Expected repository property '${property.property_name}' to have a string value, but got: ${JSON.stringify(property)}`, - ); - } - if (isKnownPropertyName(property.property_name)) { setProperty(properties, property.property_name, property.value, logger); } @@ -117,14 +143,30 @@ export async function loadPropertiesFromApi( } } -/** Update the partial set of repository properties with the parsed value of the specified property. */ +/** + * Validate that `value` has the correct type for `K` and, if so, update the partial set of repository + * properties with the parsed value of the specified property. + */ function setProperty( properties: RepositoryProperties, name: K, - value: string, + value: RepositoryPropertyValue, logger: Logger, ): void { - properties[name] = repositoryPropertyParsers[name](name, value, logger); + const propertyOptions = repositoryPropertyParsers[name]; + + // We perform the validation here for two reasons: + // 1. This function is only called if `name` is a property we care about, to avoid throwing + // on unrelated properties that may use representations we do not support. + // 2. The `propertyOptions.validate` function checks that the type of `value` we received from + // the API is what expect and narrows the type accordingly, allowing us to call `parse`. + if (propertyOptions.validate(value)) { + properties[name] = propertyOptions.parse(name, value, logger); + } else { + throw new Error( + `Unexpected value for repository property '${name}' (${typeof value}), got: ${JSON.stringify(value)}`, + ); + } } /** Parse a boolean repository property. */ diff --git a/src/fingerprints.test.ts b/src/fingerprints.test.ts index 6c29537d0..6cb9b6861 100644 --- a/src/fingerprints.test.ts +++ b/src/fingerprints.test.ts @@ -6,6 +6,7 @@ import test from "ava"; import * as fingerprints from "./fingerprints"; import { getRunnerLogger } from "./logging"; +import * as sarif from "./sarif"; import { setupTests } from "./testing-utils"; import * as util from "./util"; @@ -201,7 +202,7 @@ test("addFingerprints", async (t) => { fs .readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`) .toString(), - ) as util.SarifFile; + ) as sarif.Log; const expected = JSON.parse( fs .readFileSync( @@ -229,7 +230,7 @@ test("missingRegions", async (t) => { fs .readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`) .toString(), - ) as util.SarifFile; + ) as sarif.Log; const expected = JSON.parse( fs .readFileSync( diff --git a/src/fingerprints.ts b/src/fingerprints.ts index 443238ed4..013ba65f5 100644 --- a/src/fingerprints.ts +++ b/src/fingerprints.ts @@ -5,7 +5,7 @@ import Long from "long"; import { DocUrl } from "./doc-url"; import { Logger } from "./logging"; -import { SarifFile, SarifResult } from "./util"; +import type * as sarif from "./sarif"; const tab = "\t".charCodeAt(0); const space = " ".charCodeAt(0); @@ -138,7 +138,7 @@ export async function hash(callback: hashCallback, filepath: string) { // Generate a hash callback function that updates the given result in-place // when it receives a hash for the correct line number. Ignores hashes for other lines. function locationUpdateCallback( - result: SarifResult, + result: sarif.Result, location: any, logger: Logger, ): hashCallback { @@ -256,17 +256,17 @@ export function resolveUriToFile( // Compute fingerprints for results in the given sarif file // and return an updated sarif file contents. export async function addFingerprints( - sarif: SarifFile, + sarifLog: Partial, sourceRoot: string, logger: Logger, -): Promise { +): Promise> { logger.info( `Adding fingerprints to SARIF file. See ${DocUrl.TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS} for more information.`, ); // Gather together results for the same file and construct // callbacks to accept hashes for that file and update the location const callbacksByFile: { [filename: string]: hashCallback[] } = {}; - for (const run of sarif.runs || []) { + for (const run of sarifLog.runs || []) { // We may need the list of artifacts to resolve against const artifacts = run.artifacts || []; @@ -316,5 +316,5 @@ export async function addFingerprints( await hash(teeCallback, filepath); } - return sarif; + return sarifLog; } diff --git a/src/git-utils.test.ts b/src/git-utils.test.ts index e4dbf84bc..4c51bc1d9 100644 --- a/src/git-utils.test.ts +++ b/src/git-utils.test.ts @@ -13,154 +13,187 @@ import { withTmpDir } from "./util"; setupTests(test); -test("getRef() throws on the empty string", async (t) => { +test.serial("getRef() throws on the empty string", async (t) => { process.env["GITHUB_REF"] = ""; await t.throwsAsync(gitUtils.getRef); }); -test("getRef() returns merge PR ref if GITHUB_SHA still checked out", async (t) => { - await withTmpDir(async (tmpDir: string) => { - setupActionsVars(tmpDir, tmpDir); - const expectedRef = "refs/pull/1/merge"; - const currentSha = "a".repeat(40); - process.env["GITHUB_REF"] = expectedRef; - process.env["GITHUB_SHA"] = currentSha; +test.serial( + "getRef() returns merge PR ref if GITHUB_SHA still checked out", + async (t) => { + await withTmpDir(async (tmpDir: string) => { + setupActionsVars(tmpDir, tmpDir); + const expectedRef = "refs/pull/1/merge"; + const currentSha = "a".repeat(40); + process.env["GITHUB_REF"] = expectedRef; + process.env["GITHUB_SHA"] = currentSha; - const callback = sinon.stub(gitUtils, "getCommitOid"); - callback.withArgs("HEAD").resolves(currentSha); + const callback = sinon.stub(gitUtils, "getCommitOid"); + callback.withArgs("HEAD").resolves(currentSha); - const actualRef = await gitUtils.getRef(); - t.deepEqual(actualRef, expectedRef); - callback.restore(); - }); -}); + const actualRef = await gitUtils.getRef(); + t.deepEqual(actualRef, expectedRef); + callback.restore(); + }); + }, +); -test("getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", async (t) => { - await withTmpDir(async (tmpDir: string) => { - setupActionsVars(tmpDir, tmpDir); - const expectedRef = "refs/pull/1/merge"; - process.env["GITHUB_REF"] = expectedRef; - process.env["GITHUB_SHA"] = "b".repeat(40); - const sha = "a".repeat(40); +test.serial( + "getRef() returns merge PR ref if GITHUB_REF still checked out but sha has changed (actions checkout@v1)", + async (t) => { + await withTmpDir(async (tmpDir: string) => { + setupActionsVars(tmpDir, tmpDir); + const expectedRef = "refs/pull/1/merge"; + process.env["GITHUB_REF"] = expectedRef; + process.env["GITHUB_SHA"] = "b".repeat(40); + const sha = "a".repeat(40); - const callback = sinon.stub(gitUtils, "getCommitOid"); - callback.withArgs("refs/remotes/pull/1/merge").resolves(sha); - callback.withArgs("HEAD").resolves(sha); + const callback = sinon.stub(gitUtils, "getCommitOid"); + callback.withArgs("refs/remotes/pull/1/merge").resolves(sha); + callback.withArgs("HEAD").resolves(sha); - const actualRef = await gitUtils.getRef(); - t.deepEqual(actualRef, expectedRef); - callback.restore(); - }); -}); + const actualRef = await gitUtils.getRef(); + t.deepEqual(actualRef, expectedRef); + callback.restore(); + }); + }, +); -test("getRef() returns head PR ref if GITHUB_REF no longer checked out", async (t) => { - await withTmpDir(async (tmpDir: string) => { - setupActionsVars(tmpDir, tmpDir); - process.env["GITHUB_REF"] = "refs/pull/1/merge"; - process.env["GITHUB_SHA"] = "a".repeat(40); +test.serial( + "getRef() returns head PR ref if GITHUB_REF no longer checked out", + async (t) => { + await withTmpDir(async (tmpDir: string) => { + setupActionsVars(tmpDir, tmpDir); + process.env["GITHUB_REF"] = "refs/pull/1/merge"; + process.env["GITHUB_SHA"] = "a".repeat(40); - const callback = sinon.stub(gitUtils, "getCommitOid"); - callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40)); - callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40)); + const callback = sinon.stub(gitUtils, "getCommitOid"); + callback.withArgs(tmpDir, "refs/pull/1/merge").resolves("a".repeat(40)); + callback.withArgs(tmpDir, "HEAD").resolves("b".repeat(40)); - const actualRef = await gitUtils.getRef(); - t.deepEqual(actualRef, "refs/pull/1/head"); - callback.restore(); - }); -}); + const actualRef = await gitUtils.getRef(); + t.deepEqual(actualRef, "refs/pull/1/head"); + callback.restore(); + }); + }, +); -test("getRef() returns ref provided as an input and ignores current HEAD", async (t) => { - await withTmpDir(async (tmpDir: string) => { - setupActionsVars(tmpDir, tmpDir); - const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput"); - getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge"); - getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40)); +test.serial( + "getRef() returns ref provided as an input and ignores current HEAD", + async (t) => { + await withTmpDir(async (tmpDir: string) => { + setupActionsVars(tmpDir, tmpDir); + const getAdditionalInputStub = sinon.stub( + actionsUtil, + "getOptionalInput", + ); + getAdditionalInputStub.withArgs("ref").resolves("refs/pull/2/merge"); + getAdditionalInputStub.withArgs("sha").resolves("b".repeat(40)); - // These values are be ignored - process.env["GITHUB_REF"] = "refs/pull/1/merge"; - process.env["GITHUB_SHA"] = "a".repeat(40); + // These values are be ignored + process.env["GITHUB_REF"] = "refs/pull/1/merge"; + process.env["GITHUB_SHA"] = "a".repeat(40); - const callback = sinon.stub(gitUtils, "getCommitOid"); - callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40)); - callback.withArgs("HEAD").resolves("b".repeat(40)); + const callback = sinon.stub(gitUtils, "getCommitOid"); + callback.withArgs("refs/pull/1/merge").resolves("b".repeat(40)); + callback.withArgs("HEAD").resolves("b".repeat(40)); - const actualRef = await gitUtils.getRef(); - t.deepEqual(actualRef, "refs/pull/2/merge"); - callback.restore(); - getAdditionalInputStub.restore(); - }); -}); + const actualRef = await gitUtils.getRef(); + t.deepEqual(actualRef, "refs/pull/2/merge"); + callback.restore(); + getAdditionalInputStub.restore(); + }); + }, +); -test("getRef() returns CODE_SCANNING_REF as a fallback for GITHUB_REF", async (t) => { - await withTmpDir(async (tmpDir: string) => { - setupActionsVars(tmpDir, tmpDir); - const expectedRef = "refs/pull/1/HEAD"; - const currentSha = "a".repeat(40); - process.env["CODE_SCANNING_REF"] = expectedRef; - process.env["GITHUB_REF"] = ""; - process.env["GITHUB_SHA"] = currentSha; +test.serial( + "getRef() returns CODE_SCANNING_REF as a fallback for GITHUB_REF", + async (t) => { + await withTmpDir(async (tmpDir: string) => { + setupActionsVars(tmpDir, tmpDir); + const expectedRef = "refs/pull/1/HEAD"; + const currentSha = "a".repeat(40); + process.env["CODE_SCANNING_REF"] = expectedRef; + process.env["GITHUB_REF"] = ""; + process.env["GITHUB_SHA"] = currentSha; - const actualRef = await gitUtils.getRef(); - t.deepEqual(actualRef, expectedRef); - }); -}); + const actualRef = await gitUtils.getRef(); + t.deepEqual(actualRef, expectedRef); + }); + }, +); -test("getRef() returns GITHUB_REF over CODE_SCANNING_REF if both are provided", async (t) => { - await withTmpDir(async (tmpDir: string) => { - setupActionsVars(tmpDir, tmpDir); - const expectedRef = "refs/pull/1/merge"; - const currentSha = "a".repeat(40); - process.env["CODE_SCANNING_REF"] = "refs/pull/1/HEAD"; - process.env["GITHUB_REF"] = expectedRef; - process.env["GITHUB_SHA"] = currentSha; +test.serial( + "getRef() returns GITHUB_REF over CODE_SCANNING_REF if both are provided", + async (t) => { + await withTmpDir(async (tmpDir: string) => { + setupActionsVars(tmpDir, tmpDir); + const expectedRef = "refs/pull/1/merge"; + const currentSha = "a".repeat(40); + process.env["CODE_SCANNING_REF"] = "refs/pull/1/HEAD"; + process.env["GITHUB_REF"] = expectedRef; + process.env["GITHUB_SHA"] = currentSha; - const actualRef = await gitUtils.getRef(); - t.deepEqual(actualRef, expectedRef); - }); -}); + const actualRef = await gitUtils.getRef(); + t.deepEqual(actualRef, expectedRef); + }); + }, +); -test("getRef() throws an error if only `ref` is provided as an input", async (t) => { - await withTmpDir(async (tmpDir: string) => { - setupActionsVars(tmpDir, tmpDir); - const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput"); - getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge"); +test.serial( + "getRef() throws an error if only `ref` is provided as an input", + async (t) => { + await withTmpDir(async (tmpDir: string) => { + setupActionsVars(tmpDir, tmpDir); + const getAdditionalInputStub = sinon.stub( + actionsUtil, + "getOptionalInput", + ); + getAdditionalInputStub.withArgs("ref").resolves("refs/pull/1/merge"); - await t.throwsAsync( - async () => { - await gitUtils.getRef(); - }, - { - instanceOf: Error, - message: - "Both 'ref' and 'sha' are required if one of them is provided.", - }, - ); - getAdditionalInputStub.restore(); - }); -}); + await t.throwsAsync( + async () => { + await gitUtils.getRef(); + }, + { + instanceOf: Error, + message: + "Both 'ref' and 'sha' are required if one of them is provided.", + }, + ); + getAdditionalInputStub.restore(); + }); + }, +); -test("getRef() throws an error if only `sha` is provided as an input", async (t) => { - await withTmpDir(async (tmpDir: string) => { - setupActionsVars(tmpDir, tmpDir); - process.env["GITHUB_WORKSPACE"] = "/tmp"; - const getAdditionalInputStub = sinon.stub(actionsUtil, "getOptionalInput"); - getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40)); +test.serial( + "getRef() throws an error if only `sha` is provided as an input", + async (t) => { + await withTmpDir(async (tmpDir: string) => { + setupActionsVars(tmpDir, tmpDir); + process.env["GITHUB_WORKSPACE"] = "/tmp"; + const getAdditionalInputStub = sinon.stub( + actionsUtil, + "getOptionalInput", + ); + getAdditionalInputStub.withArgs("sha").resolves("a".repeat(40)); - await t.throwsAsync( - async () => { - await gitUtils.getRef(); - }, - { - instanceOf: Error, - message: - "Both 'ref' and 'sha' are required if one of them is provided.", - }, - ); - getAdditionalInputStub.restore(); - }); -}); + await t.throwsAsync( + async () => { + await gitUtils.getRef(); + }, + { + instanceOf: Error, + message: + "Both 'ref' and 'sha' are required if one of them is provided.", + }, + ); + getAdditionalInputStub.restore(); + }); + }, +); -test("isAnalyzingDefaultBranch()", async (t) => { +test.serial("isAnalyzingDefaultBranch()", async (t) => { process.env["GITHUB_EVENT_NAME"] = "push"; process.env["CODE_SCANNING_IS_ANALYZING_DEFAULT_BRANCH"] = "true"; t.deepEqual(await gitUtils.isAnalyzingDefaultBranch(), true); @@ -213,7 +246,7 @@ test("isAnalyzingDefaultBranch()", async (t) => { }); }); -test("determineBaseBranchHeadCommitOid non-pullrequest", async (t) => { +test.serial("determineBaseBranchHeadCommitOid non-pullrequest", async (t) => { const infoStub = sinon.stub(core, "info"); process.env["GITHUB_EVENT_NAME"] = "hucairz"; @@ -225,27 +258,30 @@ test("determineBaseBranchHeadCommitOid non-pullrequest", async (t) => { infoStub.restore(); }); -test("determineBaseBranchHeadCommitOid not git repository", async (t) => { - const infoStub = sinon.stub(core, "info"); +test.serial( + "determineBaseBranchHeadCommitOid not git repository", + async (t) => { + const infoStub = sinon.stub(core, "info"); - process.env["GITHUB_EVENT_NAME"] = "pull_request"; - process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a"; + process.env["GITHUB_EVENT_NAME"] = "pull_request"; + process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a"; - await withTmpDir(async (tmpDir) => { - await gitUtils.determineBaseBranchHeadCommitOid(tmpDir); - }); + await withTmpDir(async (tmpDir) => { + await gitUtils.determineBaseBranchHeadCommitOid(tmpDir); + }); - t.deepEqual(1, infoStub.callCount); - t.deepEqual( - infoStub.firstCall.args[0], - "git call failed. Will calculate the base branch SHA on the server. Error: " + - "The checkout path provided to the action does not appear to be a git repository.", - ); + t.deepEqual(1, infoStub.callCount); + t.deepEqual( + infoStub.firstCall.args[0], + "git call failed. Will calculate the base branch SHA on the server. Error: " + + "The checkout path provided to the action does not appear to be a git repository.", + ); - infoStub.restore(); -}); + infoStub.restore(); + }, +); -test("determineBaseBranchHeadCommitOid other error", async (t) => { +test.serial("determineBaseBranchHeadCommitOid other error", async (t) => { const infoStub = sinon.stub(core, "info"); process.env["GITHUB_EVENT_NAME"] = "pull_request"; @@ -269,7 +305,7 @@ test("determineBaseBranchHeadCommitOid other error", async (t) => { infoStub.restore(); }); -test("decodeGitFilePath unquoted strings", async (t) => { +test.serial("decodeGitFilePath unquoted strings", async (t) => { t.deepEqual(gitUtils.decodeGitFilePath("foo"), "foo"); t.deepEqual(gitUtils.decodeGitFilePath("foo bar"), "foo bar"); t.deepEqual(gitUtils.decodeGitFilePath("foo\\\\bar"), "foo\\\\bar"); @@ -288,7 +324,7 @@ test("decodeGitFilePath unquoted strings", async (t) => { ); }); -test("decodeGitFilePath quoted strings", async (t) => { +test.serial("decodeGitFilePath quoted strings", async (t) => { t.deepEqual(gitUtils.decodeGitFilePath('"foo"'), "foo"); t.deepEqual(gitUtils.decodeGitFilePath('"foo bar"'), "foo bar"); t.deepEqual(gitUtils.decodeGitFilePath('"foo\\\\bar"'), "foo\\bar"); @@ -307,7 +343,7 @@ test("decodeGitFilePath quoted strings", async (t) => { ); }); -test("getFileOidsUnderPath returns correct file mapping", async (t) => { +test.serial("getFileOidsUnderPath returns correct file mapping", async (t) => { const runGitCommandStub = sinon .stub(gitUtils as any, "runGitCommand") .resolves( @@ -331,7 +367,7 @@ test("getFileOidsUnderPath returns correct file mapping", async (t) => { ]); }); -test("getFileOidsUnderPath handles quoted paths", async (t) => { +test.serial("getFileOidsUnderPath handles quoted paths", async (t) => { sinon .stub(gitUtils as any, "runGitCommand") .resolves( @@ -349,44 +385,50 @@ test("getFileOidsUnderPath handles quoted paths", async (t) => { }); }); -test("getFileOidsUnderPath handles empty output", async (t) => { +test.serial("getFileOidsUnderPath handles empty output", async (t) => { sinon.stub(gitUtils as any, "runGitCommand").resolves(""); const result = await gitUtils.getFileOidsUnderPath("/fake/path"); t.deepEqual(result, {}); }); -test("getFileOidsUnderPath throws on unexpected output format", async (t) => { - sinon - .stub(gitUtils as any, "runGitCommand") - .resolves( - "30d998ded095371488be3a729eb61d86ed721a18_lib/git-utils.js\n" + - "invalid-line-format\n" + - "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts", +test.serial( + "getFileOidsUnderPath throws on unexpected output format", + async (t) => { + sinon + .stub(gitUtils as any, "runGitCommand") + .resolves( + "30d998ded095371488be3a729eb61d86ed721a18_lib/git-utils.js\n" + + "invalid-line-format\n" + + "a47c11f5bfdca7661942d2c8f1b7209fb0dfdf96_src/git-utils.ts", + ); + + await t.throwsAsync( + async () => { + await gitUtils.getFileOidsUnderPath("/fake/path"); + }, + { + instanceOf: Error, + message: 'Unexpected "git ls-files" output: invalid-line-format', + }, ); + }, +); - await t.throwsAsync( - async () => { - await gitUtils.getFileOidsUnderPath("/fake/path"); - }, - { - instanceOf: Error, - message: 'Unexpected "git ls-files" output: invalid-line-format', - }, - ); -}); +test.serial( + "getGitVersionOrThrow returns version for valid git output", + async (t) => { + sinon + .stub(gitUtils as any, "runGitCommand") + .resolves(`git version 2.40.0${os.EOL}`); -test("getGitVersionOrThrow returns version for valid git output", async (t) => { - sinon - .stub(gitUtils as any, "runGitCommand") - .resolves(`git version 2.40.0${os.EOL}`); + const version = await gitUtils.getGitVersionOrThrow(); + t.is(version.truncatedVersion, "2.40.0"); + t.is(version.fullVersion, "2.40.0"); + }, +); - const version = await gitUtils.getGitVersionOrThrow(); - t.is(version.truncatedVersion, "2.40.0"); - t.is(version.fullVersion, "2.40.0"); -}); - -test("getGitVersionOrThrow throws for invalid git output", async (t) => { +test.serial("getGitVersionOrThrow throws for invalid git output", async (t) => { sinon.stub(gitUtils as any, "runGitCommand").resolves("invalid output"); await t.throwsAsync( @@ -400,18 +442,21 @@ test("getGitVersionOrThrow throws for invalid git output", async (t) => { ); }); -test("getGitVersionOrThrow handles Windows-style git output", async (t) => { - sinon - .stub(gitUtils as any, "runGitCommand") - .resolves("git version 2.40.0.windows.1"); +test.serial( + "getGitVersionOrThrow handles Windows-style git output", + async (t) => { + sinon + .stub(gitUtils as any, "runGitCommand") + .resolves("git version 2.40.0.windows.1"); - const version = await gitUtils.getGitVersionOrThrow(); - // The truncated version should contain just the major.minor.patch portion - t.is(version.truncatedVersion, "2.40.0"); - t.is(version.fullVersion, "2.40.0.windows.1"); -}); + const version = await gitUtils.getGitVersionOrThrow(); + // The truncated version should contain just the major.minor.patch portion + t.is(version.truncatedVersion, "2.40.0"); + t.is(version.fullVersion, "2.40.0.windows.1"); + }, +); -test("getGitVersionOrThrow throws when git command fails", async (t) => { +test.serial("getGitVersionOrThrow throws when git command fails", async (t) => { sinon .stub(gitUtils as any, "runGitCommand") .rejects(new Error("git not found")); @@ -427,16 +472,19 @@ test("getGitVersionOrThrow throws when git command fails", async (t) => { ); }); -test("GitVersionInfo.isAtLeast correctly compares versions", async (t) => { - const version = new gitUtils.GitVersionInfo("2.40.0", "2.40.0"); +test.serial( + "GitVersionInfo.isAtLeast correctly compares versions", + async (t) => { + const version = new gitUtils.GitVersionInfo("2.40.0", "2.40.0"); - t.true(version.isAtLeast("2.38.0")); - t.true(version.isAtLeast("2.40.0")); - t.false(version.isAtLeast("2.41.0")); - t.false(version.isAtLeast("3.0.0")); -}); + t.true(version.isAtLeast("2.38.0")); + t.true(version.isAtLeast("2.40.0")); + t.false(version.isAtLeast("2.41.0")); + t.false(version.isAtLeast("3.0.0")); + }, +); -test("listFiles returns array of file paths", async (t) => { +test.serial("listFiles returns array of file paths", async (t) => { sinon .stub(gitUtils, "runGitCommand") .resolves(["dir/file.txt", "README.txt", ""].join(os.EOL)); @@ -448,7 +496,7 @@ test("listFiles returns array of file paths", async (t) => { }); }); -test("getGeneratedFiles returns generated files only", async (t) => { +test.serial("getGeneratedFiles returns generated files only", async (t) => { const runGitCommandStub = sinon.stub(gitUtils, "runGitCommand"); runGitCommandStub diff --git a/src/init-action-post-helper.test.ts b/src/init-action-post-helper.test.ts index e5f1a3831..a6f17948e 100644 --- a/src/init-action-post-helper.test.ts +++ b/src/init-action-post-helper.test.ts @@ -18,8 +18,10 @@ import { parseRepositoryNwo } from "./repository"; import { createFeatures, createTestConfig, + DEFAULT_ACTIONS_VARS, makeVersionInfo, RecordingLogger, + setupActionsVars, setupTests, } from "./testing-utils"; import * as uploadLib from "./upload-lib"; @@ -30,10 +32,9 @@ const NUM_BYTES_PER_GIB = 1024 * 1024 * 1024; setupTests(test); -test("init-post action with debug mode off", async (t) => { +test.serial("init-post action with debug mode off", async (t) => { return await util.withTmpDir(async (tmpDir) => { - process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; - process.env["RUNNER_TEMP"] = tmpDir; + setupActionsVars(tmpDir, tmpDir); const gitHubVersion: util.GitHubVersion = { type: util.GitHubVariant.DOTCOM, @@ -64,10 +65,9 @@ test("init-post action with debug mode off", async (t) => { }); }); -test("init-post action with debug mode on", async (t) => { +test.serial("init-post action with debug mode on", async (t) => { return await util.withTmpDir(async (tmpDir) => { - process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; - process.env["RUNNER_TEMP"] = tmpDir; + setupActionsVars(tmpDir, tmpDir); const uploadAllAvailableDebugArtifactsSpy = sinon.spy(); const printDebugLogsSpy = sinon.spy(); @@ -87,83 +87,94 @@ test("init-post action with debug mode on", async (t) => { }); }); -test("uploads failed SARIF run with `diagnostics export` if feature flag is off", async (t) => { - const actionsWorkflow = createTestWorkflow([ - { - name: "Checkout repository", - uses: "actions/checkout@v5", - }, - { - name: "Initialize CodeQL", - uses: "github/codeql-action/init@v4", - with: { - languages: "javascript", +test.serial( + "uploads failed SARIF run with `diagnostics export` if feature flag is off", + async (t) => { + const actionsWorkflow = createTestWorkflow([ + { + name: "Checkout repository", + uses: "actions/checkout@v5", }, - }, - { - name: "Perform CodeQL Analysis", - uses: "github/codeql-action/analyze@v4", - with: { - category: "my-category", + { + name: "Initialize CodeQL", + uses: "github/codeql-action/init@v4", + with: { + languages: "javascript", + }, }, - }, - ]); - await testFailedSarifUpload(t, actionsWorkflow, { category: "my-category" }); -}); + { + name: "Perform CodeQL Analysis", + uses: "github/codeql-action/analyze@v4", + with: { + category: "my-category", + }, + }, + ]); + await testFailedSarifUpload(t, actionsWorkflow, { + category: "my-category", + }); + }, +); -test("uploads failed SARIF run with `diagnostics export` if the database doesn't exist", async (t) => { - const actionsWorkflow = createTestWorkflow([ - { - name: "Checkout repository", - uses: "actions/checkout@v5", - }, - { - name: "Initialize CodeQL", - uses: "github/codeql-action/init@v4", - with: { - languages: "javascript", +test.serial( + "uploads failed SARIF run with `diagnostics export` if the database doesn't exist", + async (t) => { + const actionsWorkflow = createTestWorkflow([ + { + name: "Checkout repository", + uses: "actions/checkout@v5", }, - }, - { - name: "Perform CodeQL Analysis", - uses: "github/codeql-action/analyze@v4", - with: { - category: "my-category", + { + name: "Initialize CodeQL", + uses: "github/codeql-action/init@v4", + with: { + languages: "javascript", + }, }, - }, - ]); - await testFailedSarifUpload(t, actionsWorkflow, { - category: "my-category", - databaseExists: false, - }); -}); + { + name: "Perform CodeQL Analysis", + uses: "github/codeql-action/analyze@v4", + with: { + category: "my-category", + }, + }, + ]); + await testFailedSarifUpload(t, actionsWorkflow, { + category: "my-category", + databaseExists: false, + }); + }, +); -test("uploads failed SARIF run with database export-diagnostics if the database exists and feature flag is on", async (t) => { - const actionsWorkflow = createTestWorkflow([ - { - name: "Checkout repository", - uses: "actions/checkout@v5", - }, - { - name: "Initialize CodeQL", - uses: "github/codeql-action/init@v4", - with: { - languages: "javascript", +test.serial( + "uploads failed SARIF run with database export-diagnostics if the database exists and feature flag is on", + async (t) => { + const actionsWorkflow = createTestWorkflow([ + { + name: "Checkout repository", + uses: "actions/checkout@v5", }, - }, - { - name: "Perform CodeQL Analysis", - uses: "github/codeql-action/analyze@v4", - with: { - category: "my-category", + { + name: "Initialize CodeQL", + uses: "github/codeql-action/init@v4", + with: { + languages: "javascript", + }, }, - }, - ]); - await testFailedSarifUpload(t, actionsWorkflow, { - category: "my-category", - exportDiagnosticsEnabled: true, - }); -}); + { + name: "Perform CodeQL Analysis", + uses: "github/codeql-action/analyze@v4", + with: { + category: "my-category", + }, + }, + ]); + await testFailedSarifUpload(t, actionsWorkflow, { + category: "my-category", + exportDiagnosticsEnabled: true, + }); + }, +); const UPLOAD_INPUT_TEST_CASES = [ { @@ -193,9 +204,49 @@ const UPLOAD_INPUT_TEST_CASES = [ ]; for (const { uploadInput, shouldUpload } of UPLOAD_INPUT_TEST_CASES) { - test(`does ${ - shouldUpload ? "" : "not " - }upload failed SARIF run for workflow with upload: ${uploadInput}`, async (t) => { + test.serial( + `does ${ + shouldUpload ? "" : "not " + }upload failed SARIF run for workflow with upload: ${uploadInput}`, + async (t) => { + const actionsWorkflow = createTestWorkflow([ + { + name: "Checkout repository", + uses: "actions/checkout@v5", + }, + { + name: "Initialize CodeQL", + uses: "github/codeql-action/init@v4", + with: { + languages: "javascript", + }, + }, + { + name: "Perform CodeQL Analysis", + uses: "github/codeql-action/analyze@v4", + with: { + category: "my-category", + upload: uploadInput, + }, + }, + ]); + const result = await testFailedSarifUpload(t, actionsWorkflow, { + category: "my-category", + expectUpload: shouldUpload, + }); + if (!shouldUpload) { + t.is( + result.upload_failed_run_skipped_because, + "SARIF upload is disabled", + ); + } + }, + ); +} + +test.serial( + "uploading failed SARIF run succeeds when workflow uses an input with a matrix var", + async (t) => { const actionsWorkflow = createTestWorkflow([ { name: "Checkout repository", @@ -212,215 +263,199 @@ for (const { uploadInput, shouldUpload } of UPLOAD_INPUT_TEST_CASES) { name: "Perform CodeQL Analysis", uses: "github/codeql-action/analyze@v4", with: { - category: "my-category", - upload: uploadInput, + category: "/language:${{ matrix.language }}", + }, + }, + ]); + await testFailedSarifUpload(t, actionsWorkflow, { + category: "/language:csharp", + matrix: { language: "csharp" }, + }); + }, +); + +test.serial( + "uploading failed SARIF run fails when workflow uses a complex upload input", + async (t) => { + const actionsWorkflow = createTestWorkflow([ + { + name: "Checkout repository", + uses: "actions/checkout@v5", + }, + { + name: "Initialize CodeQL", + uses: "github/codeql-action/init@v4", + with: { + languages: "javascript", + }, + }, + { + name: "Perform CodeQL Analysis", + uses: "github/codeql-action/analyze@v4", + with: { + upload: "${{ matrix.language != 'csharp' }}", }, }, ]); const result = await testFailedSarifUpload(t, actionsWorkflow, { - category: "my-category", - expectUpload: shouldUpload, + expectUpload: false, }); - if (!shouldUpload) { - t.is( - result.upload_failed_run_skipped_because, - "SARIF upload is disabled", - ); - } - }); -} - -test("uploading failed SARIF run succeeds when workflow uses an input with a matrix var", async (t) => { - const actionsWorkflow = createTestWorkflow([ - { - name: "Checkout repository", - uses: "actions/checkout@v5", - }, - { - name: "Initialize CodeQL", - uses: "github/codeql-action/init@v4", - with: { - languages: "javascript", - }, - }, - { - name: "Perform CodeQL Analysis", - uses: "github/codeql-action/analyze@v4", - with: { - category: "/language:${{ matrix.language }}", - }, - }, - ]); - await testFailedSarifUpload(t, actionsWorkflow, { - category: "/language:csharp", - matrix: { language: "csharp" }, - }); -}); - -test("uploading failed SARIF run fails when workflow uses a complex upload input", async (t) => { - const actionsWorkflow = createTestWorkflow([ - { - name: "Checkout repository", - uses: "actions/checkout@v5", - }, - { - name: "Initialize CodeQL", - uses: "github/codeql-action/init@v4", - with: { - languages: "javascript", - }, - }, - { - name: "Perform CodeQL Analysis", - uses: "github/codeql-action/analyze@v4", - with: { - upload: "${{ matrix.language != 'csharp' }}", - }, - }, - ]); - const result = await testFailedSarifUpload(t, actionsWorkflow, { - expectUpload: false, - }); - t.is( - result.upload_failed_run_error, - "Could not get upload input to github/codeql-action/analyze since it contained an " + - "unrecognized dynamic value.", - ); -}); - -test("uploading failed SARIF run fails when workflow does not reference github/codeql-action", async (t) => { - const actionsWorkflow = createTestWorkflow([ - { - name: "Checkout repository", - uses: "actions/checkout@v5", - }, - ]); - const result = await testFailedSarifUpload(t, actionsWorkflow, { - expectUpload: false, - }); - t.is( - result.upload_failed_run_error, - "Could not get upload input to github/codeql-action/analyze since the analyze job does not " + - "call github/codeql-action/analyze.", - ); - t.truthy(result.upload_failed_run_stack_trace); -}); - -test("not uploading failed SARIF when `code-scanning` is not an enabled analysis kind", async (t) => { - const result = await testFailedSarifUpload(t, createTestWorkflow([]), { - analysisKinds: [AnalysisKind.CodeQuality], - expectUpload: false, - }); - t.is( - result.upload_failed_run_skipped_because, - "No analysis kind that supports failed SARIF uploads is enabled.", - ); -}); - -test("saves overlay status when overlay-base analysis did not complete successfully", async (t) => { - return await util.withTmpDir(async (tmpDir) => { - process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; - process.env["RUNNER_TEMP"] = tmpDir; - // Ensure analyze did not complete successfully. - delete process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY]; - - const diskUsage: util.DiskUsage = { - numAvailableBytes: 100 * NUM_BYTES_PER_GIB, - numTotalBytes: 200 * NUM_BYTES_PER_GIB, - }; - sinon.stub(util, "checkDiskUsage").resolves(diskUsage); - - const saveOverlayStatusStub = sinon - .stub(overlayStatus, "saveOverlayStatus") - .resolves(true); - - const stubCodeQL = codeql.createStubCodeQL({}); - - await initActionPostHelper.uploadFailureInfo( - sinon.spy(), - sinon.spy(), - stubCodeQL, - createTestConfig({ - debugMode: false, - languages: ["javascript"], - overlayDatabaseMode: OverlayDatabaseMode.OverlayBase, - }), - parseRepositoryNwo("github/codeql-action"), - createFeatures([Feature.OverlayAnalysisStatusSave]), - getRunnerLogger(true), + t.is( + result.upload_failed_run_error, + "Could not get upload input to github/codeql-action/analyze since it contained an " + + "unrecognized dynamic value.", ); + }, +); - t.true( - saveOverlayStatusStub.calledOnce, - "saveOverlayStatus should be called exactly once", - ); - t.deepEqual( - saveOverlayStatusStub.firstCall.args[0], - stubCodeQL, - "first arg should be the CodeQL instance", - ); - t.deepEqual( - saveOverlayStatusStub.firstCall.args[1], - ["javascript"], - "second arg should be the languages", - ); - t.deepEqual( - saveOverlayStatusStub.firstCall.args[2], - diskUsage, - "third arg should be the disk usage", - ); - t.deepEqual( - saveOverlayStatusStub.firstCall.args[3], +test.serial( + "uploading failed SARIF run fails when workflow does not reference github/codeql-action", + async (t) => { + const actionsWorkflow = createTestWorkflow([ { - attemptedToBuildOverlayBaseDatabase: true, - builtOverlayBaseDatabase: false, + name: "Checkout repository", + uses: "actions/checkout@v5", }, - "fourth arg should be the overlay status recording an unsuccessful build attempt", - ); - }); -}); - -test("does not save overlay status when OverlayAnalysisStatusSave feature flag is disabled", async (t) => { - return await util.withTmpDir(async (tmpDir) => { - process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; - process.env["RUNNER_TEMP"] = tmpDir; - // Ensure analyze did not complete successfully. - delete process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY]; - - sinon.stub(util, "checkDiskUsage").resolves({ - numAvailableBytes: 100 * NUM_BYTES_PER_GIB, - numTotalBytes: 200 * NUM_BYTES_PER_GIB, + ]); + const result = await testFailedSarifUpload(t, actionsWorkflow, { + expectUpload: false, }); - - const saveOverlayStatusStub = sinon - .stub(overlayStatus, "saveOverlayStatus") - .resolves(true); - - await initActionPostHelper.uploadFailureInfo( - sinon.spy(), - sinon.spy(), - codeql.createStubCodeQL({}), - createTestConfig({ - debugMode: false, - languages: ["javascript"], - overlayDatabaseMode: OverlayDatabaseMode.OverlayBase, - }), - parseRepositoryNwo("github/codeql-action"), - createFeatures([]), - getRunnerLogger(true), + t.is( + result.upload_failed_run_error, + "Could not get upload input to github/codeql-action/analyze since the analyze job does not " + + "call github/codeql-action/analyze.", ); + t.truthy(result.upload_failed_run_stack_trace); + }, +); - t.true( - saveOverlayStatusStub.notCalled, - "saveOverlayStatus should not be called when OverlayAnalysisStatusSave feature flag is disabled", +test.serial( + "not uploading failed SARIF when `code-scanning` is not an enabled analysis kind", + async (t) => { + const result = await testFailedSarifUpload(t, createTestWorkflow([]), { + analysisKinds: [AnalysisKind.CodeQuality], + expectUpload: false, + }); + t.is( + result.upload_failed_run_skipped_because, + "No analysis kind that supports failed SARIF uploads is enabled.", ); - }); -}); + }, +); -test("does not save overlay status when build successful", async (t) => { +test.serial( + "saves overlay status when overlay-base analysis did not complete successfully", + async (t) => { + return await util.withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); + // Ensure analyze did not complete successfully. + delete process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY]; + + const diskUsage: util.DiskUsage = { + numAvailableBytes: 100 * NUM_BYTES_PER_GIB, + numTotalBytes: 200 * NUM_BYTES_PER_GIB, + }; + sinon.stub(util, "checkDiskUsage").resolves(diskUsage); + + const saveOverlayStatusStub = sinon + .stub(overlayStatus, "saveOverlayStatus") + .resolves(true); + + const stubCodeQL = codeql.createStubCodeQL({}); + + await initActionPostHelper.uploadFailureInfo( + sinon.spy(), + sinon.spy(), + stubCodeQL, + createTestConfig({ + debugMode: false, + languages: ["javascript"], + overlayDatabaseMode: OverlayDatabaseMode.OverlayBase, + }), + parseRepositoryNwo("github/codeql-action"), + createFeatures([Feature.OverlayAnalysisStatusSave]), + getRunnerLogger(true), + ); + + t.true( + saveOverlayStatusStub.calledOnce, + "saveOverlayStatus should be called exactly once", + ); + t.deepEqual( + saveOverlayStatusStub.firstCall.args[0], + stubCodeQL, + "first arg should be the CodeQL instance", + ); + t.deepEqual( + saveOverlayStatusStub.firstCall.args[1], + ["javascript"], + "second arg should be the languages", + ); + t.deepEqual( + saveOverlayStatusStub.firstCall.args[2], + diskUsage, + "third arg should be the disk usage", + ); + t.deepEqual( + saveOverlayStatusStub.firstCall.args[3], + { + attemptedToBuildOverlayBaseDatabase: true, + builtOverlayBaseDatabase: false, + job: { + checkRunId: undefined, + workflowRunId: Number(DEFAULT_ACTIONS_VARS.GITHUB_RUN_ID), + workflowRunAttempt: Number(DEFAULT_ACTIONS_VARS.GITHUB_RUN_ATTEMPT), + name: DEFAULT_ACTIONS_VARS.GITHUB_JOB, + }, + }, + "fourth arg should be the overlay status recording an unsuccessful build attempt with job details", + ); + }); + }, +); + +test.serial( + "does not save overlay status when OverlayAnalysisStatusSave feature flag is disabled", + async (t) => { + return await util.withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); + // Ensure analyze did not complete successfully. + delete process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY]; + + sinon.stub(util, "checkDiskUsage").resolves({ + numAvailableBytes: 100 * NUM_BYTES_PER_GIB, + numTotalBytes: 200 * NUM_BYTES_PER_GIB, + }); + + const saveOverlayStatusStub = sinon + .stub(overlayStatus, "saveOverlayStatus") + .resolves(true); + + await initActionPostHelper.uploadFailureInfo( + sinon.spy(), + sinon.spy(), + codeql.createStubCodeQL({}), + createTestConfig({ + debugMode: false, + languages: ["javascript"], + overlayDatabaseMode: OverlayDatabaseMode.OverlayBase, + }), + parseRepositoryNwo("github/codeql-action"), + createFeatures([]), + getRunnerLogger(true), + ); + + t.true( + saveOverlayStatusStub.notCalled, + "saveOverlayStatus should not be called when OverlayAnalysisStatusSave feature flag is disabled", + ); + }); + }, +); + +test.serial("does not save overlay status when build successful", async (t) => { return await util.withTmpDir(async (tmpDir) => { - process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; - process.env["RUNNER_TEMP"] = tmpDir; + setupActionsVars(tmpDir, tmpDir); // Mark analyze as having completed successfully. process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY] = "true"; @@ -454,41 +489,43 @@ test("does not save overlay status when build successful", async (t) => { }); }); -test("does not save overlay status when overlay not enabled", async (t) => { - return await util.withTmpDir(async (tmpDir) => { - process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; - process.env["RUNNER_TEMP"] = tmpDir; - delete process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY]; +test.serial( + "does not save overlay status when overlay not enabled", + async (t) => { + return await util.withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); + delete process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY]; - sinon.stub(util, "checkDiskUsage").resolves({ - numAvailableBytes: 100 * NUM_BYTES_PER_GIB, - numTotalBytes: 200 * NUM_BYTES_PER_GIB, + sinon.stub(util, "checkDiskUsage").resolves({ + numAvailableBytes: 100 * NUM_BYTES_PER_GIB, + numTotalBytes: 200 * NUM_BYTES_PER_GIB, + }); + + const saveOverlayStatusStub = sinon + .stub(overlayStatus, "saveOverlayStatus") + .resolves(true); + + await initActionPostHelper.uploadFailureInfo( + sinon.spy(), + sinon.spy(), + codeql.createStubCodeQL({}), + createTestConfig({ + debugMode: false, + languages: ["javascript"], + overlayDatabaseMode: OverlayDatabaseMode.None, + }), + parseRepositoryNwo("github/codeql-action"), + createFeatures([]), + getRunnerLogger(true), + ); + + t.true( + saveOverlayStatusStub.notCalled, + "saveOverlayStatus should not be called when overlay is not enabled", + ); }); - - const saveOverlayStatusStub = sinon - .stub(overlayStatus, "saveOverlayStatus") - .resolves(true); - - await initActionPostHelper.uploadFailureInfo( - sinon.spy(), - sinon.spy(), - codeql.createStubCodeQL({}), - createTestConfig({ - debugMode: false, - languages: ["javascript"], - overlayDatabaseMode: OverlayDatabaseMode.None, - }), - parseRepositoryNwo("github/codeql-action"), - createFeatures([]), - getRunnerLogger(true), - ); - - t.true( - saveOverlayStatusStub.notCalled, - "saveOverlayStatus should not be called when overlay is not enabled", - ); - }); -}); + }, +); function createTestWorkflow( steps: workflow.WorkflowJobStep[], @@ -542,9 +579,8 @@ async function testFailedSarifUpload( config.dbLocation = "path/to/database"; } process.env["GITHUB_JOB"] = "analyze"; - process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; - process.env["GITHUB_WORKSPACE"] = - "/home/runner/work/codeql-action/codeql-action"; + process.env["GITHUB_REPOSITORY"] = DEFAULT_ACTIONS_VARS.GITHUB_REPOSITORY; + process.env["GITHUB_WORKSPACE"] = "/tmp"; sinon .stub(actionsUtil, "getRequiredInput") .withArgs("matrix") @@ -666,93 +702,99 @@ async function mockRiskAssessmentEnv(matrix: string) { return { uploadArtifact, databaseExportDiagnostics, diagnosticsExport }; } -test("tryUploadSarifIfRunFailed - uploads as artifact for risk assessments (diagnosticsExport)", async (t) => { - const logger = new RecordingLogger(); - const { uploadArtifact, databaseExportDiagnostics, diagnosticsExport } = - await mockRiskAssessmentEnv(singleLanguageMatrix); +test.serial( + "tryUploadSarifIfRunFailed - uploads as artifact for risk assessments (diagnosticsExport)", + async (t) => { + const logger = new RecordingLogger(); + const { uploadArtifact, databaseExportDiagnostics, diagnosticsExport } = + await mockRiskAssessmentEnv(singleLanguageMatrix); - const config = createTestConfig({ - analysisKinds: [AnalysisKind.RiskAssessment], - codeQLCmd: "codeql-for-testing", - languages: ["javascript"], - }); - const features = createFeatures([]); + const config = createTestConfig({ + analysisKinds: [AnalysisKind.RiskAssessment], + codeQLCmd: "codeql-for-testing", + languages: ["javascript"], + }); + const features = createFeatures([]); - const result = await initActionPostHelper.tryUploadSarifIfRunFailed( - config, - parseRepositoryNwo("github/codeql-action-fake-repository"), - features, - logger, - ); - - const expectedName = debugArtifacts.sanitizeArtifactName( - `sarif-artifact-${debugArtifacts.getArtifactSuffix(singleLanguageMatrix)}`, - ); - const expectedFilePattern = /codeql-failed-sarif-javascript\.csra\.sarif$/; - t.is(result.upload_failed_run_skipped_because, undefined); - t.is(result.upload_failed_run_error, undefined); - t.is(result.sarifID, expectedName); - t.assert( - uploadArtifact.calledOnceWith( - expectedName, - [sinon.match(expectedFilePattern)], - sinon.match.string, - ), - ); - t.assert(databaseExportDiagnostics.notCalled); - t.assert( - diagnosticsExport.calledOnceWith( - sinon.match(expectedFilePattern), - "/language:javascript", + const result = await initActionPostHelper.tryUploadSarifIfRunFailed( config, - ), - ); -}); + parseRepositoryNwo("github/codeql-action-fake-repository"), + features, + logger, + ); -test("tryUploadSarifIfRunFailed - uploads as artifact for risk assessments (databaseExportDiagnostics)", async (t) => { - const logger = new RecordingLogger(); - const { uploadArtifact, databaseExportDiagnostics, diagnosticsExport } = - await mockRiskAssessmentEnv(singleLanguageMatrix); + const expectedName = debugArtifacts.sanitizeArtifactName( + `sarif-artifact-${debugArtifacts.getArtifactSuffix(singleLanguageMatrix)}`, + ); + const expectedFilePattern = /codeql-failed-sarif-javascript\.csra\.sarif$/; + t.is(result.upload_failed_run_skipped_because, undefined); + t.is(result.upload_failed_run_error, undefined); + t.is(result.sarifID, expectedName); + t.assert( + uploadArtifact.calledOnceWith( + expectedName, + [sinon.match(expectedFilePattern)], + sinon.match.string, + ), + ); + t.assert(databaseExportDiagnostics.notCalled); + t.assert( + diagnosticsExport.calledOnceWith( + sinon.match(expectedFilePattern), + "/language:javascript", + config, + ), + ); + }, +); - const dbLocation = "/some/path"; - const config = createTestConfig({ - analysisKinds: [AnalysisKind.RiskAssessment], - codeQLCmd: "codeql-for-testing", - languages: ["javascript"], - dbLocation: "/some/path", - }); - const features = createFeatures([Feature.ExportDiagnosticsEnabled]); +test.serial( + "tryUploadSarifIfRunFailed - uploads as artifact for risk assessments (databaseExportDiagnostics)", + async (t) => { + const logger = new RecordingLogger(); + const { uploadArtifact, databaseExportDiagnostics, diagnosticsExport } = + await mockRiskAssessmentEnv(singleLanguageMatrix); - const result = await initActionPostHelper.tryUploadSarifIfRunFailed( - config, - parseRepositoryNwo("github/codeql-action-fake-repository"), - features, - logger, - ); + const dbLocation = "/some/path"; + const config = createTestConfig({ + analysisKinds: [AnalysisKind.RiskAssessment], + codeQLCmd: "codeql-for-testing", + languages: ["javascript"], + dbLocation: "/some/path", + }); + const features = createFeatures([Feature.ExportDiagnosticsEnabled]); - const expectedName = debugArtifacts.sanitizeArtifactName( - `sarif-artifact-${debugArtifacts.getArtifactSuffix(singleLanguageMatrix)}`, - ); - const expectedFilePattern = /codeql-failed-sarif-javascript\.csra\.sarif$/; - t.is(result.upload_failed_run_skipped_because, undefined); - t.is(result.upload_failed_run_error, undefined); - t.is(result.sarifID, expectedName); - t.assert( - uploadArtifact.calledOnceWith( - expectedName, - [sinon.match(expectedFilePattern)], - sinon.match.string, - ), - ); - t.assert(diagnosticsExport.notCalled); - t.assert( - databaseExportDiagnostics.calledOnceWith( - dbLocation, - sinon.match(expectedFilePattern), - "/language:javascript", - ), - ); -}); + const result = await initActionPostHelper.tryUploadSarifIfRunFailed( + config, + parseRepositoryNwo("github/codeql-action-fake-repository"), + features, + logger, + ); + + const expectedName = debugArtifacts.sanitizeArtifactName( + `sarif-artifact-${debugArtifacts.getArtifactSuffix(singleLanguageMatrix)}`, + ); + const expectedFilePattern = /codeql-failed-sarif-javascript\.csra\.sarif$/; + t.is(result.upload_failed_run_skipped_because, undefined); + t.is(result.upload_failed_run_error, undefined); + t.is(result.sarifID, expectedName); + t.assert( + uploadArtifact.calledOnceWith( + expectedName, + [sinon.match(expectedFilePattern)], + sinon.match.string, + ), + ); + t.assert(diagnosticsExport.notCalled); + t.assert( + databaseExportDiagnostics.calledOnceWith( + dbLocation, + sinon.match(expectedFilePattern), + "/language:javascript", + ), + ); + }, +); const skippedUploadTest = test.macro({ exec: async ( @@ -781,7 +823,7 @@ const skippedUploadTest = test.macro({ `tryUploadSarifIfRunFailed - skips upload ${providedTitle}`, }); -test( +test.serial( "without CodeQL command", skippedUploadTest, // No codeQLCmd @@ -792,7 +834,7 @@ test( "CodeQL command not found", ); -test( +test.serial( "if no language is configured", skippedUploadTest, // No explicit language configuration @@ -803,7 +845,7 @@ test( "Unexpectedly, the configuration is not for a single language.", ); -test( +test.serial( "if multiple languages is configured", skippedUploadTest, // Multiple explicit languages configured diff --git a/src/init-action-post-helper.ts b/src/init-action-post-helper.ts index 40d0f42e3..0c97a947a 100644 --- a/src/init-action-post-helper.ts +++ b/src/init-action-post-helper.ts @@ -22,7 +22,11 @@ import { EnvVar } from "./environment"; import { Feature, FeatureEnablement } from "./feature-flags"; import { Logger } from "./logging"; import { OverlayDatabaseMode } from "./overlay"; -import { OverlayStatus, saveOverlayStatus } from "./overlay/status"; +import { + createOverlayStatus, + OverlayStatus, + saveOverlayStatus, +} from "./overlay/status"; import { RepositoryNwo, getRepositoryNwo } from "./repository"; import { JobStatus } from "./status-report"; import * as uploadLib from "./upload-lib"; @@ -424,10 +428,17 @@ async function recordOverlayStatus( return; } - const overlayStatus: OverlayStatus = { - attemptedToBuildOverlayBaseDatabase: true, - builtOverlayBaseDatabase: false, - }; + const checkRunIdInput = actionsUtil.getOptionalInput("check-run-id"); + const checkRunId = + checkRunIdInput !== undefined ? parseInt(checkRunIdInput, 10) : undefined; + + const overlayStatus: OverlayStatus = createOverlayStatus( + { + attemptedToBuildOverlayBaseDatabase: true, + builtOverlayBaseDatabase: false, + }, + checkRunId !== undefined && checkRunId >= 0 ? checkRunId : undefined, + ); const diskUsage = await checkDiskUsage(logger); if (diskUsage === undefined) { @@ -447,7 +458,7 @@ async function recordOverlayStatus( const blurb = "This job attempted to run with improved incremental analysis but it did not complete successfully. " + - "This may have been due to disk space constraints: using improved incremental analysis can " + + "One possible reason for this is disk space constraints, since improved incremental analysis can " + "require a significant amount of disk space for some repositories."; if (saved) { @@ -455,7 +466,7 @@ async function recordOverlayStatus( `${blurb} ` + "This failure has been recorded in the Actions cache, so the next CodeQL analysis will run " + "without improved incremental analysis. If you want to enable improved incremental analysis, " + - "increase the disk space available to the runner. " + + "try increasing the disk space available to the runner. " + "If that doesn't help, contact GitHub Support for further assistance.", ); } else { diff --git a/src/init-action.ts b/src/init-action.ts index 7bd749e82..f7d9d52eb 100644 --- a/src/init-action.ts +++ b/src/init-action.ts @@ -93,7 +93,6 @@ import { checkActionVersion, getErrorMessage, BuildMode, - GitHubVersion, Result, getOptionalEnvVar, Success, @@ -250,8 +249,6 @@ async function run(startedAt: Date) { // Fetch the values of known repository properties that affect us. const repositoryPropertiesResult = await loadRepositoryProperties( repositoryNwo, - gitHubVersion, - features, logger, ); @@ -820,8 +817,6 @@ async function run(startedAt: Date) { */ async function loadRepositoryProperties( repositoryNwo: RepositoryNwo, - gitHubVersion: GitHubVersion, - features: FeatureEnablement, logger: Logger, ): Promise> { // See if we can skip loading repository properties early. In particular, @@ -839,17 +834,8 @@ async function loadRepositoryProperties( return new Success({}); } - if (!(await features.getValue(Feature.UseRepositoryProperties))) { - logger.debug( - "Skipping loading repository properties because the UseRepositoryProperties feature flag is disabled.", - ); - return new Success({}); - } - try { - return new Success( - await loadPropertiesFromApi(gitHubVersion, logger, repositoryNwo), - ); + return new Success(await loadPropertiesFromApi(logger, repositoryNwo)); } catch (error) { logger.warning( `Failed to load repository properties: ${getErrorMessage(error)}`, diff --git a/src/init.test.ts b/src/init.test.ts index 8106a78f9..a7d4f4de1 100644 --- a/src/init.test.ts +++ b/src/init.test.ts @@ -77,46 +77,49 @@ for (const { runnerEnv, ErrorConstructor, message } of [ "otherwise we recommend rerunning the job.", }, ]) { - test(`cleanupDatabaseClusterDirectory throws a ${ErrorConstructor.name} when cleanup fails on ${runnerEnv} runner`, async (t) => { - await withTmpDir(async (tmpDir: string) => { - process.env["RUNNER_ENVIRONMENT"] = runnerEnv; + test.serial( + `cleanupDatabaseClusterDirectory throws a ${ErrorConstructor.name} when cleanup fails on ${runnerEnv} runner`, + async (t) => { + await withTmpDir(async (tmpDir: string) => { + process.env["RUNNER_ENVIRONMENT"] = runnerEnv; - const dbLocation = path.resolve(tmpDir, "dbs"); - fs.mkdirSync(dbLocation, { recursive: true }); + const dbLocation = path.resolve(tmpDir, "dbs"); + fs.mkdirSync(dbLocation, { recursive: true }); - const fileToCleanUp = path.resolve( - dbLocation, - "something-to-cleanup.txt", - ); - fs.writeFileSync(fileToCleanUp, ""); + const fileToCleanUp = path.resolve( + dbLocation, + "something-to-cleanup.txt", + ); + fs.writeFileSync(fileToCleanUp, ""); - const rmSyncError = `Failed to clean up file ${fileToCleanUp}`; + const rmSyncError = `Failed to clean up file ${fileToCleanUp}`; - const messages: LoggedMessage[] = []; - t.throws( - () => - cleanupDatabaseClusterDirectory( - createTestConfig({ dbLocation }), - getRecordingLogger(messages), - {}, - () => { - throw new Error(rmSyncError); - }, - ), - { - instanceOf: ErrorConstructor, - message: `${message(dbLocation)} Details: ${rmSyncError}`, - }, - ); + const messages: LoggedMessage[] = []; + t.throws( + () => + cleanupDatabaseClusterDirectory( + createTestConfig({ dbLocation }), + getRecordingLogger(messages), + {}, + () => { + throw new Error(rmSyncError); + }, + ), + { + instanceOf: ErrorConstructor, + message: `${message(dbLocation)} Details: ${rmSyncError}`, + }, + ); - t.is(messages.length, 1); - t.is(messages[0].type, "warning"); - t.is( - messages[0].message, - `The database cluster directory ${dbLocation} must be empty. Attempting to clean it up.`, - ); - }); - }); + t.is(messages.length, 1); + t.is(messages[0].type, "warning"); + t.is( + messages[0].message, + `The database cluster directory ${dbLocation} must be empty. Attempting to clean it up.`, + ); + }); + }, + ); } test("cleanupDatabaseClusterDirectory can disable warning with options", async (t) => { @@ -459,50 +462,62 @@ test("file coverage information enabled when debugMode is true", async (t) => { ); }); -test("file coverage information enabled when not analyzing a pull request", async (t) => { - sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(false); +test.serial( + "file coverage information enabled when not analyzing a pull request", + async (t) => { + sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(false); - t.true( - await getFileCoverageInformationEnabled( - false, // debugMode - parseRepositoryNwo("github/codeql-action"), - createFeatures([Feature.SkipFileCoverageOnPrs]), - ), - ); -}); + t.true( + await getFileCoverageInformationEnabled( + false, // debugMode + parseRepositoryNwo("github/codeql-action"), + createFeatures([Feature.SkipFileCoverageOnPrs]), + ), + ); + }, +); -test("file coverage information enabled when owner is not 'github'", async (t) => { - sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true); +test.serial( + "file coverage information enabled when owner is not 'github'", + async (t) => { + sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true); - t.true( - await getFileCoverageInformationEnabled( - false, // debugMode - parseRepositoryNwo("other-org/some-repo"), - createFeatures([Feature.SkipFileCoverageOnPrs]), - ), - ); -}); + t.true( + await getFileCoverageInformationEnabled( + false, // debugMode + parseRepositoryNwo("other-org/some-repo"), + createFeatures([Feature.SkipFileCoverageOnPrs]), + ), + ); + }, +); -test("file coverage information enabled when feature flag is not enabled", async (t) => { - sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true); +test.serial( + "file coverage information enabled when feature flag is not enabled", + async (t) => { + sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true); - t.true( - await getFileCoverageInformationEnabled( - false, // debugMode - parseRepositoryNwo("github/codeql-action"), - createFeatures([]), - ), - ); -}); + t.true( + await getFileCoverageInformationEnabled( + false, // debugMode + parseRepositoryNwo("github/codeql-action"), + createFeatures([]), + ), + ); + }, +); -test("file coverage information disabled when all conditions for skipping are met", async (t) => { - sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true); +test.serial( + "file coverage information disabled when all conditions for skipping are met", + async (t) => { + sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true); - t.false( - await getFileCoverageInformationEnabled( - false, // debugMode - parseRepositoryNwo("github/codeql-action"), - createFeatures([Feature.SkipFileCoverageOnPrs]), - ), - ); -}); + t.false( + await getFileCoverageInformationEnabled( + false, // debugMode + parseRepositoryNwo("github/codeql-action"), + createFeatures([Feature.SkipFileCoverageOnPrs]), + ), + ); + }, +); diff --git a/src/overlay/diagnostics.ts b/src/overlay/diagnostics.ts index ab1266868..6bc11a73f 100644 --- a/src/overlay/diagnostics.ts +++ b/src/overlay/diagnostics.ts @@ -10,20 +10,35 @@ import { RepositoryPropertyName } from "../feature-flags/properties"; /** Reason why overlay analysis was disabled. */ export enum OverlayDisabledReason { + /** Overlay analysis was disabled by the CODEQL_OVERLAY_DATABASE_MODE environment variable being set to "none". */ + DisabledByEnvironmentVariable = "disabled-by-environment-variable", /** Overlay analysis was disabled by a repository property. */ DisabledByRepositoryProperty = "disabled-by-repository-property", - /** Overlay analysis feature was not enabled. */ - FeatureNotEnabled = "feature-not-enabled", /** The build mode is incompatible with overlay analysis. */ IncompatibleBuildMode = "incompatible-build-mode", /** The CodeQL CLI version is too old to support overlay analysis. */ IncompatibleCodeQl = "incompatible-codeql", /** The Git version could not be determined or is too old. */ IncompatibleGit = "incompatible-git", - /** The runner does not have enough disk space or memory. */ - InsufficientResources = "insufficient-resources", + /** The runner does not have enough disk space to perform overlay analysis. */ + InsufficientDiskSpace = "insufficient-disk-space", + /** The runner does not have enough memory to perform overlay analysis. */ + InsufficientMemory = "insufficient-memory", + /** Overlay analysis is not enabled for one or more of the configured languages. */ + LanguageNotEnabled = "language-not-enabled", /** The source root is not inside a git repository. */ NoGitRoot = "no-git-root", + /** + * For one or more of the configured languages, overlay analysis is only + * enabled when using the default query suite, but the config customises the + * queries by disabling default queries, specifying custom queries or packs, + * or adding query filters. + */ + NonDefaultQueries = "non-default-queries", + /** We are not analyzing a pull request or the default branch. */ + NotPullRequestOrDefaultBranch = "not-pull-request-or-default-branch", + /** The top-level overlay analysis feature flag is not enabled. */ + OverallFeatureNotEnabled = "overall-feature-not-enabled", /** Overlay analysis was skipped because it previously failed with similar hardware resources. */ SkippedDueToCachedStatus = "skipped-due-to-cached-status", /** Disk usage could not be determined during the overlay status check. */ @@ -68,8 +83,8 @@ export async function addOverlayDisablementDiagnostics( markdownMessage: `Improved incremental analysis was skipped because it previously failed for this repository ` + `with CodeQL version ${(await codeql.getVersion()).version} on a runner with similar hardware resources. ` + - "Improved incremental analysis may require a significant amount of disk space for some repositories. " + - "If you want to enable improved incremental analysis, increase the disk space available " + + "One possible reason for this is that improved incremental analysis can require a significant amount of disk space for some repositories. " + + "If you want to try re-enabling improved incremental analysis, increase the disk space available " + "to the runner. If that doesn't help, contact GitHub Support for further assistance.\n\n" + "Improved incremental analysis will be automatically retried when the next version of CodeQL is released. " + `You can also manually trigger a retry by [removing](${DocUrl.DELETE_ACTIONS_CACHE_ENTRIES}) \`codeql-overlay-status-*\` entries from the Actions cache.`, diff --git a/src/overlay/index.test.ts b/src/overlay/index.test.ts index 7e63520f5..8e92a69e2 100644 --- a/src/overlay/index.test.ts +++ b/src/overlay/index.test.ts @@ -30,65 +30,68 @@ import { setupTests(test); -test("writeOverlayChangesFile generates correct changes file", async (t) => { - await withTmpDir(async (tmpDir) => { - const dbLocation = path.join(tmpDir, "db"); - await fs.promises.mkdir(dbLocation, { recursive: true }); - const sourceRoot = path.join(tmpDir, "src"); - await fs.promises.mkdir(sourceRoot, { recursive: true }); - const tempDir = path.join(tmpDir, "temp"); - await fs.promises.mkdir(tempDir, { recursive: true }); +test.serial( + "writeOverlayChangesFile generates correct changes file", + async (t) => { + await withTmpDir(async (tmpDir) => { + const dbLocation = path.join(tmpDir, "db"); + await fs.promises.mkdir(dbLocation, { recursive: true }); + const sourceRoot = path.join(tmpDir, "src"); + await fs.promises.mkdir(sourceRoot, { recursive: true }); + const tempDir = path.join(tmpDir, "temp"); + await fs.promises.mkdir(tempDir, { recursive: true }); - const logger = getRunnerLogger(true); - const config = createTestConfig({ dbLocation }); + const logger = getRunnerLogger(true); + const config = createTestConfig({ dbLocation }); - // Mock the getFileOidsUnderPath function to return base OIDs - const baseOids = { - "unchanged.js": "aaa111", - "modified.js": "bbb222", - "deleted.js": "ccc333", - }; - const getFileOidsStubForBase = sinon - .stub(gitUtils, "getFileOidsUnderPath") - .resolves(baseOids); + // Mock the getFileOidsUnderPath function to return base OIDs + const baseOids = { + "unchanged.js": "aaa111", + "modified.js": "bbb222", + "deleted.js": "ccc333", + }; + const getFileOidsStubForBase = sinon + .stub(gitUtils, "getFileOidsUnderPath") + .resolves(baseOids); - // Write the base database OIDs file - await writeBaseDatabaseOidsFile(config, sourceRoot); - getFileOidsStubForBase.restore(); + // Write the base database OIDs file + await writeBaseDatabaseOidsFile(config, sourceRoot); + getFileOidsStubForBase.restore(); - // Mock the getFileOidsUnderPath function to return overlay OIDs - const currentOids = { - "unchanged.js": "aaa111", - "modified.js": "ddd444", // Changed OID - "added.js": "eee555", // New file - }; - const getFileOidsStubForOverlay = sinon - .stub(gitUtils, "getFileOidsUnderPath") - .resolves(currentOids); + // Mock the getFileOidsUnderPath function to return overlay OIDs + const currentOids = { + "unchanged.js": "aaa111", + "modified.js": "ddd444", // Changed OID + "added.js": "eee555", // New file + }; + const getFileOidsStubForOverlay = sinon + .stub(gitUtils, "getFileOidsUnderPath") + .resolves(currentOids); - // Write the overlay changes file, which uses the mocked overlay OIDs - // and the base database OIDs file - const getTempDirStub = sinon - .stub(actionsUtil, "getTemporaryDirectory") - .returns(tempDir); - const changesFilePath = await writeOverlayChangesFile( - config, - sourceRoot, - logger, - ); - getFileOidsStubForOverlay.restore(); - getTempDirStub.restore(); + // Write the overlay changes file, which uses the mocked overlay OIDs + // and the base database OIDs file + const getTempDirStub = sinon + .stub(actionsUtil, "getTemporaryDirectory") + .returns(tempDir); + const changesFilePath = await writeOverlayChangesFile( + config, + sourceRoot, + logger, + ); + getFileOidsStubForOverlay.restore(); + getTempDirStub.restore(); - const fileContent = await fs.promises.readFile(changesFilePath, "utf-8"); - const parsedContent = JSON.parse(fileContent) as { changes: string[] }; + const fileContent = await fs.promises.readFile(changesFilePath, "utf-8"); + const parsedContent = JSON.parse(fileContent) as { changes: string[] }; - t.deepEqual( - parsedContent.changes.sort(), - ["added.js", "deleted.js", "modified.js"], - "Should identify added, deleted, and modified files", - ); - }); -}); + t.deepEqual( + parsedContent.changes.sort(), + ["added.js", "deleted.js", "modified.js"], + "Should identify added, deleted, and modified files", + ); + }); + }, +); interface DownloadOverlayBaseDatabaseTestCase { overlayDatabaseMode: OverlayDatabaseMode; @@ -206,14 +209,14 @@ const testDownloadOverlayBaseDatabaseFromCache = test.macro({ title: (_, title) => `downloadOverlayBaseDatabaseFromCache: ${title}`, }); -test( +test.serial( testDownloadOverlayBaseDatabaseFromCache, "returns stats when successful", {}, true, ); -test( +test.serial( testDownloadOverlayBaseDatabaseFromCache, "returns undefined when mode is OverlayDatabaseMode.OverlayBase", { @@ -222,7 +225,7 @@ test( false, ); -test( +test.serial( testDownloadOverlayBaseDatabaseFromCache, "returns undefined when mode is OverlayDatabaseMode.None", { @@ -231,7 +234,7 @@ test( false, ); -test( +test.serial( testDownloadOverlayBaseDatabaseFromCache, "returns undefined when caching is disabled", { @@ -240,7 +243,7 @@ test( false, ); -test( +test.serial( testDownloadOverlayBaseDatabaseFromCache, "returns undefined in test mode", { @@ -249,7 +252,7 @@ test( false, ); -test( +test.serial( testDownloadOverlayBaseDatabaseFromCache, "returns undefined when cache miss", { @@ -258,7 +261,7 @@ test( false, ); -test( +test.serial( testDownloadOverlayBaseDatabaseFromCache, "returns undefined when download fails", { @@ -267,7 +270,7 @@ test( false, ); -test( +test.serial( testDownloadOverlayBaseDatabaseFromCache, "returns undefined when downloaded database is invalid", { @@ -276,7 +279,7 @@ test( false, ); -test( +test.serial( testDownloadOverlayBaseDatabaseFromCache, "returns undefined when downloaded database doesn't have an overlayBaseSpecifier", { @@ -285,7 +288,7 @@ test( false, ); -test( +test.serial( testDownloadOverlayBaseDatabaseFromCache, "returns undefined when resolving database metadata fails", { @@ -294,7 +297,7 @@ test( false, ); -test( +test.serial( testDownloadOverlayBaseDatabaseFromCache, "returns undefined when filesystem error occurs", { @@ -303,7 +306,7 @@ test( false, ); -test("overlay-base database cache keys remain stable", async (t) => { +test.serial("overlay-base database cache keys remain stable", async (t) => { const logger = getRunnerLogger(true); const config = createTestConfig({ languages: ["python", "javascript"] }); const codeQlVersion = "2.23.0"; diff --git a/src/overlay/status.test.ts b/src/overlay/status.test.ts index 066b963b8..d9fa48d90 100644 --- a/src/overlay/status.test.ts +++ b/src/overlay/status.test.ts @@ -72,101 +72,110 @@ test("getCacheKey rounds disk space down to nearest 10 GiB", async (t) => { ); }); -test("shouldSkipOverlayAnalysis returns false when no cached status exists", async (t) => { - await withTmpDir(async (tmpDir) => { - process.env["RUNNER_TEMP"] = tmpDir; - const codeql = mockCodeQLVersion("2.20.0"); - const messages: LoggedMessage[] = []; - const logger = getRecordingLogger(messages); +test.serial( + "shouldSkipOverlayAnalysis returns false when no cached status exists", + async (t) => { + await withTmpDir(async (tmpDir) => { + process.env["RUNNER_TEMP"] = tmpDir; + const codeql = mockCodeQLVersion("2.20.0"); + const messages: LoggedMessage[] = []; + const logger = getRecordingLogger(messages); - sinon.stub(actionsCache, "restoreCache").resolves(undefined); + sinon.stub(actionsCache, "restoreCache").resolves(undefined); - const result = await shouldSkipOverlayAnalysis( - codeql, - ["javascript"], - makeDiskUsage(50), - logger, - ); + const result = await shouldSkipOverlayAnalysis( + codeql, + ["javascript"], + makeDiskUsage(50), + logger, + ); - t.false(result); - t.true( - messages.some( - (m) => - m.type === "debug" && - typeof m.message === "string" && - m.message.includes("No overlay status found in Actions cache."), - ), - ); - }); -}); - -test("shouldSkipOverlayAnalysis returns true when cached status indicates failed build", async (t) => { - await withTmpDir(async (tmpDir) => { - process.env["RUNNER_TEMP"] = tmpDir; - const codeql = mockCodeQLVersion("2.20.0"); - const messages: LoggedMessage[] = []; - const logger = getRecordingLogger(messages); - - const status = { - attemptedToBuildOverlayBaseDatabase: true, - builtOverlayBaseDatabase: false, - }; - - // Stub restoreCache to write the status file and return a key - sinon.stub(actionsCache, "restoreCache").callsFake(async (paths) => { - const statusFile = paths[0]; - await fs.promises.mkdir(path.dirname(statusFile), { recursive: true }); - await fs.promises.writeFile(statusFile, JSON.stringify(status)); - return "found-key"; + t.false(result); + t.true( + messages.some( + (m) => + m.type === "debug" && + typeof m.message === "string" && + m.message.includes("No overlay status found in Actions cache."), + ), + ); }); + }, +); - const result = await shouldSkipOverlayAnalysis( - codeql, - ["javascript"], - makeDiskUsage(50), - logger, - ); +test.serial( + "shouldSkipOverlayAnalysis returns true when cached status indicates failed build", + async (t) => { + await withTmpDir(async (tmpDir) => { + process.env["RUNNER_TEMP"] = tmpDir; + const codeql = mockCodeQLVersion("2.20.0"); + const messages: LoggedMessage[] = []; + const logger = getRecordingLogger(messages); - t.true(result); - }); -}); + const status = { + attemptedToBuildOverlayBaseDatabase: true, + builtOverlayBaseDatabase: false, + }; -test("shouldSkipOverlayAnalysis returns false when cached status indicates successful build", async (t) => { - await withTmpDir(async (tmpDir) => { - process.env["RUNNER_TEMP"] = tmpDir; - const codeql = mockCodeQLVersion("2.20.0"); - const messages: LoggedMessage[] = []; - const logger = getRecordingLogger(messages); + // Stub restoreCache to write the status file and return a key + sinon.stub(actionsCache, "restoreCache").callsFake(async (paths) => { + const statusFile = paths[0]; + await fs.promises.mkdir(path.dirname(statusFile), { recursive: true }); + await fs.promises.writeFile(statusFile, JSON.stringify(status)); + return "found-key"; + }); - const status = { - attemptedToBuildOverlayBaseDatabase: true, - builtOverlayBaseDatabase: true, - }; + const result = await shouldSkipOverlayAnalysis( + codeql, + ["javascript"], + makeDiskUsage(50), + logger, + ); - sinon.stub(actionsCache, "restoreCache").callsFake(async (paths) => { - const statusFile = paths[0]; - await fs.promises.mkdir(path.dirname(statusFile), { recursive: true }); - await fs.promises.writeFile(statusFile, JSON.stringify(status)); - return "found-key"; + t.true(result); }); + }, +); - const result = await shouldSkipOverlayAnalysis( - codeql, - ["javascript"], - makeDiskUsage(50), - logger, - ); +test.serial( + "shouldSkipOverlayAnalysis returns false when cached status indicates successful build", + async (t) => { + await withTmpDir(async (tmpDir) => { + process.env["RUNNER_TEMP"] = tmpDir; + const codeql = mockCodeQLVersion("2.20.0"); + const messages: LoggedMessage[] = []; + const logger = getRecordingLogger(messages); - t.false(result); - t.true( - messages.some( - (m) => - m.type === "debug" && - typeof m.message === "string" && - m.message.includes( - "Cached overlay status does not indicate a previous unsuccessful attempt", - ), - ), - ); - }); -}); + const status = { + attemptedToBuildOverlayBaseDatabase: true, + builtOverlayBaseDatabase: true, + }; + + sinon.stub(actionsCache, "restoreCache").callsFake(async (paths) => { + const statusFile = paths[0]; + await fs.promises.mkdir(path.dirname(statusFile), { recursive: true }); + await fs.promises.writeFile(statusFile, JSON.stringify(status)); + return "found-key"; + }); + + const result = await shouldSkipOverlayAnalysis( + codeql, + ["javascript"], + makeDiskUsage(50), + logger, + ); + + t.false(result); + t.true( + messages.some( + (m) => + m.type === "debug" && + typeof m.message === "string" && + m.message.includes( + "Cached overlay status does not indicate a previous unsuccessful attempt", + ), + ), + ); + }); + }, +); diff --git a/src/overlay/status.ts b/src/overlay/status.ts index ac3d6e747..a57835ed1 100644 --- a/src/overlay/status.ts +++ b/src/overlay/status.ts @@ -13,12 +13,17 @@ import * as path from "path"; import * as actionsCache from "@actions/cache"; -import { getTemporaryDirectory } from "../actions-util"; +import { + getTemporaryDirectory, + getWorkflowRunAttempt, + getWorkflowRunID, +} from "../actions-util"; import { type CodeQL } from "../codeql"; import { Logger } from "../logging"; import { DiskUsage, getErrorMessage, + getRequiredEnvParam, waitForResultWithTimeLimit, } from "../util"; @@ -38,12 +43,43 @@ function getStatusFilePath(languages: string[]): string { ); } +/** Details of the job that recorded an overlay status. */ +interface JobInfo { + /** The check run ID. This is optional since it is not always available. */ + checkRunId?: number; + /** The workflow run ID. */ + workflowRunId: number; + /** The workflow run attempt number. */ + workflowRunAttempt: number; + /** The name of the job (from GITHUB_JOB). */ + name: string; +} + /** Status of an overlay analysis for a group of languages. */ export interface OverlayStatus { /** Whether the job attempted to build an overlay base database. */ attemptedToBuildOverlayBaseDatabase: boolean; /** Whether the job successfully built an overlay base database. */ builtOverlayBaseDatabase: boolean; + /** Details of the job that recorded this status. */ + job?: JobInfo; +} + +/** Creates an `OverlayStatus` populated with the details of the current job. */ +export function createOverlayStatus( + attributes: Omit, + checkRunId?: number, +): OverlayStatus { + const job: JobInfo = { + workflowRunId: getWorkflowRunID(), + workflowRunAttempt: getWorkflowRunAttempt(), + name: getRequiredEnvParam("GITHUB_JOB"), + checkRunId, + }; + return { + ...attributes, + job, + }; } /** diff --git a/src/sarif/index.test.ts b/src/sarif/index.test.ts new file mode 100644 index 000000000..115d35013 --- /dev/null +++ b/src/sarif/index.test.ts @@ -0,0 +1,18 @@ +import * as fs from "fs"; + +import test from "ava"; + +import { setupTests } from "../testing-utils"; + +import { getToolNames, type Log } from "."; + +setupTests(test); + +test("getToolNames", (t) => { + const input = fs.readFileSync( + `${__dirname}/../../src/testdata/tool-names.sarif`, + "utf8", + ); + const toolNames = getToolNames(JSON.parse(input) as Log); + t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]); +}); diff --git a/src/sarif/index.ts b/src/sarif/index.ts new file mode 100644 index 000000000..3cd537daf --- /dev/null +++ b/src/sarif/index.ts @@ -0,0 +1,141 @@ +import * as fs from "fs"; + +import { Logger } from "../logging"; + +import * as sarif from "sarif"; + +export type * from "sarif"; + +// Extends `ToolComponent` with the non-standard `automationId` property we use. +export type RunKey = sarif.ToolComponent & { + /** + * Describes a SARIF run (either uniquely or not uniquely) based on the criteria used by + * Code Scanning to determine analysis categories + */ + automationId: string | undefined; +}; + +/** + * An error that occurred due to an invalid SARIF upload request. + */ +export class InvalidSarifUploadError extends Error {} + +/** + * Get the array of all the tool names contained in the given sarif contents. + * + * Returns an array of unique string tool names. + */ +export function getToolNames(sarifFile: Partial): string[] { + const toolNames = {}; + + for (const run of sarifFile.runs || []) { + const tool = run.tool || {}; + const driver = tool.driver || {}; + if (typeof driver.name === "string" && driver.name.length > 0) { + toolNames[driver.name] = true; + } + } + + return Object.keys(toolNames); +} + +/** + * Reads the file pointed at by `sarifFilePath` and parses it as JSON. This function does + * not validate that the JSON represents a valid SARIF file. I.e. this function will only + * throw if the file cannot be read or does not contain valid JSON. + * + * @param sarifFilePath The file to read. + * @returns The resulting JSON value, cast to a SARIF `Log`. + */ +export function readSarifFile(sarifFilePath: string): Partial { + return JSON.parse(fs.readFileSync(sarifFilePath, "utf8")) as sarif.Log; +} + +// Takes a list of paths to sarif files and combines them together, +// returning the contents of the combined sarif file. +export function combineSarifFiles( + sarifFiles: string[], + logger: Logger, +): sarif.Log { + logger.info(`Loading SARIF file(s)`); + const runs: sarif.Run[] = []; + let version: sarif.Log.version | undefined = undefined; + + for (const sarifFile of sarifFiles) { + logger.debug(`Loading SARIF file: ${sarifFile}`); + const sarifLog = readSarifFile(sarifFile); + // If this is the first SARIF file we are reading, store the version from it so that we + // can put it in the combined SARIF. If not, then check that the versions match and + // throw an exception if they do not. + if (version === undefined) { + version = sarifLog.version; + } else if (version !== sarifLog.version) { + throw new InvalidSarifUploadError( + `Different SARIF versions encountered: ${version} and ${sarifLog.version}`, + ); + } + + runs.push(...(sarifLog?.runs || [])); + } + + // We can't guarantee that the SARIF files we load will have version properties. As a fallback, + // we set it to the expected version if we didn't find any other. + if (version === undefined) { + version = "2.1.0"; + } + + return { version, runs }; +} + +/** + * Checks whether all the runs in the given SARIF files were produced by CodeQL. + * @param sarifLogs The list of SARIF objects to check. + */ +export function areAllRunsProducedByCodeQL( + sarifLogs: Array>, +): boolean { + return sarifLogs.every((sarifLog: Partial) => { + return sarifLog.runs?.every((run) => run.tool?.driver?.name === "CodeQL"); + }); +} + +function createRunKey(run: sarif.Run): RunKey { + return { + name: run.tool?.driver?.name, + fullName: run.tool?.driver?.fullName, + version: run.tool?.driver?.version, + semanticVersion: run.tool?.driver?.semanticVersion, + guid: run.tool?.driver?.guid, + automationId: run.automationDetails?.id, + }; +} + +/** + * Checks whether all runs in the given SARIF files are unique (based on the + * criteria used by Code Scanning to determine analysis categories). + * @param sarifLogs The list of SARIF objects to check. + */ +export function areAllRunsUnique( + sarifLogs: Array>, +): boolean { + const keys = new Set(); + + for (const sarifLog of sarifLogs) { + if (sarifLog.runs === undefined) { + continue; + } + + for (const run of sarifLog.runs) { + const key = JSON.stringify(createRunKey(run)); + + // If the key already exists, the runs are not unique. + if (keys.has(key)) { + return false; + } + + keys.add(key); + } + } + + return true; +} diff --git a/src/setup-codeql.test.ts b/src/setup-codeql.test.ts index 5b1587ab0..555352bd2 100644 --- a/src/setup-codeql.test.ts +++ b/src/setup-codeql.test.ts @@ -45,7 +45,7 @@ test.beforeEach(() => { initializeEnvironment("1.2.3"); }); -test("parse codeql bundle url version", (t) => { +test.serial("parse codeql bundle url version", (t) => { t.deepEqual( setupCodeql.getCodeQLURLVersion( "https://github.com/.../codeql-bundle-20200601/...", @@ -54,7 +54,7 @@ test("parse codeql bundle url version", (t) => { ); }); -test("convert to semver", (t) => { +test.serial("convert to semver", (t) => { const tests = { "20200601": "0.0.0-20200601", "20200601.0": "0.0.0-20200601.0", @@ -77,7 +77,7 @@ test("convert to semver", (t) => { } }); -test("getCodeQLActionRepository", (t) => { +test.serial("getCodeQLActionRepository", (t) => { const logger = getRunnerLogger(true); initializeEnvironment("1.2.3"); @@ -95,361 +95,383 @@ test("getCodeQLActionRepository", (t) => { t.deepEqual(repoEnv, "xxx/yyy"); }); -test("getCodeQLSource sets CLI version for a semver tagged bundle", async (t) => { - const features = createFeatures([]); +test.serial( + "getCodeQLSource sets CLI version for a semver tagged bundle", + async (t) => { + const features = createFeatures([]); - await withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); - const tagName = "codeql-bundle-v1.2.3"; - mockBundleDownloadApi({ tagName }); - const source = await setupCodeql.getCodeQLSource( - `https://github.com/github/codeql-action/releases/download/${tagName}/codeql-bundle-linux64.tar.gz`, - SAMPLE_DEFAULT_CLI_VERSION, - SAMPLE_DOTCOM_API_DETAILS, - GitHubVariant.DOTCOM, - false, - features, - getRunnerLogger(true), - ); + await withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); + const tagName = "codeql-bundle-v1.2.3"; + mockBundleDownloadApi({ tagName }); + const source = await setupCodeql.getCodeQLSource( + `https://github.com/github/codeql-action/releases/download/${tagName}/codeql-bundle-linux64.tar.gz`, + SAMPLE_DEFAULT_CLI_VERSION, + SAMPLE_DOTCOM_API_DETAILS, + GitHubVariant.DOTCOM, + false, + features, + getRunnerLogger(true), + ); - t.is(source.sourceType, "download"); - t.is(source["cliVersion"], "1.2.3"); - }); -}); + t.is(source.sourceType, "download"); + t.is(source["cliVersion"], "1.2.3"); + }); + }, +); -test("getCodeQLSource correctly returns bundled CLI version when tools == linked", async (t) => { - const features = createFeatures([]); +test.serial( + "getCodeQLSource correctly returns bundled CLI version when tools == linked", + async (t) => { + const features = createFeatures([]); - await withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); - const source = await setupCodeql.getCodeQLSource( - "linked", - SAMPLE_DEFAULT_CLI_VERSION, - SAMPLE_DOTCOM_API_DETAILS, - GitHubVariant.DOTCOM, - false, - features, - getRunnerLogger(true), - ); + await withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); + const source = await setupCodeql.getCodeQLSource( + "linked", + SAMPLE_DEFAULT_CLI_VERSION, + SAMPLE_DOTCOM_API_DETAILS, + GitHubVariant.DOTCOM, + false, + features, + getRunnerLogger(true), + ); - t.is(source.toolsVersion, LINKED_CLI_VERSION.cliVersion); - t.is(source.sourceType, "download"); - }); -}); + t.is(source.toolsVersion, LINKED_CLI_VERSION.cliVersion); + t.is(source.sourceType, "download"); + }); + }, +); -test("getCodeQLSource correctly returns bundled CLI version when tools == latest", async (t) => { - const loggedMessages: LoggedMessage[] = []; - const logger = getRecordingLogger(loggedMessages); - const features = createFeatures([]); +test.serial( + "getCodeQLSource correctly returns bundled CLI version when tools == latest", + async (t) => { + const loggedMessages: LoggedMessage[] = []; + const logger = getRecordingLogger(loggedMessages); + const features = createFeatures([]); - await withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); - const source = await setupCodeql.getCodeQLSource( - "latest", - SAMPLE_DEFAULT_CLI_VERSION, - SAMPLE_DOTCOM_API_DETAILS, - GitHubVariant.DOTCOM, - false, - features, - logger, - ); + await withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); + const source = await setupCodeql.getCodeQLSource( + "latest", + SAMPLE_DEFAULT_CLI_VERSION, + SAMPLE_DOTCOM_API_DETAILS, + GitHubVariant.DOTCOM, + false, + features, + logger, + ); - // First, ensure that the CLI version is the linked version, so that backwards - // compatibility is maintained. - t.is(source.toolsVersion, LINKED_CLI_VERSION.cliVersion); - t.is(source.sourceType, "download"); + // First, ensure that the CLI version is the linked version, so that backwards + // compatibility is maintained. + t.is(source.toolsVersion, LINKED_CLI_VERSION.cliVersion); + t.is(source.sourceType, "download"); - // Afterwards, ensure that we see the deprecation message in the log. - const expected_message: string = - "`tools: latest` has been renamed to `tools: linked`, but the old name is still supported. No action is required."; - t.assert( - loggedMessages.some( - (msg) => - typeof msg.message === "string" && - msg.message.includes(expected_message), - ), - ); - }); -}); - -test("setupCodeQLBundle logs the CodeQL CLI version being used when asked to use linked tools", async (t) => { - const loggedMessages: LoggedMessage[] = []; - const logger = getRecordingLogger(loggedMessages); - const features = createFeatures([]); - - // Stub the downloadCodeQL function to prevent downloading artefacts - // during testing from being called. - sinon.stub(setupCodeql, "downloadCodeQL").resolves({ - codeqlFolder: "codeql", - statusReport: { - combinedDurationMs: 500, - compressionMethod: "gzip", - downloadDurationMs: 200, - extractionDurationMs: 300, - streamExtraction: false, - toolsUrl: "toolsUrl", - }, - toolsVersion: LINKED_CLI_VERSION.cliVersion, - }); - - await withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); - const result = await setupCodeql.setupCodeQLBundle( - "linked", - SAMPLE_DOTCOM_API_DETAILS, - "tmp/codeql_action_test/", - GitHubVariant.DOTCOM, - SAMPLE_DEFAULT_CLI_VERSION, - features, - logger, - ); - - // Basic sanity check that the version we got back is indeed - // the linked (default) CLI version. - t.is(result.toolsVersion, LINKED_CLI_VERSION.cliVersion); - - // Ensure message logging CodeQL CLI version was present in user logs. - const expected_message: string = `Using CodeQL CLI version ${LINKED_CLI_VERSION.cliVersion}`; - t.assert( - loggedMessages.some( - (msg) => - typeof msg.message === "string" && - msg.message.includes(expected_message), - ), - ); - }); -}); - -test("setupCodeQLBundle logs the CodeQL CLI version being used when asked to download a non-default bundle", async (t) => { - const loggedMessages: LoggedMessage[] = []; - const logger = getRecordingLogger(loggedMessages); - const features = createFeatures([]); - - const bundleUrl = - "https://github.com/github/codeql-action/releases/download/codeql-bundle-v2.16.0/codeql-bundle-linux64.tar.gz"; - const expectedVersion = "2.16.0"; - - // Stub the downloadCodeQL function to prevent downloading artefacts - // during testing from being called. - sinon.stub(setupCodeql, "downloadCodeQL").resolves({ - codeqlFolder: "codeql", - statusReport: { - combinedDurationMs: 500, - compressionMethod: "gzip", - downloadDurationMs: 200, - extractionDurationMs: 300, - streamExtraction: false, - toolsUrl: bundleUrl, - }, - toolsVersion: expectedVersion, - }); - - await withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); - const result = await setupCodeql.setupCodeQLBundle( - bundleUrl, - SAMPLE_DOTCOM_API_DETAILS, - "tmp/codeql_action_test/", - GitHubVariant.DOTCOM, - SAMPLE_DEFAULT_CLI_VERSION, - features, - logger, - ); - - // Basic sanity check that the version we got back is indeed the version that the - // bundle contains.. - t.is(result.toolsVersion, expectedVersion); - - // Ensure message logging CodeQL CLI version was present in user logs. - const expected_message: string = `Using CodeQL CLI version 2.16.0 sourced from ${bundleUrl} .`; - t.assert( - loggedMessages.some( - (msg) => - typeof msg.message === "string" && - msg.message.includes(expected_message), - ), - ); - }); -}); - -test("getCodeQLSource correctly returns nightly CLI version when tools == nightly", async (t) => { - const loggedMessages: LoggedMessage[] = []; - const logger = getRecordingLogger(loggedMessages); - const features = createFeatures([]); - - const expectedDate = "30260213"; - const expectedTag = `codeql-bundle-${expectedDate}`; - - // Ensure that we consistently select "zstd" for the test. - sinon.stub(process, "platform").value("linux"); - sinon.stub(tar, "isZstdAvailable").resolves({ - available: true, - foundZstdBinary: true, - }); - - const client = github.getOctokit("123"); - const listReleases = sinon.stub(client.rest.repos, "listReleases"); - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - listReleases.resolves({ - data: [{ tag_name: expectedTag }], - } as any); - sinon.stub(api, "getApiClient").value(() => client); - - await withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); - const source = await setupCodeql.getCodeQLSource( - "nightly", - SAMPLE_DEFAULT_CLI_VERSION, - SAMPLE_DOTCOM_API_DETAILS, - GitHubVariant.DOTCOM, - false, - features, - logger, - ); - - // Check that the `CodeQLToolsSource` object matches our expectations. - const expectedVersion = `0.0.0-${expectedDate}`; - const expectedURL = `https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/${expectedTag}/${setupCodeql.getCodeQLBundleName("zstd")}`; - t.deepEqual(source, { - bundleVersion: expectedDate, - cliVersion: undefined, - codeqlURL: expectedURL, - compressionMethod: "zstd", - sourceType: "download", - toolsVersion: expectedVersion, - } satisfies setupCodeql.CodeQLToolsSource); - - // Afterwards, ensure that we see the expected messages in the log. - checkExpectedLogMessages(t, loggedMessages, [ - "Using the latest CodeQL CLI nightly, as requested by 'tools: nightly'.", - `Bundle version ${expectedDate} is not in SemVer format. Will treat it as pre-release ${expectedVersion}.`, - `Attempting to obtain CodeQL tools. CLI version: unknown, bundle tag name: ${expectedTag}`, - `Using CodeQL CLI sourced from ${expectedURL}`, - ]); - }); -}); - -test("getCodeQLSource correctly returns nightly CLI version when forced by FF", async (t) => { - const loggedMessages: LoggedMessage[] = []; - const logger = getRecordingLogger(loggedMessages); - const features = createFeatures([Feature.ForceNightly]); - - const expectedDate = "30260213"; - const expectedTag = `codeql-bundle-${expectedDate}`; - - // Ensure that we consistently select "zstd" for the test. - sinon.stub(process, "platform").value("linux"); - sinon.stub(tar, "isZstdAvailable").resolves({ - available: true, - foundZstdBinary: true, - }); - - const client = github.getOctokit("123"); - const listReleases = sinon.stub(client.rest.repos, "listReleases"); - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - listReleases.resolves({ - data: [{ tag_name: expectedTag }], - } as any); - sinon.stub(api, "getApiClient").value(() => client); - - await withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); - process.env["GITHUB_EVENT_NAME"] = "dynamic"; - - const source = await setupCodeql.getCodeQLSource( - undefined, - SAMPLE_DEFAULT_CLI_VERSION, - SAMPLE_DOTCOM_API_DETAILS, - GitHubVariant.DOTCOM, - false, - features, - logger, - ); - - // Check that the `CodeQLToolsSource` object matches our expectations. - const expectedVersion = `0.0.0-${expectedDate}`; - const expectedURL = `https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/${expectedTag}/${setupCodeql.getCodeQLBundleName("zstd")}`; - t.deepEqual(source, { - bundleVersion: expectedDate, - cliVersion: undefined, - codeqlURL: expectedURL, - compressionMethod: "zstd", - sourceType: "download", - toolsVersion: expectedVersion, - } satisfies setupCodeql.CodeQLToolsSource); - - // Afterwards, ensure that we see the expected messages in the log. - checkExpectedLogMessages(t, loggedMessages, [ - `Using the latest CodeQL CLI nightly, as forced by the ${Feature.ForceNightly} feature flag.`, - `Bundle version ${expectedDate} is not in SemVer format. Will treat it as pre-release ${expectedVersion}.`, - `Attempting to obtain CodeQL tools. CLI version: unknown, bundle tag name: ${expectedTag}`, - `Using CodeQL CLI sourced from ${expectedURL}`, - ]); - }); -}); - -test("getCodeQLSource correctly returns latest version from toolcache when tools == toolcache", async (t) => { - const loggedMessages: LoggedMessage[] = []; - const logger = getRecordingLogger(loggedMessages); - const features = createFeatures([Feature.AllowToolcacheInput]); - - const latestToolcacheVersion = "3.2.1"; - const latestVersionPath = "/path/to/latest"; - const testVersions = ["2.3.1", latestToolcacheVersion, "1.2.3"]; - const findAllVersionsStub = sinon - .stub(toolcache, "findAllVersions") - .returns(testVersions); - const findStub = sinon.stub(toolcache, "find"); - findStub - .withArgs("CodeQL", latestToolcacheVersion) - .returns(latestVersionPath); - - await withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); - process.env["GITHUB_EVENT_NAME"] = "dynamic"; - - const source = await setupCodeql.getCodeQLSource( - "toolcache", - SAMPLE_DEFAULT_CLI_VERSION, - SAMPLE_DOTCOM_API_DETAILS, - GitHubVariant.DOTCOM, - false, - features, - logger, - ); - - // Check that the toolcache functions were called with the expected arguments - t.assert( - findAllVersionsStub.calledOnceWith("CodeQL"), - `toolcache.findAllVersions("CodeQL") wasn't called`, - ); - t.assert( - findStub.calledOnceWith("CodeQL", latestToolcacheVersion), - `toolcache.find("CodeQL", ${latestToolcacheVersion}) wasn't called`, - ); - - // Check that `sourceType` and `toolsVersion` match expectations. - t.is(source.sourceType, "toolcache"); - t.is(source.toolsVersion, latestToolcacheVersion); - - // Check that key messages we would expect to find in the log are present. - const expectedMessages: string[] = [ - `Attempting to use the latest CodeQL CLI version in the toolcache, as requested by 'tools: toolcache'.`, - `CLI version ${latestToolcacheVersion} is the latest version in the toolcache.`, - `Using CodeQL CLI version ${latestToolcacheVersion} from toolcache at ${latestVersionPath}`, - ]; - for (const expectedMessage of expectedMessages) { + // Afterwards, ensure that we see the deprecation message in the log. + const expected_message: string = + "`tools: latest` has been renamed to `tools: linked`, but the old name is still supported. No action is required."; t.assert( loggedMessages.some( (msg) => typeof msg.message === "string" && - msg.message.includes(expectedMessage), + msg.message.includes(expected_message), ), - `Expected '${expectedMessage}' in the logger output, but didn't find it in:\n ${loggedMessages.map((m) => ` - '${m.message}'`).join("\n")}`, ); - } - }); -}); + }); + }, +); + +test.serial( + "setupCodeQLBundle logs the CodeQL CLI version being used when asked to use linked tools", + async (t) => { + const loggedMessages: LoggedMessage[] = []; + const logger = getRecordingLogger(loggedMessages); + const features = createFeatures([]); + + // Stub the downloadCodeQL function to prevent downloading artefacts + // during testing from being called. + sinon.stub(setupCodeql, "downloadCodeQL").resolves({ + codeqlFolder: "codeql", + statusReport: { + combinedDurationMs: 500, + compressionMethod: "gzip", + downloadDurationMs: 200, + extractionDurationMs: 300, + streamExtraction: false, + toolsUrl: "toolsUrl", + }, + toolsVersion: LINKED_CLI_VERSION.cliVersion, + }); + + await withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); + const result = await setupCodeql.setupCodeQLBundle( + "linked", + SAMPLE_DOTCOM_API_DETAILS, + "tmp/codeql_action_test/", + GitHubVariant.DOTCOM, + SAMPLE_DEFAULT_CLI_VERSION, + features, + logger, + ); + + // Basic sanity check that the version we got back is indeed + // the linked (default) CLI version. + t.is(result.toolsVersion, LINKED_CLI_VERSION.cliVersion); + + // Ensure message logging CodeQL CLI version was present in user logs. + const expected_message: string = `Using CodeQL CLI version ${LINKED_CLI_VERSION.cliVersion}`; + t.assert( + loggedMessages.some( + (msg) => + typeof msg.message === "string" && + msg.message.includes(expected_message), + ), + ); + }); + }, +); + +test.serial( + "setupCodeQLBundle logs the CodeQL CLI version being used when asked to download a non-default bundle", + async (t) => { + const loggedMessages: LoggedMessage[] = []; + const logger = getRecordingLogger(loggedMessages); + const features = createFeatures([]); + + const bundleUrl = + "https://github.com/github/codeql-action/releases/download/codeql-bundle-v2.16.0/codeql-bundle-linux64.tar.gz"; + const expectedVersion = "2.16.0"; + + // Stub the downloadCodeQL function to prevent downloading artefacts + // during testing from being called. + sinon.stub(setupCodeql, "downloadCodeQL").resolves({ + codeqlFolder: "codeql", + statusReport: { + combinedDurationMs: 500, + compressionMethod: "gzip", + downloadDurationMs: 200, + extractionDurationMs: 300, + streamExtraction: false, + toolsUrl: bundleUrl, + }, + toolsVersion: expectedVersion, + }); + + await withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); + const result = await setupCodeql.setupCodeQLBundle( + bundleUrl, + SAMPLE_DOTCOM_API_DETAILS, + "tmp/codeql_action_test/", + GitHubVariant.DOTCOM, + SAMPLE_DEFAULT_CLI_VERSION, + features, + logger, + ); + + // Basic sanity check that the version we got back is indeed the version that the + // bundle contains.. + t.is(result.toolsVersion, expectedVersion); + + // Ensure message logging CodeQL CLI version was present in user logs. + const expected_message: string = `Using CodeQL CLI version 2.16.0 sourced from ${bundleUrl} .`; + t.assert( + loggedMessages.some( + (msg) => + typeof msg.message === "string" && + msg.message.includes(expected_message), + ), + ); + }); + }, +); + +test.serial( + "getCodeQLSource correctly returns nightly CLI version when tools == nightly", + async (t) => { + const loggedMessages: LoggedMessage[] = []; + const logger = getRecordingLogger(loggedMessages); + const features = createFeatures([]); + + const expectedDate = "30260213"; + const expectedTag = `codeql-bundle-${expectedDate}`; + + // Ensure that we consistently select "zstd" for the test. + sinon.stub(process, "platform").value("linux"); + sinon.stub(tar, "isZstdAvailable").resolves({ + available: true, + foundZstdBinary: true, + }); + + const client = github.getOctokit("123"); + const listReleases = sinon.stub(client.rest.repos, "listReleases"); + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + listReleases.resolves({ + data: [{ tag_name: expectedTag }], + } as any); + sinon.stub(api, "getApiClient").value(() => client); + + await withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); + const source = await setupCodeql.getCodeQLSource( + "nightly", + SAMPLE_DEFAULT_CLI_VERSION, + SAMPLE_DOTCOM_API_DETAILS, + GitHubVariant.DOTCOM, + false, + features, + logger, + ); + + // Check that the `CodeQLToolsSource` object matches our expectations. + const expectedVersion = `0.0.0-${expectedDate}`; + const expectedURL = `https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/${expectedTag}/${setupCodeql.getCodeQLBundleName("zstd")}`; + t.deepEqual(source, { + bundleVersion: expectedDate, + cliVersion: undefined, + codeqlURL: expectedURL, + compressionMethod: "zstd", + sourceType: "download", + toolsVersion: expectedVersion, + } satisfies setupCodeql.CodeQLToolsSource); + + // Afterwards, ensure that we see the expected messages in the log. + checkExpectedLogMessages(t, loggedMessages, [ + "Using the latest CodeQL CLI nightly, as requested by 'tools: nightly'.", + `Bundle version ${expectedDate} is not in SemVer format. Will treat it as pre-release ${expectedVersion}.`, + `Attempting to obtain CodeQL tools. CLI version: unknown, bundle tag name: ${expectedTag}`, + `Using CodeQL CLI sourced from ${expectedURL}`, + ]); + }); + }, +); + +test.serial( + "getCodeQLSource correctly returns nightly CLI version when forced by FF", + async (t) => { + const loggedMessages: LoggedMessage[] = []; + const logger = getRecordingLogger(loggedMessages); + const features = createFeatures([Feature.ForceNightly]); + + const expectedDate = "30260213"; + const expectedTag = `codeql-bundle-${expectedDate}`; + + // Ensure that we consistently select "zstd" for the test. + sinon.stub(process, "platform").value("linux"); + sinon.stub(tar, "isZstdAvailable").resolves({ + available: true, + foundZstdBinary: true, + }); + + const client = github.getOctokit("123"); + const listReleases = sinon.stub(client.rest.repos, "listReleases"); + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + listReleases.resolves({ + data: [{ tag_name: expectedTag }], + } as any); + sinon.stub(api, "getApiClient").value(() => client); + + await withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir, { GITHUB_EVENT_NAME: "dynamic" }); + + const source = await setupCodeql.getCodeQLSource( + undefined, + SAMPLE_DEFAULT_CLI_VERSION, + SAMPLE_DOTCOM_API_DETAILS, + GitHubVariant.DOTCOM, + false, + features, + logger, + ); + + // Check that the `CodeQLToolsSource` object matches our expectations. + const expectedVersion = `0.0.0-${expectedDate}`; + const expectedURL = `https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/${expectedTag}/${setupCodeql.getCodeQLBundleName("zstd")}`; + t.deepEqual(source, { + bundleVersion: expectedDate, + cliVersion: undefined, + codeqlURL: expectedURL, + compressionMethod: "zstd", + sourceType: "download", + toolsVersion: expectedVersion, + } satisfies setupCodeql.CodeQLToolsSource); + + // Afterwards, ensure that we see the expected messages in the log. + checkExpectedLogMessages(t, loggedMessages, [ + `Using the latest CodeQL CLI nightly, as forced by the ${Feature.ForceNightly} feature flag.`, + `Bundle version ${expectedDate} is not in SemVer format. Will treat it as pre-release ${expectedVersion}.`, + `Attempting to obtain CodeQL tools. CLI version: unknown, bundle tag name: ${expectedTag}`, + `Using CodeQL CLI sourced from ${expectedURL}`, + ]); + }); + }, +); + +test.serial( + "getCodeQLSource correctly returns latest version from toolcache when tools == toolcache", + async (t) => { + const loggedMessages: LoggedMessage[] = []; + const logger = getRecordingLogger(loggedMessages); + const features = createFeatures([Feature.AllowToolcacheInput]); + + const latestToolcacheVersion = "3.2.1"; + const latestVersionPath = "/path/to/latest"; + const testVersions = ["2.3.1", latestToolcacheVersion, "1.2.3"]; + const findAllVersionsStub = sinon + .stub(toolcache, "findAllVersions") + .returns(testVersions); + const findStub = sinon.stub(toolcache, "find"); + findStub + .withArgs("CodeQL", latestToolcacheVersion) + .returns(latestVersionPath); + + await withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir, { GITHUB_EVENT_NAME: "dynamic" }); + + const source = await setupCodeql.getCodeQLSource( + "toolcache", + SAMPLE_DEFAULT_CLI_VERSION, + SAMPLE_DOTCOM_API_DETAILS, + GitHubVariant.DOTCOM, + false, + features, + logger, + ); + + // Check that the toolcache functions were called with the expected arguments + t.assert( + findAllVersionsStub.calledOnceWith("CodeQL"), + `toolcache.findAllVersions("CodeQL") wasn't called`, + ); + t.assert( + findStub.calledOnceWith("CodeQL", latestToolcacheVersion), + `toolcache.find("CodeQL", ${latestToolcacheVersion}) wasn't called`, + ); + + // Check that `sourceType` and `toolsVersion` match expectations. + t.is(source.sourceType, "toolcache"); + t.is(source.toolsVersion, latestToolcacheVersion); + + // Check that key messages we would expect to find in the log are present. + const expectedMessages: string[] = [ + `Attempting to use the latest CodeQL CLI version in the toolcache, as requested by 'tools: toolcache'.`, + `CLI version ${latestToolcacheVersion} is the latest version in the toolcache.`, + `Using CodeQL CLI version ${latestToolcacheVersion} from toolcache at ${latestVersionPath}`, + ]; + for (const expectedMessage of expectedMessages) { + t.assert( + loggedMessages.some( + (msg) => + typeof msg.message === "string" && + msg.message.includes(expectedMessage), + ), + `Expected '${expectedMessage}' in the logger output, but didn't find it in:\n ${loggedMessages.map((m) => ` - '${m.message}'`).join("\n")}`, + ); + } + }); + }, +); const toolcacheInputFallbackMacro = test.macro({ exec: async ( @@ -511,7 +533,7 @@ const toolcacheInputFallbackMacro = test.macro({ `getCodeQLSource falls back to downloading the CLI if ${providedTitle}`, }); -test( +test.serial( "the toolcache doesn't have a CodeQL CLI when tools == toolcache", toolcacheInputFallbackMacro, [Feature.AllowToolcacheInput], @@ -523,7 +545,7 @@ test( ], ); -test( +test.serial( "the workflow trigger is not `dynamic`", toolcacheInputFallbackMacro, [Feature.AllowToolcacheInput], @@ -534,7 +556,7 @@ test( ], ); -test( +test.serial( "the feature flag is not enabled", toolcacheInputFallbackMacro, [], @@ -543,24 +565,36 @@ test( [`Ignoring 'tools: toolcache' because the feature is not enabled.`], ); -test('tryGetTagNameFromUrl extracts the right tag name for a repo name containing "codeql-bundle"', (t) => { - t.is( - setupCodeql.tryGetTagNameFromUrl( - "https://github.com/org/codeql-bundle-testing/releases/download/codeql-bundle-v2.19.0/codeql-bundle-linux64.tar.zst", - getRunnerLogger(true), - ), - "codeql-bundle-v2.19.0", - ); -}); +test.serial( + 'tryGetTagNameFromUrl extracts the right tag name for a repo name containing "codeql-bundle"', + (t) => { + t.is( + setupCodeql.tryGetTagNameFromUrl( + "https://github.com/org/codeql-bundle-testing/releases/download/codeql-bundle-v2.19.0/codeql-bundle-linux64.tar.zst", + getRunnerLogger(true), + ), + "codeql-bundle-v2.19.0", + ); + }, +); -test("getLatestToolcacheVersion returns undefined if there are no CodeQL CLIs in the toolcache", (t) => { - sinon.stub(toolcache, "findAllVersions").returns([]); - t.is(setupCodeql.getLatestToolcacheVersion(getRunnerLogger(true)), undefined); -}); +test.serial( + "getLatestToolcacheVersion returns undefined if there are no CodeQL CLIs in the toolcache", + (t) => { + sinon.stub(toolcache, "findAllVersions").returns([]); + t.is( + setupCodeql.getLatestToolcacheVersion(getRunnerLogger(true)), + undefined, + ); + }, +); -test("getLatestToolcacheVersion returns latest version in the toolcache", (t) => { - const testVersions = ["2.3.1", "3.2.1", "1.2.3"]; - sinon.stub(toolcache, "findAllVersions").returns(testVersions); +test.serial( + "getLatestToolcacheVersion returns latest version in the toolcache", + (t) => { + const testVersions = ["2.3.1", "3.2.1", "1.2.3"]; + sinon.stub(toolcache, "findAllVersions").returns(testVersions); - t.is(setupCodeql.getLatestToolcacheVersion(getRunnerLogger(true)), "3.2.1"); -}); + t.is(setupCodeql.getLatestToolcacheVersion(getRunnerLogger(true)), "3.2.1"); + }, +); diff --git a/src/start-proxy-action.ts b/src/start-proxy-action.ts index 438d565ae..29c76643e 100644 --- a/src/start-proxy-action.ts +++ b/src/start-proxy-action.ts @@ -5,7 +5,7 @@ import * as core from "@actions/core"; import * as actionsUtil from "./actions-util"; import { getGitHubVersion } from "./api-client"; -import { FeatureEnablement, initFeatures } from "./feature-flags"; +import { Feature, FeatureEnablement, initFeatures } from "./feature-flags"; import { KnownLanguage } from "./languages"; import { getActionsLogger, Logger } from "./logging"; import { getRepositoryNwo } from "./repository"; @@ -58,12 +58,18 @@ async function run(startedAt: Date) { const languageInput = actionsUtil.getOptionalInput("language"); language = languageInput ? parseLanguage(languageInput) : undefined; + // Query the FF for whether we should use the reduced registry mapping. + const skipUnusedRegistries = await features.getValue( + Feature.StartProxyRemoveUnusedRegistries, + ); + // Get the registry configurations from one of the inputs. const credentials = getCredentials( logger, actionsUtil.getOptionalInput("registry_secrets"), actionsUtil.getOptionalInput("registries_credentials"), language, + skipUnusedRegistries, ); if (credentials.length === 0) { diff --git a/src/start-proxy.test.ts b/src/start-proxy.test.ts index b1c4926f8..a4dd8d589 100644 --- a/src/start-proxy.test.ts +++ b/src/start-proxy.test.ts @@ -87,14 +87,14 @@ const sendFailedStatusReportTest = test.macro({ title: (providedTitle = "") => `sendFailedStatusReport - ${providedTitle}`, }); -test( +test.serial( "reports generic error message for non-StartProxyError error", sendFailedStatusReportTest, new Error("Something went wrong today"), "Error from start-proxy Action omitted (Error).", ); -test( +test.serial( "reports generic error message for non-StartProxyError error with safe error message", sendFailedStatusReportTest, new Error( @@ -105,7 +105,7 @@ test( "Error from start-proxy Action omitted (Error).", ); -test( +test.serial( "reports generic error message for ConfigurationError error", sendFailedStatusReportTest, new ConfigurationError("Something went wrong today"), @@ -124,110 +124,125 @@ const mixedCredentials = [ { type: "git_source", host: "github.com/github", token: "mno" }, ]; -test("getCredentials prefers registriesCredentials over registrySecrets", async (t) => { - const registryCredentials = Buffer.from( - JSON.stringify([ - { type: "npm_registry", host: "npm.pkg.github.com", token: "abc" }, - ]), - ).toString("base64"); - const registrySecrets = JSON.stringify([ - { type: "npm_registry", host: "registry.npmjs.org", token: "def" }, - ]); +test.serial( + "getCredentials prefers registriesCredentials over registrySecrets", + async (t) => { + const registryCredentials = Buffer.from( + JSON.stringify([ + { type: "npm_registry", host: "npm.pkg.github.com", token: "abc" }, + ]), + ).toString("base64"); + const registrySecrets = JSON.stringify([ + { type: "npm_registry", host: "registry.npmjs.org", token: "def" }, + ]); - const credentials = startProxyExports.getCredentials( - getRunnerLogger(true), - registrySecrets, - registryCredentials, - undefined, - ); - t.is(credentials.length, 1); - t.is(credentials[0].host, "npm.pkg.github.com"); -}); + const credentials = startProxyExports.getCredentials( + getRunnerLogger(true), + registrySecrets, + registryCredentials, + undefined, + ); + t.is(credentials.length, 1); + t.is(credentials[0].host, "npm.pkg.github.com"); + }, +); -test("getCredentials throws an error when configurations are not an array", async (t) => { - const registryCredentials = Buffer.from( - JSON.stringify({ type: "npm_registry", token: "abc" }), - ).toString("base64"); +test.serial( + "getCredentials throws an error when configurations are not an array", + async (t) => { + const registryCredentials = Buffer.from( + JSON.stringify({ type: "npm_registry", token: "abc" }), + ).toString("base64"); - t.throws( - () => - startProxyExports.getCredentials( - getRunnerLogger(true), - undefined, - registryCredentials, - undefined, - ), - { - message: - "Expected credentials data to be an array of configurations, but it is not.", - }, - ); -}); - -test("getCredentials throws error when credential is not an object", async (t) => { - const testCredentials = [["foo"], [null]].map(toEncodedJSON); - - for (const testCredential of testCredentials) { t.throws( () => startProxyExports.getCredentials( getRunnerLogger(true), undefined, - testCredential, + registryCredentials, undefined, ), { - message: "Invalid credentials - must be an object", + message: + "Expected credentials data to be an array of configurations, but it is not.", }, ); - } -}); + }, +); -test("getCredentials throws error when credential is missing type", async (t) => { - const testCredentials = [[{ token: "abc", url: "https://localhost" }]].map( - toEncodedJSON, - ); +test.serial( + "getCredentials throws error when credential is not an object", + async (t) => { + const testCredentials = [["foo"], [null]].map(toEncodedJSON); - for (const testCredential of testCredentials) { - t.throws( - () => - startProxyExports.getCredentials( - getRunnerLogger(true), - undefined, - testCredential, - undefined, - ), - { - message: "Invalid credentials - must have a type", - }, + for (const testCredential of testCredentials) { + t.throws( + () => + startProxyExports.getCredentials( + getRunnerLogger(true), + undefined, + testCredential, + undefined, + ), + { + message: "Invalid credentials - must be an object", + }, + ); + } + }, +); + +test.serial( + "getCredentials throws error when credential is missing type", + async (t) => { + const testCredentials = [[{ token: "abc", url: "https://localhost" }]].map( + toEncodedJSON, ); - } -}); -test("getCredentials throws error when credential missing host and url", async (t) => { - const testCredentials = [ - [{ type: "npm_registry", token: "abc" }], - [{ type: "npm_registry", token: "abc", host: null }], - [{ type: "npm_registry", token: "abc", url: null }], - ].map(toEncodedJSON); + for (const testCredential of testCredentials) { + t.throws( + () => + startProxyExports.getCredentials( + getRunnerLogger(true), + undefined, + testCredential, + undefined, + ), + { + message: "Invalid credentials - must have a type", + }, + ); + } + }, +); - for (const testCredential of testCredentials) { - t.throws( - () => - startProxyExports.getCredentials( - getRunnerLogger(true), - undefined, - testCredential, - undefined, - ), - { - message: "Invalid credentials - must specify host or url", - }, - ); - } -}); +test.serial( + "getCredentials throws error when credential missing host and url", + async (t) => { + const testCredentials = [ + [{ type: "npm_registry", token: "abc" }], + [{ type: "npm_registry", token: "abc", host: null }], + [{ type: "npm_registry", token: "abc", url: null }], + ].map(toEncodedJSON); -test("getCredentials filters by language when specified", async (t) => { + for (const testCredential of testCredentials) { + t.throws( + () => + startProxyExports.getCredentials( + getRunnerLogger(true), + undefined, + testCredential, + undefined, + ), + { + message: "Invalid credentials - must specify host or url", + }, + ); + } + }, +); + +test.serial("getCredentials filters by language when specified", async (t) => { const credentials = startProxyExports.getCredentials( getRunnerLogger(true), undefined, @@ -238,97 +253,145 @@ test("getCredentials filters by language when specified", async (t) => { t.is(credentials[0].type, "maven_repository"); }); -test("getCredentials returns all for a language when specified", async (t) => { - const credentials = startProxyExports.getCredentials( - getRunnerLogger(true), - undefined, - toEncodedJSON(mixedCredentials), - KnownLanguage.go, - ); - t.is(credentials.length, 2); - - const credentialsTypes = credentials.map((c) => c.type); - t.assert(credentialsTypes.includes("goproxy_server")); - t.assert(credentialsTypes.includes("git_source")); -}); - -test("getCredentials returns all credentials when no language specified", async (t) => { - const credentialsInput = toEncodedJSON(mixedCredentials); - - const credentials = startProxyExports.getCredentials( - getRunnerLogger(true), - undefined, - credentialsInput, - undefined, - ); - t.is(credentials.length, mixedCredentials.length); -}); - -test("getCredentials throws an error when non-printable characters are used", async (t) => { - const invalidCredentials = [ - { type: "nuget_feed", host: "1nuget.pkg.github.com", token: "abc\u0000" }, // Non-printable character in token - { type: "nuget_feed", host: "2nuget.pkg.github.com\u0001" }, // Non-printable character in host - { - type: "nuget_feed", - host: "3nuget.pkg.github.com", - password: "ghi\u0002", - }, // Non-printable character in password - { type: "nuget_feed", host: "4nuget.pkg.github.com", password: "ghi\x00" }, // Non-printable character in password - ]; - - for (const invalidCredential of invalidCredentials) { - const credentialsInput = Buffer.from( - JSON.stringify([invalidCredential]), - ).toString("base64"); - - t.throws( - () => - startProxyExports.getCredentials( - getRunnerLogger(true), - undefined, - credentialsInput, - undefined, - ), - { - message: - "Invalid credentials - fields must contain only printable characters", - }, +test.serial( + "getCredentials returns all for a language when specified", + async (t) => { + const credentials = startProxyExports.getCredentials( + getRunnerLogger(true), + undefined, + toEncodedJSON(mixedCredentials), + KnownLanguage.go, ); - } -}); + t.is(credentials.length, 2); -test("getCredentials logs a warning when a PAT is used without a username", async (t) => { - const loggedMessages = []; - const logger = getRecordingLogger(loggedMessages); - const likelyWrongCredentials = toEncodedJSON([ - { - type: "git_server", - host: "https://github.com/", - password: `ghp_${makeTestToken()}`, - }, - ]); + const credentialsTypes = credentials.map((c) => c.type); + t.assert(credentialsTypes.includes("goproxy_server")); + t.assert(credentialsTypes.includes("git_source")); + }, +); - const results = startProxyExports.getCredentials( - logger, - undefined, - likelyWrongCredentials, - undefined, - ); +test.serial( + "getCredentials returns all credentials when no language specified", + async (t) => { + const credentialsInput = toEncodedJSON(mixedCredentials); - // The configuration should be accepted, despite the likely problem. - t.assert(results); - t.is(results.length, 1); - t.is(results[0].type, "git_server"); - t.is(results[0].host, "https://github.com/"); - t.assert(results[0].password?.startsWith("ghp_")); + const credentials = startProxyExports.getCredentials( + getRunnerLogger(true), + undefined, + credentialsInput, + undefined, + ); + t.is(credentials.length, mixedCredentials.length); + }, +); - // A warning should have been logged. - checkExpectedLogMessages(t, loggedMessages, [ - "using a GitHub Personal Access Token (PAT), but no username was provided", - ]); -}); +test.serial( + "getCredentials throws an error when non-printable characters are used", + async (t) => { + const invalidCredentials = [ + { type: "nuget_feed", host: "1nuget.pkg.github.com", token: "abc\u0000" }, // Non-printable character in token + { type: "nuget_feed", host: "2nuget.pkg.github.com\u0001" }, // Non-printable character in host + { + type: "nuget_feed", + host: "3nuget.pkg.github.com", + password: "ghi\u0002", + }, // Non-printable character in password + { + type: "nuget_feed", + host: "4nuget.pkg.github.com", + password: "ghi\x00", + }, // Non-printable character in password + ]; -test("parseLanguage", async (t) => { + for (const invalidCredential of invalidCredentials) { + const credentialsInput = Buffer.from( + JSON.stringify([invalidCredential]), + ).toString("base64"); + + t.throws( + () => + startProxyExports.getCredentials( + getRunnerLogger(true), + undefined, + credentialsInput, + undefined, + ), + { + message: + "Invalid credentials - fields must contain only printable characters", + }, + ); + } + }, +); + +test.serial( + "getCredentials logs a warning when a PAT is used without a username", + async (t) => { + const loggedMessages = []; + const logger = getRecordingLogger(loggedMessages); + const likelyWrongCredentials = toEncodedJSON([ + { + type: "git_server", + host: "https://github.com/", + password: `ghp_${makeTestToken()}`, + }, + ]); + + const results = startProxyExports.getCredentials( + logger, + undefined, + likelyWrongCredentials, + undefined, + ); + + // The configuration should be accepted, despite the likely problem. + t.assert(results); + t.is(results.length, 1); + t.is(results[0].type, "git_server"); + t.is(results[0].host, "https://github.com/"); + t.assert(results[0].password?.startsWith("ghp_")); + + // A warning should have been logged. + checkExpectedLogMessages(t, loggedMessages, [ + "using a GitHub Personal Access Token (PAT), but no username was provided", + ]); + }, +); + +test.serial( + "getCredentials returns all credentials for Actions when using LANGUAGE_TO_REGISTRY_TYPE", + async (t) => { + const credentialsInput = toEncodedJSON(mixedCredentials); + + const credentials = startProxyExports.getCredentials( + getRunnerLogger(true), + undefined, + credentialsInput, + KnownLanguage.actions, + false, + ); + t.is(credentials.length, mixedCredentials.length); + }, +); + +test.serial( + "getCredentials returns no credentials for Actions when using NEW_LANGUAGE_TO_REGISTRY_TYPE", + async (t) => { + const credentialsInput = toEncodedJSON(mixedCredentials); + + const credentials = startProxyExports.getCredentials( + getRunnerLogger(true), + undefined, + credentialsInput, + KnownLanguage.actions, + true, + ); + t.deepEqual(credentials, []); + }, +); + +test.serial("parseLanguage", async (t) => { // Exact matches t.deepEqual(parseLanguage("csharp"), KnownLanguage.csharp); t.deepEqual(parseLanguage("cpp"), KnownLanguage.cpp); @@ -391,34 +454,14 @@ function mockOfflineFeatures(tempDir: string, logger: Logger) { return setUpFeatureFlagTests(tempDir, logger, gitHubVersion); } -test("getDownloadUrl returns fallback when `getReleaseByVersion` rejects", async (t) => { - const logger = new RecordingLogger(); - mockGetReleaseByTag(); +test.serial( + "getDownloadUrl returns fallback when `getReleaseByVersion` rejects", + async (t) => { + const logger = new RecordingLogger(); + mockGetReleaseByTag(); - await withTmpDir(async (tempDir) => { - const features = mockOfflineFeatures(tempDir, logger); - const info = await startProxyExports.getDownloadUrl( - getRunnerLogger(true), - features, - ); - - t.is(info.version, startProxyExports.UPDATEJOB_PROXY_VERSION); - t.is( - info.url, - startProxyExports.getFallbackUrl(startProxyExports.getProxyPackage()), - ); - }); -}); - -test("getDownloadUrl returns fallback when there's no matching release asset", async (t) => { - const logger = new RecordingLogger(); - const testAssets = [[], [{ name: "foo" }]]; - - await withTmpDir(async (tempDir) => { - const features = mockOfflineFeatures(tempDir, logger); - - for (const assets of testAssets) { - const stub = mockGetReleaseByTag(assets); + await withTmpDir(async (tempDir) => { + const features = mockOfflineFeatures(tempDir, logger); const info = await startProxyExports.getDownloadUrl( getRunnerLogger(true), features, @@ -429,13 +472,39 @@ test("getDownloadUrl returns fallback when there's no matching release asset", a info.url, startProxyExports.getFallbackUrl(startProxyExports.getProxyPackage()), ); + }); + }, +); - stub.restore(); - } - }); -}); +test.serial( + "getDownloadUrl returns fallback when there's no matching release asset", + async (t) => { + const logger = new RecordingLogger(); + const testAssets = [[], [{ name: "foo" }]]; -test("getDownloadUrl returns matching release asset", async (t) => { + await withTmpDir(async (tempDir) => { + const features = mockOfflineFeatures(tempDir, logger); + + for (const assets of testAssets) { + const stub = mockGetReleaseByTag(assets); + const info = await startProxyExports.getDownloadUrl( + getRunnerLogger(true), + features, + ); + + t.is(info.version, startProxyExports.UPDATEJOB_PROXY_VERSION); + t.is( + info.url, + startProxyExports.getFallbackUrl(startProxyExports.getProxyPackage()), + ); + + stub.restore(); + } + }); + }, +); + +test.serial("getDownloadUrl returns matching release asset", async (t) => { const logger = new RecordingLogger(); const assets = [ { name: "foo", url: "other-url" }, @@ -455,7 +524,7 @@ test("getDownloadUrl returns matching release asset", async (t) => { }); }); -test("credentialToStr - hides passwords", (t) => { +test.serial("credentialToStr - hides passwords", (t) => { const secret = "password123"; const credential = { type: "maven_credential", @@ -472,7 +541,7 @@ test("credentialToStr - hides passwords", (t) => { ); }); -test("credentialToStr - hides tokens", (t) => { +test.serial("credentialToStr - hides tokens", (t) => { const secret = "password123"; const credential = { type: "maven_credential", @@ -489,29 +558,35 @@ test("credentialToStr - hides tokens", (t) => { ); }); -test("getSafeErrorMessage - returns actual message for `StartProxyError`", (t) => { - const error = new startProxyExports.StartProxyError( - startProxyExports.StartProxyErrorType.DownloadFailed, - ); - t.is( - startProxyExports.getSafeErrorMessage(error), - startProxyExports.getStartProxyErrorMessage(error.errorType), - ); -}); - -test("getSafeErrorMessage - does not return message for arbitrary errors", (t) => { - const error = new Error( - startProxyExports.getStartProxyErrorMessage( +test.serial( + "getSafeErrorMessage - returns actual message for `StartProxyError`", + (t) => { + const error = new startProxyExports.StartProxyError( startProxyExports.StartProxyErrorType.DownloadFailed, - ), - ); + ); + t.is( + startProxyExports.getSafeErrorMessage(error), + startProxyExports.getStartProxyErrorMessage(error.errorType), + ); + }, +); - const message = startProxyExports.getSafeErrorMessage(error); +test.serial( + "getSafeErrorMessage - does not return message for arbitrary errors", + (t) => { + const error = new Error( + startProxyExports.getStartProxyErrorMessage( + startProxyExports.StartProxyErrorType.DownloadFailed, + ), + ); - t.not(message, error.message); - t.assert(message.startsWith("Error from start-proxy Action omitted")); - t.assert(message.includes(error.name)); -}); + const message = startProxyExports.getSafeErrorMessage(error); + + t.not(message, error.message); + t.assert(message.startsWith("Error from start-proxy Action omitted")); + t.assert(message.includes(error.name)); + }, +); const wrapFailureTest = test.macro({ exec: async ( @@ -530,7 +605,7 @@ const wrapFailureTest = test.macro({ title: (providedTitle) => `${providedTitle} - wraps errors on failure`, }); -test("downloadProxy - returns file path on success", async (t) => { +test.serial("downloadProxy - returns file path on success", async (t) => { await withRecordingLoggerAsync(async (logger) => { const testPath = "/some/path"; sinon.stub(toolcache, "downloadTool").resolves(testPath); @@ -544,7 +619,7 @@ test("downloadProxy - returns file path on success", async (t) => { }); }); -test( +test.serial( "downloadProxy", wrapFailureTest, () => { @@ -555,7 +630,7 @@ test( }, ); -test("extractProxy - returns file path on success", async (t) => { +test.serial("extractProxy - returns file path on success", async (t) => { await withRecordingLoggerAsync(async (logger) => { const testPath = "/some/path"; sinon.stub(toolcache, "extractTar").resolves(testPath); @@ -565,7 +640,7 @@ test("extractProxy - returns file path on success", async (t) => { }); }); -test( +test.serial( "extractProxy", wrapFailureTest, () => { @@ -576,7 +651,7 @@ test( }, ); -test("cacheProxy - returns file path on success", async (t) => { +test.serial("cacheProxy - returns file path on success", async (t) => { await withRecordingLoggerAsync(async (logger) => { const testPath = "/some/path"; sinon.stub(toolcache, "cacheDir").resolves(testPath); @@ -591,7 +666,7 @@ test("cacheProxy - returns file path on success", async (t) => { }); }); -test( +test.serial( "cacheProxy", wrapFailureTest, () => { @@ -602,100 +677,37 @@ test( }, ); -test("getProxyBinaryPath - returns path from tool cache if available", async (t) => { - const logger = new RecordingLogger(); - mockGetReleaseByTag(); +test.serial( + "getProxyBinaryPath - returns path from tool cache if available", + async (t) => { + const logger = new RecordingLogger(); + mockGetReleaseByTag(); - await withTmpDir(async (tempDir) => { - const toolcachePath = "/path/to/proxy/dir"; - sinon.stub(toolcache, "find").returns(toolcachePath); + await withTmpDir(async (tempDir) => { + const toolcachePath = "/path/to/proxy/dir"; + sinon.stub(toolcache, "find").returns(toolcachePath); - const features = mockOfflineFeatures(tempDir, logger); - const path = await startProxyExports.getProxyBinaryPath(logger, features); + const features = mockOfflineFeatures(tempDir, logger); + const path = await startProxyExports.getProxyBinaryPath(logger, features); - t.assert(path); - t.is( - path, - filepath.join(toolcachePath, startProxyExports.getProxyFilename()), - ); - }); -}); + t.assert(path); + t.is( + path, + filepath.join(toolcachePath, startProxyExports.getProxyFilename()), + ); + }); + }, +); -test("getProxyBinaryPath - downloads proxy if not in cache", async (t) => { - const logger = new RecordingLogger(); - const downloadUrl = "url-we-want"; - mockGetReleaseByTag([ - { name: startProxyExports.getProxyPackage(), url: downloadUrl }, - ]); +test.serial( + "getProxyBinaryPath - downloads proxy if not in cache", + async (t) => { + const logger = new RecordingLogger(); + const downloadUrl = "url-we-want"; + mockGetReleaseByTag([ + { name: startProxyExports.getProxyPackage(), url: downloadUrl }, + ]); - const toolcachePath = "/path/to/proxy/dir"; - const find = sinon.stub(toolcache, "find").returns(""); - const getApiDetails = sinon.stub(apiClient, "getApiDetails").returns({ - auth: "", - url: "", - apiURL: "", - }); - const getAuthorizationHeaderFor = sinon - .stub(apiClient, "getAuthorizationHeaderFor") - .returns(undefined); - const archivePath = "/path/to/archive"; - const downloadTool = sinon - .stub(toolcache, "downloadTool") - .resolves(archivePath); - const extractedPath = "/path/to/extracted"; - const extractTar = sinon - .stub(toolcache, "extractTar") - .resolves(extractedPath); - const cacheDir = sinon.stub(toolcache, "cacheDir").resolves(toolcachePath); - - const path = await startProxyExports.getProxyBinaryPath( - logger, - createFeatures([]), - ); - - t.assert(find.calledOnce); - t.assert(getApiDetails.calledOnce); - t.assert(getAuthorizationHeaderFor.calledOnce); - t.assert(downloadTool.calledOnceWith(downloadUrl)); - t.assert(extractTar.calledOnceWith(archivePath)); - t.assert(cacheDir.calledOnceWith(extractedPath)); - t.assert(path); - t.is( - path, - filepath.join(toolcachePath, startProxyExports.getProxyFilename()), - ); - - checkExpectedLogMessages(t, logger.messages, [ - `Found '${startProxyExports.getProxyPackage()}' in release '${defaults.bundleVersion}' at '${downloadUrl}'`, - ]); -}); - -test("getProxyBinaryPath - downloads proxy based on features if not in cache", async (t) => { - const logger = new RecordingLogger(); - const expectedTag = "codeql-bundle-v2.20.1"; - const expectedParams = { - owner: "github", - repo: "codeql-action", - tag: expectedTag, - }; - const downloadUrl = "url-we-want"; - const assets = [ - { - name: startProxyExports.getProxyPackage(), - url: downloadUrl, - }, - ]; - - const getReleaseByTag = sinon.stub(); - getReleaseByTag.withArgs(sinon.match(expectedParams)).resolves({ - status: 200, - data: { assets }, - headers: {}, - url: "GET /repos/:owner/:repo/releases/tags/:tag", - }); - mockGetApiClient({ repos: { getReleaseByTag } }); - - await withTmpDir(async (tempDir) => { const toolcachePath = "/path/to/proxy/dir"; const find = sinon.stub(toolcache, "find").returns(""); const getApiDetails = sinon.stub(apiClient, "getApiDetails").returns({ @@ -716,40 +728,114 @@ test("getProxyBinaryPath - downloads proxy based on features if not in cache", a .resolves(extractedPath); const cacheDir = sinon.stub(toolcache, "cacheDir").resolves(toolcachePath); - const gitHubVersion: GitHubVersion = { - type: GitHubVariant.DOTCOM, - }; - sinon.stub(apiClient, "getGitHubVersion").resolves(gitHubVersion); - - const features = setUpFeatureFlagTests(tempDir, logger, gitHubVersion); - sinon.stub(features, "getValue").callsFake(async (_feature, _codeql) => { - return true; - }); - const getDefaultCliVersion = sinon - .stub(features, "getDefaultCliVersion") - .resolves({ cliVersion: "2.20.1", tagName: expectedTag }); - const path = await startProxyExports.getProxyBinaryPath(logger, features); - - t.assert(getDefaultCliVersion.calledOnce); - sinon.assert.calledOnceWithMatch( - getReleaseByTag, - sinon.match(expectedParams), + const path = await startProxyExports.getProxyBinaryPath( + logger, + createFeatures([]), ); + t.assert(find.calledOnce); t.assert(getApiDetails.calledOnce); t.assert(getAuthorizationHeaderFor.calledOnce); t.assert(downloadTool.calledOnceWith(downloadUrl)); t.assert(extractTar.calledOnceWith(archivePath)); t.assert(cacheDir.calledOnceWith(extractedPath)); - t.assert(path); t.is( path, filepath.join(toolcachePath, startProxyExports.getProxyFilename()), ); - }); - checkExpectedLogMessages(t, logger.messages, [ - `Found '${startProxyExports.getProxyPackage()}' in release '${expectedTag}' at '${downloadUrl}'`, - ]); -}); + checkExpectedLogMessages(t, logger.messages, [ + `Found '${startProxyExports.getProxyPackage()}' in release '${defaults.bundleVersion}' at '${downloadUrl}'`, + ]); + }, +); + +test.serial( + "getProxyBinaryPath - downloads proxy based on features if not in cache", + async (t) => { + const logger = new RecordingLogger(); + const expectedTag = "codeql-bundle-v2.20.1"; + const expectedParams = { + owner: "github", + repo: "codeql-action", + tag: expectedTag, + }; + const downloadUrl = "url-we-want"; + const assets = [ + { + name: startProxyExports.getProxyPackage(), + url: downloadUrl, + }, + ]; + + const getReleaseByTag = sinon.stub(); + getReleaseByTag.withArgs(sinon.match(expectedParams)).resolves({ + status: 200, + data: { assets }, + headers: {}, + url: "GET /repos/:owner/:repo/releases/tags/:tag", + }); + mockGetApiClient({ repos: { getReleaseByTag } }); + + await withTmpDir(async (tempDir) => { + const toolcachePath = "/path/to/proxy/dir"; + const find = sinon.stub(toolcache, "find").returns(""); + const getApiDetails = sinon.stub(apiClient, "getApiDetails").returns({ + auth: "", + url: "", + apiURL: "", + }); + const getAuthorizationHeaderFor = sinon + .stub(apiClient, "getAuthorizationHeaderFor") + .returns(undefined); + const archivePath = "/path/to/archive"; + const downloadTool = sinon + .stub(toolcache, "downloadTool") + .resolves(archivePath); + const extractedPath = "/path/to/extracted"; + const extractTar = sinon + .stub(toolcache, "extractTar") + .resolves(extractedPath); + const cacheDir = sinon + .stub(toolcache, "cacheDir") + .resolves(toolcachePath); + + const gitHubVersion: GitHubVersion = { + type: GitHubVariant.DOTCOM, + }; + sinon.stub(apiClient, "getGitHubVersion").resolves(gitHubVersion); + + const features = setUpFeatureFlagTests(tempDir, logger, gitHubVersion); + sinon.stub(features, "getValue").callsFake(async (_feature, _codeql) => { + return true; + }); + const getDefaultCliVersion = sinon + .stub(features, "getDefaultCliVersion") + .resolves({ cliVersion: "2.20.1", tagName: expectedTag }); + const path = await startProxyExports.getProxyBinaryPath(logger, features); + + t.assert(getDefaultCliVersion.calledOnce); + sinon.assert.calledOnceWithMatch( + getReleaseByTag, + sinon.match(expectedParams), + ); + t.assert(find.calledOnce); + t.assert(getApiDetails.calledOnce); + t.assert(getAuthorizationHeaderFor.calledOnce); + t.assert(downloadTool.calledOnceWith(downloadUrl)); + t.assert(extractTar.calledOnceWith(archivePath)); + t.assert(cacheDir.calledOnceWith(extractedPath)); + + t.assert(path); + t.is( + path, + filepath.join(toolcachePath, startProxyExports.getProxyFilename()), + ); + }); + + checkExpectedLogMessages(t, logger.messages, [ + `Found '${startProxyExports.getProxyPackage()}' in release '${expectedTag}' at '${downloadUrl}'`, + ]); + }, +); diff --git a/src/start-proxy.ts b/src/start-proxy.ts index 7ed466a41..60d0afbc6 100644 --- a/src/start-proxy.ts +++ b/src/start-proxy.ts @@ -224,7 +224,9 @@ function isPAT(value: string) { ]); } -const LANGUAGE_TO_REGISTRY_TYPE: Partial> = { +type RegistryMapping = Partial>; + +const LANGUAGE_TO_REGISTRY_TYPE: RegistryMapping = { java: ["maven_repository"], csharp: ["nuget_feed"], javascript: ["npm_registry"], @@ -234,6 +236,19 @@ const LANGUAGE_TO_REGISTRY_TYPE: Partial> = { go: ["goproxy_server", "git_source"], } as const; +const NEW_LANGUAGE_TO_REGISTRY_TYPE: Required = { + actions: [], + cpp: [], + java: ["maven_repository"], + csharp: ["nuget_feed"], + javascript: [], + python: [], + ruby: [], + rust: [], + swift: [], + go: ["goproxy_server", "git_source"], +} as const; + /** * Extracts an `Address` value from the given `Registry` value by determining whether it has * a `url` value, or no `url` value but a `host` value. @@ -267,9 +282,13 @@ export function getCredentials( registrySecrets: string | undefined, registriesCredentials: string | undefined, language: KnownLanguage | undefined, + skipUnusedRegistries: boolean = false, ): Credential[] { + const registryMapping = skipUnusedRegistries + ? NEW_LANGUAGE_TO_REGISTRY_TYPE + : LANGUAGE_TO_REGISTRY_TYPE; const registryTypeForLanguage = language - ? LANGUAGE_TO_REGISTRY_TYPE[language] + ? registryMapping[language] : undefined; let credentialsStr: string; diff --git a/src/start-proxy/environment.test.ts b/src/start-proxy/environment.test.ts index 8dcb4c7b2..6722c53ab 100644 --- a/src/start-proxy/environment.test.ts +++ b/src/start-proxy/environment.test.ts @@ -69,19 +69,22 @@ test("checkJavaEnvironment - none set", (t) => { assertEnvVarLogMessages(t, JAVA_PROXY_ENV_VARS, messages, false); }); -test("checkJavaEnvironment - logs values when variables are set", (t) => { - const messages: LoggedMessage[] = []; - const logger = getRecordingLogger(messages); +test.serial( + "checkJavaEnvironment - logs values when variables are set", + (t) => { + const messages: LoggedMessage[] = []; + const logger = getRecordingLogger(messages); - for (const envVar of Object.values(JavaEnvVars)) { - process.env[envVar] = envVar; - } + for (const envVar of Object.values(JavaEnvVars)) { + process.env[envVar] = envVar; + } - checkJavaEnvVars(logger); - assertEnvVarLogMessages(t, JAVA_PROXY_ENV_VARS, messages, true); -}); + checkJavaEnvVars(logger); + assertEnvVarLogMessages(t, JAVA_PROXY_ENV_VARS, messages, true); + }, +); -test("discoverActionsJdks - discovers JDK paths", (t) => { +test.serial("discoverActionsJdks - discovers JDK paths", (t) => { // Clear GHA variables that may interfere with this test in CI. for (const envVar of Object.keys(process.env)) { if (envVar.startsWith("JAVA_HOME_")) { @@ -149,7 +152,7 @@ test("checkProxyEnvVars - none set", (t) => { assertEnvVarLogMessages(t, Object.values(ProxyEnvVars), messages, false); }); -test("checkProxyEnvVars - logs values when variables are set", (t) => { +test.serial("checkProxyEnvVars - logs values when variables are set", (t) => { const messages: LoggedMessage[] = []; const logger = getRecordingLogger(messages); @@ -161,7 +164,7 @@ test("checkProxyEnvVars - logs values when variables are set", (t) => { assertEnvVarLogMessages(t, Object.values(ProxyEnvVars), messages, true); }); -test("checkProxyEnvVars - credentials are removed from URLs", (t) => { +test.serial("checkProxyEnvVars - credentials are removed from URLs", (t) => { const messages: LoggedMessage[] = []; const logger = getRecordingLogger(messages); @@ -178,36 +181,45 @@ test("checkProxyEnvVars - credentials are removed from URLs", (t) => { ); }); -test("checkProxyEnvironment - includes base checks for all known languages", async (t) => { - stubToolrunner(); +test.serial( + "checkProxyEnvironment - includes base checks for all known languages", + async (t) => { + stubToolrunner(); - for (const language of Object.values(KnownLanguage)) { + for (const language of Object.values(KnownLanguage)) { + const messages: LoggedMessage[] = []; + const logger = getRecordingLogger(messages); + + await checkProxyEnvironment(logger, language); + assertEnvVarLogMessages(t, Object.keys(ProxyEnvVars), messages, false); + } + }, +); + +test.serial( + "checkProxyEnvironment - includes Java checks for Java", + async (t) => { const messages: LoggedMessage[] = []; const logger = getRecordingLogger(messages); - await checkProxyEnvironment(logger, language); + stubToolrunner(); + + await checkProxyEnvironment(logger, KnownLanguage.java); assertEnvVarLogMessages(t, Object.keys(ProxyEnvVars), messages, false); - } -}); + assertEnvVarLogMessages(t, JAVA_PROXY_ENV_VARS, messages, false); + }, +); -test("checkProxyEnvironment - includes Java checks for Java", async (t) => { - const messages: LoggedMessage[] = []; - const logger = getRecordingLogger(messages); +test.serial( + "checkProxyEnvironment - includes language-specific checks if the language is undefined", + async (t) => { + const messages: LoggedMessage[] = []; + const logger = getRecordingLogger(messages); - stubToolrunner(); + stubToolrunner(); - await checkProxyEnvironment(logger, KnownLanguage.java); - assertEnvVarLogMessages(t, Object.keys(ProxyEnvVars), messages, false); - assertEnvVarLogMessages(t, JAVA_PROXY_ENV_VARS, messages, false); -}); - -test("checkProxyEnvironment - includes language-specific checks if the language is undefined", async (t) => { - const messages: LoggedMessage[] = []; - const logger = getRecordingLogger(messages); - - stubToolrunner(); - - await checkProxyEnvironment(logger, undefined); - assertEnvVarLogMessages(t, Object.keys(ProxyEnvVars), messages, false); - assertEnvVarLogMessages(t, JAVA_PROXY_ENV_VARS, messages, false); -}); + await checkProxyEnvironment(logger, undefined); + assertEnvVarLogMessages(t, Object.keys(ProxyEnvVars), messages, false); + assertEnvVarLogMessages(t, JAVA_PROXY_ENV_VARS, messages, false); + }, +); diff --git a/src/status-report.test.ts b/src/status-report.test.ts index e051c54a2..35d608b7d 100644 --- a/src/status-report.test.ts +++ b/src/status-report.test.ts @@ -25,24 +25,20 @@ import { BuildMode, ConfigurationError, withTmpDir, wrapError } from "./util"; setupTests(test); function setupEnvironmentAndStub(tmpDir: string) { - setupActionsVars(tmpDir, tmpDir); + setupActionsVars(tmpDir, tmpDir, { + GITHUB_EVENT_NAME: "dynamic", + GITHUB_RUN_ATTEMPT: "2", + GITHUB_RUN_ID: "100", + }); process.env[EnvVar.ANALYSIS_KEY] = "analysis-key"; - process.env["GITHUB_EVENT_NAME"] = "dynamic"; - process.env["GITHUB_REF"] = "refs/heads/main"; - process.env["GITHUB_REPOSITORY"] = "octocat/HelloWorld"; - process.env["GITHUB_RUN_ATTEMPT"] = "2"; - process.env["GITHUB_RUN_ID"] = "100"; - process.env["GITHUB_SHA"] = "a".repeat(40); process.env["ImageVersion"] = "2023.05.19.1"; - process.env["RUNNER_OS"] = "macOS"; - process.env["RUNNER_TEMP"] = tmpDir; const getRequiredInput = sinon.stub(actionsUtil, "getRequiredInput"); getRequiredInput.withArgs("matrix").resolves("input/matrix"); } -test("createStatusReportBase", async (t) => { +test.serial("createStatusReportBase", async (t) => { await withTmpDir(async (tmpDir: string) => { setupEnvironmentAndStub(tmpDir); @@ -92,7 +88,7 @@ test("createStatusReportBase", async (t) => { }); }); -test("createStatusReportBase - empty configuration", async (t) => { +test.serial("createStatusReportBase - empty configuration", async (t) => { await withTmpDir(async (tmpDir: string) => { setupEnvironmentAndStub(tmpDir); @@ -112,7 +108,7 @@ test("createStatusReportBase - empty configuration", async (t) => { }); }); -test("createStatusReportBase - partial configuration", async (t) => { +test.serial("createStatusReportBase - partial configuration", async (t) => { await withTmpDir(async (tmpDir: string) => { setupEnvironmentAndStub(tmpDir); @@ -135,7 +131,7 @@ test("createStatusReportBase - partial configuration", async (t) => { }); }); -test("createStatusReportBase_firstParty", async (t) => { +test.serial("createStatusReportBase_firstParty", async (t) => { await withTmpDir(async (tmpDir: string) => { setupEnvironmentAndStub(tmpDir); @@ -239,58 +235,61 @@ test("createStatusReportBase_firstParty", async (t) => { }); }); -test("getActionStatus handling correctly various types of errors", (t) => { - t.is( - getActionsStatus(new Error("arbitrary error")), - "failure", - "We categorise an arbitrary error as a failure", - ); +test.serial( + "getActionStatus handling correctly various types of errors", + (t) => { + t.is( + getActionsStatus(new Error("arbitrary error")), + "failure", + "We categorise an arbitrary error as a failure", + ); - t.is( - getActionsStatus(new ConfigurationError("arbitrary error")), - "user-error", - "We categorise a ConfigurationError as a user error", - ); + t.is( + getActionsStatus(new ConfigurationError("arbitrary error")), + "user-error", + "We categorise a ConfigurationError as a user error", + ); - t.is( - getActionsStatus(new Error("exit code 1"), "multiple things went wrong"), - "failure", - "getActionsStatus should return failure if passed an arbitrary error and an additional failure cause", - ); + t.is( + getActionsStatus(new Error("exit code 1"), "multiple things went wrong"), + "failure", + "getActionsStatus should return failure if passed an arbitrary error and an additional failure cause", + ); - t.is( - getActionsStatus( - new ConfigurationError("exit code 1"), - "multiple things went wrong", - ), - "user-error", - "getActionsStatus should return user-error if passed a configuration error and an additional failure cause", - ); + t.is( + getActionsStatus( + new ConfigurationError("exit code 1"), + "multiple things went wrong", + ), + "user-error", + "getActionsStatus should return user-error if passed a configuration error and an additional failure cause", + ); - t.is( - getActionsStatus(), - "success", - "getActionsStatus should return success if no error is passed", - ); + t.is( + getActionsStatus(), + "success", + "getActionsStatus should return success if no error is passed", + ); - t.is( - getActionsStatus(new Object()), - "failure", - "getActionsStatus should return failure if passed an arbitrary object", - ); + t.is( + getActionsStatus(new Object()), + "failure", + "getActionsStatus should return failure if passed an arbitrary object", + ); - t.is( - getActionsStatus(null, "an error occurred"), - "failure", - "getActionsStatus should return failure if passed null and an additional failure cause", - ); + t.is( + getActionsStatus(null, "an error occurred"), + "failure", + "getActionsStatus should return failure if passed null and an additional failure cause", + ); - t.is( - getActionsStatus(wrapError(new ConfigurationError("arbitrary error"))), - "user-error", - "We still recognise a wrapped ConfigurationError as a user error", - ); -}); + t.is( + getActionsStatus(wrapError(new ConfigurationError("arbitrary error"))), + "user-error", + "We still recognise a wrapped ConfigurationError as a user error", + ); + }, +); const testCreateInitWithConfigStatusReport = test.macro({ exec: async ( @@ -341,7 +340,7 @@ const testCreateInitWithConfigStatusReport = test.macro({ title: (_, title) => `createInitWithConfigStatusReport: ${title}`, }); -test( +test.serial( testCreateInitWithConfigStatusReport, "returns a value", createTestConfig({ @@ -356,7 +355,7 @@ test( }, ); -test( +test.serial( testCreateInitWithConfigStatusReport, "includes packs for a single language", createTestConfig({ @@ -373,7 +372,7 @@ test( }, ); -test( +test.serial( testCreateInitWithConfigStatusReport, "includes packs for multiple languages", createTestConfig({ diff --git a/src/testing-utils.ts b/src/testing-utils.ts index 3abc1f4f4..8a7cf8e2d 100644 --- a/src/testing-utils.ts +++ b/src/testing-utils.ts @@ -139,13 +139,40 @@ export function setupTests(test: TestFn) { }); } +/** + * Default values for environment variables typically set in an Actions + * environment. Tests can override individual variables by passing them in the + * `overrides` parameter. + */ +export const DEFAULT_ACTIONS_VARS = { + GITHUB_ACTION_REPOSITORY: "github/codeql-action", + GITHUB_API_URL: "https://api.github.com", + GITHUB_EVENT_NAME: "push", + GITHUB_JOB: "test-job", + GITHUB_REF: "refs/heads/main", + GITHUB_REPOSITORY: "github/codeql-action-testing", + GITHUB_RUN_ATTEMPT: "1", + GITHUB_RUN_ID: "1", + GITHUB_SERVER_URL: "https://github.com", + GITHUB_SHA: "0".repeat(40), + GITHUB_WORKFLOW: "test-workflow", + RUNNER_OS: "Linux", +} as const satisfies Record; + // Sets environment variables that make using some libraries designed for // use only on actions safe to use outside of actions. -export function setupActionsVars(tempDir: string, toolsDir: string) { +export function setupActionsVars( + tempDir: string, + toolsDir: string, + overrides?: Partial>, +) { + const vars = { ...DEFAULT_ACTIONS_VARS, ...overrides }; + for (const [key, value] of Object.entries(vars)) { + process.env[key] = value; + } process.env["RUNNER_TEMP"] = tempDir; process.env["RUNNER_TOOL_CACHE"] = toolsDir; process.env["GITHUB_WORKSPACE"] = tempDir; - process.env["GITHUB_EVENT_NAME"] = "push"; } type LogLevel = "debug" | "info" | "warning" | "error"; diff --git a/src/trap-caching.test.ts b/src/trap-caching.test.ts index 66913d61b..a6c7fc76c 100644 --- a/src/trap-caching.test.ts +++ b/src/trap-caching.test.ts @@ -94,7 +94,7 @@ function getTestConfigWithTempDir(tempDir: string): configUtils.Config { }); } -test("check flags for JS, analyzing default branch", async (t) => { +test.serial("check flags for JS, analyzing default branch", async (t) => { await util.withTmpDir(async (tmpDir) => { const config = getTestConfigWithTempDir(tmpDir); sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true); @@ -110,7 +110,7 @@ test("check flags for JS, analyzing default branch", async (t) => { }); }); -test("check flags for all, not analyzing default branch", async (t) => { +test.serial("check flags for all, not analyzing default branch", async (t) => { await util.withTmpDir(async (tmpDir) => { const config = getTestConfigWithTempDir(tmpDir); sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false); @@ -137,7 +137,7 @@ test("get languages that support TRAP caching", async (t) => { t.deepEqual(languagesSupportingCaching, [KnownLanguage.javascript]); }); -test("upload cache key contains right fields", async (t) => { +test.serial("upload cache key contains right fields", async (t) => { const loggedMessages = []; const logger = getRecordingLogger(loggedMessages); sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true); @@ -156,47 +156,50 @@ test("upload cache key contains right fields", async (t) => { ); }); -test("download cache looks for the right key and creates dir", async (t) => { - await util.withTmpDir(async (tmpDir) => { - const loggedMessages = []; - const logger = getRecordingLogger(loggedMessages); - sinon.stub(actionsUtil, "getTemporaryDirectory").returns(tmpDir); - sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false); - const stubRestore = sinon.stub(cache, "restoreCache").resolves("found"); - const eventFile = path.resolve(tmpDir, "event.json"); - process.env.GITHUB_EVENT_NAME = "pull_request"; - process.env.GITHUB_EVENT_PATH = eventFile; - fs.writeFileSync( - eventFile, - JSON.stringify({ - pull_request: { - base: { - sha: "somesha", +test.serial( + "download cache looks for the right key and creates dir", + async (t) => { + await util.withTmpDir(async (tmpDir) => { + const loggedMessages = []; + const logger = getRecordingLogger(loggedMessages); + sinon.stub(actionsUtil, "getTemporaryDirectory").returns(tmpDir); + sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false); + const stubRestore = sinon.stub(cache, "restoreCache").resolves("found"); + const eventFile = path.resolve(tmpDir, "event.json"); + process.env.GITHUB_EVENT_NAME = "pull_request"; + process.env.GITHUB_EVENT_PATH = eventFile; + fs.writeFileSync( + eventFile, + JSON.stringify({ + pull_request: { + base: { + sha: "somesha", + }, }, - }, - }), - ); - await downloadTrapCaches( - stubCodeql, - [KnownLanguage.javascript, KnownLanguage.cpp], - logger, - ); - t.assert( - stubRestore.calledOnceWith( - sinon.match.array.contains([ - path.resolve(tmpDir, "trapCaches", "javascript"), - ]), - sinon - .match("somesha") - .and(sinon.match("2.10.3")) - .and(sinon.match("javascript")), - ), - ); - t.assert(fs.existsSync(path.resolve(tmpDir, "trapCaches", "javascript"))); - }); -}); + }), + ); + await downloadTrapCaches( + stubCodeql, + [KnownLanguage.javascript, KnownLanguage.cpp], + logger, + ); + t.assert( + stubRestore.calledOnceWith( + sinon.match.array.contains([ + path.resolve(tmpDir, "trapCaches", "javascript"), + ]), + sinon + .match("somesha") + .and(sinon.match("2.10.3")) + .and(sinon.match("javascript")), + ), + ); + t.assert(fs.existsSync(path.resolve(tmpDir, "trapCaches", "javascript"))); + }); + }, +); -test("cleanup removes only old CodeQL TRAP caches", async (t) => { +test.serial("cleanup removes only old CodeQL TRAP caches", async (t) => { await util.withTmpDir(async (tmpDir) => { // This config specifies that we are analyzing JavaScript and Ruby, but not Swift. const config = getTestConfigWithTempDir(tmpDir); diff --git a/src/upload-lib.test.ts b/src/upload-lib.test.ts index 677d9f2aa..92dc2e773 100644 --- a/src/upload-lib.test.ts +++ b/src/upload-lib.test.ts @@ -10,6 +10,7 @@ import * as analyses from "./analyses"; import { AnalysisKind, CodeQuality, CodeScanning } from "./analyses"; import * as api from "./api-client"; import { getRunnerLogger, Logger } from "./logging"; +import * as sarif from "./sarif"; import { setupTests } from "./testing-utils"; import * as uploadLib from "./upload-lib"; import { UploadPayload } from "./upload-lib/types"; @@ -21,91 +22,94 @@ test.beforeEach(() => { initializeEnvironment("1.2.3"); }); -test("validateSarifFileSchema - valid", (t) => { +test.serial("validateSarifFileSchema - valid", (t) => { const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`; t.notThrows(() => uploadLib.validateSarifFileSchema( - uploadLib.readSarifFile(inputFile), + uploadLib.readSarifFileOrThrow(inputFile), inputFile, getRunnerLogger(true), ), ); }); -test("validateSarifFileSchema - invalid", (t) => { +test.serial("validateSarifFileSchema - invalid", (t) => { const inputFile = `${__dirname}/../src/testdata/invalid-sarif.sarif`; t.throws(() => uploadLib.validateSarifFileSchema( - uploadLib.readSarifFile(inputFile), + uploadLib.readSarifFileOrThrow(inputFile), inputFile, getRunnerLogger(true), ), ); }); -test("validate correct payload used for push, PR merge commit, and PR head", async (t) => { - process.env["GITHUB_EVENT_NAME"] = "push"; - const pushPayload: any = uploadLib.buildPayload( - "commit", - "refs/heads/master", - "key", - undefined, - "", - 1234, - 1, - "/opt/src", - undefined, - ["CodeQL", "eslint"], - "mergeBaseCommit", - ); - // Not triggered by a pull request - t.falsy(pushPayload.base_ref); - t.falsy(pushPayload.base_sha); +test.serial( + "validate correct payload used for push, PR merge commit, and PR head", + async (t) => { + process.env["GITHUB_EVENT_NAME"] = "push"; + const pushPayload: any = uploadLib.buildPayload( + "commit", + "refs/heads/master", + "key", + undefined, + "", + 1234, + 1, + "/opt/src", + undefined, + ["CodeQL", "eslint"], + "mergeBaseCommit", + ); + // Not triggered by a pull request + t.falsy(pushPayload.base_ref); + t.falsy(pushPayload.base_sha); - process.env["GITHUB_EVENT_NAME"] = "pull_request"; - process.env["GITHUB_SHA"] = "commit"; - process.env["GITHUB_BASE_REF"] = "master"; - process.env["GITHUB_EVENT_PATH"] = - `${__dirname}/../src/testdata/pull_request.json`; - const prMergePayload: any = uploadLib.buildPayload( - "commit", - "refs/pull/123/merge", - "key", - undefined, - "", - 1234, - 1, - "/opt/src", - undefined, - ["CodeQL", "eslint"], - "mergeBaseCommit", - ); - // Uploads for a merge commit use the merge base - t.deepEqual(prMergePayload.base_ref, "refs/heads/master"); - t.deepEqual(prMergePayload.base_sha, "mergeBaseCommit"); + process.env["GITHUB_EVENT_NAME"] = "pull_request"; + process.env["GITHUB_SHA"] = "commit"; + process.env["GITHUB_BASE_REF"] = "master"; + process.env["GITHUB_EVENT_PATH"] = + `${__dirname}/../src/testdata/pull_request.json`; + const prMergePayload: any = uploadLib.buildPayload( + "commit", + "refs/pull/123/merge", + "key", + undefined, + "", + 1234, + 1, + "/opt/src", + undefined, + ["CodeQL", "eslint"], + "mergeBaseCommit", + ); + // Uploads for a merge commit use the merge base + t.deepEqual(prMergePayload.base_ref, "refs/heads/master"); + t.deepEqual(prMergePayload.base_sha, "mergeBaseCommit"); - const prHeadPayload: any = uploadLib.buildPayload( - "headCommit", - "refs/pull/123/head", - "key", - undefined, - "", - 1234, - 1, - "/opt/src", - undefined, - ["CodeQL", "eslint"], - "mergeBaseCommit", - ); - // Uploads for the head use the PR base - t.deepEqual(prHeadPayload.base_ref, "refs/heads/master"); - t.deepEqual( - prHeadPayload.base_sha, - "f95f852bd8fca8fcc58a9a2d6c842781e32a215e", - ); -}); + const prHeadPayload: any = uploadLib.buildPayload( + "headCommit", + "refs/pull/123/head", + "key", + undefined, + "", + 1234, + 1, + "/opt/src", + undefined, + ["CodeQL", "eslint"], + "mergeBaseCommit", + ); + // Uploads for the head use the PR base + t.deepEqual(prHeadPayload.base_ref, "refs/heads/master"); + t.deepEqual( + prHeadPayload.base_sha, + "f95f852bd8fca8fcc58a9a2d6c842781e32a215e", + ); + }, +); -test("finding SARIF files", async (t) => { +test.serial("finding SARIF files", async (t) => { await withTmpDir(async (tmpDir) => { // include a couple of sarif files fs.writeFileSync(path.join(tmpDir, "a.sarif"), ""); @@ -189,7 +193,7 @@ test("finding SARIF files", async (t) => { }); }); -test("getGroupedSarifFilePaths - Risk Assessment files", async (t) => { +test.serial("getGroupedSarifFilePaths - Risk Assessment files", async (t) => { await withTmpDir(async (tmpDir) => { const sarifPath = path.join(tmpDir, "a.csra.sarif"); fs.writeFileSync(sarifPath, ""); @@ -207,7 +211,7 @@ test("getGroupedSarifFilePaths - Risk Assessment files", async (t) => { }); }); -test("getGroupedSarifFilePaths - Code Quality file", async (t) => { +test.serial("getGroupedSarifFilePaths - Code Quality file", async (t) => { await withTmpDir(async (tmpDir) => { const sarifPath = path.join(tmpDir, "a.quality.sarif"); fs.writeFileSync(sarifPath, ""); @@ -225,7 +229,7 @@ test("getGroupedSarifFilePaths - Code Quality file", async (t) => { }); }); -test("getGroupedSarifFilePaths - Code Scanning file", async (t) => { +test.serial("getGroupedSarifFilePaths - Code Scanning file", async (t) => { await withTmpDir(async (tmpDir) => { const sarifPath = path.join(tmpDir, "a.sarif"); fs.writeFileSync(sarifPath, ""); @@ -243,7 +247,7 @@ test("getGroupedSarifFilePaths - Code Scanning file", async (t) => { }); }); -test("getGroupedSarifFilePaths - Other file", async (t) => { +test.serial("getGroupedSarifFilePaths - Other file", async (t) => { await withTmpDir(async (tmpDir) => { const sarifPath = path.join(tmpDir, "a.json"); fs.writeFileSync(sarifPath, ""); @@ -261,19 +265,24 @@ test("getGroupedSarifFilePaths - Other file", async (t) => { }); }); -test("populateRunAutomationDetails", (t) => { - let sarif = { - runs: [{}], +test.serial("populateRunAutomationDetails", (t) => { + const tool = { driver: { name: "test tool" } }; + let sarifLog: sarif.Log = { + version: "2.1.0", + runs: [{ tool }], }; const analysisKey = ".github/workflows/codeql-analysis.yml:analyze"; - let expectedSarif = { - runs: [{ automationDetails: { id: "language:javascript/os:linux/" } }], + let expectedSarif: sarif.Log = { + version: "2.1.0", + runs: [ + { tool, automationDetails: { id: "language:javascript/os:linux/" } }, + ], }; // Category has priority over analysis_key/environment let modifiedSarif = uploadLib.populateRunAutomationDetails( - sarif, + sarifLog, "language:javascript/os:linux", analysisKey, '{"language": "other", "os": "other"}', @@ -282,7 +291,7 @@ test("populateRunAutomationDetails", (t) => { // It doesn't matter if the category has a slash at the end or not modifiedSarif = uploadLib.populateRunAutomationDetails( - sarif, + sarifLog, "language:javascript/os:linux/", analysisKey, "", @@ -290,10 +299,16 @@ test("populateRunAutomationDetails", (t) => { t.deepEqual(modifiedSarif, expectedSarif); // check that the automation details doesn't get overwritten - sarif = { runs: [{ automationDetails: { id: "my_id" } }] }; - expectedSarif = { runs: [{ automationDetails: { id: "my_id" } }] }; + sarifLog = { + version: "2.1.0", + runs: [{ tool, automationDetails: { id: "my_id" } }], + }; + expectedSarif = { + version: "2.1.0", + runs: [{ tool, automationDetails: { id: "my_id" } }], + }; modifiedSarif = uploadLib.populateRunAutomationDetails( - sarif, + sarifLog, undefined, analysisKey, '{"os": "linux", "language": "javascript"}', @@ -301,11 +316,16 @@ test("populateRunAutomationDetails", (t) => { t.deepEqual(modifiedSarif, expectedSarif); // check multiple runs - sarif = { runs: [{ automationDetails: { id: "my_id" } }, {}] }; + sarifLog = { + version: "2.1.0", + runs: [{ tool, automationDetails: { id: "my_id" } }, { tool }], + }; expectedSarif = { + version: "2.1.0", runs: [ - { automationDetails: { id: "my_id" } }, + { tool, automationDetails: { id: "my_id" } }, { + tool, automationDetails: { id: ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/", }, @@ -313,7 +333,7 @@ test("populateRunAutomationDetails", (t) => { ], }; modifiedSarif = uploadLib.populateRunAutomationDetails( - sarif, + sarifLog, undefined, analysisKey, '{"os": "linux", "language": "javascript"}', @@ -321,7 +341,7 @@ test("populateRunAutomationDetails", (t) => { t.deepEqual(modifiedSarif, expectedSarif); }); -test("validateUniqueCategory when empty", (t) => { +test.serial("validateUniqueCategory when empty", (t) => { t.notThrows(() => uploadLib.validateUniqueCategory( createMockSarif(), @@ -336,7 +356,7 @@ test("validateUniqueCategory when empty", (t) => { ); }); -test("validateUniqueCategory for automation details id", (t) => { +test.serial("validateUniqueCategory for automation details id", (t) => { t.notThrows(() => uploadLib.validateUniqueCategory( createMockSarif("abc"), @@ -405,7 +425,7 @@ test("validateUniqueCategory for automation details id", (t) => { ); }); -test("validateUniqueCategory for tool name", (t) => { +test.serial("validateUniqueCategory for tool name", (t) => { t.notThrows(() => uploadLib.validateUniqueCategory( createMockSarif(undefined, "abc"), @@ -474,94 +494,88 @@ test("validateUniqueCategory for tool name", (t) => { ); }); -test("validateUniqueCategory for automation details id and tool name", (t) => { - t.notThrows(() => - uploadLib.validateUniqueCategory( - createMockSarif("abc", "abc"), - CodeScanning.sentinelPrefix, - ), - ); - t.throws(() => - uploadLib.validateUniqueCategory( - createMockSarif("abc", "abc"), - CodeScanning.sentinelPrefix, - ), - ); +test.serial( + "validateUniqueCategory for automation details id and tool name", + (t) => { + t.notThrows(() => + uploadLib.validateUniqueCategory( + createMockSarif("abc", "abc"), + CodeScanning.sentinelPrefix, + ), + ); + t.throws(() => + uploadLib.validateUniqueCategory( + createMockSarif("abc", "abc"), + CodeScanning.sentinelPrefix, + ), + ); - t.notThrows(() => - uploadLib.validateUniqueCategory( - createMockSarif("abc_", "def"), - CodeScanning.sentinelPrefix, - ), - ); - t.throws(() => - uploadLib.validateUniqueCategory( - createMockSarif("abc_", "def"), - CodeScanning.sentinelPrefix, - ), - ); + t.notThrows(() => + uploadLib.validateUniqueCategory( + createMockSarif("abc_", "def"), + CodeScanning.sentinelPrefix, + ), + ); + t.throws(() => + uploadLib.validateUniqueCategory( + createMockSarif("abc_", "def"), + CodeScanning.sentinelPrefix, + ), + ); - t.notThrows(() => - uploadLib.validateUniqueCategory( - createMockSarif("ghi", "_jkl"), - CodeScanning.sentinelPrefix, - ), - ); - t.throws(() => - uploadLib.validateUniqueCategory( - createMockSarif("ghi", "_jkl"), - CodeScanning.sentinelPrefix, - ), - ); + t.notThrows(() => + uploadLib.validateUniqueCategory( + createMockSarif("ghi", "_jkl"), + CodeScanning.sentinelPrefix, + ), + ); + t.throws(() => + uploadLib.validateUniqueCategory( + createMockSarif("ghi", "_jkl"), + CodeScanning.sentinelPrefix, + ), + ); - // Our category sanitization is not perfect. Here are some examples - // of where we see false clashes - t.notThrows(() => - uploadLib.validateUniqueCategory( - createMockSarif("abc"), - CodeScanning.sentinelPrefix, - ), - ); - t.throws(() => - uploadLib.validateUniqueCategory( - createMockSarif("abc", "_"), - CodeScanning.sentinelPrefix, - ), - ); + // Our category sanitization is not perfect. Here are some examples + // of where we see false clashes because we replace some characters + // with `_` in `sanitize`. + t.notThrows(() => + uploadLib.validateUniqueCategory( + createMockSarif("abc", "def__"), + CodeScanning.sentinelPrefix, + ), + ); + t.throws(() => + uploadLib.validateUniqueCategory( + createMockSarif("abc_def", "_"), + CodeScanning.sentinelPrefix, + ), + ); - t.notThrows(() => - uploadLib.validateUniqueCategory( - createMockSarif("abc", "def__"), - CodeScanning.sentinelPrefix, - ), - ); - t.throws(() => - uploadLib.validateUniqueCategory( - createMockSarif("abc_def"), - CodeScanning.sentinelPrefix, - ), - ); + t.notThrows(() => + uploadLib.validateUniqueCategory( + createMockSarif("mno_", "pqr"), + CodeScanning.sentinelPrefix, + ), + ); + t.throws(() => + uploadLib.validateUniqueCategory( + createMockSarif("mno", "_pqr"), + CodeScanning.sentinelPrefix, + ), + ); + }, +); - t.notThrows(() => - uploadLib.validateUniqueCategory( - createMockSarif("mno_", "pqr"), - CodeScanning.sentinelPrefix, - ), - ); - t.throws(() => - uploadLib.validateUniqueCategory( - createMockSarif("mno", "_pqr"), - CodeScanning.sentinelPrefix, - ), - ); -}); - -test("validateUniqueCategory for multiple runs", (t) => { +test.serial("validateUniqueCategory for multiple runs", (t) => { const sarif1 = createMockSarif("abc", "def"); const sarif2 = createMockSarif("ghi", "jkl"); // duplicate categories are allowed within the same sarif file - const multiSarif = { runs: [sarif1.runs[0], sarif1.runs[0], sarif2.runs[0]] }; + const multiSarif: sarif.Log = { + version: "2.1.0", + runs: [sarif1.runs[0], sarif1.runs[0], sarif2.runs[0]], + }; t.notThrows(() => uploadLib.validateUniqueCategory(multiSarif, CodeScanning.sentinelPrefix), ); @@ -575,7 +589,7 @@ test("validateUniqueCategory for multiple runs", (t) => { ); }); -test("validateUniqueCategory with different prefixes", (t) => { +test.serial("validateUniqueCategory with different prefixes", (t) => { t.notThrows(() => uploadLib.validateUniqueCategory( createMockSarif(), @@ -590,7 +604,7 @@ test("validateUniqueCategory with different prefixes", (t) => { ); }); -test("accept results with invalid artifactLocation.uri value", (t) => { +test.serial("accept results with invalid artifactLocation.uri value", (t) => { const loggedMessages: string[] = []; const mockLogger = { info: (message: string) => { @@ -600,7 +614,7 @@ test("accept results with invalid artifactLocation.uri value", (t) => { const sarifFile = `${__dirname}/../src/testdata/with-invalid-uri.sarif`; uploadLib.validateSarifFileSchema( - uploadLib.readSarifFile(sarifFile), + uploadLib.readSarifFileOrThrow(sarifFile), sarifFile, mockLogger, ); @@ -613,100 +627,124 @@ test("accept results with invalid artifactLocation.uri value", (t) => { ); }); -test("shouldShowCombineSarifFilesDeprecationWarning when on dotcom", async (t) => { - t.true( - await uploadLib.shouldShowCombineSarifFilesDeprecationWarning( - [createMockSarif("abc", "def"), createMockSarif("abc", "def")], - { - type: GitHubVariant.DOTCOM, - }, - ), - ); -}); +test.serial( + "shouldShowCombineSarifFilesDeprecationWarning when on dotcom", + async (t) => { + t.true( + await uploadLib.shouldShowCombineSarifFilesDeprecationWarning( + [createMockSarif("abc", "def"), createMockSarif("abc", "def")], + { + type: GitHubVariant.DOTCOM, + }, + ), + ); + }, +); -test("shouldShowCombineSarifFilesDeprecationWarning when on GHES 3.13", async (t) => { - t.false( - await uploadLib.shouldShowCombineSarifFilesDeprecationWarning( - [createMockSarif("abc", "def"), createMockSarif("abc", "def")], - { - type: GitHubVariant.GHES, - version: "3.13.2", - }, - ), - ); -}); +test.serial( + "shouldShowCombineSarifFilesDeprecationWarning when on GHES 3.13", + async (t) => { + t.false( + await uploadLib.shouldShowCombineSarifFilesDeprecationWarning( + [createMockSarif("abc", "def"), createMockSarif("abc", "def")], + { + type: GitHubVariant.GHES, + version: "3.13.2", + }, + ), + ); + }, +); -test("shouldShowCombineSarifFilesDeprecationWarning when on GHES 3.14", async (t) => { - t.true( - await uploadLib.shouldShowCombineSarifFilesDeprecationWarning( - [createMockSarif("abc", "def"), createMockSarif("abc", "def")], - { - type: GitHubVariant.GHES, - version: "3.14.0", - }, - ), - ); -}); +test.serial( + "shouldShowCombineSarifFilesDeprecationWarning when on GHES 3.14", + async (t) => { + t.true( + await uploadLib.shouldShowCombineSarifFilesDeprecationWarning( + [createMockSarif("abc", "def"), createMockSarif("abc", "def")], + { + type: GitHubVariant.GHES, + version: "3.14.0", + }, + ), + ); + }, +); -test("shouldShowCombineSarifFilesDeprecationWarning when on GHES 3.16 pre", async (t) => { - t.true( - await uploadLib.shouldShowCombineSarifFilesDeprecationWarning( - [createMockSarif("abc", "def"), createMockSarif("abc", "def")], - { - type: GitHubVariant.GHES, - version: "3.16.0.pre1", - }, - ), - ); -}); +test.serial( + "shouldShowCombineSarifFilesDeprecationWarning when on GHES 3.16 pre", + async (t) => { + t.true( + await uploadLib.shouldShowCombineSarifFilesDeprecationWarning( + [createMockSarif("abc", "def"), createMockSarif("abc", "def")], + { + type: GitHubVariant.GHES, + version: "3.16.0.pre1", + }, + ), + ); + }, +); -test("shouldShowCombineSarifFilesDeprecationWarning with only 1 run", async (t) => { - t.false( - await uploadLib.shouldShowCombineSarifFilesDeprecationWarning( - [createMockSarif("abc", "def")], - { - type: GitHubVariant.DOTCOM, - }, - ), - ); -}); +test.serial( + "shouldShowCombineSarifFilesDeprecationWarning with only 1 run", + async (t) => { + t.false( + await uploadLib.shouldShowCombineSarifFilesDeprecationWarning( + [createMockSarif("abc", "def")], + { + type: GitHubVariant.DOTCOM, + }, + ), + ); + }, +); -test("shouldShowCombineSarifFilesDeprecationWarning with distinct categories", async (t) => { - t.false( - await uploadLib.shouldShowCombineSarifFilesDeprecationWarning( - [createMockSarif("abc", "def"), createMockSarif("def", "def")], - { - type: GitHubVariant.DOTCOM, - }, - ), - ); -}); +test.serial( + "shouldShowCombineSarifFilesDeprecationWarning with distinct categories", + async (t) => { + t.false( + await uploadLib.shouldShowCombineSarifFilesDeprecationWarning( + [createMockSarif("abc", "def"), createMockSarif("def", "def")], + { + type: GitHubVariant.DOTCOM, + }, + ), + ); + }, +); -test("shouldShowCombineSarifFilesDeprecationWarning with distinct tools", async (t) => { - t.false( - await uploadLib.shouldShowCombineSarifFilesDeprecationWarning( - [createMockSarif("abc", "abc"), createMockSarif("abc", "def")], - { - type: GitHubVariant.DOTCOM, - }, - ), - ); -}); +test.serial( + "shouldShowCombineSarifFilesDeprecationWarning with distinct tools", + async (t) => { + t.false( + await uploadLib.shouldShowCombineSarifFilesDeprecationWarning( + [createMockSarif("abc", "abc"), createMockSarif("abc", "def")], + { + type: GitHubVariant.DOTCOM, + }, + ), + ); + }, +); -test("shouldShowCombineSarifFilesDeprecationWarning when environment variable is already set", async (t) => { - process.env["CODEQL_MERGE_SARIF_DEPRECATION_WARNING"] = "true"; +test.serial( + "shouldShowCombineSarifFilesDeprecationWarning when environment variable is already set", + async (t) => { + process.env["CODEQL_MERGE_SARIF_DEPRECATION_WARNING"] = "true"; - t.false( - await uploadLib.shouldShowCombineSarifFilesDeprecationWarning( - [createMockSarif("abc", "def"), createMockSarif("abc", "def")], - { - type: GitHubVariant.DOTCOM, - }, - ), - ); -}); + t.false( + await uploadLib.shouldShowCombineSarifFilesDeprecationWarning( + [createMockSarif("abc", "def"), createMockSarif("abc", "def")], + { + type: GitHubVariant.DOTCOM, + }, + ), + ); + }, +); -test("throwIfCombineSarifFilesDisabled when on dotcom", async (t) => { +test.serial("throwIfCombineSarifFilesDisabled when on dotcom", async (t) => { await t.throwsAsync( uploadLib.throwIfCombineSarifFilesDisabled( [createMockSarif("abc", "def"), createMockSarif("abc", "def")], @@ -721,7 +759,7 @@ test("throwIfCombineSarifFilesDisabled when on dotcom", async (t) => { ); }); -test("throwIfCombineSarifFilesDisabled when on GHES 3.13", async (t) => { +test.serial("throwIfCombineSarifFilesDisabled when on GHES 3.13", async (t) => { await t.notThrowsAsync( uploadLib.throwIfCombineSarifFilesDisabled( [createMockSarif("abc", "def"), createMockSarif("abc", "def")], @@ -733,7 +771,7 @@ test("throwIfCombineSarifFilesDisabled when on GHES 3.13", async (t) => { ); }); -test("throwIfCombineSarifFilesDisabled when on GHES 3.14", async (t) => { +test.serial("throwIfCombineSarifFilesDisabled when on GHES 3.14", async (t) => { await t.notThrowsAsync( uploadLib.throwIfCombineSarifFilesDisabled( [createMockSarif("abc", "def"), createMockSarif("abc", "def")], @@ -745,7 +783,7 @@ test("throwIfCombineSarifFilesDisabled when on GHES 3.14", async (t) => { ); }); -test("throwIfCombineSarifFilesDisabled when on GHES 3.17", async (t) => { +test.serial("throwIfCombineSarifFilesDisabled when on GHES 3.17", async (t) => { await t.notThrowsAsync( uploadLib.throwIfCombineSarifFilesDisabled( [createMockSarif("abc", "def"), createMockSarif("abc", "def")], @@ -757,39 +795,45 @@ test("throwIfCombineSarifFilesDisabled when on GHES 3.17", async (t) => { ); }); -test("throwIfCombineSarifFilesDisabled when on GHES 3.18 pre", async (t) => { - await t.throwsAsync( - uploadLib.throwIfCombineSarifFilesDisabled( - [createMockSarif("abc", "def"), createMockSarif("abc", "def")], +test.serial( + "throwIfCombineSarifFilesDisabled when on GHES 3.18 pre", + async (t) => { + await t.throwsAsync( + uploadLib.throwIfCombineSarifFilesDisabled( + [createMockSarif("abc", "def"), createMockSarif("abc", "def")], + { + type: GitHubVariant.GHES, + version: "3.18.0.pre1", + }, + ), { - type: GitHubVariant.GHES, - version: "3.18.0.pre1", + message: + /The CodeQL Action does not support uploading multiple SARIF runs with the same category/, }, - ), - { - message: - /The CodeQL Action does not support uploading multiple SARIF runs with the same category/, - }, - ); -}); + ); + }, +); -test("throwIfCombineSarifFilesDisabled when on GHES 3.18 alpha", async (t) => { - await t.throwsAsync( - uploadLib.throwIfCombineSarifFilesDisabled( - [createMockSarif("abc", "def"), createMockSarif("abc", "def")], +test.serial( + "throwIfCombineSarifFilesDisabled when on GHES 3.18 alpha", + async (t) => { + await t.throwsAsync( + uploadLib.throwIfCombineSarifFilesDisabled( + [createMockSarif("abc", "def"), createMockSarif("abc", "def")], + { + type: GitHubVariant.GHES, + version: "3.18.0-alpha.1", + }, + ), { - type: GitHubVariant.GHES, - version: "3.18.0-alpha.1", + message: + /The CodeQL Action does not support uploading multiple SARIF runs with the same category/, }, - ), - { - message: - /The CodeQL Action does not support uploading multiple SARIF runs with the same category/, - }, - ); -}); + ); + }, +); -test("throwIfCombineSarifFilesDisabled when on GHES 3.18", async (t) => { +test.serial("throwIfCombineSarifFilesDisabled when on GHES 3.18", async (t) => { await t.throwsAsync( uploadLib.throwIfCombineSarifFilesDisabled( [createMockSarif("abc", "def"), createMockSarif("abc", "def")], @@ -805,19 +849,22 @@ test("throwIfCombineSarifFilesDisabled when on GHES 3.18", async (t) => { ); }); -test("throwIfCombineSarifFilesDisabled with an invalid GHES version", async (t) => { - await t.notThrowsAsync( - uploadLib.throwIfCombineSarifFilesDisabled( - [createMockSarif("abc", "def"), createMockSarif("abc", "def")], - { - type: GitHubVariant.GHES, - version: "foobar", - }, - ), - ); -}); +test.serial( + "throwIfCombineSarifFilesDisabled with an invalid GHES version", + async (t) => { + await t.notThrowsAsync( + uploadLib.throwIfCombineSarifFilesDisabled( + [createMockSarif("abc", "def"), createMockSarif("abc", "def")], + { + type: GitHubVariant.GHES, + version: "foobar", + }, + ), + ); + }, +); -test("throwIfCombineSarifFilesDisabled with only 1 run", async (t) => { +test.serial("throwIfCombineSarifFilesDisabled with only 1 run", async (t) => { await t.notThrowsAsync( uploadLib.throwIfCombineSarifFilesDisabled( [createMockSarif("abc", "def")], @@ -828,71 +875,84 @@ test("throwIfCombineSarifFilesDisabled with only 1 run", async (t) => { ); }); -test("throwIfCombineSarifFilesDisabled with distinct categories", async (t) => { - await t.notThrowsAsync( - uploadLib.throwIfCombineSarifFilesDisabled( - [createMockSarif("abc", "def"), createMockSarif("def", "def")], - { - type: GitHubVariant.DOTCOM, - }, - ), - ); -}); +test.serial( + "throwIfCombineSarifFilesDisabled with distinct categories", + async (t) => { + await t.notThrowsAsync( + uploadLib.throwIfCombineSarifFilesDisabled( + [createMockSarif("abc", "def"), createMockSarif("def", "def")], + { + type: GitHubVariant.DOTCOM, + }, + ), + ); + }, +); -test("throwIfCombineSarifFilesDisabled with distinct tools", async (t) => { - await t.notThrowsAsync( - uploadLib.throwIfCombineSarifFilesDisabled( - [createMockSarif("abc", "abc"), createMockSarif("abc", "def")], - { - type: GitHubVariant.DOTCOM, - }, - ), - ); -}); +test.serial( + "throwIfCombineSarifFilesDisabled with distinct tools", + async (t) => { + await t.notThrowsAsync( + uploadLib.throwIfCombineSarifFilesDisabled( + [createMockSarif("abc", "abc"), createMockSarif("abc", "def")], + { + type: GitHubVariant.DOTCOM, + }, + ), + ); + }, +); -test("shouldConsiderConfigurationError correctly detects configuration errors", (t) => { - const error1 = [ - "CodeQL analyses from advanced configurations cannot be processed when the default setup is enabled", - ]; - t.true(uploadLib.shouldConsiderConfigurationError(error1)); +test.serial( + "shouldConsiderConfigurationError correctly detects configuration errors", + (t) => { + const error1 = [ + "CodeQL analyses from advanced configurations cannot be processed when the default setup is enabled", + ]; + t.true(uploadLib.shouldConsiderConfigurationError(error1)); - const error2 = [ - "rejecting delivery as the repository has too many logical alerts", - ]; - t.true(uploadLib.shouldConsiderConfigurationError(error2)); + const error2 = [ + "rejecting delivery as the repository has too many logical alerts", + ]; + t.true(uploadLib.shouldConsiderConfigurationError(error2)); - // We fail cases where we get > 1 error messages back - const error3 = [ - "rejecting delivery as the repository has too many alerts", - "extra error message", - ]; - t.false(uploadLib.shouldConsiderConfigurationError(error3)); -}); + // We fail cases where we get > 1 error messages back + const error3 = [ + "rejecting delivery as the repository has too many alerts", + "extra error message", + ]; + t.false(uploadLib.shouldConsiderConfigurationError(error3)); + }, +); -test("shouldConsiderInvalidRequest returns correct recognises processing errors", (t) => { - const error1 = [ - "rejecting SARIF", - "an invalid URI was provided as a SARIF location", - ]; - t.true(uploadLib.shouldConsiderInvalidRequest(error1)); +test.serial( + "shouldConsiderInvalidRequest returns correct recognises processing errors", + (t) => { + const error1 = [ + "rejecting SARIF", + "an invalid URI was provided as a SARIF location", + ]; + t.true(uploadLib.shouldConsiderInvalidRequest(error1)); - const error2 = [ - "locationFromSarifResult: expected artifact location", - "an invalid URI was provided as a SARIF location", - ]; - t.true(uploadLib.shouldConsiderInvalidRequest(error2)); + const error2 = [ + "locationFromSarifResult: expected artifact location", + "an invalid URI was provided as a SARIF location", + ]; + t.true(uploadLib.shouldConsiderInvalidRequest(error2)); - // We expect ALL errors to be of processing errors, for the outcome to be classified as - // an invalid SARIF upload error. - const error3 = [ - "could not convert rules: invalid security severity value, is not a number", - "an unknown error occurred", - ]; - t.false(uploadLib.shouldConsiderInvalidRequest(error3)); -}); + // We expect ALL errors to be of processing errors, for the outcome to be classified as + // an invalid SARIF upload error. + const error3 = [ + "could not convert rules: invalid security severity value, is not a number", + "an unknown error occurred", + ]; + t.false(uploadLib.shouldConsiderInvalidRequest(error3)); + }, +); -function createMockSarif(id?: string, tool?: string) { +function createMockSarif(id?: string, tool?: string): sarif.Log { return { + version: "2.1.0", runs: [ { automationDetails: { @@ -900,7 +960,7 @@ function createMockSarif(id?: string, tool?: string) { }, tool: { driver: { - name: tool, + name: tool || "test tool", }, }, }, @@ -953,55 +1013,70 @@ function uploadPayloadFixtures(analysis: analyses.AnalysisConfig) { for (const analysisKind of analyses.supportedAnalysisKinds) { const analysis = analyses.getAnalysisConfig(analysisKind); - test(`uploadPayload on ${analysis.name} uploads successfully`, async (t) => { - const { upload, requestStub, mockData } = uploadPayloadFixtures(analysis); - requestStub - .withArgs(analysis.target, { - owner: mockData.owner, - repo: mockData.repo, - data: mockData.payload, - }) - .onFirstCall() - .returns(Promise.resolve(mockData.response)); - const result = await upload(); - t.is(result, mockData.response.data.id); - t.true(requestStub.calledOnce); - }); + test.serial( + `uploadPayload on ${analysis.name} uploads successfully`, + async (t) => { + const { upload, requestStub, mockData } = uploadPayloadFixtures(analysis); + requestStub + .withArgs(analysis.target, { + owner: mockData.owner, + repo: mockData.repo, + data: mockData.payload, + }) + .onFirstCall() + .returns(Promise.resolve(mockData.response)); + const result = await upload(); + t.is(result, mockData.response.data.id); + t.true(requestStub.calledOnce); + }, + ); for (const envVar of [ "CODEQL_ACTION_SKIP_SARIF_UPLOAD", "CODEQL_ACTION_TEST_MODE", ]) { - test(`uploadPayload on ${analysis.name} skips upload when ${envVar} is set`, async (t) => { - const { upload, requestStub, mockData } = uploadPayloadFixtures(analysis); - await withTmpDir(async (tmpDir) => { - process.env.RUNNER_TEMP = tmpDir; - process.env[envVar] = "true"; - const result = await upload(); - t.is(result, "dummy-sarif-id"); - t.false(requestStub.called); + test.serial( + `uploadPayload on ${analysis.name} skips upload when ${envVar} is set`, + async (t) => { + const { upload, requestStub, mockData } = + uploadPayloadFixtures(analysis); + await withTmpDir(async (tmpDir) => { + process.env.RUNNER_TEMP = tmpDir; + process.env[envVar] = "true"; + const result = await upload(); + t.is(result, "dummy-sarif-id"); + t.false(requestStub.called); - const payloadFile = path.join(tmpDir, `payload-${analysis.kind}.json`); - t.true(fs.existsSync(payloadFile)); + const payloadFile = path.join( + tmpDir, + `payload-${analysis.kind}.json`, + ); + t.true(fs.existsSync(payloadFile)); - const savedPayload = JSON.parse(fs.readFileSync(payloadFile, "utf8")); - t.deepEqual(savedPayload, mockData.payload); - }); - }); + const savedPayload = JSON.parse(fs.readFileSync(payloadFile, "utf8")); + t.deepEqual(savedPayload, mockData.payload); + }); + }, + ); } - test(`uploadPayload on ${analysis.name} wraps request errors using wrapApiConfigurationError`, async (t) => { - const { upload, requestStub } = uploadPayloadFixtures(analysis); - const wrapApiConfigurationErrorStub = sinon.stub( - api, - "wrapApiConfigurationError", - ); - const originalError = new HTTPError(404); - const wrappedError = new Error("Wrapped error message"); - requestStub.rejects(originalError); - wrapApiConfigurationErrorStub.withArgs(originalError).returns(wrappedError); - await t.throwsAsync(upload, { - is: wrappedError, - }); - }); + test.serial( + `uploadPayload on ${analysis.name} wraps request errors using wrapApiConfigurationError`, + async (t) => { + const { upload, requestStub } = uploadPayloadFixtures(analysis); + const wrapApiConfigurationErrorStub = sinon.stub( + api, + "wrapApiConfigurationError", + ); + const originalError = new HTTPError(404); + const wrappedError = new Error("Wrapped error message"); + requestStub.rejects(originalError); + wrapApiConfigurationErrorStub + .withArgs(originalError) + .returns(wrappedError); + await t.throwsAsync(upload, { + is: wrappedError, + }); + }, + ); } diff --git a/src/upload-lib.ts b/src/upload-lib.ts index 88f8276ae..249882533 100644 --- a/src/upload-lib.ts +++ b/src/upload-lib.ts @@ -21,6 +21,13 @@ import * as gitUtils from "./git-utils"; import { initCodeQL } from "./init"; import { Logger } from "./logging"; import { getRepositoryNwo, RepositoryNwo } from "./repository"; +import * as sarif from "./sarif"; +import { + areAllRunsProducedByCodeQL, + areAllRunsUnique, + combineSarifFiles, + InvalidSarifUploadError, +} from "./sarif"; import { BasePayload, UploadPayload } from "./upload-lib/types"; import * as util from "./util"; import { @@ -30,8 +37,6 @@ import { GitHubVariant, GitHubVersion, satisfiesGHESVersion, - SarifFile, - SarifRun, } from "./util"; const GENERIC_403_MSG = @@ -39,94 +44,9 @@ const GENERIC_403_MSG = const GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository."; -// Takes a list of paths to sarif files and combines them together, -// returning the contents of the combined sarif file. -function combineSarifFiles(sarifFiles: string[], logger: Logger): SarifFile { - logger.info(`Loading SARIF file(s)`); - const combinedSarif: SarifFile = { - version: null, - runs: [], - }; - - for (const sarifFile of sarifFiles) { - logger.debug(`Loading SARIF file: ${sarifFile}`); - const sarifObject = JSON.parse( - fs.readFileSync(sarifFile, "utf8"), - ) as SarifFile; - // Check SARIF version - if (combinedSarif.version === null) { - combinedSarif.version = sarifObject.version; - } else if (combinedSarif.version !== sarifObject.version) { - throw new InvalidSarifUploadError( - `Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`, - ); - } - - combinedSarif.runs.push(...sarifObject.runs); - } - - return combinedSarif; -} - -/** - * Checks whether all the runs in the given SARIF files were produced by CodeQL. - * @param sarifObjects The list of SARIF objects to check. - */ -function areAllRunsProducedByCodeQL(sarifObjects: SarifFile[]): boolean { - return sarifObjects.every((sarifObject) => { - return sarifObject.runs?.every( - (run) => run.tool?.driver?.name === "CodeQL", - ); - }); -} - -type SarifRunKey = { - name: string | undefined; - fullName: string | undefined; - version: string | undefined; - semanticVersion: string | undefined; - guid: string | undefined; - automationId: string | undefined; -}; - -function createRunKey(run: SarifRun): SarifRunKey { - return { - name: run.tool?.driver?.name, - fullName: run.tool?.driver?.fullName, - version: run.tool?.driver?.version, - semanticVersion: run.tool?.driver?.semanticVersion, - guid: run.tool?.driver?.guid, - automationId: run.automationDetails?.id, - }; -} - -/** - * Checks whether all runs in the given SARIF files are unique (based on the - * criteria used by Code Scanning to determine analysis categories). - * @param sarifObjects The list of SARIF objects to check. - */ -function areAllRunsUnique(sarifObjects: SarifFile[]): boolean { - const keys = new Set(); - - for (const sarifObject of sarifObjects) { - for (const run of sarifObject.runs) { - const key = JSON.stringify(createRunKey(run)); - - // If the key already exists, the runs are not unique. - if (keys.has(key)) { - return false; - } - - keys.add(key); - } - } - - return true; -} - // Checks whether the deprecation warning for combining SARIF files should be shown. export async function shouldShowCombineSarifFilesDeprecationWarning( - sarifObjects: util.SarifFile[], + sarifObjects: Array>, githubVersion: GitHubVersion, ) { // Do not show this warning on GHES versions before 3.14.0 @@ -146,7 +66,7 @@ export async function shouldShowCombineSarifFilesDeprecationWarning( } export async function throwIfCombineSarifFilesDisabled( - sarifObjects: util.SarifFile[], + sarifObjects: Array>, githubVersion: GitHubVersion, ) { if (!(await shouldDisableCombineSarifFiles(sarifObjects, githubVersion))) { @@ -163,7 +83,7 @@ export async function throwIfCombineSarifFilesDisabled( // Checks whether combining SARIF files should be disabled. async function shouldDisableCombineSarifFiles( - sarifObjects: util.SarifFile[], + sarifObjects: Array>, githubVersion: GitHubVersion, ) { if (githubVersion.type === GitHubVariant.GHES) { @@ -192,12 +112,10 @@ async function combineSarifFilesUsingCLI( gitHubVersion: GitHubVersion, features: FeatureEnablement, logger: Logger, -): Promise { +): Promise> { logger.info("Combining SARIF files using the CodeQL CLI"); - const sarifObjects = sarifFiles.map((sarifFile): SarifFile => { - return JSON.parse(fs.readFileSync(sarifFile, "utf8")) as SarifFile; - }); + const sarifObjects = sarifFiles.map(sarif.readSarifFile); const deprecationWarningMessage = gitHubVersion.type === GitHubVariant.GHES @@ -279,30 +197,30 @@ async function combineSarifFilesUsingCLI( mergeRunsFromEqualCategory: true, }); - return JSON.parse(fs.readFileSync(outputFile, "utf8")) as SarifFile; + return sarif.readSarifFile(outputFile); } // Populates the run.automationDetails.id field using the analysis_key and environment // and return an updated sarif file contents. export function populateRunAutomationDetails( - sarif: SarifFile, + sarifFile: Partial, category: string | undefined, analysis_key: string, environment: string | undefined, -): SarifFile { +): Partial { const automationID = getAutomationID(category, analysis_key, environment); if (automationID !== undefined) { - for (const run of sarif.runs || []) { + for (const run of sarifFile.runs || []) { if (run.automationDetails === undefined) { run.automationDetails = { id: automationID, }; } } - return sarif; + return sarifFile; } - return sarif; + return sarifFile; } function getAutomationID( @@ -511,9 +429,9 @@ export async function getGroupedSarifFilePaths( } // Counts the number of results in the given SARIF file -function countResultsInSarif(sarif: string): number { +function countResultsInSarif(sarifLog: string): number { let numResults = 0; - const parsedSarif = JSON.parse(sarif); + const parsedSarif = JSON.parse(sarifLog); if (!Array.isArray(parsedSarif.runs)) { throw new InvalidSarifUploadError("Invalid SARIF. Missing 'runs' array."); } @@ -529,9 +447,15 @@ function countResultsInSarif(sarif: string): number { return numResults; } -export function readSarifFile(sarifFilePath: string): SarifFile { +/** A thin wrapper around `readSarifFile` which wraps exceptions in `InvalidSarifUploadError`. + * + * @throws InvalidSarifUploadError If parsing the SARIF file as JSON failed. + */ +export function readSarifFileOrThrow( + sarifFilePath: string, +): Partial { try { - return JSON.parse(fs.readFileSync(sarifFilePath, "utf8")) as SarifFile; + return sarif.readSarifFile(sarifFilePath); } catch (e) { throw new InvalidSarifUploadError( `Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}`, @@ -542,26 +466,26 @@ export function readSarifFile(sarifFilePath: string): SarifFile { // Validates the given SARIF object and throws an error if the SARIF object is invalid. // The file path is only used in error messages to improve clarity. export function validateSarifFileSchema( - sarif: SarifFile, + sarifLog: Partial, sarifFilePath: string, logger: Logger, -) { +): sarifLog is sarif.Log { if ( - areAllRunsProducedByCodeQL([sarif]) && + areAllRunsProducedByCodeQL([sarifLog]) && // We want to validate CodeQL SARIF in testing environments. !util.getTestingEnvironment() ) { logger.debug( `Skipping SARIF schema validation for ${sarifFilePath} as all runs are produced by CodeQL.`, ); - return; + return true; } logger.info(`Validating ${sarifFilePath}`); // eslint-disable-next-line @typescript-eslint/no-require-imports const schema = require("../src/sarif-schema-2.1.0.json") as jsonschema.Schema; - const result = new jsonschema.Validator().validate(sarif, schema); + const result = new jsonschema.Validator().validate(sarifLog, schema); // Filter errors related to invalid URIs in the artifactLocation field as this // is a breaking change. See https://github.com/github/codeql-action/issues/1703 const warningAttributes = ["uri-reference", "uri"]; @@ -603,6 +527,8 @@ export function validateSarifFileSchema( )}`, ); } + + return true; } // buildPayload constructs a map ready to be uploaded to the API from the given @@ -663,7 +589,7 @@ export function buildPayload( } export interface PostProcessingResults { - sarif: util.SarifFile; + sarif: Partial; analysisKey: string; environment: string; } @@ -693,17 +619,17 @@ export async function postProcessSarifFiles( const gitHubVersion = await getGitHubVersion(); - let sarif: SarifFile; + let sarifLog: Partial; category = analysis.fixCategory(logger, category); if (sarifPaths.length > 1) { // Validate that the files we were asked to upload are all valid SARIF files for (const sarifPath of sarifPaths) { - const parsedSarif = readSarifFile(sarifPath); + const parsedSarif = readSarifFileOrThrow(sarifPath); validateSarifFileSchema(parsedSarif, sarifPath, logger); } - sarif = await combineSarifFilesUsingCLI( + sarifLog = await combineSarifFilesUsingCLI( sarifPaths, gitHubVersion, features, @@ -711,26 +637,26 @@ export async function postProcessSarifFiles( ); } else { const sarifPath = sarifPaths[0]; - sarif = readSarifFile(sarifPath); - validateSarifFileSchema(sarif, sarifPath, logger); + sarifLog = readSarifFileOrThrow(sarifPath); + validateSarifFileSchema(sarifLog, sarifPath, logger); // Validate that there are no runs for the same category - await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion); + await throwIfCombineSarifFilesDisabled([sarifLog], gitHubVersion); } - sarif = filterAlertsByDiffRange(logger, sarif); - sarif = await fingerprints.addFingerprints(sarif, checkoutPath, logger); + sarifLog = filterAlertsByDiffRange(logger, sarifLog); + sarifLog = await fingerprints.addFingerprints(sarifLog, checkoutPath, logger); const analysisKey = await api.getAnalysisKey(); const environment = actionsUtil.getRequiredInput("matrix"); - sarif = populateRunAutomationDetails( - sarif, + sarifLog = populateRunAutomationDetails( + sarifLog, category, analysisKey, environment, ); - return { sarif, analysisKey, environment }; + return { sarif: sarifLog, analysisKey, environment }; } /** @@ -836,13 +762,13 @@ export async function uploadPostProcessedFiles( ): Promise { logger.startGroup(`Uploading ${uploadTarget.name} results`); - const sarif = postProcessingResults.sarif; - const toolNames = util.getToolNames(sarif); + const sarifLog = postProcessingResults.sarif; + const toolNames = sarif.getToolNames(sarifLog); logger.debug(`Validating that each SARIF run has a unique category`); - validateUniqueCategory(sarif, uploadTarget.sentinelPrefix); + validateUniqueCategory(sarifLog, uploadTarget.sentinelPrefix); logger.debug(`Serializing SARIF for upload`); - const sarifPayload = JSON.stringify(sarif); + const sarifPayload = JSON.stringify(sarifLog); logger.debug(`Compressing serialized SARIF`); const zippedSarif = zlib.gzipSync(sarifPayload).toString("base64"); @@ -1085,14 +1011,14 @@ function handleProcessingResultForUnsuccessfulExecution( } export function validateUniqueCategory( - sarif: SarifFile, + sarifLog: Partial, sentinelPrefix: string, ): void { // duplicate categories are allowed in the same sarif file // but not across multiple sarif files const categories = {} as Record; - for (const run of sarif.runs) { + for (const run of sarifLog.runs || []) { const id = run?.automationDetails?.id; const tool = run.tool?.driver?.name; const category = `${sanitize(id)}_${sanitize(tool)}`; @@ -1127,20 +1053,22 @@ function sanitize(str?: string) { return (str ?? "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase(); } -/** - * An error that occurred due to an invalid SARIF upload request. - */ -export class InvalidSarifUploadError extends Error {} - -function filterAlertsByDiffRange(logger: Logger, sarif: SarifFile): SarifFile { +function filterAlertsByDiffRange( + logger: Logger, + sarifLog: Partial, +): Partial { const diffRanges = readDiffRangesJsonFile(logger); if (!diffRanges?.length) { - return sarif; + return sarifLog; + } + + if (sarifLog.runs === undefined) { + return sarifLog; } const checkoutPath = actionsUtil.getRequiredInput("checkout_path"); - for (const run of sarif.runs) { + for (const run of sarifLog.runs) { if (run.results) { run.results = run.results.filter((result) => { const locations = [ @@ -1176,5 +1104,5 @@ function filterAlertsByDiffRange(logger: Logger, sarif: SarifFile): SarifFile { } } - return sarif; + return sarifLog; } diff --git a/src/upload-sarif-action.ts b/src/upload-sarif-action.ts index cec41b276..896c14458 100644 --- a/src/upload-sarif-action.ts +++ b/src/upload-sarif-action.ts @@ -7,6 +7,7 @@ import { getGitHubVersion } from "./api-client"; import { initFeatures } from "./feature-flags"; import { Logger, getActionsLogger } from "./logging"; import { getRepositoryNwo } from "./repository"; +import { InvalidSarifUploadError } from "./sarif"; import { createStatusReportBase, sendStatusReport, @@ -141,7 +142,7 @@ async function run(startedAt: Date) { } catch (unwrappedError) { const error = isThirdPartyAnalysis(ActionName.UploadSarif) && - unwrappedError instanceof upload_lib.InvalidSarifUploadError + unwrappedError instanceof InvalidSarifUploadError ? new ConfigurationError(unwrappedError.message) : wrapError(unwrappedError); const message = error.message; diff --git a/src/upload-sarif.test.ts b/src/upload-sarif.test.ts index d32c0c031..fcd5c3108 100644 --- a/src/upload-sarif.test.ts +++ b/src/upload-sarif.test.ts @@ -33,7 +33,11 @@ function mockPostProcessSarifFiles() { sinon.match.any, analysisConfig, ) - .resolves({ sarif: { runs: [] }, analysisKey: "", environment: "" }); + .resolves({ + sarif: { version: "2.1.0", runs: [] }, + analysisKey: "", + environment: "", + }); } return postProcessSarifFiles; @@ -119,7 +123,7 @@ const postProcessAndUploadSarifMacro = test.macro({ title: (providedTitle = "") => `processAndUploadSarif - ${providedTitle}`, }); -test( +test.serial( "SARIF file", postProcessAndUploadSarifMacro, ["test.sarif"], @@ -134,7 +138,7 @@ test( }, ); -test( +test.serial( "JSON file", postProcessAndUploadSarifMacro, ["test.json"], @@ -149,7 +153,7 @@ test( }, ); -test( +test.serial( "Code Scanning files", postProcessAndUploadSarifMacro, ["test.json", "test.sarif"], @@ -165,7 +169,7 @@ test( }, ); -test( +test.serial( "Code Quality file", postProcessAndUploadSarifMacro, ["test.quality.sarif"], @@ -180,7 +184,7 @@ test( }, ); -test( +test.serial( "Mixed files", postProcessAndUploadSarifMacro, ["test.sarif", "test.quality.sarif"], @@ -203,64 +207,70 @@ test( }, ); -test("postProcessAndUploadSarif doesn't upload if upload is disabled", async (t) => { - await util.withTmpDir(async (tempDir) => { - const logger = getRunnerLogger(true); - const features = createFeatures([]); +test.serial( + "postProcessAndUploadSarif doesn't upload if upload is disabled", + async (t) => { + await util.withTmpDir(async (tempDir) => { + const logger = getRunnerLogger(true); + const features = createFeatures([]); - const toFullPath = (filename: string) => path.join(tempDir, filename); + const toFullPath = (filename: string) => path.join(tempDir, filename); - const postProcessSarifFiles = mockPostProcessSarifFiles(); - const uploadPostProcessedFiles = sinon.stub( - uploadLib, - "uploadPostProcessedFiles", - ); + const postProcessSarifFiles = mockPostProcessSarifFiles(); + const uploadPostProcessedFiles = sinon.stub( + uploadLib, + "uploadPostProcessedFiles", + ); - fs.writeFileSync(toFullPath("test.sarif"), ""); - fs.writeFileSync(toFullPath("test.quality.sarif"), ""); + fs.writeFileSync(toFullPath("test.sarif"), ""); + fs.writeFileSync(toFullPath("test.quality.sarif"), ""); - const actual = await postProcessAndUploadSarif( - logger, - features, - "never", - "", - tempDir, - ); + const actual = await postProcessAndUploadSarif( + logger, + features, + "never", + "", + tempDir, + ); - t.truthy(actual); - t.assert(postProcessSarifFiles.calledTwice); - t.assert(uploadPostProcessedFiles.notCalled); - }); -}); + t.truthy(actual); + t.assert(postProcessSarifFiles.calledTwice); + t.assert(uploadPostProcessedFiles.notCalled); + }); + }, +); -test("postProcessAndUploadSarif writes post-processed SARIF files if output directory is provided", async (t) => { - await util.withTmpDir(async (tempDir) => { - const logger = getRunnerLogger(true); - const features = createFeatures([]); +test.serial( + "postProcessAndUploadSarif writes post-processed SARIF files if output directory is provided", + async (t) => { + await util.withTmpDir(async (tempDir) => { + const logger = getRunnerLogger(true); + const features = createFeatures([]); - const toFullPath = (filename: string) => path.join(tempDir, filename); + const toFullPath = (filename: string) => path.join(tempDir, filename); - const postProcessSarifFiles = mockPostProcessSarifFiles(); + const postProcessSarifFiles = mockPostProcessSarifFiles(); - fs.writeFileSync(toFullPath("test.sarif"), ""); - fs.writeFileSync(toFullPath("test.quality.sarif"), ""); + fs.writeFileSync(toFullPath("test.sarif"), ""); + fs.writeFileSync(toFullPath("test.quality.sarif"), ""); - const postProcessedOutPath = path.join(tempDir, "post-processed"); - const actual = await postProcessAndUploadSarif( - logger, - features, - "never", - "", - tempDir, - "", - postProcessedOutPath, - ); + const postProcessedOutPath = path.join(tempDir, "post-processed"); + const actual = await postProcessAndUploadSarif( + logger, + features, + "never", + "", + tempDir, + "", + postProcessedOutPath, + ); - t.truthy(actual); - t.assert(postProcessSarifFiles.calledTwice); - t.assert(fs.existsSync(path.join(postProcessedOutPath, "upload.sarif"))); - t.assert( - fs.existsSync(path.join(postProcessedOutPath, "upload.quality.sarif")), - ); - }); -}); + t.truthy(actual); + t.assert(postProcessSarifFiles.calledTwice); + t.assert(fs.existsSync(path.join(postProcessedOutPath, "upload.sarif"))); + t.assert( + fs.existsSync(path.join(postProcessedOutPath, "upload.quality.sarif")), + ); + }); + }, +); diff --git a/src/util.test.ts b/src/util.test.ts index 7b6850018..63b9263e0 100644 --- a/src/util.test.ts +++ b/src/util.test.ts @@ -10,20 +10,11 @@ import * as sinon from "sinon"; import * as api from "./api-client"; import { EnvVar } from "./environment"; import { getRunnerLogger } from "./logging"; -import { getRecordingLogger, LoggedMessage, setupTests } from "./testing-utils"; +import { setupTests } from "./testing-utils"; import * as util from "./util"; setupTests(test); -test("getToolNames", (t) => { - const input = fs.readFileSync( - `${__dirname}/../src/testdata/tool-names.sarif`, - "utf8", - ); - const toolNames = util.getToolNames(JSON.parse(input) as util.SarifFile); - t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]); -}); - const GET_MEMORY_FLAG_TESTS = [ { input: undefined, @@ -77,22 +68,25 @@ for (const { expectedMemoryValue, reservedPercentageValue, } of GET_MEMORY_FLAG_TESTS) { - test(`Memory flag value is ${expectedMemoryValue} for ${ - input ?? "no user input" - } on ${platform} with ${totalMemoryMb} MB total system RAM${ - reservedPercentageValue - ? ` and reserved percentage env var set to ${reservedPercentageValue}` - : "" - }`, async (t) => { - process.env[EnvVar.SCALING_RESERVED_RAM_PERCENTAGE] = - reservedPercentageValue || undefined; - const flag = util.getMemoryFlagValueForPlatform( - input, - totalMemoryMb * 1024 * 1024, - platform, - ); - t.deepEqual(flag, expectedMemoryValue); - }); + test.serial( + `Memory flag value is ${expectedMemoryValue} for ${ + input ?? "no user input" + } on ${platform} with ${totalMemoryMb} MB total system RAM${ + reservedPercentageValue + ? ` and reserved percentage env var set to ${reservedPercentageValue}` + : "" + }`, + async (t) => { + process.env[EnvVar.SCALING_RESERVED_RAM_PERCENTAGE] = + reservedPercentageValue || undefined; + const flag = util.getMemoryFlagValueForPlatform( + input, + totalMemoryMb * 1024 * 1024, + platform, + ); + t.deepEqual(flag, expectedMemoryValue); + }, + ); } test("getMemoryFlag() throws if the ram input is < 0 or NaN", async (t) => { @@ -123,19 +117,22 @@ test("getThreadsFlag() throws if the threads input is not an integer", (t) => { t.throws(() => util.getThreadsFlag("hello!", getRunnerLogger(true))); }); -test("getExtraOptionsEnvParam() succeeds on valid JSON with invalid options (for now)", (t) => { - const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS; +test.serial( + "getExtraOptionsEnvParam() succeeds on valid JSON with invalid options (for now)", + (t) => { + const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS; - const options = { foo: 42 }; + const options = { foo: 42 }; - process.env.CODEQL_ACTION_EXTRA_OPTIONS = JSON.stringify(options); + process.env.CODEQL_ACTION_EXTRA_OPTIONS = JSON.stringify(options); - t.deepEqual(util.getExtraOptionsEnvParam(), options); + t.deepEqual(util.getExtraOptionsEnvParam(), options); - process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions; -}); + process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions; + }, +); -test("getExtraOptionsEnvParam() succeeds on valid JSON options", (t) => { +test.serial("getExtraOptionsEnvParam() succeeds on valid JSON options", (t) => { const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS; const options = { database: { init: ["--debug"] } }; @@ -146,7 +143,7 @@ test("getExtraOptionsEnvParam() succeeds on valid JSON options", (t) => { process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions; }); -test("getExtraOptionsEnvParam() succeeds on valid YAML options", (t) => { +test.serial("getExtraOptionsEnvParam() succeeds on valid YAML options", (t) => { const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS; const options = { database: { init: ["--debug"] } }; @@ -157,7 +154,7 @@ test("getExtraOptionsEnvParam() succeeds on valid YAML options", (t) => { process.env.CODEQL_ACTION_EXTRA_OPTIONS = origExtraOptions; }); -test("getExtraOptionsEnvParam() fails on invalid JSON", (t) => { +test.serial("getExtraOptionsEnvParam() fails on invalid JSON", (t) => { const origExtraOptions = process.env.CODEQL_ACTION_EXTRA_OPTIONS; process.env.CODEQL_ACTION_EXTRA_OPTIONS = "{{invalid-json}"; @@ -242,7 +239,7 @@ test("allowed API versions", async (t) => { ); }); -test("getRequiredEnvParam - gets environment variables", (t) => { +test.serial("getRequiredEnvParam - gets environment variables", (t) => { process.env.SOME_UNIT_TEST_VAR = "foo"; const result = util.getRequiredEnvParam("SOME_UNIT_TEST_VAR"); t.is(result, "foo"); @@ -252,17 +249,20 @@ test("getRequiredEnvParam - throws if an environment variable isn't set", (t) => t.throws(() => util.getRequiredEnvParam("SOME_UNIT_TEST_VAR")); }); -test("getOptionalEnvVar - gets environment variables", (t) => { +test.serial("getOptionalEnvVar - gets environment variables", (t) => { process.env.SOME_UNIT_TEST_VAR = "foo"; const result = util.getOptionalEnvVar("SOME_UNIT_TEST_VAR"); t.is(result, "foo"); }); -test("getOptionalEnvVar - gets undefined for empty environment variables", (t) => { - process.env.SOME_UNIT_TEST_VAR = ""; - const result = util.getOptionalEnvVar("SOME_UNIT_TEST_VAR"); - t.is(result, undefined); -}); +test.serial( + "getOptionalEnvVar - gets undefined for empty environment variables", + (t) => { + process.env.SOME_UNIT_TEST_VAR = ""; + const result = util.getOptionalEnvVar("SOME_UNIT_TEST_VAR"); + t.is(result, undefined); + }, +); test("getOptionalEnvVar - doesn't throw for undefined environment variables", (t) => { t.notThrows(() => { @@ -368,67 +368,6 @@ test("waitForResultWithTimeLimit doesn't call callback if promise resolves", asy t.deepEqual(result, 99); }); -function createMockSarifWithNotification( - locations: util.SarifLocation[], -): util.SarifFile { - return { - runs: [ - { - tool: { - driver: { - name: "CodeQL", - }, - }, - invocations: [ - { - toolExecutionNotifications: [ - { - locations, - }, - ], - }, - ], - }, - ], - }; -} - -const stubLocation: util.SarifLocation = { - physicalLocation: { - artifactLocation: { - uri: "file1", - }, - }, -}; - -test("fixInvalidNotifications leaves notifications with unique locations alone", (t) => { - const messages: LoggedMessage[] = []; - const result = util.fixInvalidNotifications( - createMockSarifWithNotification([stubLocation]), - getRecordingLogger(messages), - ); - t.deepEqual(result, createMockSarifWithNotification([stubLocation])); - t.is(messages.length, 1); - t.deepEqual(messages[0], { - type: "debug", - message: "No duplicate locations found in SARIF notification objects.", - }); -}); - -test("fixInvalidNotifications removes duplicate locations", (t) => { - const messages: LoggedMessage[] = []; - const result = util.fixInvalidNotifications( - createMockSarifWithNotification([stubLocation, stubLocation]), - getRecordingLogger(messages), - ); - t.deepEqual(result, createMockSarifWithNotification([stubLocation])); - t.is(messages.length, 1); - t.deepEqual(messages[0], { - type: "info", - message: "Removed 1 duplicate locations from SARIF notification objects.", - }); -}); - function formatGitHubVersion(version: util.GitHubVersion): string { switch (version.type) { case util.GitHubVariant.DOTCOM: @@ -475,27 +414,32 @@ for (const [ const versionsDescription = `CodeQL Action version ${version} and GitHub version ${formatGitHubVersion( githubVersion, )}`; - test(`checkActionVersion ${reportErrorDescription} for ${versionsDescription}`, async (t) => { - const warningSpy = sinon.spy(core, "warning"); - const versionStub = sinon - .stub(api, "getGitHubVersion") - .resolves(githubVersion); + test.serial( + `checkActionVersion ${reportErrorDescription} for ${versionsDescription}`, + async (t) => { + const warningSpy = sinon.spy(core, "warning"); + const versionStub = sinon + .stub(api, "getGitHubVersion") + .resolves(githubVersion); - // call checkActionVersion twice and assert below that warning is reported only once - util.checkActionVersion(version, await api.getGitHubVersion()); - util.checkActionVersion(version, await api.getGitHubVersion()); + // call checkActionVersion twice and assert below that warning is reported only once + util.checkActionVersion(version, await api.getGitHubVersion()); + util.checkActionVersion(version, await api.getGitHubVersion()); - if (shouldReportError) { - t.true( - warningSpy.calledOnceWithExactly( - sinon.match("CodeQL Action v3 will be deprecated in December 2026."), - ), - ); - } else { - t.false(warningSpy.called); - } - versionStub.restore(); - }); + if (shouldReportError) { + t.true( + warningSpy.calledOnceWithExactly( + sinon.match( + "CodeQL Action v3 will be deprecated in December 2026.", + ), + ), + ); + } else { + t.false(warningSpy.called); + } + versionStub.restore(); + }, + ); } test("getCgroupCpuCountFromCpus calculates the number of CPUs correctly", async (t) => { @@ -531,14 +475,17 @@ test("getCgroupCpuCountFromCpus returns undefined if the CPU file exists but is }); }); -test("checkDiskUsage succeeds and produces positive numbers", async (t) => { - process.env["GITHUB_WORKSPACE"] = os.tmpdir(); - const diskUsage = await util.checkDiskUsage(getRunnerLogger(true)); - if (t.truthy(diskUsage)) { - t.true(diskUsage.numAvailableBytes > 0); - t.true(diskUsage.numTotalBytes > 0); - } -}); +test.serial( + "checkDiskUsage succeeds and produces positive numbers", + async (t) => { + process.env["GITHUB_WORKSPACE"] = os.tmpdir(); + const diskUsage = await util.checkDiskUsage(getRunnerLogger(true)); + if (t.truthy(diskUsage)) { + t.true(diskUsage.numAvailableBytes > 0); + t.true(diskUsage.numTotalBytes > 0); + } + }, +); test("joinAtMost - behaves like join if limit is <= 0", (t) => { const sep = ", "; diff --git a/src/util.ts b/src/util.ts index 823291a0a..6db56e722 100644 --- a/src/util.ts +++ b/src/util.ts @@ -55,78 +55,6 @@ const DEFAULT_RESERVED_RAM_SCALING_FACTOR = 0.05; */ const MINIMUM_CGROUP_MEMORY_LIMIT_BYTES = 1024 * 1024; -export interface SarifFile { - version?: string | null; - runs: SarifRun[]; -} - -export interface SarifRun { - tool?: { - driver?: { - guid?: string; - name?: string; - fullName?: string; - semanticVersion?: string; - version?: string; - }; - }; - automationDetails?: { - id?: string; - }; - artifacts?: string[]; - invocations?: SarifInvocation[]; - results?: SarifResult[]; -} - -export interface SarifInvocation { - toolExecutionNotifications?: SarifNotification[]; -} - -export interface SarifResult { - ruleId?: string; - rule?: { - id?: string; - }; - message?: { - text?: string; - }; - locations: Array<{ - physicalLocation: { - artifactLocation: { - uri: string; - }; - region?: { - startLine?: number; - }; - }; - }>; - relatedLocations?: Array<{ - physicalLocation: { - artifactLocation: { - uri: string; - }; - region?: { - startLine?: number; - }; - }; - }>; - partialFingerprints: { - primaryLocationLineHash?: string; - }; -} - -export interface SarifNotification { - locations?: SarifLocation[]; -} - -export interface SarifLocation { - physicalLocation?: { - artifactLocation?: { - uri?: string; - }; - }; -} - /** * Get the extra options for the codeql commands. */ @@ -146,25 +74,6 @@ export function getExtraOptionsEnvParam(): object { } } -/** - * Get the array of all the tool names contained in the given sarif contents. - * - * Returns an array of unique string tool names. - */ -export function getToolNames(sarif: SarifFile): string[] { - const toolNames = {}; - - for (const run of sarif.runs || []) { - const tool = run.tool || {}; - const driver = tool.driver || {}; - if (typeof driver.name === "string" && driver.name.length > 0) { - toolNames[driver.name] = true; - } - } - - return Object.keys(toolNames); -} - // Creates a random temporary directory, runs the given body, and then deletes the directory. // Mostly intended for use within tests. export async function withTmpDir( @@ -984,80 +893,6 @@ export function parseMatrixInput( return JSON.parse(matrixInput) as { [key: string]: string }; } -function removeDuplicateLocations(locations: SarifLocation[]): SarifLocation[] { - const newJsonLocations = new Set(); - return locations.filter((location) => { - const jsonLocation = JSON.stringify(location); - if (!newJsonLocations.has(jsonLocation)) { - newJsonLocations.add(jsonLocation); - return true; - } - return false; - }); -} - -export function fixInvalidNotifications( - sarif: SarifFile, - logger: Logger, -): SarifFile { - if (!Array.isArray(sarif.runs)) { - return sarif; - } - - // Ensure that the array of locations for each SARIF notification contains unique locations. - // This is a workaround for a bug in the CodeQL CLI that causes duplicate locations to be - // emitted in some cases. - let numDuplicateLocationsRemoved = 0; - - const newSarif = { - ...sarif, - runs: sarif.runs.map((run) => { - if ( - run.tool?.driver?.name !== "CodeQL" || - !Array.isArray(run.invocations) - ) { - return run; - } - return { - ...run, - invocations: run.invocations.map((invocation) => { - if (!Array.isArray(invocation.toolExecutionNotifications)) { - return invocation; - } - return { - ...invocation, - toolExecutionNotifications: - invocation.toolExecutionNotifications.map((notification) => { - if (!Array.isArray(notification.locations)) { - return notification; - } - const newLocations = removeDuplicateLocations( - notification.locations, - ); - numDuplicateLocationsRemoved += - notification.locations.length - newLocations.length; - return { - ...notification, - locations: newLocations, - }; - }), - }; - }), - }; - }), - }; - - if (numDuplicateLocationsRemoved > 0) { - logger.info( - `Removed ${numDuplicateLocationsRemoved} duplicate locations from SARIF notification ` + - "objects.", - ); - } else { - logger.debug("No duplicate locations found in SARIF notification objects."); - } - return newSarif; -} - export function wrapError(error: unknown): Error { return error instanceof Error ? error : new Error(String(error)); } diff --git a/src/workflow.test.ts b/src/workflow.test.ts index f05ad5485..67f969040 100644 --- a/src/workflow.test.ts +++ b/src/workflow.test.ts @@ -306,7 +306,7 @@ test("getWorkflowErrors() when on.pull_request for wildcard branches", async (t) t.deepEqual(...errorCodes(errors, [])); }); -test("getWorkflowErrors() when HEAD^2 is checked out", async (t) => { +test.serial("getWorkflowErrors() when HEAD^2 is checked out", async (t) => { process.env.GITHUB_JOB = "test"; const errors = await getWorkflowErrors( @@ -320,47 +320,59 @@ test("getWorkflowErrors() when HEAD^2 is checked out", async (t) => { t.deepEqual(...errorCodes(errors, [WorkflowErrors.CheckoutWrongHead])); }); -test("getWorkflowErrors() produces an error for workflow with language name and its alias", async (t) => { - await testLanguageAliases( - t, - ["java", "kotlin"], - { java: ["java-kotlin", "kotlin"] }, - [ - "CodeQL language 'java' is referenced by more than one entry in the 'language' matrix " + - "parameter for job 'test'. This may result in duplicate alerts. Please edit the 'language' " + - "matrix parameter to keep only one of the following: 'java', 'kotlin'.", - ], - ); -}); +test.serial( + "getWorkflowErrors() produces an error for workflow with language name and its alias", + async (t) => { + await testLanguageAliases( + t, + ["java", "kotlin"], + { java: ["java-kotlin", "kotlin"] }, + [ + "CodeQL language 'java' is referenced by more than one entry in the 'language' matrix " + + "parameter for job 'test'. This may result in duplicate alerts. Please edit the 'language' " + + "matrix parameter to keep only one of the following: 'java', 'kotlin'.", + ], + ); + }, +); -test("getWorkflowErrors() produces an error for workflow with two aliases same language", async (t) => { - await testLanguageAliases( - t, - ["java-kotlin", "kotlin"], - { java: ["java-kotlin", "kotlin"] }, - [ - "CodeQL language 'java' is referenced by more than one entry in the 'language' matrix " + - "parameter for job 'test'. This may result in duplicate alerts. Please edit the 'language' " + - "matrix parameter to keep only one of the following: 'java-kotlin', 'kotlin'.", - ], - ); -}); +test.serial( + "getWorkflowErrors() produces an error for workflow with two aliases same language", + async (t) => { + await testLanguageAliases( + t, + ["java-kotlin", "kotlin"], + { java: ["java-kotlin", "kotlin"] }, + [ + "CodeQL language 'java' is referenced by more than one entry in the 'language' matrix " + + "parameter for job 'test'. This may result in duplicate alerts. Please edit the 'language' " + + "matrix parameter to keep only one of the following: 'java-kotlin', 'kotlin'.", + ], + ); + }, +); -test("getWorkflowErrors() does not produce an error for workflow with two distinct languages", async (t) => { - await testLanguageAliases( - t, - ["java", "typescript"], - { - java: ["java-kotlin", "kotlin"], - javascript: ["javascript-typescript", "typescript"], - }, - [], - ); -}); +test.serial( + "getWorkflowErrors() does not produce an error for workflow with two distinct languages", + async (t) => { + await testLanguageAliases( + t, + ["java", "typescript"], + { + java: ["java-kotlin", "kotlin"], + javascript: ["javascript-typescript", "typescript"], + }, + [], + ); + }, +); -test("getWorkflowErrors() does not produce an error if codeql doesn't support language aliases", async (t) => { - await testLanguageAliases(t, ["java-kotlin", "kotlin"], undefined, []); -}); +test.serial( + "getWorkflowErrors() does not produce an error if codeql doesn't support language aliases", + async (t) => { + await testLanguageAliases(t, ["java-kotlin", "kotlin"], undefined, []); + }, +); async function testLanguageAliases( t: ExecutionContext, @@ -483,11 +495,13 @@ test("getWorkflowErrors() when on.push has a trailing comma", async (t) => { t.deepEqual(...errorCodes(errors, [])); }); -test("getWorkflowErrors() should only report the current job's CheckoutWrongHead", async (t) => { - process.env.GITHUB_JOB = "test"; +test.serial( + "getWorkflowErrors() should only report the current job's CheckoutWrongHead", + async (t) => { + process.env.GITHUB_JOB = "test"; - const errors = await getWorkflowErrors( - yaml.load(` + const errors = await getWorkflowErrors( + yaml.load(` name: "CodeQL" on: push: @@ -507,17 +521,20 @@ test("getWorkflowErrors() should only report the current job's CheckoutWrongHead test3: steps: [] `) as Workflow, - await getCodeQLForTesting(), - ); + await getCodeQLForTesting(), + ); - t.deepEqual(...errorCodes(errors, [WorkflowErrors.CheckoutWrongHead])); -}); + t.deepEqual(...errorCodes(errors, [WorkflowErrors.CheckoutWrongHead])); + }, +); -test("getWorkflowErrors() should not report a different job's CheckoutWrongHead", async (t) => { - process.env.GITHUB_JOB = "test3"; +test.serial( + "getWorkflowErrors() should not report a different job's CheckoutWrongHead", + async (t) => { + process.env.GITHUB_JOB = "test3"; - const errors = await getWorkflowErrors( - yaml.load(` + const errors = await getWorkflowErrors( + yaml.load(` name: "CodeQL" on: push: @@ -537,11 +554,12 @@ test("getWorkflowErrors() should not report a different job's CheckoutWrongHead" test3: steps: [] `) as Workflow, - await getCodeQLForTesting(), - ); + await getCodeQLForTesting(), + ); - t.deepEqual(...errorCodes(errors, [])); -}); + t.deepEqual(...errorCodes(errors, [])); + }, +); test("getWorkflowErrors() when on is missing", async (t) => { const errors = await getWorkflowErrors( @@ -723,11 +741,13 @@ test("getWorkflowErrors() should not report a warning involving versions of othe t.deepEqual(...errorCodes(errors, [])); }); -test("getCategoryInputOrThrow returns category for simple workflow with category", (t) => { - process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; - t.is( - getCategoryInputOrThrow( - yaml.load(` +test.serial( + "getCategoryInputOrThrow returns category for simple workflow with category", + (t) => { + process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; + t.is( + getCategoryInputOrThrow( + yaml.load(` jobs: analysis: runs-on: ubuntu-latest @@ -738,18 +758,21 @@ test("getCategoryInputOrThrow returns category for simple workflow with category with: category: some-category `) as Workflow, - "analysis", - {}, - ), - "some-category", - ); -}); + "analysis", + {}, + ), + "some-category", + ); + }, +); -test("getCategoryInputOrThrow returns undefined for simple workflow without category", (t) => { - process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; - t.is( - getCategoryInputOrThrow( - yaml.load(` +test.serial( + "getCategoryInputOrThrow returns undefined for simple workflow without category", + (t) => { + process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; + t.is( + getCategoryInputOrThrow( + yaml.load(` jobs: analysis: runs-on: ubuntu-latest @@ -758,18 +781,21 @@ test("getCategoryInputOrThrow returns undefined for simple workflow without cate - uses: github/codeql-action/init@v4 - uses: github/codeql-action/analyze@v4 `) as Workflow, - "analysis", - {}, - ), - undefined, - ); -}); + "analysis", + {}, + ), + undefined, + ); + }, +); -test("getCategoryInputOrThrow returns category for workflow with multiple jobs", (t) => { - process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; - t.is( - getCategoryInputOrThrow( - yaml.load(` +test.serial( + "getCategoryInputOrThrow returns category for workflow with multiple jobs", + (t) => { + process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; + t.is( + getCategoryInputOrThrow( + yaml.load(` jobs: foo: runs-on: ubuntu-latest @@ -790,18 +816,21 @@ test("getCategoryInputOrThrow returns category for workflow with multiple jobs", with: category: bar-category `) as Workflow, - "bar", - {}, - ), - "bar-category", - ); -}); + "bar", + {}, + ), + "bar-category", + ); + }, +); -test("getCategoryInputOrThrow finds category for workflow with language matrix", (t) => { - process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; - t.is( - getCategoryInputOrThrow( - yaml.load(` +test.serial( + "getCategoryInputOrThrow finds category for workflow with language matrix", + (t) => { + process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; + t.is( + getCategoryInputOrThrow( + yaml.load(` jobs: analysis: runs-on: ubuntu-latest @@ -817,19 +846,22 @@ test("getCategoryInputOrThrow finds category for workflow with language matrix", with: category: "/language:\${{ matrix.language }}" `) as Workflow, - "analysis", - { language: "javascript" }, - ), - "/language:javascript", - ); -}); + "analysis", + { language: "javascript" }, + ), + "/language:javascript", + ); + }, +); -test("getCategoryInputOrThrow throws error for workflow with dynamic category", (t) => { - process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; - t.throws( - () => - getCategoryInputOrThrow( - yaml.load(` +test.serial( + "getCategoryInputOrThrow throws error for workflow with dynamic category", + (t) => { + process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; + t.throws( + () => + getCategoryInputOrThrow( + yaml.load(` jobs: analysis: steps: @@ -839,23 +871,26 @@ test("getCategoryInputOrThrow throws error for workflow with dynamic category", with: category: "\${{ github.workflow }}" `) as Workflow, - "analysis", - {}, - ), - { - message: - "Could not get category input to github/codeql-action/analyze since it contained " + - "an unrecognized dynamic value.", - }, - ); -}); + "analysis", + {}, + ), + { + message: + "Could not get category input to github/codeql-action/analyze since it contained " + + "an unrecognized dynamic value.", + }, + ); + }, +); -test("getCategoryInputOrThrow throws error for workflow with multiple calls to analyze", (t) => { - process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; - t.throws( - () => - getCategoryInputOrThrow( - yaml.load(` +test.serial( + "getCategoryInputOrThrow throws error for workflow with multiple calls to analyze", + (t) => { + process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; + t.throws( + () => + getCategoryInputOrThrow( + yaml.load(` jobs: analysis: runs-on: ubuntu-latest @@ -869,88 +904,101 @@ test("getCategoryInputOrThrow throws error for workflow with multiple calls to a with: category: another-category `) as Workflow, - "analysis", - {}, - ), - { - message: - "Could not get category input to github/codeql-action/analyze since the analysis job " + - "calls github/codeql-action/analyze multiple times.", - }, - ); -}); + "analysis", + {}, + ), + { + message: + "Could not get category input to github/codeql-action/analyze since the analysis job " + + "calls github/codeql-action/analyze multiple times.", + }, + ); + }, +); -test("checkWorkflow - validates workflow if `SKIP_WORKFLOW_VALIDATION` is not set", async (t) => { - const messages: LoggedMessage[] = []; - const codeql = createStubCodeQL({}); +test.serial( + "checkWorkflow - validates workflow if `SKIP_WORKFLOW_VALIDATION` is not set", + async (t) => { + const messages: LoggedMessage[] = []; + const codeql = createStubCodeQL({}); - sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false); - const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow"); - validateWorkflow.resolves(undefined); + sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false); + const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow"); + validateWorkflow.resolves(undefined); - await checkWorkflow(getRecordingLogger(messages), codeql); + await checkWorkflow(getRecordingLogger(messages), codeql); - t.assert( - validateWorkflow.calledOnce, - "`checkWorkflow` unexpectedly did not call `validateWorkflow`", - ); - checkExpectedLogMessages(t, messages, [ - "Detected no issues with the code scanning workflow.", - ]); -}); + t.assert( + validateWorkflow.calledOnce, + "`checkWorkflow` unexpectedly did not call `validateWorkflow`", + ); + checkExpectedLogMessages(t, messages, [ + "Detected no issues with the code scanning workflow.", + ]); + }, +); -test("checkWorkflow - logs problems with workflow validation", async (t) => { - const messages: LoggedMessage[] = []; - const codeql = createStubCodeQL({}); +test.serial( + "checkWorkflow - logs problems with workflow validation", + async (t) => { + const messages: LoggedMessage[] = []; + const codeql = createStubCodeQL({}); - sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false); - const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow"); - validateWorkflow.resolves("problem"); + sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false); + const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow"); + validateWorkflow.resolves("problem"); - await checkWorkflow(getRecordingLogger(messages), codeql); + await checkWorkflow(getRecordingLogger(messages), codeql); - t.assert( - validateWorkflow.calledOnce, - "`checkWorkflow` unexpectedly did not call `validateWorkflow`", - ); - checkExpectedLogMessages(t, messages, [ - "Unable to validate code scanning workflow: problem", - ]); -}); + t.assert( + validateWorkflow.calledOnce, + "`checkWorkflow` unexpectedly did not call `validateWorkflow`", + ); + checkExpectedLogMessages(t, messages, [ + "Unable to validate code scanning workflow: problem", + ]); + }, +); -test("checkWorkflow - skips validation if `SKIP_WORKFLOW_VALIDATION` is `true`", async (t) => { - process.env[EnvVar.SKIP_WORKFLOW_VALIDATION] = "true"; +test.serial( + "checkWorkflow - skips validation if `SKIP_WORKFLOW_VALIDATION` is `true`", + async (t) => { + process.env[EnvVar.SKIP_WORKFLOW_VALIDATION] = "true"; - const messages: LoggedMessage[] = []; - const codeql = createStubCodeQL({}); + const messages: LoggedMessage[] = []; + const codeql = createStubCodeQL({}); - sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false); - const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow"); + sinon.stub(actionsUtil, "isDynamicWorkflow").returns(false); + const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow"); - await checkWorkflow(getRecordingLogger(messages), codeql); + await checkWorkflow(getRecordingLogger(messages), codeql); - t.assert( - validateWorkflow.notCalled, - "`checkWorkflow` called `validateWorkflow` unexpectedly", - ); - t.is(messages.length, 0); -}); + t.assert( + validateWorkflow.notCalled, + "`checkWorkflow` called `validateWorkflow` unexpectedly", + ); + t.is(messages.length, 0); + }, +); -test("checkWorkflow - skips validation for `dynamic` workflows", async (t) => { - const messages: LoggedMessage[] = []; - const codeql = createStubCodeQL({}); +test.serial( + "checkWorkflow - skips validation for `dynamic` workflows", + async (t) => { + const messages: LoggedMessage[] = []; + const codeql = createStubCodeQL({}); - const isDynamicWorkflow = sinon - .stub(actionsUtil, "isDynamicWorkflow") - .returns(true); - const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow"); + const isDynamicWorkflow = sinon + .stub(actionsUtil, "isDynamicWorkflow") + .returns(true); + const validateWorkflow = sinon.stub(workflow.internal, "validateWorkflow"); - await checkWorkflow(getRecordingLogger(messages), codeql); + await checkWorkflow(getRecordingLogger(messages), codeql); - t.assert(isDynamicWorkflow.calledOnce); - t.assert( - validateWorkflow.notCalled, - "`checkWorkflow` called `validateWorkflow` unexpectedly", - ); - t.is(messages.length, 0); -}); + t.assert(isDynamicWorkflow.calledOnce); + t.assert( + validateWorkflow.notCalled, + "`checkWorkflow` called `validateWorkflow` unexpectedly", + ); + t.is(messages.length, 0); + }, +); diff --git a/tsconfig.json b/tsconfig.json index 7676dc322..6be84577a 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -28,5 +28,5 @@ "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ "resolveJsonModule": true, }, - "exclude": ["node_modules"] + "exclude": ["node_modules", "pr-checks"] }