mirror of
https://github.com/github/codeql-action.git
synced 2026-05-10 15:50:28 +00:00
Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 78fa31643b |
@@ -4,15 +4,14 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: weekly
|
||||
cooldown:
|
||||
default-days: 7
|
||||
exclude:
|
||||
- "@actions/*"
|
||||
labels:
|
||||
- Rebuild
|
||||
# Ignore incompatible dependency updates
|
||||
ignore:
|
||||
# This is broken due to the way configuration files have changed.
|
||||
# There is a type incompatibility issue between v0.0.9 and our other dependencies.
|
||||
- dependency-name: "@octokit/plugin-retry"
|
||||
versions: ["~6.0.0"]
|
||||
# This is broken due to the way configuration files have changed.
|
||||
# This might be fixed when we move to eslint v9.
|
||||
- dependency-name: "eslint-plugin-import"
|
||||
versions: [">=2.30.0"]
|
||||
@@ -29,10 +28,6 @@ updates:
|
||||
- "/.github/actions"
|
||||
schedule:
|
||||
interval: weekly
|
||||
cooldown:
|
||||
default-days: 7
|
||||
exclude:
|
||||
- "actions/*"
|
||||
labels:
|
||||
- Rebuild
|
||||
groups:
|
||||
|
||||
@@ -23,13 +23,13 @@ For internal use only. Please select the risk level of this change:
|
||||
Workflow types:
|
||||
|
||||
- **Advanced setup** - Impacts users who have custom CodeQL workflows.
|
||||
- **Managed** - Impacts users with `dynamic` workflows (Default Setup, Code Quality, ...).
|
||||
- **Managed** - Impacts users with `dynamic` workflows (Default Setup, CCR, ...).
|
||||
|
||||
Products:
|
||||
|
||||
- **Code Scanning** - The changes impact analyses when `analysis-kinds: code-scanning`.
|
||||
- **Code Quality** - The changes impact analyses when `analysis-kinds: code-quality`.
|
||||
- **Other first-party** - The changes impact other first-party analyses.
|
||||
- **CCR** - The changes impact analyses for Copilot Code Reviews.
|
||||
- **Third-party analyses** - The changes affect the `upload-sarif` action.
|
||||
|
||||
Environments:
|
||||
@@ -54,7 +54,6 @@ Environments:
|
||||
|
||||
- **Feature flags** - All new or changed code paths can be fully disabled with corresponding feature flags.
|
||||
- **Rollback** - Change can only be disabled by rolling back the release or releasing a new version with a fix.
|
||||
- **Development/testing only** - This change cannot cause any failures in production.
|
||||
- **Other** - Please provide details.
|
||||
|
||||
#### How will you know if something goes wrong after this change is released?
|
||||
|
||||
@@ -71,9 +71,8 @@ def open_pr(
|
||||
body.append('')
|
||||
body.append('Contains the following pull requests:')
|
||||
for pr in pull_requests:
|
||||
# Use PR author if they are GitHub staff, otherwise use the merger
|
||||
display_user = get_pr_author_if_staff(pr) or get_merger_of_pr(repo, pr)
|
||||
body.append(f'- #{pr.number} (@{display_user})')
|
||||
merger = get_merger_of_pr(repo, pr)
|
||||
body.append(f'- #{pr.number} (@{merger})')
|
||||
|
||||
# List all commits not part of a PR
|
||||
if len(commits_without_pull_requests) > 0:
|
||||
@@ -169,14 +168,6 @@ def get_pr_for_commit(commit):
|
||||
def get_merger_of_pr(repo, pr):
|
||||
return repo.get_commit(pr.merge_commit_sha).author.login
|
||||
|
||||
# Get the PR author if they are GitHub staff, otherwise None.
|
||||
def get_pr_author_if_staff(pr):
|
||||
if pr.user is None:
|
||||
return None
|
||||
if getattr(pr.user, 'site_admin', False):
|
||||
return pr.user.login
|
||||
return None
|
||||
|
||||
def get_current_version():
|
||||
with open('package.json', 'r') as f:
|
||||
return json.load(f)['version']
|
||||
@@ -190,9 +181,9 @@ def replace_version_package_json(prev_version, new_version):
|
||||
print(line.replace(prev_version, new_version), end='')
|
||||
else:
|
||||
prev_line_is_codeql = False
|
||||
print(line, end='')
|
||||
print(line, end='')
|
||||
if '\"name\": \"codeql\",' in line:
|
||||
prev_line_is_codeql = True
|
||||
prev_line_is_codeql = True
|
||||
|
||||
def get_today_string():
|
||||
today = datetime.datetime.today()
|
||||
|
||||
+1
-1
@@ -56,7 +56,7 @@ jobs:
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Install @actions/tool-cache
|
||||
run: npm install @actions/tool-cache@3
|
||||
run: npm install @actions/tool-cache
|
||||
- name: Check toolcache contains CodeQL
|
||||
continue-on-error: true
|
||||
uses: actions/github-script@v8
|
||||
|
||||
Generated
+1
-1
@@ -68,7 +68,7 @@ jobs:
|
||||
const codeqlPath = path.join(process.env['RUNNER_TOOL_CACHE'], 'CodeQL');
|
||||
fs.rmdirSync(codeqlPath, { recursive: true });
|
||||
- name: Install @actions/tool-cache
|
||||
run: npm install @actions/tool-cache@3
|
||||
run: npm install @actions/tool-cache
|
||||
- name: Check toolcache does not contain CodeQL
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
|
||||
+30
-12
@@ -3,7 +3,7 @@
|
||||
# pr-checks/sync.sh
|
||||
# to regenerate this file.
|
||||
|
||||
name: 'PR Check - Bundle: From nightly'
|
||||
name: PR Check - CCR
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
@@ -29,16 +29,32 @@ defaults:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
|
||||
group: bundle-from-nightly-${{github.ref}}
|
||||
group: ccr-${{github.ref}}
|
||||
jobs:
|
||||
bundle-from-nightly:
|
||||
ccr:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.17.6
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.18.4
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.19.4
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.20.7
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.21.4
|
||||
- os: ubuntu-latest
|
||||
version: stable-v2.22.4
|
||||
- os: ubuntu-latest
|
||||
version: default
|
||||
- os: ubuntu-latest
|
||||
version: linked
|
||||
name: 'Bundle: From nightly'
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
name: CCR
|
||||
if: github.triggering_actor != 'dependabot[bot]'
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -55,15 +71,17 @@ jobs:
|
||||
version: ${{ matrix.version }}
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- id: init
|
||||
uses: ./../action/init
|
||||
env:
|
||||
CODEQL_ACTION_FORCE_NIGHTLY: true
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: javascript
|
||||
- name: Fail if the CodeQL version is not a nightly
|
||||
if: "!contains(steps.init.outputs.codeql-version, '+')"
|
||||
run: exit 1
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
with:
|
||||
upload-database: false
|
||||
|
||||
env:
|
||||
CODEQL_ACTION_ANALYSIS_KEY: dynamic/copilot-pull-request-reviewer/codeql-action-test
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
@@ -125,6 +125,5 @@ jobs:
|
||||
fi
|
||||
done
|
||||
env:
|
||||
CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS: false
|
||||
CODEQL_ACTION_SUBLANGUAGE_FILE_COVERAGE: true
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
Generated
+12
@@ -48,6 +48,18 @@ jobs:
|
||||
timeout-minutes: 45
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
# These steps are required to initialise the `gh` cli in a container that doesn't
|
||||
# come pre-installed with it. The reason for that is that this is later
|
||||
# needed by the `prepare-test` workflow to find the latest release of CodeQL.
|
||||
- name: Set up GitHub CLI
|
||||
run: |
|
||||
apt update
|
||||
apt install -y curl libreadline8 gnupg2 software-properties-common zstd
|
||||
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||
apt-key add /usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||
apt-add-repository https://cli.github.com/packages
|
||||
apt install -y gh
|
||||
env: {}
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v6
|
||||
- name: Prepare test
|
||||
|
||||
+17
-18
@@ -3,7 +3,7 @@
|
||||
# pr-checks/sync.sh
|
||||
# to regenerate this file.
|
||||
|
||||
name: PR Check - Analysis kinds
|
||||
name: PR Check - Quality queries input
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GO111MODULE: auto
|
||||
@@ -29,9 +29,9 @@ defaults:
|
||||
shell: bash
|
||||
concurrency:
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
|
||||
group: analysis-kinds-${{github.ref}}
|
||||
group: quality-queries-${{github.ref}}
|
||||
jobs:
|
||||
analysis-kinds:
|
||||
quality-queries:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -45,9 +45,6 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
version: linked
|
||||
analysis-kinds: code-scanning,code-quality
|
||||
- os: ubuntu-latest
|
||||
version: linked
|
||||
analysis-kinds: risk-assessment
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
analysis-kinds: code-scanning
|
||||
@@ -57,10 +54,7 @@ jobs:
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
analysis-kinds: code-scanning,code-quality
|
||||
- os: ubuntu-latest
|
||||
version: nightly-latest
|
||||
analysis-kinds: risk-assessment
|
||||
name: Analysis kinds
|
||||
name: Quality queries input
|
||||
if: github.triggering_actor != 'dependabot[bot]'
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -87,24 +81,30 @@ jobs:
|
||||
output: ${{ runner.temp }}/results
|
||||
upload-database: false
|
||||
post-processed-sarif-path: ${{ runner.temp }}/post-processed
|
||||
|
||||
- name: Upload SARIF files
|
||||
- name: Upload security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: |
|
||||
analysis-kinds-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}
|
||||
path: ${{ runner.temp }}/results/*.sarif
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.sarif
|
||||
retention-days: 7
|
||||
- name: Upload quality SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-quality')
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: |
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
||||
path: ${{ runner.temp }}/results/javascript.quality.sarif
|
||||
retention-days: 7
|
||||
|
||||
- name: Upload post-processed SARIF
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: |
|
||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}
|
||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
path: ${{ runner.temp }}/post-processed
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Check quality query does not appear in security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/github-script@v8
|
||||
@@ -122,7 +122,6 @@ jobs:
|
||||
with:
|
||||
script: ${{ env.CHECK_SCRIPT }}
|
||||
env:
|
||||
CODEQL_ACTION_RISK_ASSESSMENT_ID: 1
|
||||
CHECK_SCRIPT: |
|
||||
const fs = require('fs');
|
||||
|
||||
+1
-1
@@ -56,7 +56,7 @@ jobs:
|
||||
use-all-platform-bundle: 'false'
|
||||
setup-kotlin: 'true'
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@09a7688d3b55cf0e976497ff046b70949eeaccfd # v1.288.0
|
||||
uses: ruby/setup-ruby@80740b3b13bf9857e28854481ca95a84e78a2bdf # v1.284.0
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
|
||||
@@ -17,7 +17,6 @@ jobs:
|
||||
sizeup:
|
||||
name: Label PR with size
|
||||
runs-on: ubuntu-slim
|
||||
if: github.event.pull_request.merged != true
|
||||
|
||||
steps:
|
||||
- name: Run sizeup
|
||||
|
||||
@@ -111,7 +111,7 @@ jobs:
|
||||
# Otherwise, just commit the changes.
|
||||
if git rev-parse --verify MERGE_HEAD >/dev/null 2>&1; then
|
||||
echo "In progress merge detected, finishing it up."
|
||||
git merge --continue --no-edit
|
||||
git merge --continue
|
||||
else
|
||||
echo "No in-progress merge detected, committing changes."
|
||||
git commit -m "Rebuild"
|
||||
|
||||
@@ -57,24 +57,6 @@ jobs:
|
||||
- name: Update bundle
|
||||
uses: ./.github/actions/update-bundle
|
||||
|
||||
- name: Bump Action minor version if new CodeQL minor version series
|
||||
id: bump-action-version
|
||||
run: |
|
||||
prior_cli_version=$(jq -r '.priorCliVersion' src/defaults.json)
|
||||
cli_version=$(jq -r '.cliVersion' src/defaults.json)
|
||||
|
||||
prior_minor=$(echo "$prior_cli_version" | cut -d. -f2)
|
||||
current_minor=$(echo "$cli_version" | cut -d. -f2)
|
||||
|
||||
if [[ "$current_minor" != "$prior_minor" ]]; then
|
||||
echo "New CodeQL minor version series ($prior_cli_version -> $cli_version), bumping Action minor version"
|
||||
npm version minor --no-git-tag-version
|
||||
echo "bumped=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "Same minor version series ($prior_cli_version -> $cli_version), skipping Action version bump"
|
||||
echo "bumped=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Rebuild Action
|
||||
run: npm run build
|
||||
|
||||
@@ -89,19 +71,11 @@ jobs:
|
||||
- name: Open pull request
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
ACTION_VERSION_BUMPED: ${{ steps.bump-action-version.outputs.bumped }}
|
||||
run: |
|
||||
cli_version=$(jq -r '.cliVersion' src/defaults.json)
|
||||
action_version=$(jq -r '.version' package.json)
|
||||
|
||||
pr_body="This pull request updates the default CodeQL bundle, as used with \`tools: linked\` and on GHES, to $cli_version."
|
||||
if [[ "$ACTION_VERSION_BUMPED" == "true" ]]; then
|
||||
pr_body+=$'\n\n'"Since this is a new CodeQL minor version series, this PR also bumps the Action version to $action_version."
|
||||
fi
|
||||
|
||||
pr_url=$(gh pr create \
|
||||
--title "Update default bundle to $cli_version" \
|
||||
--body "$pr_body" \
|
||||
--body "This pull request updates the default CodeQL bundle, as used with \`tools: linked\` and on GHES, to $cli_version." \
|
||||
--assignee "$GITHUB_ACTOR" \
|
||||
--draft \
|
||||
)
|
||||
|
||||
+1
-26
@@ -2,32 +2,7 @@
|
||||
|
||||
See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs.
|
||||
|
||||
## [UNRELEASED]
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 4.32.4 - 20 Feb 2026
|
||||
|
||||
- Update default CodeQL bundle version to [2.24.2](https://github.com/github/codeql-action/releases/tag/codeql-bundle-v2.24.2). [#3493](https://github.com/github/codeql-action/pull/3493)
|
||||
- Added an experimental change which improves how certificates are generated for the authentication proxy that is used by the CodeQL Action in Default Setup when [private package registries are configured](https://docs.github.com/en/code-security/how-tos/secure-at-scale/configure-organization-security/manage-usage-and-access/giving-org-access-private-registries). This is expected to generate more widely compatible certificates and should have no impact on analyses which are working correctly already. We expect to roll this change out to everyone in February. [#3473](https://github.com/github/codeql-action/pull/3473)
|
||||
- When the CodeQL Action is run [with debugging enabled in Default Setup](https://docs.github.com/en/code-security/how-tos/scan-code-for-vulnerabilities/troubleshooting/troubleshooting-analysis-errors/logs-not-detailed-enough#creating-codeql-debugging-artifacts-for-codeql-default-setup) and [private package registries are configured](https://docs.github.com/en/code-security/how-tos/secure-at-scale/configure-organization-security/manage-usage-and-access/giving-org-access-private-registries), the "Setup proxy for registries" step will output additional diagnostic information that can be used for troubleshooting. [#3486](https://github.com/github/codeql-action/pull/3486)
|
||||
- Added a setting which allows the CodeQL Action to enable network debugging for Java programs. This will help GitHub staff support customers with troubleshooting issues in GitHub-managed CodeQL workflows, such as Default Setup. This setting can only be enabled by GitHub staff. [#3485](https://github.com/github/codeql-action/pull/3485)
|
||||
- Added a setting which enables GitHub-managed workflows, such as Default Setup, to use a [nightly CodeQL CLI release](https://github.com/dsp-testing/codeql-cli-nightlies) instead of the latest, stable release that is used by default. This will help GitHub staff support customers whose analyses for a given repository or organization require early access to a change in an upcoming CodeQL CLI release. This setting can only be enabled by GitHub staff. [#3484](https://github.com/github/codeql-action/pull/3484)
|
||||
|
||||
## 4.32.3 - 13 Feb 2026
|
||||
|
||||
- Added experimental support for testing connections to [private package registries](https://docs.github.com/en/code-security/how-tos/secure-at-scale/configure-organization-security/manage-usage-and-access/giving-org-access-private-registries). This feature is not currently enabled for any analysis. In the future, it may be enabled by default for Default Setup. [#3466](https://github.com/github/codeql-action/pull/3466)
|
||||
|
||||
## 4.32.2 - 05 Feb 2026
|
||||
|
||||
- Update default CodeQL bundle version to [2.24.1](https://github.com/github/codeql-action/releases/tag/codeql-bundle-v2.24.1). [#3460](https://github.com/github/codeql-action/pull/3460)
|
||||
|
||||
## 4.32.1 - 02 Feb 2026
|
||||
|
||||
- A warning is now shown in Default Setup workflow logs if a [private package registry is configured](https://docs.github.com/en/code-security/how-tos/secure-at-scale/configure-organization-security/manage-usage-and-access/giving-org-access-private-registries) using a GitHub Personal Access Token (PAT), but no username is configured. [#3422](https://github.com/github/codeql-action/pull/3422)
|
||||
- Fixed a bug which caused the CodeQL Action to fail when repository properties cannot successfully be retrieved. [#3421](https://github.com/github/codeql-action/pull/3421)
|
||||
|
||||
## 4.32.0 - 26 Jan 2026
|
||||
## 4.31.12 - 26 Jan 2026
|
||||
|
||||
- Update default CodeQL bundle version to [2.24.0](https://github.com/github/codeql-action/releases/tag/codeql-bundle-v2.24.0). [#3425](https://github.com/github/codeql-action/pull/3425)
|
||||
|
||||
|
||||
@@ -80,12 +80,6 @@ We typically release new minor versions of the CodeQL Action and Bundle when a n
|
||||
|
||||
See the full list of GHES release and deprecation dates at [GitHub Enterprise Server releases](https://docs.github.com/en/enterprise-server/admin/all-releases#releases-of-github-enterprise-server).
|
||||
|
||||
## Keeping the CodeQL Action up to date in advanced setups
|
||||
|
||||
If you are using an [advanced setup](https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/configuring-advanced-setup-for-code-scanning), we recommend referencing the CodeQL Action using a major version tag (e.g. `v4`) in your workflow file. This ensures your workflow automatically picks up the latest release within that major version, including bug fixes, new features, and updated CodeQL CLI versions.
|
||||
|
||||
If you pin to a specific commit SHA or patch version tag, ensure you keep it updated (e.g. via [Dependabot](https://docs.github.com/en/code-security/dependabot/working-with-dependabot/keeping-your-actions-up-to-date-with-dependabot)). Some CodeQL Action features are enabled by server-side flags that may be removed over time, which can cause old versions to lose functionality.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
Read about [troubleshooting code scanning](https://docs.github.com/en/code-security/code-scanning/troubleshooting-code-scanning).
|
||||
|
||||
+37
-35
@@ -1,14 +1,27 @@
|
||||
import { fixupPluginRules } from "@eslint/compat";
|
||||
// Automatically generated by running npx @eslint/migrate-config .eslintrc.json
|
||||
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
import { fixupConfigRules, fixupPluginRules } from "@eslint/compat";
|
||||
import { FlatCompat } from "@eslint/eslintrc";
|
||||
import js from "@eslint/js";
|
||||
import typescriptEslint from "@typescript-eslint/eslint-plugin";
|
||||
import tsParser from "@typescript-eslint/parser";
|
||||
import filenames from "eslint-plugin-filenames";
|
||||
import github from "eslint-plugin-github";
|
||||
import { importX, createNodeResolver } from "eslint-plugin-import-x";
|
||||
import { createTypeScriptImportResolver } from "eslint-import-resolver-typescript";
|
||||
import _import from "eslint-plugin-import";
|
||||
import noAsyncForeach from "eslint-plugin-no-async-foreach";
|
||||
import jsdoc from "eslint-plugin-jsdoc";
|
||||
import tseslint from "typescript-eslint";
|
||||
import globals from "globals";
|
||||
|
||||
const githubFlatConfigs = github.getFlatConfigs();
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: __dirname,
|
||||
recommendedConfig: js.configs.recommended,
|
||||
allConfig: js.configs.all,
|
||||
});
|
||||
|
||||
export default [
|
||||
{
|
||||
@@ -23,29 +36,29 @@ export default [
|
||||
".github/**/*",
|
||||
],
|
||||
},
|
||||
// eslint recommended config
|
||||
js.configs.recommended,
|
||||
// Type-checked rules from typescript-eslint
|
||||
...tseslint.configs.recommendedTypeChecked,
|
||||
...tseslint.configs.strict,
|
||||
// eslint-plugin-github recommended config
|
||||
githubFlatConfigs.recommended,
|
||||
// eslint-plugin-github typescript config
|
||||
...githubFlatConfigs.typescript,
|
||||
// import-x TypeScript settings
|
||||
// This is needed for import-x rules to properly parse TypeScript files.
|
||||
{
|
||||
settings: importX.flatConfigs.typescript.settings,
|
||||
},
|
||||
...fixupConfigRules(
|
||||
compat.extends(
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:@typescript-eslint/recommended-requiring-type-checking",
|
||||
"plugin:github/recommended",
|
||||
"plugin:github/typescript",
|
||||
"plugin:import/typescript",
|
||||
),
|
||||
),
|
||||
{
|
||||
plugins: {
|
||||
"import-x": importX,
|
||||
"no-async-foreach": fixupPluginRules(noAsyncForeach),
|
||||
"@typescript-eslint": fixupPluginRules(typescriptEslint),
|
||||
filenames: fixupPluginRules(filenames),
|
||||
github: fixupPluginRules(github),
|
||||
import: fixupPluginRules(_import),
|
||||
"no-async-foreach": noAsyncForeach,
|
||||
"jsdoc": jsdoc,
|
||||
},
|
||||
|
||||
languageOptions: {
|
||||
ecmaVersion: "latest",
|
||||
parser: tsParser,
|
||||
ecmaVersion: 5,
|
||||
sourceType: "module",
|
||||
|
||||
globals: {
|
||||
@@ -66,16 +79,10 @@ export default [
|
||||
typescript: {},
|
||||
},
|
||||
"import/ignore": ["sinon", "uuid", "@octokit/plugin-retry", "del", "get-folder-size"],
|
||||
"import-x/resolver-next": [
|
||||
createTypeScriptImportResolver(),
|
||||
createNodeResolver({
|
||||
extensions: [".ts", ".js", ".json"],
|
||||
}),
|
||||
],
|
||||
},
|
||||
|
||||
rules: {
|
||||
"github/filenames-match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"],
|
||||
"filenames/match-regex": ["error", "^[a-z0-9-]+(\\.test)?$"],
|
||||
"i18n-text/no-en": "off",
|
||||
|
||||
"import/extensions": [
|
||||
@@ -87,10 +94,7 @@ export default [
|
||||
|
||||
"import/no-amd": "error",
|
||||
"import/no-commonjs": "error",
|
||||
// import/no-cycle does not seem to work with ESLint 9.
|
||||
// Use import-x/no-cycle from eslint-plugin-import-x instead.
|
||||
"import/no-cycle": "off",
|
||||
"import-x/no-cycle": "error",
|
||||
"import/no-cycle": "error",
|
||||
"import/no-dynamic-require": "error",
|
||||
|
||||
"import/no-extraneous-dependencies": [
|
||||
@@ -128,8 +132,6 @@ export default [
|
||||
"no-async-foreach/no-async-foreach": "error",
|
||||
"no-sequences": "error",
|
||||
"no-shadow": "off",
|
||||
// This is overly restrictive with unsetting `EnvVar`s
|
||||
"@typescript-eslint/no-dynamic-delete": "off",
|
||||
"@typescript-eslint/no-shadow": "error",
|
||||
"@typescript-eslint/prefer-optional-chain": "error",
|
||||
"one-var": ["error", "never"],
|
||||
|
||||
Generated
+15128
-52172
File diff suppressed because one or more lines are too long
Generated
+16457
-32031
File diff suppressed because one or more lines are too long
Generated
+14633
-31226
File diff suppressed because one or more lines are too long
+4
-4
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.24.2",
|
||||
"cliVersion": "2.24.2",
|
||||
"priorBundleVersion": "codeql-bundle-v2.24.1",
|
||||
"priorCliVersion": "2.24.1"
|
||||
"bundleVersion": "codeql-bundle-v2.24.0",
|
||||
"cliVersion": "2.24.0",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.9",
|
||||
"priorCliVersion": "2.23.9"
|
||||
}
|
||||
|
||||
Generated
+18395
-54563
File diff suppressed because one or more lines are too long
Generated
+17655
-34522
File diff suppressed because one or more lines are too long
Generated
+14595
-31136
File diff suppressed because one or more lines are too long
Generated
+14805
-31483
File diff suppressed because one or more lines are too long
Generated
+15114
-52155
File diff suppressed because one or more lines are too long
Generated
+37929
-55266
File diff suppressed because one or more lines are too long
Generated
+16878
-33510
File diff suppressed because one or more lines are too long
Generated
+15080
-52121
File diff suppressed because one or more lines are too long
Generated
+17964
-34692
File diff suppressed because one or more lines are too long
Generated
+1715
-2380
File diff suppressed because it is too large
Load Diff
+21
-19
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "4.32.5",
|
||||
"version": "4.31.12",
|
||||
"private": true,
|
||||
"description": "CodeQL action",
|
||||
"scripts": {
|
||||
@@ -24,33 +24,34 @@
|
||||
},
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "^5.0.3",
|
||||
"@actions/artifact": "^5.0.2",
|
||||
"@actions/artifact-legacy": "npm:@actions/artifact@^1.1.2",
|
||||
"@actions/cache": "^5.0.5",
|
||||
"@actions/core": "^2.0.3",
|
||||
"@actions/cache": "^5.0.3",
|
||||
"@actions/core": "^2.0.2",
|
||||
"@actions/exec": "^2.0.0",
|
||||
"@actions/github": "^8.0.1",
|
||||
"@actions/github": "^7.0.0",
|
||||
"@actions/glob": "^0.5.0",
|
||||
"@actions/http-client": "^3.0.0",
|
||||
"@actions/io": "^2.0.0",
|
||||
"@actions/tool-cache": "^3.0.1",
|
||||
"@octokit/plugin-retry": "^8.0.0",
|
||||
"@actions/tool-cache": "^3.0.0",
|
||||
"@octokit/plugin-retry": "^6.0.0",
|
||||
"@schemastore/package": "0.0.10",
|
||||
"archiver": "^7.0.1",
|
||||
"fast-deep-equal": "^3.1.3",
|
||||
"follow-redirects": "^1.15.11",
|
||||
"get-folder-size": "^5.0.0",
|
||||
"https-proxy-agent": "^7.0.6",
|
||||
"js-yaml": "^4.1.1",
|
||||
"jsonschema": "1.4.1",
|
||||
"long": "^5.3.2",
|
||||
"node-forge": "^1.3.3",
|
||||
"semver": "^7.7.4",
|
||||
"semver": "^7.7.3",
|
||||
"uuid": "^13.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@ava/typescript": "6.0.0",
|
||||
"@eslint/compat": "^2.0.2",
|
||||
"@eslint/compat": "^2.0.1",
|
||||
"@eslint/eslintrc": "^3.3.3",
|
||||
"@eslint/js": "^9.39.2",
|
||||
"@microsoft/eslint-formatter-sarif": "^3.1.0",
|
||||
"@octokit/types": "^16.0.0",
|
||||
"@types/archiver": "^7.0.0",
|
||||
@@ -60,20 +61,21 @@
|
||||
"@types/node-forge": "^1.3.14",
|
||||
"@types/semver": "^7.7.1",
|
||||
"@types/sinon": "^21.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.53.1",
|
||||
"@typescript-eslint/parser": "^8.48.0",
|
||||
"ava": "^6.4.1",
|
||||
"esbuild": "^0.27.3",
|
||||
"eslint": "^9.39.2",
|
||||
"esbuild": "^0.27.2",
|
||||
"eslint": "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.8.7",
|
||||
"eslint-plugin-github": "^6.0.0",
|
||||
"eslint-plugin-import-x": "^4.16.1",
|
||||
"eslint-plugin-jsdoc": "^62.5.0",
|
||||
"eslint-plugin-filenames": "^1.3.2",
|
||||
"eslint-plugin-github": "^5.1.8",
|
||||
"eslint-plugin-import": "2.29.1",
|
||||
"eslint-plugin-jsdoc": "^62.2.0",
|
||||
"eslint-plugin-no-async-foreach": "^0.1.1",
|
||||
"glob": "^11.1.0",
|
||||
"globals": "^16.5.0",
|
||||
"nock": "^14.0.11",
|
||||
"nock": "^14.0.10",
|
||||
"sinon": "^21.0.1",
|
||||
"typescript": "^5.9.3",
|
||||
"typescript-eslint": "^8.56.0"
|
||||
"typescript": "^5.9.3"
|
||||
},
|
||||
"overrides": {
|
||||
"@actions/tool-cache": {
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
name: "Bundle: From nightly"
|
||||
description: "The nightly CodeQL bundle should be used when forced"
|
||||
versions:
|
||||
- linked # overruled by the FF set below
|
||||
steps:
|
||||
- id: init
|
||||
uses: ./../action/init
|
||||
env:
|
||||
CODEQL_ACTION_FORCE_NIGHTLY: true
|
||||
with:
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
languages: javascript
|
||||
- name: Fail if the CodeQL version is not a nightly
|
||||
if: "!contains(steps.init.outputs.codeql-version, '+')"
|
||||
run: exit 1
|
||||
@@ -4,7 +4,7 @@ versions:
|
||||
- toolcache
|
||||
steps:
|
||||
- name: Install @actions/tool-cache
|
||||
run: npm install @actions/tool-cache@3
|
||||
run: npm install @actions/tool-cache
|
||||
- name: Check toolcache contains CodeQL
|
||||
continue-on-error: true
|
||||
uses: actions/github-script@v8
|
||||
|
||||
@@ -16,7 +16,7 @@ steps:
|
||||
const codeqlPath = path.join(process.env['RUNNER_TOOL_CACHE'], 'CodeQL');
|
||||
fs.rmdirSync(codeqlPath, { recursive: true });
|
||||
- name: Install @actions/tool-cache
|
||||
run: npm install @actions/tool-cache@3
|
||||
run: npm install @actions/tool-cache
|
||||
- name: Check toolcache does not contain CodeQL
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
name: "CCR"
|
||||
description: "A standard analysis in CCR mode"
|
||||
env:
|
||||
CODEQL_ACTION_ANALYSIS_KEY: "dynamic/copilot-pull-request-reviewer/codeql-action-test"
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
id: init
|
||||
with:
|
||||
languages: javascript
|
||||
tools: ${{ steps.prepare-test.outputs.tools-url }}
|
||||
|
||||
- uses: ./../action/analyze
|
||||
id: analysis
|
||||
with:
|
||||
upload-database: false
|
||||
|
||||
@@ -5,7 +5,6 @@ versions: ["nightly-latest"]
|
||||
installGo: true
|
||||
installDotNet: true
|
||||
env:
|
||||
CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS: false
|
||||
CODEQL_ACTION_SUBLANGUAGE_FILE_COVERAGE: true
|
||||
steps:
|
||||
- uses: ./../action/init
|
||||
|
||||
@@ -3,6 +3,19 @@ description: "Tests using a proxy specified by the https_proxy environment varia
|
||||
versions: ["linked", "nightly-latest"]
|
||||
container:
|
||||
image: ubuntu:22.04
|
||||
container-init-steps:
|
||||
# These steps are required to initialise the `gh` cli in a container that doesn't
|
||||
# come pre-installed with it. The reason for that is that this is later
|
||||
# needed by the `prepare-test` workflow to find the latest release of CodeQL.
|
||||
name: Set up GitHub CLI
|
||||
run: |
|
||||
apt update
|
||||
apt install -y curl libreadline8 gnupg2 software-properties-common zstd
|
||||
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||
apt-key add /usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||
apt-add-repository https://cli.github.com/packages
|
||||
apt install -y gh
|
||||
env: {}
|
||||
services:
|
||||
squid-proxy:
|
||||
image: ubuntu/squid:latest
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
name: "Analysis kinds"
|
||||
description: "Tests basic functionality for different `analysis-kinds` inputs."
|
||||
name: "Quality queries input"
|
||||
description: "Tests that queries specified in the quality-queries input are used."
|
||||
versions: ["linked", "nightly-latest"]
|
||||
analysisKinds: ["code-scanning", "code-quality", "code-scanning,code-quality", "risk-assessment"]
|
||||
analysisKinds: ["code-scanning", "code-quality", "code-scanning,code-quality"]
|
||||
env:
|
||||
CODEQL_ACTION_RISK_ASSESSMENT_ID: 1
|
||||
CHECK_SCRIPT: |
|
||||
const fs = require('fs');
|
||||
|
||||
@@ -38,24 +37,30 @@ steps:
|
||||
output: "${{ runner.temp }}/results"
|
||||
upload-database: false
|
||||
post-processed-sarif-path: "${{ runner.temp }}/post-processed"
|
||||
|
||||
- name: Upload SARIF files
|
||||
- name: Upload security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: |
|
||||
analysis-kinds-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}
|
||||
path: "${{ runner.temp }}/results/*.sarif"
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.sarif"
|
||||
retention-days: 7
|
||||
- name: Upload quality SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-quality')
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: |
|
||||
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
|
||||
path: "${{ runner.temp }}/results/javascript.quality.sarif"
|
||||
retention-days: 7
|
||||
|
||||
- name: Upload post-processed SARIF
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: |
|
||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}
|
||||
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
|
||||
path: "${{ runner.temp }}/post-processed"
|
||||
retention-days: 7
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Check quality query does not appear in security SARIF
|
||||
if: contains(matrix.analysis-kinds, 'code-scanning')
|
||||
uses: actions/github-script@v8
|
||||
@@ -4,7 +4,7 @@ description: "Tests using RuboCop to analyze a multi-language repository and the
|
||||
versions: ["default"]
|
||||
steps:
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@09a7688d3b55cf0e976497ff046b70949eeaccfd # v1.288.0
|
||||
uses: ruby/setup-ruby@80740b3b13bf9857e28854481ca95a84e78a2bdf # v1.284.0
|
||||
with:
|
||||
ruby-version: 2.6
|
||||
- name: Install Code Scanning integration
|
||||
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
fixCodeQualityCategory,
|
||||
getPullRequestBranches,
|
||||
isAnalyzingPullRequest,
|
||||
isCCR,
|
||||
isDefaultSetup,
|
||||
isDynamicWorkflow,
|
||||
} from "./actions-util";
|
||||
@@ -256,8 +257,16 @@ test("isDynamicWorkflow() returns true if event name is `dynamic`", (t) => {
|
||||
t.false(isDynamicWorkflow());
|
||||
});
|
||||
|
||||
test("isCCR() returns true when expected", (t) => {
|
||||
process.env.GITHUB_EVENT_NAME = "dynamic";
|
||||
process.env[EnvVar.ANALYSIS_KEY] = "dynamic/copilot-pull-request-reviewer";
|
||||
t.assert(isCCR());
|
||||
t.false(isDefaultSetup());
|
||||
});
|
||||
|
||||
test("isDefaultSetup() returns true when expected", (t) => {
|
||||
process.env.GITHUB_EVENT_NAME = "dynamic";
|
||||
process.env[EnvVar.ANALYSIS_KEY] = "dynamic/github-code-scanning";
|
||||
t.assert(isDefaultSetup());
|
||||
t.false(isCCR());
|
||||
});
|
||||
|
||||
+10
-1
@@ -8,6 +8,7 @@ import * as io from "@actions/io";
|
||||
import { JSONSchemaForNPMPackageJsonFiles } from "@schemastore/package";
|
||||
|
||||
import type { Config } from "./config-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Logger } from "./logging";
|
||||
import {
|
||||
doesDirectoryExist,
|
||||
@@ -254,7 +255,15 @@ export function isDynamicWorkflow(): boolean {
|
||||
|
||||
/** Determines whether we are running in default setup. */
|
||||
export function isDefaultSetup(): boolean {
|
||||
return isDynamicWorkflow();
|
||||
return isDynamicWorkflow() && !isCCR();
|
||||
}
|
||||
|
||||
/* The analysis key prefix used for CCR. */
|
||||
const CCR_KEY_PREFIX = "dynamic/copilot-pull-request-reviewer";
|
||||
|
||||
/** Determines whether we are running in CCR. */
|
||||
export function isCCR(): boolean {
|
||||
return process.env[EnvVar.ANALYSIS_KEY]?.startsWith(CCR_KEY_PREFIX) || false;
|
||||
}
|
||||
|
||||
export function prettyPrintInvocation(cmd: string, args: string[]): string {
|
||||
|
||||
@@ -1,23 +1,15 @@
|
||||
import path from "path";
|
||||
|
||||
import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import {
|
||||
AnalysisKind,
|
||||
CodeScanning,
|
||||
compatibilityMatrix,
|
||||
RiskAssessment,
|
||||
getAnalysisConfig,
|
||||
getAnalysisKinds,
|
||||
parseAnalysisKinds,
|
||||
supportedAnalysisKinds,
|
||||
} from "./analyses";
|
||||
import { EnvVar } from "./environment";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { setupTests } from "./testing-utils";
|
||||
import { AssessmentPayload } from "./upload-lib/types";
|
||||
import { ConfigurationError } from "./util";
|
||||
|
||||
setupTests(test);
|
||||
@@ -75,107 +67,3 @@ test("getAnalysisKinds - throws if `analysis-kinds` input is invalid", async (t)
|
||||
requiredInputStub.withArgs("analysis-kinds").returns("no-such-thing");
|
||||
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true));
|
||||
});
|
||||
|
||||
// Test the compatibility matrix by looping through all analysis kinds.
|
||||
const analysisKinds = Object.values(AnalysisKind);
|
||||
for (let i = 0; i < analysisKinds.length; i++) {
|
||||
const analysisKind = analysisKinds[i];
|
||||
|
||||
for (let j = i + 1; j < analysisKinds.length; j++) {
|
||||
const otherAnalysis = analysisKinds[j];
|
||||
|
||||
if (analysisKind === otherAnalysis) continue;
|
||||
if (compatibilityMatrix[analysisKind].has(otherAnalysis)) {
|
||||
test(`getAnalysisKinds - allows ${analysisKind} with ${otherAnalysis}`, async (t) => {
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub
|
||||
.withArgs("analysis-kinds")
|
||||
.returns([analysisKind, otherAnalysis].join(","));
|
||||
const result = await getAnalysisKinds(getRunnerLogger(true), true);
|
||||
t.is(result.length, 2);
|
||||
});
|
||||
} else {
|
||||
test(`getAnalysisKinds - throws if ${analysisKind} is enabled with ${otherAnalysis}`, async (t) => {
|
||||
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
|
||||
requiredInputStub
|
||||
.withArgs("analysis-kinds")
|
||||
.returns([analysisKind, otherAnalysis].join(","));
|
||||
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true), {
|
||||
instanceOf: ConfigurationError,
|
||||
message: `${analysisKind} and ${otherAnalysis} cannot be enabled at the same time`,
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
test("Code Scanning configuration does not accept other SARIF extensions", (t) => {
|
||||
for (const analysisKind of supportedAnalysisKinds) {
|
||||
if (analysisKind === AnalysisKind.CodeScanning) continue;
|
||||
|
||||
const analysis = getAnalysisConfig(analysisKind);
|
||||
const sarifPath = path.join("path", "to", `file${analysis.sarifExtension}`);
|
||||
|
||||
// The Code Scanning configuration's `sarifPredicate` should not accept a path which
|
||||
// ends in a different configuration's `sarifExtension`.
|
||||
t.false(CodeScanning.sarifPredicate(sarifPath));
|
||||
}
|
||||
});
|
||||
|
||||
test("Risk Assessment configuration transforms SARIF upload payload", (t) => {
|
||||
process.env[EnvVar.RISK_ASSESSMENT_ID] = "1";
|
||||
const payload = RiskAssessment.transformPayload({
|
||||
commit_oid: "abc",
|
||||
sarif: "sarif",
|
||||
ref: "ref",
|
||||
workflow_run_attempt: 1,
|
||||
workflow_run_id: 1,
|
||||
checkout_uri: "uri",
|
||||
tool_names: [],
|
||||
}) as AssessmentPayload;
|
||||
|
||||
const expected: AssessmentPayload = { sarif: "sarif", assessment_id: 1 };
|
||||
t.deepEqual(expected, payload);
|
||||
});
|
||||
|
||||
test("Risk Assessment configuration throws for negative assessment IDs", (t) => {
|
||||
process.env[EnvVar.RISK_ASSESSMENT_ID] = "-1";
|
||||
t.throws(
|
||||
() =>
|
||||
RiskAssessment.transformPayload({
|
||||
commit_oid: "abc",
|
||||
sarif: "sarif",
|
||||
ref: "ref",
|
||||
workflow_run_attempt: 1,
|
||||
workflow_run_id: 1,
|
||||
checkout_uri: "uri",
|
||||
tool_names: [],
|
||||
}),
|
||||
{
|
||||
instanceOf: Error,
|
||||
message: (msg) =>
|
||||
msg.startsWith(`${EnvVar.RISK_ASSESSMENT_ID} must not be negative: `),
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test("Risk Assessment configuration throws for invalid IDs", (t) => {
|
||||
process.env[EnvVar.RISK_ASSESSMENT_ID] = "foo";
|
||||
t.throws(
|
||||
() =>
|
||||
RiskAssessment.transformPayload({
|
||||
commit_oid: "abc",
|
||||
sarif: "sarif",
|
||||
ref: "ref",
|
||||
workflow_run_attempt: 1,
|
||||
workflow_run_id: 1,
|
||||
checkout_uri: "uri",
|
||||
tool_names: [],
|
||||
}),
|
||||
{
|
||||
instanceOf: Error,
|
||||
message: (msg) =>
|
||||
msg.startsWith(`${EnvVar.RISK_ASSESSMENT_ID} must not be NaN: `),
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
+6
-80
@@ -3,30 +3,14 @@ import {
|
||||
getOptionalInput,
|
||||
getRequiredInput,
|
||||
} from "./actions-util";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Logger } from "./logging";
|
||||
import {
|
||||
AssessmentPayload,
|
||||
BasePayload,
|
||||
UploadPayload,
|
||||
} from "./upload-lib/types";
|
||||
import { ConfigurationError, getRequiredEnvParam } from "./util";
|
||||
import { ConfigurationError } from "./util";
|
||||
|
||||
export enum AnalysisKind {
|
||||
CodeScanning = "code-scanning",
|
||||
CodeQuality = "code-quality",
|
||||
RiskAssessment = "risk-assessment",
|
||||
}
|
||||
|
||||
export type CompatibilityMatrix = Record<AnalysisKind, Set<AnalysisKind>>;
|
||||
|
||||
/** A mapping from analysis kinds to other analysis kinds which can be enabled concurrently. */
|
||||
export const compatibilityMatrix: CompatibilityMatrix = {
|
||||
[AnalysisKind.CodeScanning]: new Set([AnalysisKind.CodeQuality]),
|
||||
[AnalysisKind.CodeQuality]: new Set([AnalysisKind.CodeScanning]),
|
||||
[AnalysisKind.RiskAssessment]: new Set(),
|
||||
};
|
||||
|
||||
// Exported for testing. A set of all known analysis kinds.
|
||||
export const supportedAnalysisKinds = new Set(Object.values(AnalysisKind));
|
||||
|
||||
@@ -83,7 +67,7 @@ export async function getAnalysisKinds(
|
||||
return cachedAnalysisKinds;
|
||||
}
|
||||
|
||||
const analysisKinds = await parseAnalysisKinds(
|
||||
cachedAnalysisKinds = await parseAnalysisKinds(
|
||||
getRequiredInput("analysis-kinds"),
|
||||
);
|
||||
|
||||
@@ -101,27 +85,12 @@ export async function getAnalysisKinds(
|
||||
// if an input to `quality-queries` was specified. We should remove this once
|
||||
// `quality-queries` is no longer used.
|
||||
if (
|
||||
!analysisKinds.includes(AnalysisKind.CodeQuality) &&
|
||||
!cachedAnalysisKinds.includes(AnalysisKind.CodeQuality) &&
|
||||
qualityQueriesInput !== undefined
|
||||
) {
|
||||
analysisKinds.push(AnalysisKind.CodeQuality);
|
||||
cachedAnalysisKinds.push(AnalysisKind.CodeQuality);
|
||||
}
|
||||
|
||||
// Check that all enabled analysis kinds are compatible with each other.
|
||||
for (const analysisKind of analysisKinds) {
|
||||
for (const otherAnalysisKind of analysisKinds) {
|
||||
if (analysisKind === otherAnalysisKind) continue;
|
||||
|
||||
if (!compatibilityMatrix[analysisKind].has(otherAnalysisKind)) {
|
||||
throw new ConfigurationError(
|
||||
`${analysisKind} and ${otherAnalysisKind} cannot be enabled at the same time`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cache the analysis kinds and return them.
|
||||
cachedAnalysisKinds = analysisKinds;
|
||||
return cachedAnalysisKinds;
|
||||
}
|
||||
|
||||
@@ -132,7 +101,6 @@ export const codeQualityQueries: string[] = ["code-quality"];
|
||||
enum SARIF_UPLOAD_ENDPOINT {
|
||||
CODE_SCANNING = "PUT /repos/:owner/:repo/code-scanning/analysis",
|
||||
CODE_QUALITY = "PUT /repos/:owner/:repo/code-quality/analysis",
|
||||
RISK_ASSESSMENT = "PUT /repos/:owner/:repo/code-scanning/risk-assessment",
|
||||
}
|
||||
|
||||
// Represents configurations for different analysis kinds.
|
||||
@@ -152,8 +120,6 @@ export interface AnalysisConfig {
|
||||
fixCategory: (logger: Logger, category?: string) => string | undefined;
|
||||
/** A prefix for environment variables used to track the uniqueness of SARIF uploads. */
|
||||
sentinelPrefix: string;
|
||||
/** Transforms the upload payload in an analysis-specific way. */
|
||||
transformPayload: (payload: UploadPayload) => BasePayload;
|
||||
}
|
||||
|
||||
// Represents the Code Scanning analysis configuration.
|
||||
@@ -164,11 +130,9 @@ export const CodeScanning: AnalysisConfig = {
|
||||
sarifExtension: ".sarif",
|
||||
sarifPredicate: (name) =>
|
||||
name.endsWith(CodeScanning.sarifExtension) &&
|
||||
!CodeQuality.sarifPredicate(name) &&
|
||||
!RiskAssessment.sarifPredicate(name),
|
||||
!CodeQuality.sarifPredicate(name),
|
||||
fixCategory: (_, category) => category,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_SARIF_",
|
||||
transformPayload: (payload) => payload,
|
||||
};
|
||||
|
||||
// Represents the Code Quality analysis configuration.
|
||||
@@ -180,38 +144,6 @@ export const CodeQuality: AnalysisConfig = {
|
||||
sarifPredicate: (name) => name.endsWith(CodeQuality.sarifExtension),
|
||||
fixCategory: fixCodeQualityCategory,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_QUALITY_SARIF_",
|
||||
transformPayload: (payload) => payload,
|
||||
};
|
||||
|
||||
/**
|
||||
* Retrieves the CSRA assessment id from an environment variable and adds it to the payload.
|
||||
* @param payload The base payload.
|
||||
*/
|
||||
function addAssessmentId(payload: UploadPayload): AssessmentPayload {
|
||||
const rawAssessmentId = getRequiredEnvParam(EnvVar.RISK_ASSESSMENT_ID);
|
||||
const assessmentId = parseInt(rawAssessmentId, 10);
|
||||
if (Number.isNaN(assessmentId)) {
|
||||
throw new Error(
|
||||
`${EnvVar.RISK_ASSESSMENT_ID} must not be NaN: ${rawAssessmentId}`,
|
||||
);
|
||||
}
|
||||
if (assessmentId < 0) {
|
||||
throw new Error(
|
||||
`${EnvVar.RISK_ASSESSMENT_ID} must not be negative: ${rawAssessmentId}`,
|
||||
);
|
||||
}
|
||||
return { sarif: payload.sarif, assessment_id: assessmentId };
|
||||
}
|
||||
|
||||
export const RiskAssessment: AnalysisConfig = {
|
||||
kind: AnalysisKind.RiskAssessment,
|
||||
name: "code scanning risk assessment",
|
||||
target: SARIF_UPLOAD_ENDPOINT.RISK_ASSESSMENT,
|
||||
sarifExtension: ".csra.sarif",
|
||||
sarifPredicate: (name) => name.endsWith(RiskAssessment.sarifExtension),
|
||||
fixCategory: (_, category) => category,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_CSRA_SARIF_",
|
||||
transformPayload: addAssessmentId,
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -228,8 +160,6 @@ export function getAnalysisConfig(kind: AnalysisKind): AnalysisConfig {
|
||||
return CodeScanning;
|
||||
case AnalysisKind.CodeQuality:
|
||||
return CodeQuality;
|
||||
case AnalysisKind.RiskAssessment:
|
||||
return RiskAssessment;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -237,8 +167,4 @@ export function getAnalysisConfig(kind: AnalysisKind): AnalysisConfig {
|
||||
// we want to scan a folder containing SARIF files in an order that finds the more
|
||||
// specific extensions first. This constant defines an array in the order of analyis
|
||||
// configurations with more specific extensions to less specific extensions.
|
||||
export const SarifScanOrder: AnalysisConfig[] = [
|
||||
RiskAssessment,
|
||||
CodeQuality,
|
||||
CodeScanning,
|
||||
];
|
||||
export const SarifScanOrder = [CodeQuality, CodeScanning];
|
||||
|
||||
@@ -30,10 +30,10 @@ import {
|
||||
} from "./dependency-caching";
|
||||
import { getDiffInformedAnalysisBranches } from "./diff-informed-analysis-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import { initFeatures } from "./feature-flags";
|
||||
import { Features } from "./feature-flags";
|
||||
import { KnownLanguage } from "./languages";
|
||||
import { getActionsLogger, Logger } from "./logging";
|
||||
import { cleanupAndUploadOverlayBaseDatabaseToCache } from "./overlay";
|
||||
import { cleanupAndUploadOverlayBaseDatabaseToCache } from "./overlay-database-utils";
|
||||
import { getRepositoryNwo } from "./repository";
|
||||
import * as statusReport from "./status-report";
|
||||
import {
|
||||
@@ -293,7 +293,7 @@ async function run(startedAt: Date) {
|
||||
|
||||
util.checkActionVersion(actionsUtil.getActionVersion(), gitHubVersion);
|
||||
|
||||
const features = initFeatures(
|
||||
const features = new Features(
|
||||
gitHubVersion,
|
||||
repositoryNwo,
|
||||
actionsUtil.getTemporaryDirectory(),
|
||||
@@ -363,9 +363,7 @@ async function run(startedAt: Date) {
|
||||
|
||||
uploadResults = await postProcessAndUploadSarif(
|
||||
logger,
|
||||
config.tempDir,
|
||||
features,
|
||||
async () => codeql,
|
||||
uploadKind,
|
||||
checkoutPath,
|
||||
outputDir,
|
||||
|
||||
+2
-2
@@ -4,7 +4,7 @@ import * as path from "path";
|
||||
import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import { CodeQuality, CodeScanning, RiskAssessment } from "./analyses";
|
||||
import { CodeQuality, CodeScanning } from "./analyses";
|
||||
import {
|
||||
runQueries,
|
||||
defaultSuites,
|
||||
@@ -87,6 +87,7 @@ test("status report fields", async (t) => {
|
||||
);
|
||||
return "";
|
||||
},
|
||||
databasePrintBaseline: async () => "",
|
||||
});
|
||||
|
||||
const config = createTestConfig({
|
||||
@@ -155,6 +156,5 @@ test("addSarifExtension", (t) => {
|
||||
addSarifExtension(CodeQuality, language),
|
||||
`${language}.quality.sarif`,
|
||||
);
|
||||
t.is(addSarifExtension(RiskAssessment, language), `${language}.csra.sarif`);
|
||||
}
|
||||
});
|
||||
|
||||
+9
-14
@@ -24,7 +24,7 @@ import { EnvVar } from "./environment";
|
||||
import { FeatureEnablement, Feature } from "./feature-flags";
|
||||
import { KnownLanguage, Language } from "./languages";
|
||||
import { Logger, withGroupAsync } from "./logging";
|
||||
import { OverlayDatabaseMode } from "./overlay";
|
||||
import { OverlayDatabaseMode } from "./overlay-database-utils";
|
||||
import { DatabaseCreationTimings, EventReport } from "./status-report";
|
||||
import { endTracingForCluster } from "./tracer-config";
|
||||
import * as util from "./util";
|
||||
@@ -495,18 +495,10 @@ export async function runQueries(
|
||||
endTimeInterpretResults.getTime() - startTimeInterpretResults.getTime();
|
||||
logger.endGroup();
|
||||
|
||||
if (analysisSummary.trim()) {
|
||||
logger.info(analysisSummary);
|
||||
}
|
||||
if (qualityAnalysisSummary?.trim()) {
|
||||
logger.info(analysisSummary);
|
||||
if (qualityAnalysisSummary) {
|
||||
logger.info(qualityAnalysisSummary);
|
||||
}
|
||||
if (!config.enableFileCoverageInformation) {
|
||||
logger.info(
|
||||
"To speed up pull request analysis, file coverage information is only enabled when analyzing " +
|
||||
"the default branch and protected branches.",
|
||||
);
|
||||
}
|
||||
|
||||
if (await features.getValue(Feature.QaTelemetryEnabled)) {
|
||||
// Note: QA adds the `code-quality` query suite to the `queries` input,
|
||||
@@ -549,9 +541,12 @@ export async function runQueries(
|
||||
): Promise<{ summary: string; sarifFile: string }> {
|
||||
logger.info(`Interpreting ${analysis.name} results for ${language}`);
|
||||
|
||||
// Apply the analysis configuration's `fixCategory` function to adjust the category if needed.
|
||||
// This is a no-op for Code Scanning.
|
||||
const category = analysis.fixCategory(logger, automationDetailsId);
|
||||
// If this is a Code Quality analysis, correct the category to one
|
||||
// accepted by the Code Quality backend.
|
||||
let category = automationDetailsId;
|
||||
if (analysis.kind === analyses.AnalysisKind.CodeQuality) {
|
||||
category = analysis.fixCategory(logger, automationDetailsId);
|
||||
}
|
||||
|
||||
const sarifFile = path.join(
|
||||
sarifFolder,
|
||||
|
||||
@@ -36,9 +36,6 @@ test("getApiClient", async (t) => {
|
||||
baseUrl: "http://api.github.localhost",
|
||||
log: sinon.match.any,
|
||||
userAgent: `CodeQL-Action/${actionsUtil.getActionVersion()}`,
|
||||
retry: {
|
||||
doNotRetry: [400, 410, 422, 451],
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
+1
-8
@@ -51,12 +51,6 @@ function createApiClientWithDetails(
|
||||
warn: core.warning,
|
||||
error: core.error,
|
||||
},
|
||||
retry: {
|
||||
// The default is 400, 401, 403, 404, 410, 422, and 451. We have observed transient errors
|
||||
// with authentication, so we remove 401, 403, and 404 from the default list to ensure that
|
||||
// these errors are retried.
|
||||
doNotRetry: [400, 410, 422, 451],
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
@@ -312,8 +306,7 @@ export function wrapApiConfigurationError(e: unknown) {
|
||||
}
|
||||
if (
|
||||
httpError.message.includes("Bad credentials") ||
|
||||
httpError.message.includes("Not Found") ||
|
||||
httpError.message.includes("Requires authentication")
|
||||
httpError.message.includes("Not Found")
|
||||
) {
|
||||
return new ConfigurationError(
|
||||
"Please check that your token is valid and has the required permissions: contents: read, security-events: write",
|
||||
|
||||
+27
-112
@@ -4,122 +4,37 @@ import * as path from "path";
|
||||
|
||||
import test from "ava";
|
||||
|
||||
import {
|
||||
GITHUB_PAT_CLASSIC_PATTERN,
|
||||
isAuthToken,
|
||||
scanArtifactsForTokens,
|
||||
TokenType,
|
||||
} from "./artifact-scanner";
|
||||
import { scanArtifactsForTokens } from "./artifact-scanner";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import {
|
||||
checkExpectedLogMessages,
|
||||
getRecordingLogger,
|
||||
LoggedMessage,
|
||||
makeTestToken,
|
||||
} from "./testing-utils";
|
||||
import { getRecordingLogger, LoggedMessage } from "./testing-utils";
|
||||
|
||||
test("makeTestToken", (t) => {
|
||||
t.is(makeTestToken().length, 36);
|
||||
t.is(makeTestToken(255).length, 255);
|
||||
test("scanArtifactsForTokens detects GitHub tokens in files", async (t) => {
|
||||
const logger = getRunnerLogger(true);
|
||||
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "scanner-test-"));
|
||||
|
||||
try {
|
||||
// Create a test file with a fake GitHub token
|
||||
const testFile = path.join(tempDir, "test.txt");
|
||||
fs.writeFileSync(
|
||||
testFile,
|
||||
"This is a test file with token ghp_1234567890123456789012345678901234AB",
|
||||
);
|
||||
|
||||
const error = await t.throwsAsync(
|
||||
async () => await scanArtifactsForTokens([testFile], logger),
|
||||
);
|
||||
|
||||
t.regex(
|
||||
error?.message || "",
|
||||
/Found 1 potential GitHub token.*Personal Access Token/,
|
||||
);
|
||||
t.regex(error?.message || "", /test\.txt/);
|
||||
} finally {
|
||||
// Clean up
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test("isAuthToken", (t) => {
|
||||
// Undefined for strings that aren't tokens
|
||||
t.is(isAuthToken("some string"), undefined);
|
||||
t.is(isAuthToken("ghp_"), undefined);
|
||||
t.is(isAuthToken("ghp_123"), undefined);
|
||||
|
||||
// Token types for strings that are tokens.
|
||||
t.is(isAuthToken(`ghp_${makeTestToken()}`), TokenType.PersonalAccessClassic);
|
||||
t.is(isAuthToken(`ghp_${makeTestToken()}`), TokenType.PersonalAccessClassic);
|
||||
t.is(
|
||||
isAuthToken(`ghs_${makeTestToken(255)}`),
|
||||
TokenType.AppInstallationAccess,
|
||||
);
|
||||
t.is(
|
||||
isAuthToken(`github_pat_${makeTestToken(22)}_${makeTestToken(59)}`),
|
||||
TokenType.PersonalAccessFineGrained,
|
||||
);
|
||||
|
||||
// With a custom pattern set
|
||||
t.is(
|
||||
isAuthToken(`ghp_${makeTestToken()}`, [GITHUB_PAT_CLASSIC_PATTERN]),
|
||||
TokenType.PersonalAccessClassic,
|
||||
);
|
||||
t.is(
|
||||
isAuthToken(`github_pat_${makeTestToken(22)}_${makeTestToken(59)}`, [
|
||||
GITHUB_PAT_CLASSIC_PATTERN,
|
||||
]),
|
||||
undefined,
|
||||
);
|
||||
});
|
||||
|
||||
const testTokens = [
|
||||
{
|
||||
type: TokenType.PersonalAccessClassic,
|
||||
value: `ghp_${makeTestToken()}`,
|
||||
checkPattern: "Personal Access Token",
|
||||
},
|
||||
{
|
||||
type: TokenType.PersonalAccessFineGrained,
|
||||
value:
|
||||
"github_pat_1234567890ABCDEFGHIJKL_MNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890ABCDEFGHI",
|
||||
checkPattern: "Personal Access Token",
|
||||
},
|
||||
{
|
||||
type: TokenType.OAuth,
|
||||
value: `gho_${makeTestToken()}`,
|
||||
},
|
||||
{
|
||||
type: TokenType.UserToServer,
|
||||
value: `ghu_${makeTestToken()}`,
|
||||
},
|
||||
{
|
||||
type: TokenType.ServerToServer,
|
||||
value: `ghs_${makeTestToken()}`,
|
||||
},
|
||||
{
|
||||
type: TokenType.Refresh,
|
||||
value: `ghr_${makeTestToken()}`,
|
||||
},
|
||||
{
|
||||
type: TokenType.AppInstallationAccess,
|
||||
value: `ghs_${makeTestToken(255)}`,
|
||||
},
|
||||
];
|
||||
|
||||
for (const { type, value, checkPattern } of testTokens) {
|
||||
test(`scanArtifactsForTokens detects GitHub ${type} tokens in files`, async (t) => {
|
||||
const logMessages = [];
|
||||
const logger = getRecordingLogger(logMessages, { logToConsole: false });
|
||||
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "scanner-test-"));
|
||||
|
||||
try {
|
||||
// Create a test file with a fake GitHub token
|
||||
const testFile = path.join(tempDir, "test.txt");
|
||||
fs.writeFileSync(testFile, `This is a test file with token ${value}`);
|
||||
|
||||
const error = await t.throwsAsync(
|
||||
async () => await scanArtifactsForTokens([testFile], logger),
|
||||
);
|
||||
|
||||
t.regex(
|
||||
error?.message || "",
|
||||
new RegExp(`Found 1 potential GitHub token.*${checkPattern || type}`),
|
||||
);
|
||||
t.regex(error?.message || "", /test\.txt/);
|
||||
|
||||
checkExpectedLogMessages(t, logMessages, [
|
||||
"Starting best-effort check",
|
||||
`Found 1 ${type}`,
|
||||
]);
|
||||
} finally {
|
||||
// Clean up
|
||||
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
test("scanArtifactsForTokens handles files without tokens", async (t) => {
|
||||
const logger = getRunnerLogger(true);
|
||||
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "scanner-test-"));
|
||||
|
||||
+13
-60
@@ -7,62 +7,33 @@ import * as exec from "@actions/exec";
|
||||
import { Logger } from "./logging";
|
||||
import { getErrorMessage } from "./util";
|
||||
|
||||
/**
|
||||
* Enumerates known types of GitHub token formats.
|
||||
*/
|
||||
export enum TokenType {
|
||||
PersonalAccessClassic = "Personal Access Token (Classic)",
|
||||
PersonalAccessFineGrained = "Personal Access Token (Fine-grained)",
|
||||
OAuth = "OAuth Access Token",
|
||||
UserToServer = "User-to-Server Token",
|
||||
ServerToServer = "Server-to-Server Token",
|
||||
Refresh = "Refresh Token",
|
||||
AppInstallationAccess = "App Installation Access Token",
|
||||
}
|
||||
|
||||
/** A value of this type associates a token type with its pattern. */
|
||||
export interface TokenPattern {
|
||||
type: TokenType;
|
||||
pattern: RegExp;
|
||||
}
|
||||
|
||||
/** The pattern for PATs (Classic) */
|
||||
export const GITHUB_PAT_CLASSIC_PATTERN: TokenPattern = {
|
||||
type: TokenType.PersonalAccessClassic,
|
||||
pattern: /\bghp_[a-zA-Z0-9]{36}\b/g,
|
||||
};
|
||||
|
||||
/** The pattern for PATs (Fine-grained) */
|
||||
export const GITHUB_PAT_FINE_GRAINED_PATTERN: TokenPattern = {
|
||||
type: TokenType.PersonalAccessFineGrained,
|
||||
pattern: /\bgithub_pat_[a-zA-Z0-9_]+\b/g,
|
||||
};
|
||||
|
||||
/**
|
||||
* GitHub token patterns to scan for.
|
||||
* These patterns match various GitHub token formats.
|
||||
*/
|
||||
const GITHUB_TOKEN_PATTERNS: TokenPattern[] = [
|
||||
GITHUB_PAT_CLASSIC_PATTERN,
|
||||
GITHUB_PAT_FINE_GRAINED_PATTERN,
|
||||
const GITHUB_TOKEN_PATTERNS = [
|
||||
{
|
||||
type: TokenType.OAuth,
|
||||
name: "Personal Access Token",
|
||||
pattern: /\bghp_[a-zA-Z0-9]{36}\b/g,
|
||||
},
|
||||
{
|
||||
name: "OAuth Access Token",
|
||||
pattern: /\bgho_[a-zA-Z0-9]{36}\b/g,
|
||||
},
|
||||
{
|
||||
type: TokenType.UserToServer,
|
||||
name: "User-to-Server Token",
|
||||
pattern: /\bghu_[a-zA-Z0-9]{36}\b/g,
|
||||
},
|
||||
{
|
||||
type: TokenType.ServerToServer,
|
||||
name: "Server-to-Server Token",
|
||||
pattern: /\bghs_[a-zA-Z0-9]{36}\b/g,
|
||||
},
|
||||
{
|
||||
type: TokenType.Refresh,
|
||||
name: "Refresh Token",
|
||||
pattern: /\bghr_[a-zA-Z0-9]{36}\b/g,
|
||||
},
|
||||
{
|
||||
type: TokenType.AppInstallationAccess,
|
||||
name: "App Installation Access Token",
|
||||
pattern: /\bghs_[a-zA-Z0-9]{255}\b/g,
|
||||
},
|
||||
];
|
||||
@@ -77,24 +48,6 @@ interface ScanResult {
|
||||
findings: TokenFinding[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether `value` matches any token `patterns`.
|
||||
* @param value The value to match against.
|
||||
* @param patterns The patterns to check.
|
||||
* @returns The type of the first matching pattern, or `undefined` if none match.
|
||||
*/
|
||||
export function isAuthToken(
|
||||
value: string,
|
||||
patterns: TokenPattern[] = GITHUB_TOKEN_PATTERNS,
|
||||
) {
|
||||
for (const { type, pattern } of patterns) {
|
||||
if (value.match(pattern)) {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a file for GitHub tokens.
|
||||
*
|
||||
@@ -112,13 +65,13 @@ function scanFileForTokens(
|
||||
try {
|
||||
const content = fs.readFileSync(filePath, "utf8");
|
||||
|
||||
for (const { type, pattern } of GITHUB_TOKEN_PATTERNS) {
|
||||
for (const { name, pattern } of GITHUB_TOKEN_PATTERNS) {
|
||||
const matches = content.match(pattern);
|
||||
if (matches) {
|
||||
for (let i = 0; i < matches.length; i++) {
|
||||
findings.push({ tokenType: type, filePath: relativePath });
|
||||
findings.push({ tokenType: name, filePath: relativePath });
|
||||
}
|
||||
logger.debug(`Found ${matches.length} ${type}(s) in ${relativePath}`);
|
||||
logger.debug(`Found ${matches.length} ${name}(s) in ${relativePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
+2
-2
@@ -6,7 +6,7 @@ import { CodeQL, getCodeQL } from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
import { DocUrl } from "./doc-url";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Feature, featureConfig, initFeatures } from "./feature-flags";
|
||||
import { Feature, featureConfig, Features } from "./feature-flags";
|
||||
import { KnownLanguage, Language } from "./languages";
|
||||
import { Logger } from "./logging";
|
||||
import { getRepositoryNwo } from "./repository";
|
||||
@@ -117,7 +117,7 @@ export async function setupCppAutobuild(codeql: CodeQL, logger: Logger) {
|
||||
const featureName = "C++ automatic installation of dependencies";
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
const repositoryNwo = getRepositoryNwo();
|
||||
const features = initFeatures(
|
||||
const features = new Features(
|
||||
gitHubVersion,
|
||||
repositoryNwo,
|
||||
getTemporaryDirectory(),
|
||||
|
||||
+22
-22
@@ -28,7 +28,7 @@ import {
|
||||
OverlayDatabaseMode,
|
||||
writeBaseDatabaseOidsFile,
|
||||
writeOverlayChangesFile,
|
||||
} from "./overlay";
|
||||
} from "./overlay-database-utils";
|
||||
import * as setupCodeql from "./setup-codeql";
|
||||
import { ZstdAvailability } from "./tar";
|
||||
import { ToolsDownloadStatusReport } from "./tools-download";
|
||||
@@ -160,7 +160,6 @@ export interface CodeQL {
|
||||
databasePath: string,
|
||||
outputFilePath: string,
|
||||
dbName: string,
|
||||
includeDiagnostics: boolean,
|
||||
alsoIncludeRelativePaths: string[],
|
||||
): Promise<void>;
|
||||
/**
|
||||
@@ -187,6 +186,10 @@ export interface CodeQL {
|
||||
config: Config,
|
||||
features: FeatureEnablement,
|
||||
): Promise<string>;
|
||||
/**
|
||||
* Run 'codeql database print-baseline'.
|
||||
*/
|
||||
databasePrintBaseline(databasePath: string): Promise<string>;
|
||||
/**
|
||||
* Run 'codeql database export-diagnostics'
|
||||
*
|
||||
@@ -490,6 +493,10 @@ export function createStubCodeQL(partialCodeql: Partial<CodeQL>): CodeQL {
|
||||
partialCodeql,
|
||||
"databaseInterpretResults",
|
||||
),
|
||||
databasePrintBaseline: resolveFunction(
|
||||
partialCodeql,
|
||||
"databasePrintBaseline",
|
||||
),
|
||||
databaseExportDiagnostics: resolveFunction(
|
||||
partialCodeql,
|
||||
"databaseExportDiagnostics",
|
||||
@@ -621,13 +628,6 @@ async function getCodeQLForCmd(
|
||||
extraArgs.push("--overlay-base");
|
||||
}
|
||||
|
||||
const baselineFilesOptions = config.enableFileCoverageInformation
|
||||
? [
|
||||
"--calculate-language-specific-baseline",
|
||||
"--sublanguage-file-coverage",
|
||||
]
|
||||
: ["--no-calculate-baseline"];
|
||||
|
||||
await runCli(
|
||||
cmd,
|
||||
[
|
||||
@@ -639,14 +639,12 @@ async function getCodeQLForCmd(
|
||||
"--db-cluster",
|
||||
config.dbLocation,
|
||||
`--source-root=${sourceRoot}`,
|
||||
...baselineFilesOptions,
|
||||
"--calculate-language-specific-baseline",
|
||||
"--extractor-include-aliases",
|
||||
"--sublanguage-file-coverage",
|
||||
...extraArgs,
|
||||
...getExtraOptionsFromEnv(["database", "init"], {
|
||||
// Some user configs specify `--no-calculate-baseline` as an additional
|
||||
// argument to `codeql database init`. Therefore ignore the baseline file
|
||||
// options here to avoid specifying the same argument twice and erroring.
|
||||
ignoringOptions: ["--overwrite", ...baselineFilesOptions],
|
||||
ignoringOptions: ["--overwrite"],
|
||||
}),
|
||||
],
|
||||
{ stdin: externalRepositoryToken },
|
||||
@@ -887,6 +885,15 @@ async function getCodeQLForCmd(
|
||||
noStreamStdout: true,
|
||||
});
|
||||
},
|
||||
async databasePrintBaseline(databasePath: string): Promise<string> {
|
||||
const codeqlArgs = [
|
||||
"database",
|
||||
"print-baseline",
|
||||
...getExtraOptionsFromEnv(["database", "print-baseline"]),
|
||||
databasePath,
|
||||
];
|
||||
return await runCli(cmd, codeqlArgs);
|
||||
},
|
||||
async databaseCleanupCluster(
|
||||
config: Config,
|
||||
cleanupLevel: CleanupLevel,
|
||||
@@ -913,22 +920,15 @@ async function getCodeQLForCmd(
|
||||
databasePath: string,
|
||||
outputFilePath: string,
|
||||
databaseName: string,
|
||||
includeDiagnostics: boolean,
|
||||
alsoIncludeRelativePaths: string[],
|
||||
): Promise<void> {
|
||||
const includeDiagnosticsArgs = includeDiagnostics
|
||||
? ["--include-diagnostics"]
|
||||
: [];
|
||||
const args = [
|
||||
"database",
|
||||
"bundle",
|
||||
databasePath,
|
||||
`--output=${outputFilePath}`,
|
||||
`--name=${databaseName}`,
|
||||
...includeDiagnosticsArgs,
|
||||
...getExtraOptionsFromEnv(["database", "bundle"], {
|
||||
ignoringOptions: includeDiagnosticsArgs,
|
||||
}),
|
||||
...getExtraOptionsFromEnv(["database", "bundle"]),
|
||||
];
|
||||
if (
|
||||
await this.supportsFeature(ToolsFeature.BundleSupportsIncludeOption)
|
||||
|
||||
+6
-139
@@ -7,7 +7,7 @@ import * as yaml from "js-yaml";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { AnalysisKind, supportedAnalysisKinds } from "./analyses";
|
||||
import { AnalysisKind } from "./analyses";
|
||||
import * as api from "./api-client";
|
||||
import { CachingKind } from "./caching-utils";
|
||||
import { createStubCodeQL } from "./codeql";
|
||||
@@ -18,8 +18,10 @@ import * as gitUtils from "./git-utils";
|
||||
import { GitVersionInfo } from "./git-utils";
|
||||
import { KnownLanguage, Language } from "./languages";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { CODEQL_OVERLAY_MINIMUM_VERSION, OverlayDatabaseMode } from "./overlay";
|
||||
import * as overlayStatus from "./overlay/status";
|
||||
import {
|
||||
CODEQL_OVERLAY_MINIMUM_VERSION,
|
||||
OverlayDatabaseMode,
|
||||
} from "./overlay-database-utils";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import {
|
||||
setupTests,
|
||||
@@ -87,7 +89,6 @@ function createTestInitConfigInputs(
|
||||
},
|
||||
features: createFeatures([]),
|
||||
repositoryProperties: {},
|
||||
enableFileCoverageInformation: true,
|
||||
logger: getRunnerLogger(true),
|
||||
} satisfies configUtils.InitConfigInputs,
|
||||
overrides,
|
||||
@@ -982,7 +983,6 @@ interface OverlayDatabaseModeTestSetup {
|
||||
codeScanningConfig: configUtils.UserConfig;
|
||||
diskUsage: DiskUsage | undefined;
|
||||
memoryFlagValue: number;
|
||||
shouldSkipOverlayAnalysisDueToCachedStatus: boolean;
|
||||
}
|
||||
|
||||
const defaultOverlayDatabaseModeTestSetup: OverlayDatabaseModeTestSetup = {
|
||||
@@ -1004,7 +1004,6 @@ const defaultOverlayDatabaseModeTestSetup: OverlayDatabaseModeTestSetup = {
|
||||
numTotalBytes: 100_000_000_000,
|
||||
},
|
||||
memoryFlagValue: 6920,
|
||||
shouldSkipOverlayAnalysisDueToCachedStatus: false,
|
||||
};
|
||||
|
||||
const getOverlayDatabaseModeMacro = test.macro({
|
||||
@@ -1015,7 +1014,6 @@ const getOverlayDatabaseModeMacro = test.macro({
|
||||
expected: {
|
||||
overlayDatabaseMode: OverlayDatabaseMode;
|
||||
useOverlayDatabaseCaching: boolean;
|
||||
skippedDueToCachedStatus?: boolean;
|
||||
},
|
||||
) => {
|
||||
return await withTmpDir(async (tempDir) => {
|
||||
@@ -1040,10 +1038,6 @@ const getOverlayDatabaseModeMacro = test.macro({
|
||||
|
||||
sinon.stub(util, "checkDiskUsage").resolves(setup.diskUsage);
|
||||
|
||||
sinon
|
||||
.stub(overlayStatus, "shouldSkipOverlayAnalysis")
|
||||
.resolves(setup.shouldSkipOverlayAnalysisDueToCachedStatus);
|
||||
|
||||
// Mock feature flags
|
||||
const features = createFeatures(setup.features);
|
||||
|
||||
@@ -1086,10 +1080,7 @@ const getOverlayDatabaseModeMacro = test.macro({
|
||||
logger,
|
||||
);
|
||||
|
||||
t.deepEqual(result, {
|
||||
skippedDueToCachedStatus: false,
|
||||
...expected,
|
||||
});
|
||||
t.deepEqual(result, expected);
|
||||
} finally {
|
||||
// Restore the original environment
|
||||
process.env = originalEnv;
|
||||
@@ -1269,71 +1260,6 @@ test(
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
getOverlayDatabaseModeMacro,
|
||||
"No overlay-base database on default branch if runner disk space is below v2 limit and v2 resource checks enabled",
|
||||
{
|
||||
languages: [KnownLanguage.javascript],
|
||||
features: [
|
||||
Feature.OverlayAnalysis,
|
||||
Feature.OverlayAnalysisCodeScanningJavascript,
|
||||
Feature.OverlayAnalysisResourceChecksV2,
|
||||
],
|
||||
isDefaultBranch: true,
|
||||
diskUsage: {
|
||||
numAvailableBytes: 5_000_000_000,
|
||||
numTotalBytes: 100_000_000_000,
|
||||
},
|
||||
},
|
||||
{
|
||||
overlayDatabaseMode: OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
getOverlayDatabaseModeMacro,
|
||||
"Overlay-base database on default branch if runner disk space is between v2 and v1 limits and v2 resource checks enabled",
|
||||
{
|
||||
languages: [KnownLanguage.javascript],
|
||||
features: [
|
||||
Feature.OverlayAnalysis,
|
||||
Feature.OverlayAnalysisCodeScanningJavascript,
|
||||
Feature.OverlayAnalysisResourceChecksV2,
|
||||
],
|
||||
isDefaultBranch: true,
|
||||
diskUsage: {
|
||||
numAvailableBytes: 15_000_000_000,
|
||||
numTotalBytes: 100_000_000_000,
|
||||
},
|
||||
},
|
||||
{
|
||||
overlayDatabaseMode: OverlayDatabaseMode.OverlayBase,
|
||||
useOverlayDatabaseCaching: true,
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
getOverlayDatabaseModeMacro,
|
||||
"No overlay-base database on default branch if runner disk space is between v2 and v1 limits and v2 resource checks not enabled",
|
||||
{
|
||||
languages: [KnownLanguage.javascript],
|
||||
features: [
|
||||
Feature.OverlayAnalysis,
|
||||
Feature.OverlayAnalysisCodeScanningJavascript,
|
||||
],
|
||||
isDefaultBranch: true,
|
||||
diskUsage: {
|
||||
numAvailableBytes: 15_000_000_000,
|
||||
numTotalBytes: 100_000_000_000,
|
||||
},
|
||||
},
|
||||
{
|
||||
overlayDatabaseMode: OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
getOverlayDatabaseModeMacro,
|
||||
"No overlay-base database on default branch if memory flag is too low",
|
||||
@@ -1371,46 +1297,6 @@ test(
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
getOverlayDatabaseModeMacro,
|
||||
"No overlay-base database on default branch when cached status indicates previous failure",
|
||||
{
|
||||
languages: [KnownLanguage.javascript],
|
||||
features: [
|
||||
Feature.OverlayAnalysis,
|
||||
Feature.OverlayAnalysisJavascript,
|
||||
Feature.OverlayAnalysisStatusCheck,
|
||||
],
|
||||
isDefaultBranch: true,
|
||||
shouldSkipOverlayAnalysisDueToCachedStatus: true,
|
||||
},
|
||||
{
|
||||
overlayDatabaseMode: OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
skippedDueToCachedStatus: true,
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
getOverlayDatabaseModeMacro,
|
||||
"No overlay analysis on PR when cached status indicates previous failure",
|
||||
{
|
||||
languages: [KnownLanguage.javascript],
|
||||
features: [
|
||||
Feature.OverlayAnalysis,
|
||||
Feature.OverlayAnalysisJavascript,
|
||||
Feature.OverlayAnalysisStatusCheck,
|
||||
],
|
||||
isPullRequest: true,
|
||||
shouldSkipOverlayAnalysisDueToCachedStatus: true,
|
||||
},
|
||||
{
|
||||
overlayDatabaseMode: OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
skippedDueToCachedStatus: true,
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
getOverlayDatabaseModeMacro,
|
||||
"No overlay-base database on default branch when code-scanning feature enabled with disable-default-queries",
|
||||
@@ -1942,22 +1828,3 @@ test("hasActionsWorkflows doesn't throw if workflows folder doesn't exist", asyn
|
||||
t.notThrows(() => configUtils.hasActionsWorkflows(tmpDir));
|
||||
});
|
||||
});
|
||||
|
||||
test("getPrimaryAnalysisConfig - single analysis kind", (t) => {
|
||||
// If only one analysis kind is configured, we expect to get the matching configuration.
|
||||
for (const analysisKind of supportedAnalysisKinds) {
|
||||
const singleKind = createTestConfig({ analysisKinds: [analysisKind] });
|
||||
t.is(configUtils.getPrimaryAnalysisConfig(singleKind).kind, analysisKind);
|
||||
}
|
||||
});
|
||||
|
||||
test("getPrimaryAnalysisConfig - Code Scanning + Code Quality", (t) => {
|
||||
// For CS+CQ, we expect to get the Code Scanning configuration.
|
||||
const codeScanningAndCodeQuality = createTestConfig({
|
||||
analysisKinds: [AnalysisKind.CodeScanning, AnalysisKind.CodeQuality],
|
||||
});
|
||||
t.is(
|
||||
configUtils.getPrimaryAnalysisConfig(codeScanningAndCodeQuality).kind,
|
||||
AnalysisKind.CodeScanning,
|
||||
);
|
||||
});
|
||||
|
||||
+46
-135
@@ -7,13 +7,14 @@ import * as yaml from "js-yaml";
|
||||
import {
|
||||
getActionVersion,
|
||||
isAnalyzingPullRequest,
|
||||
isDynamicWorkflow,
|
||||
isCCR,
|
||||
} from "./actions-util";
|
||||
import {
|
||||
AnalysisConfig,
|
||||
AnalysisKind,
|
||||
CodeQuality,
|
||||
codeQualityQueries,
|
||||
getAnalysisConfig,
|
||||
CodeScanning,
|
||||
} from "./analyses";
|
||||
import * as api from "./api-client";
|
||||
import { CachingKind, getCachingKind } from "./caching-utils";
|
||||
@@ -25,13 +26,8 @@ import {
|
||||
parseUserConfig,
|
||||
UserConfig,
|
||||
} from "./config/db-config";
|
||||
import {
|
||||
addNoLanguageDiagnostic,
|
||||
makeDiagnostic,
|
||||
makeTelemetryDiagnostic,
|
||||
} from "./diagnostics";
|
||||
import { addDiagnostic, makeTelemetryDiagnostic } from "./diagnostics";
|
||||
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
|
||||
import { DocUrl } from "./doc-url";
|
||||
import { EnvVar } from "./environment";
|
||||
import * as errorMessages from "./error-messages";
|
||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||
@@ -46,8 +42,10 @@ import {
|
||||
} from "./git-utils";
|
||||
import { KnownLanguage, Language } from "./languages";
|
||||
import { Logger } from "./logging";
|
||||
import { CODEQL_OVERLAY_MINIMUM_VERSION, OverlayDatabaseMode } from "./overlay";
|
||||
import { shouldSkipOverlayAnalysis } from "./overlay/status";
|
||||
import {
|
||||
CODEQL_OVERLAY_MINIMUM_VERSION,
|
||||
OverlayDatabaseMode,
|
||||
} from "./overlay-database-utils";
|
||||
import { RepositoryNwo } from "./repository";
|
||||
import { ToolsFeature } from "./tools-features";
|
||||
import { downloadTrapCaches } from "./trap-caching";
|
||||
@@ -63,7 +61,6 @@ import {
|
||||
getErrorMessage,
|
||||
isInTestMode,
|
||||
joinAtMost,
|
||||
DiskUsage,
|
||||
} from "./util";
|
||||
|
||||
export * from "./config/db-config";
|
||||
@@ -79,15 +76,6 @@ const OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 20000;
|
||||
const OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_BYTES =
|
||||
OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB * 1_000_000;
|
||||
|
||||
/**
|
||||
* The v2 minimum available disk space (in MB) required to perform overlay
|
||||
* analysis. This is a lower threshold than the v1 limit, allowing overlay
|
||||
* analysis to run on runners with less available disk space.
|
||||
*/
|
||||
const OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB = 14000;
|
||||
const OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES =
|
||||
OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB * 1_000_000;
|
||||
|
||||
/**
|
||||
* The minimum memory (in MB) that must be available for CodeQL to perform overlay
|
||||
* analysis. If CodeQL will be given less memory than this threshold, then the
|
||||
@@ -226,11 +214,6 @@ export interface Config {
|
||||
* A partial mapping from repository properties that affect us to their values.
|
||||
*/
|
||||
repositoryProperties: RepositoryProperties;
|
||||
|
||||
/**
|
||||
* Whether to enable file coverage information.
|
||||
*/
|
||||
enableFileCoverageInformation: boolean;
|
||||
}
|
||||
|
||||
async function getSupportedLanguageMap(
|
||||
@@ -450,7 +433,6 @@ export interface InitConfigInputs {
|
||||
apiDetails: api.GitHubApiCombinedDetails;
|
||||
features: FeatureEnablement;
|
||||
repositoryProperties: RepositoryProperties;
|
||||
enableFileCoverageInformation: boolean;
|
||||
analysisKinds: AnalysisKind[];
|
||||
logger: Logger;
|
||||
}
|
||||
@@ -480,7 +462,6 @@ export async function initActionState(
|
||||
repositoryProperties,
|
||||
analysisKinds,
|
||||
logger,
|
||||
enableFileCoverageInformation,
|
||||
}: InitConfigInputs,
|
||||
userConfig: UserConfig,
|
||||
): Promise<Config> {
|
||||
@@ -561,7 +542,6 @@ export async function initActionState(
|
||||
overlayDatabaseMode: OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
repositoryProperties,
|
||||
enableFileCoverageInformation,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -685,26 +665,21 @@ async function isOverlayAnalysisFeatureEnabled(
|
||||
* and the maximum memory CodeQL will be allowed to use.
|
||||
*/
|
||||
async function runnerSupportsOverlayAnalysis(
|
||||
diskUsage: DiskUsage | undefined,
|
||||
ramInput: string | undefined,
|
||||
logger: Logger,
|
||||
useV2ResourceChecks: boolean,
|
||||
): Promise<boolean> {
|
||||
const minimumDiskSpaceBytes = useV2ResourceChecks
|
||||
? OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES
|
||||
: OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_BYTES;
|
||||
const diskUsage = await checkDiskUsage(logger);
|
||||
if (
|
||||
diskUsage === undefined ||
|
||||
diskUsage.numAvailableBytes < minimumDiskSpaceBytes
|
||||
diskUsage.numAvailableBytes < OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_BYTES
|
||||
) {
|
||||
const diskSpaceMb =
|
||||
diskUsage === undefined
|
||||
? 0
|
||||
: Math.round(diskUsage.numAvailableBytes / 1_000_000);
|
||||
const minimumDiskSpaceMb = Math.round(minimumDiskSpaceBytes / 1_000_000);
|
||||
logger.info(
|
||||
`Setting overlay database mode to ${OverlayDatabaseMode.None} ` +
|
||||
`due to insufficient disk space (${diskSpaceMb} MB, needed ${minimumDiskSpaceMb} MB).`,
|
||||
`due to insufficient disk space (${diskSpaceMb} MB).`,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
@@ -713,7 +688,7 @@ async function runnerSupportsOverlayAnalysis(
|
||||
if (memoryFlagValue < OVERLAY_MINIMUM_MEMORY_MB) {
|
||||
logger.info(
|
||||
`Setting overlay database mode to ${OverlayDatabaseMode.None} ` +
|
||||
`due to insufficient memory for CodeQL analysis (${memoryFlagValue} MB, needed ${OVERLAY_MINIMUM_MEMORY_MB} MB).`,
|
||||
`due to insufficient memory for CodeQL analysis (${memoryFlagValue} MB).`,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
@@ -755,11 +730,9 @@ export async function getOverlayDatabaseMode(
|
||||
): Promise<{
|
||||
overlayDatabaseMode: OverlayDatabaseMode;
|
||||
useOverlayDatabaseCaching: boolean;
|
||||
skippedDueToCachedStatus: boolean;
|
||||
}> {
|
||||
let overlayDatabaseMode = OverlayDatabaseMode.None;
|
||||
let useOverlayDatabaseCaching = false;
|
||||
let skippedDueToCachedStatus = false;
|
||||
|
||||
const modeEnv = process.env.CODEQL_OVERLAY_DATABASE_MODE;
|
||||
// Any unrecognized CODEQL_OVERLAY_DATABASE_MODE value will be ignored and
|
||||
@@ -786,43 +759,11 @@ export async function getOverlayDatabaseMode(
|
||||
Feature.OverlayAnalysisSkipResourceChecks,
|
||||
codeql,
|
||||
));
|
||||
const useV2ResourceChecks = await features.getValue(
|
||||
Feature.OverlayAnalysisResourceChecksV2,
|
||||
);
|
||||
const checkOverlayStatus = await features.getValue(
|
||||
Feature.OverlayAnalysisStatusCheck,
|
||||
);
|
||||
const diskUsage =
|
||||
performResourceChecks || checkOverlayStatus
|
||||
? await checkDiskUsage(logger)
|
||||
: undefined;
|
||||
if (
|
||||
performResourceChecks &&
|
||||
!(await runnerSupportsOverlayAnalysis(
|
||||
diskUsage,
|
||||
ramInput,
|
||||
logger,
|
||||
useV2ResourceChecks,
|
||||
))
|
||||
!(await runnerSupportsOverlayAnalysis(ramInput, logger))
|
||||
) {
|
||||
overlayDatabaseMode = OverlayDatabaseMode.None;
|
||||
} else if (checkOverlayStatus && diskUsage === undefined) {
|
||||
logger.warning(
|
||||
`Unable to determine disk usage, therefore setting overlay database mode to ${OverlayDatabaseMode.None}.`,
|
||||
);
|
||||
overlayDatabaseMode = OverlayDatabaseMode.None;
|
||||
} else if (
|
||||
checkOverlayStatus &&
|
||||
diskUsage &&
|
||||
(await shouldSkipOverlayAnalysis(codeql, languages, diskUsage, logger))
|
||||
) {
|
||||
logger.info(
|
||||
`Setting overlay database mode to ${OverlayDatabaseMode.None} ` +
|
||||
"because overlay analysis previously failed with this combination of languages, " +
|
||||
"disk space, and CodeQL version.",
|
||||
);
|
||||
overlayDatabaseMode = OverlayDatabaseMode.None;
|
||||
skippedDueToCachedStatus = true;
|
||||
} else if (isAnalyzingPullRequest()) {
|
||||
overlayDatabaseMode = OverlayDatabaseMode.Overlay;
|
||||
useOverlayDatabaseCaching = true;
|
||||
@@ -843,7 +784,6 @@ export async function getOverlayDatabaseMode(
|
||||
const nonOverlayAnalysis = {
|
||||
overlayDatabaseMode: OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
skippedDueToCachedStatus,
|
||||
};
|
||||
|
||||
if (overlayDatabaseMode === OverlayDatabaseMode.None) {
|
||||
@@ -908,7 +848,6 @@ export async function getOverlayDatabaseMode(
|
||||
return {
|
||||
overlayDatabaseMode,
|
||||
useOverlayDatabaseCaching,
|
||||
skippedDueToCachedStatus,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1015,13 +954,10 @@ export async function initConfig(
|
||||
}
|
||||
}
|
||||
|
||||
// If we are in a dynamic workflow or the corresponding FF is enabled, try to determine
|
||||
// If we are in CCR or the corresponding FF is enabled, try to determine
|
||||
// which files in the repository are marked as generated and add them to
|
||||
// the `paths-ignore` configuration.
|
||||
if (
|
||||
(await features.getValue(Feature.IgnoreGeneratedFiles)) &&
|
||||
isDynamicWorkflow()
|
||||
) {
|
||||
if ((await features.getValue(Feature.IgnoreGeneratedFiles)) && isCCR()) {
|
||||
try {
|
||||
const generatedFilesCheckStartedAt = performance.now();
|
||||
const generatedFiles = await getGeneratedFiles(inputs.sourceRoot);
|
||||
@@ -1055,21 +991,18 @@ export async function initConfig(
|
||||
// and queries, which in turn depends on the user config and the augmentation
|
||||
// properties. So we need to calculate the overlay database mode after the
|
||||
// rest of the config has been populated.
|
||||
const {
|
||||
overlayDatabaseMode,
|
||||
useOverlayDatabaseCaching,
|
||||
skippedDueToCachedStatus: overlaySkippedDueToCachedStatus,
|
||||
} = await getOverlayDatabaseMode(
|
||||
inputs.codeql,
|
||||
inputs.features,
|
||||
config.languages,
|
||||
inputs.sourceRoot,
|
||||
config.buildMode,
|
||||
inputs.ramInput,
|
||||
config.computedConfig,
|
||||
gitVersion,
|
||||
logger,
|
||||
);
|
||||
const { overlayDatabaseMode, useOverlayDatabaseCaching } =
|
||||
await getOverlayDatabaseMode(
|
||||
inputs.codeql,
|
||||
inputs.features,
|
||||
config.languages,
|
||||
inputs.sourceRoot,
|
||||
config.buildMode,
|
||||
inputs.ramInput,
|
||||
config.computedConfig,
|
||||
gitVersion,
|
||||
logger,
|
||||
);
|
||||
logger.info(
|
||||
`Using overlay database mode: ${overlayDatabaseMode} ` +
|
||||
`${useOverlayDatabaseCaching ? "with" : "without"} caching.`,
|
||||
@@ -1077,35 +1010,6 @@ export async function initConfig(
|
||||
config.overlayDatabaseMode = overlayDatabaseMode;
|
||||
config.useOverlayDatabaseCaching = useOverlayDatabaseCaching;
|
||||
|
||||
if (overlaySkippedDueToCachedStatus) {
|
||||
addNoLanguageDiagnostic(
|
||||
config,
|
||||
makeDiagnostic(
|
||||
"codeql-action/overlay-skipped-due-to-cached-status",
|
||||
"Skipped improved incremental analysis because it failed previously with similar hardware resources",
|
||||
{
|
||||
attributes: {
|
||||
languages: config.languages,
|
||||
},
|
||||
markdownMessage:
|
||||
`Improved incremental analysis was skipped because it previously failed for this repository ` +
|
||||
`with CodeQL version ${(await inputs.codeql.getVersion()).version} on a runner with similar hardware resources. ` +
|
||||
"Improved incremental analysis may require a significant amount of disk space for some repositories. " +
|
||||
"If you want to enable improved incremental analysis, increase the disk space available " +
|
||||
"to the runner. If that doesn't help, contact GitHub Support for further assistance.\n\n" +
|
||||
"Improved incremental analysis will be automatically retried when the next version of CodeQL is released. " +
|
||||
`You can also manually trigger a retry by [removing](${DocUrl.DELETE_ACTIONS_CACHE_ENTRIES}) \`codeql-overlay-status-*\` entries from the Actions cache.`,
|
||||
severity: "note",
|
||||
visibility: {
|
||||
cliSummaryTable: true,
|
||||
statusPage: true,
|
||||
telemetry: true,
|
||||
},
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
overlayDatabaseMode === OverlayDatabaseMode.Overlay ||
|
||||
(await shouldPerformDiffInformedAnalysis(
|
||||
@@ -1474,27 +1378,28 @@ export function isCodeQualityEnabled(config: Config): boolean {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the primary analysis kind that the Action is initialised with. If there is only
|
||||
* one analysis kind, then that is returned.
|
||||
* Returns the primary analysis kind that the Action is initialised with. This is
|
||||
* always `AnalysisKind.CodeScanning` unless `AnalysisKind.CodeScanning` is not enabled.
|
||||
*
|
||||
* The special case is Code Scanning + Code Quality, which can be enabled at the same time.
|
||||
* In that case, this function returns Code Scanning.
|
||||
* @returns Returns `AnalysisKind.CodeScanning` if `AnalysisKind.CodeScanning` is enabled;
|
||||
* otherwise `AnalysisKind.CodeQuality`.
|
||||
*/
|
||||
function getPrimaryAnalysisKind(config: Config): AnalysisKind {
|
||||
if (config.analysisKinds.length === 1) {
|
||||
return config.analysisKinds[0];
|
||||
}
|
||||
|
||||
return isCodeScanningEnabled(config)
|
||||
? AnalysisKind.CodeScanning
|
||||
: AnalysisKind.CodeQuality;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the primary analysis configuration that the Action is initialised with.
|
||||
* Returns the primary analysis configuration that the Action is initialised with. This is
|
||||
* always `CodeScanning` unless `CodeScanning` is not enabled.
|
||||
*
|
||||
* @returns Returns `CodeScanning` if `AnalysisKind.CodeScanning` is enabled; otherwise `CodeQuality`.
|
||||
*/
|
||||
export function getPrimaryAnalysisConfig(config: Config): AnalysisConfig {
|
||||
return getAnalysisConfig(getPrimaryAnalysisKind(config));
|
||||
return getPrimaryAnalysisKind(config) === AnalysisKind.CodeScanning
|
||||
? CodeScanning
|
||||
: CodeQuality;
|
||||
}
|
||||
|
||||
/** Logs the Git version as a telemetry diagnostic. */
|
||||
@@ -1503,8 +1408,11 @@ async function logGitVersionTelemetry(
|
||||
gitVersion: GitVersionInfo,
|
||||
): Promise<void> {
|
||||
if (config.languages.length > 0) {
|
||||
addNoLanguageDiagnostic(
|
||||
addDiagnostic(
|
||||
config,
|
||||
// Arbitrarily choose the first language. We could also choose all languages, but that
|
||||
// increases the risk of misinterpreting the data.
|
||||
config.languages[0],
|
||||
makeTelemetryDiagnostic(
|
||||
"codeql-action/git-version-telemetry",
|
||||
"Git version telemetry",
|
||||
@@ -1530,8 +1438,11 @@ async function logGeneratedFilesTelemetry(
|
||||
return;
|
||||
}
|
||||
|
||||
addNoLanguageDiagnostic(
|
||||
addDiagnostic(
|
||||
config,
|
||||
// Arbitrarily choose the first language. We could also choose all languages, but that
|
||||
// increases the risk of misinterpreting the data.
|
||||
config.languages[0],
|
||||
makeTelemetryDiagnostic(
|
||||
"codeql-action/generated-files-telemetry",
|
||||
"Generated files telemetry",
|
||||
|
||||
@@ -8,7 +8,7 @@ import { Config } from "./config-utils";
|
||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||
import * as gitUtils from "./git-utils";
|
||||
import { Logger, withGroupAsync } from "./logging";
|
||||
import { OverlayDatabaseMode } from "./overlay";
|
||||
import { OverlayDatabaseMode } from "./overlay-database-utils";
|
||||
import { RepositoryNwo } from "./repository";
|
||||
import * as util from "./util";
|
||||
import { bundleDb, CleanupLevel, parseGitHubUrl } from "./util";
|
||||
@@ -101,9 +101,7 @@ export async function cleanupAndUploadDatabases(
|
||||
// Although we are uploading arbitrary file contents to the API, it's worth
|
||||
// noting that it's the API's job to validate that the contents is acceptable.
|
||||
// This API method is available to anyone with write access to the repo.
|
||||
const bundledDb = await bundleDb(config, language, codeql, language, {
|
||||
includeDiagnostics: false,
|
||||
});
|
||||
const bundledDb = await bundleDb(config, language, codeql, language);
|
||||
bundledDbSize = fs.statSync(bundledDb).size;
|
||||
const bundledDbReadStream = fs.createReadStream(bundledDb);
|
||||
const commitOid = await gitUtils.getCommitOid(
|
||||
|
||||
@@ -429,7 +429,6 @@ async function createDatabaseBundleCli(
|
||||
language,
|
||||
codeql,
|
||||
`${config.debugDatabaseName}-${language}`,
|
||||
{ includeDiagnostics: true },
|
||||
);
|
||||
return databaseBundlePath;
|
||||
}
|
||||
|
||||
+4
-4
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.24.2",
|
||||
"cliVersion": "2.24.2",
|
||||
"priorBundleVersion": "codeql-bundle-v2.24.1",
|
||||
"priorCliVersion": "2.24.1"
|
||||
"bundleVersion": "codeql-bundle-v2.24.0",
|
||||
"cliVersion": "2.24.0",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.9",
|
||||
"priorCliVersion": "2.23.9"
|
||||
}
|
||||
|
||||
+4
-33
@@ -66,12 +66,6 @@ interface UnwrittenDiagnostic {
|
||||
/** A list of diagnostics which have not yet been written to disk. */
|
||||
let unwrittenDiagnostics: UnwrittenDiagnostic[] = [];
|
||||
|
||||
/**
|
||||
* A list of diagnostics which have not yet been written to disk,
|
||||
* and where the language does not matter.
|
||||
*/
|
||||
let unwrittenDefaultLanguageDiagnostics: DiagnosticMessage[] = [];
|
||||
|
||||
/**
|
||||
* Constructs a new diagnostic message with the specified id and name, as well as optional additional data.
|
||||
*
|
||||
@@ -123,24 +117,6 @@ export function addDiagnostic(
|
||||
}
|
||||
}
|
||||
|
||||
/** Adds a diagnostic that is not specific to any language. */
|
||||
export function addNoLanguageDiagnostic(
|
||||
config: Config | undefined,
|
||||
diagnostic: DiagnosticMessage,
|
||||
) {
|
||||
if (config !== undefined) {
|
||||
addDiagnostic(
|
||||
config,
|
||||
// Arbitrarily choose the first language. We could also choose all languages, but that
|
||||
// increases the risk of misinterpreting the data.
|
||||
config.languages[0],
|
||||
diagnostic,
|
||||
);
|
||||
} else {
|
||||
unwrittenDefaultLanguageDiagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the given diagnostic to the database.
|
||||
*
|
||||
@@ -198,21 +174,16 @@ export function logUnwrittenDiagnostics() {
|
||||
/** Writes all unwritten diagnostics to disk. */
|
||||
export function flushDiagnostics(config: Config) {
|
||||
const logger = getActionsLogger();
|
||||
|
||||
const diagnosticsCount =
|
||||
unwrittenDiagnostics.length + unwrittenDefaultLanguageDiagnostics.length;
|
||||
logger.debug(`Writing ${diagnosticsCount} diagnostic(s) to database.`);
|
||||
logger.debug(
|
||||
`Writing ${unwrittenDiagnostics.length} diagnostic(s) to database.`,
|
||||
);
|
||||
|
||||
for (const unwritten of unwrittenDiagnostics) {
|
||||
writeDiagnostic(config, unwritten.language, unwritten.diagnostic);
|
||||
}
|
||||
for (const unwritten of unwrittenDefaultLanguageDiagnostics) {
|
||||
addNoLanguageDiagnostic(config, unwritten);
|
||||
}
|
||||
|
||||
// Reset the unwritten diagnostics arrays.
|
||||
// Reset the unwritten diagnostics array.
|
||||
unwrittenDiagnostics = [];
|
||||
unwrittenDefaultLanguageDiagnostics = [];
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
shouldPerformDiffInformedAnalysis,
|
||||
exportedForTesting,
|
||||
} from "./diff-informed-analysis-utils";
|
||||
import { Feature, initFeatures } from "./feature-flags";
|
||||
import { Feature, Features } from "./feature-flags";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import {
|
||||
@@ -63,7 +63,7 @@ const testShouldPerformDiffInformedAnalysis = test.macro({
|
||||
delete process.env.CODEQL_ACTION_DIFF_INFORMED_QUERIES;
|
||||
}
|
||||
|
||||
const features = initFeatures(
|
||||
const features = new Features(
|
||||
testCase.gitHubVersion,
|
||||
parseRepositoryNwo("github/example"),
|
||||
tmpDir,
|
||||
|
||||
+2
-3
@@ -5,11 +5,10 @@
|
||||
export enum DocUrl {
|
||||
ASSIGNING_PERMISSIONS_TO_JOBS = "https://docs.github.com/en/actions/using-jobs/assigning-permissions-to-jobs",
|
||||
AUTOMATIC_BUILD_FAILED = "https://docs.github.com/en/code-security/code-scanning/troubleshooting-code-scanning/automatic-build-failed",
|
||||
CODEQL_BUILD_MODES = "https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages#codeql-build-modes",
|
||||
DEFINE_ENV_VARIABLES = "https://docs.github.com/en/actions/learn-github-actions/variables#defining-environment-variables-for-a-single-workflow",
|
||||
DELETE_ACTIONS_CACHE_ENTRIES = "https://docs.github.com/en/actions/how-tos/manage-workflow-runs/manage-caches#deleting-cache-entries",
|
||||
SCANNING_ON_PUSH = "https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push",
|
||||
SPECIFY_BUILD_STEPS_MANUALLY = "https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages#about-specifying-build-steps-manually",
|
||||
TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS = "https://docs.github.com/en/enterprise-cloud@latest/code-security/code-scanning/integrating-with-code-scanning/sarif-support-for-code-scanning#providing-data-to-track-code-scanning-alerts-across-runs",
|
||||
CODEQL_BUILD_MODES = "https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages#codeql-build-modes",
|
||||
SYSTEM_REQUIREMENTS = "https://codeql.github.com/docs/codeql-overview/system-requirements/",
|
||||
TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS = "https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts",
|
||||
}
|
||||
|
||||
@@ -141,7 +141,4 @@ export enum EnvVar {
|
||||
* `getAnalysisKey`, but can also be set manually for testing and non-standard applications.
|
||||
*/
|
||||
ANALYSIS_KEY = "CODEQL_ACTION_ANALYSIS_KEY",
|
||||
|
||||
/** Used by Code Scanning Risk Assessment to communicate the assessment ID to the CodeQL Action. */
|
||||
RISK_ASSESSMENT_ID = "CODEQL_ACTION_RISK_ASSESSMENT_ID",
|
||||
}
|
||||
|
||||
+79
-19
@@ -1,31 +1,32 @@
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import test from "ava";
|
||||
import test, { ExecutionContext } from "ava";
|
||||
|
||||
import * as defaults from "./defaults.json";
|
||||
import {
|
||||
Feature,
|
||||
featureConfig,
|
||||
FeatureEnablement,
|
||||
Features,
|
||||
FEATURE_FLAGS_FILE_NAME,
|
||||
FeatureConfig,
|
||||
FeatureWithoutCLI,
|
||||
} from "./feature-flags";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import {
|
||||
setUpFeatureFlagTests,
|
||||
getFeatureIncludingCodeQlIfRequired,
|
||||
assertAllFeaturesUndefinedInApi,
|
||||
assertAllFeaturesHaveDefaultValues,
|
||||
} from "./feature-flags/testing-util";
|
||||
import {
|
||||
checkExpectedLogMessages,
|
||||
getRecordingLogger,
|
||||
initializeFeatures,
|
||||
LoggedMessage,
|
||||
mockCodeQLVersion,
|
||||
mockFeatureFlagApiEndpoint,
|
||||
setupActionsVars,
|
||||
setupTests,
|
||||
stubFeatureFlagApiEndpoint,
|
||||
} from "./testing-utils";
|
||||
import { ToolsFeature } from "./tools-features";
|
||||
import * as util from "./util";
|
||||
import { GitHubVariant, initializeEnvironment, withTmpDir } from "./util";
|
||||
|
||||
setupTests(test);
|
||||
@@ -34,7 +35,9 @@ test.beforeEach(() => {
|
||||
initializeEnvironment("1.2.3");
|
||||
});
|
||||
|
||||
test(`All features use default values if running against GHES`, async (t) => {
|
||||
const testRepositoryNwo = parseRepositoryNwo("github/example");
|
||||
|
||||
test(`All features are disabled if running against GHES`, async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const loggedMessages = [];
|
||||
const features = setUpFeatureFlagTests(
|
||||
@@ -43,10 +46,21 @@ test(`All features use default values if running against GHES`, async (t) => {
|
||||
{ type: GitHubVariant.GHES, version: "3.0.0" },
|
||||
);
|
||||
|
||||
await assertAllFeaturesHaveDefaultValues(t, features);
|
||||
checkExpectedLogMessages(t, loggedMessages, [
|
||||
"Not running against github.com. Using default values for all features.",
|
||||
]);
|
||||
for (const feature of Object.values(Feature)) {
|
||||
t.deepEqual(
|
||||
await getFeatureIncludingCodeQlIfRequired(features, feature),
|
||||
featureConfig[feature].defaultValue,
|
||||
);
|
||||
}
|
||||
|
||||
t.assert(
|
||||
loggedMessages.find(
|
||||
(v: LoggedMessage) =>
|
||||
v.type === "debug" &&
|
||||
v.message ===
|
||||
"Not running against github.com. Disabling all toggleable features.",
|
||||
) !== undefined,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -528,9 +542,55 @@ test("non-legacy feature flags should not start with codeql_action_", async (t)
|
||||
}
|
||||
});
|
||||
|
||||
test("initFeatures returns a `Features` instance by default", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const features = setUpFeatureFlagTests(tmpDir);
|
||||
t.is("Features", features.constructor.name);
|
||||
});
|
||||
});
|
||||
function assertAllFeaturesUndefinedInApi(
|
||||
t: ExecutionContext<unknown>,
|
||||
loggedMessages: LoggedMessage[],
|
||||
) {
|
||||
for (const feature of Object.keys(featureConfig)) {
|
||||
t.assert(
|
||||
loggedMessages.find(
|
||||
(v) =>
|
||||
v.type === "debug" &&
|
||||
(v.message as string).includes(feature) &&
|
||||
(v.message as string).includes("undefined in API response"),
|
||||
) !== undefined,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function setUpFeatureFlagTests(
|
||||
tmpDir: string,
|
||||
logger = getRunnerLogger(true),
|
||||
gitHubVersion = { type: GitHubVariant.DOTCOM } as util.GitHubVersion,
|
||||
): FeatureEnablement {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
return new Features(gitHubVersion, testRepositoryNwo, tmpDir, logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an argument to pass to `getValue` that if required includes a CodeQL object meeting the
|
||||
* minimum version or tool feature requirements specified by the feature.
|
||||
*/
|
||||
function getFeatureIncludingCodeQlIfRequired(
|
||||
features: FeatureEnablement,
|
||||
feature: Feature,
|
||||
) {
|
||||
const config = featureConfig[
|
||||
feature
|
||||
] satisfies FeatureConfig as FeatureConfig;
|
||||
if (
|
||||
config.minimumVersion === undefined &&
|
||||
config.toolsFeature === undefined
|
||||
) {
|
||||
return features.getValue(feature as FeatureWithoutCLI);
|
||||
}
|
||||
|
||||
return features.getValue(
|
||||
feature,
|
||||
mockCodeQLVersion(
|
||||
"9.9.9",
|
||||
Object.fromEntries(Object.values(ToolsFeature).map((v) => [v, true])),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
+57
-169
@@ -7,7 +7,7 @@ import { getApiClient } from "./api-client";
|
||||
import type { CodeQL } from "./codeql";
|
||||
import * as defaults from "./defaults.json";
|
||||
import { Logger } from "./logging";
|
||||
import { CODEQL_OVERLAY_MINIMUM_VERSION } from "./overlay";
|
||||
import { CODEQL_OVERLAY_MINIMUM_VERSION } from "./overlay-database-utils";
|
||||
import { RepositoryNwo } from "./repository";
|
||||
import { ToolsFeature } from "./tools-features";
|
||||
import * as util from "./util";
|
||||
@@ -45,10 +45,7 @@ export enum Feature {
|
||||
DisableJavaBuildlessEnabled = "disable_java_buildless_enabled",
|
||||
DisableKotlinAnalysisEnabled = "disable_kotlin_analysis_enabled",
|
||||
ExportDiagnosticsEnabled = "export_diagnostics_enabled",
|
||||
ForceNightly = "force_nightly",
|
||||
IgnoreGeneratedFiles = "ignore_generated_files",
|
||||
ImprovedProxyCertificates = "improved_proxy_certificates",
|
||||
JavaNetworkDebugging = "java_network_debugging",
|
||||
OverlayAnalysis = "overlay_analysis",
|
||||
OverlayAnalysisActions = "overlay_analysis_actions",
|
||||
OverlayAnalysisCodeScanningActions = "overlay_analysis_code_scanning_actions",
|
||||
@@ -63,23 +60,18 @@ export enum Feature {
|
||||
OverlayAnalysisCodeScanningSwift = "overlay_analysis_code_scanning_swift",
|
||||
OverlayAnalysisCpp = "overlay_analysis_cpp",
|
||||
OverlayAnalysisCsharp = "overlay_analysis_csharp",
|
||||
OverlayAnalysisStatusCheck = "overlay_analysis_status_check",
|
||||
OverlayAnalysisStatusSave = "overlay_analysis_status_save",
|
||||
OverlayAnalysisGo = "overlay_analysis_go",
|
||||
OverlayAnalysisJava = "overlay_analysis_java",
|
||||
OverlayAnalysisJavascript = "overlay_analysis_javascript",
|
||||
OverlayAnalysisPython = "overlay_analysis_python",
|
||||
OverlayAnalysisResourceChecksV2 = "overlay_analysis_resource_checks_v2",
|
||||
OverlayAnalysisRuby = "overlay_analysis_ruby",
|
||||
OverlayAnalysisRust = "overlay_analysis_rust",
|
||||
OverlayAnalysisSkipResourceChecks = "overlay_analysis_skip_resource_checks",
|
||||
OverlayAnalysisSwift = "overlay_analysis_swift",
|
||||
PythonDefaultIsToNotExtractStdlib = "python_default_is_to_not_extract_stdlib",
|
||||
QaTelemetryEnabled = "qa_telemetry_enabled",
|
||||
/** Note that this currently only disables baseline file coverage information. */
|
||||
SkipFileCoverageOnPrs = "skip_file_coverage_on_prs",
|
||||
UploadOverlayDbToApi = "upload_overlay_db_to_api",
|
||||
UseRepositoryProperties = "use_repository_properties_v2",
|
||||
UseRepositoryProperties = "use_repository_properties",
|
||||
ValidateDbConfig = "validate_db_config",
|
||||
}
|
||||
|
||||
@@ -167,26 +159,11 @@ export const featureConfig = {
|
||||
legacyApi: true,
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.ForceNightly]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_FORCE_NIGHTLY",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.IgnoreGeneratedFiles]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_IGNORE_GENERATED_FILES",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.ImprovedProxyCertificates]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_IMPROVED_PROXY_CERTIFICATES",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.JavaNetworkDebugging]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_JAVA_NETWORK_DEBUGGING",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.OverlayAnalysis]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS",
|
||||
@@ -257,16 +234,6 @@ export const featureConfig = {
|
||||
envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CSHARP",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.OverlayAnalysisStatusCheck]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_CHECK",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.OverlayAnalysisStatusSave]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_SAVE",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.OverlayAnalysisGo]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO",
|
||||
@@ -287,11 +254,6 @@ export const featureConfig = {
|
||||
envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_PYTHON",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.OverlayAnalysisResourceChecksV2]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RESOURCE_CHECKS_V2",
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.OverlayAnalysisRuby]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUBY",
|
||||
@@ -324,15 +286,6 @@ export const featureConfig = {
|
||||
legacyApi: true,
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.SkipFileCoverageOnPrs]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS",
|
||||
// For testing, this is not behind a CLI version check yet. However
|
||||
// before rolling this out externally, we should set a minimum version here
|
||||
// since current versions of the CodeQL CLI will log if baseline information
|
||||
// cannot be found when interpreting results.
|
||||
minimumVersion: undefined,
|
||||
},
|
||||
[Feature.UploadOverlayDbToApi]: {
|
||||
defaultValue: false,
|
||||
envVar: "CODEQL_ACTION_UPLOAD_OVERLAY_DB_TO_API",
|
||||
@@ -384,60 +337,51 @@ type GitHubFeatureFlagsApiResponse = Partial<Record<Feature, boolean>>;
|
||||
export const FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json";
|
||||
|
||||
/**
|
||||
* Determines the enablement status of a number of features locally without
|
||||
* consulting the GitHub API.
|
||||
* Determines the enablement status of a number of features.
|
||||
* If feature enablement is not able to be determined locally, a request to the
|
||||
* GitHub API is made to determine the enablement status.
|
||||
*/
|
||||
class OfflineFeatures implements FeatureEnablement {
|
||||
constructor(protected readonly logger: Logger) {}
|
||||
export class Features implements FeatureEnablement {
|
||||
private gitHubFeatureFlags: GitHubFeatureFlags;
|
||||
|
||||
constructor(
|
||||
gitHubVersion: util.GitHubVersion,
|
||||
repositoryNwo: RepositoryNwo,
|
||||
tempDir: string,
|
||||
private readonly logger: Logger,
|
||||
) {
|
||||
this.gitHubFeatureFlags = new GitHubFeatureFlags(
|
||||
gitHubVersion,
|
||||
repositoryNwo,
|
||||
path.join(tempDir, FEATURE_FLAGS_FILE_NAME),
|
||||
logger,
|
||||
);
|
||||
}
|
||||
|
||||
async getDefaultCliVersion(
|
||||
_variant: util.GitHubVariant,
|
||||
variant: util.GitHubVariant,
|
||||
): Promise<CodeQLDefaultVersionInfo> {
|
||||
return {
|
||||
cliVersion: defaults.cliVersion,
|
||||
tagName: defaults.bundleVersion,
|
||||
};
|
||||
return await this.gitHubFeatureFlags.getDefaultCliVersion(variant);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the `FeatureConfig` for `feature`.
|
||||
*/
|
||||
getFeatureConfig(feature: Feature): FeatureConfig {
|
||||
// Narrow the type to FeatureConfig to avoid type errors. To avoid unsafe use of `as`, we
|
||||
// check that the required properties exist using `satisfies`.
|
||||
return featureConfig[feature] satisfies FeatureConfig as FeatureConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines whether `feature` is enabled without consulting the GitHub API.
|
||||
*
|
||||
* @param feature The feature to check.
|
||||
* @param codeql An optional CodeQL object. If provided, and a `minimumVersion` is specified for the
|
||||
* feature, the version of the CodeQL CLI will be checked against the minimum version.
|
||||
* If the version is less than the minimum version, the feature will be considered
|
||||
* disabled. If not provided, and a `minimumVersion` is specified for the feature, then
|
||||
* disabled. If not provided, and a `minimumVersion` is specified for the feature, the
|
||||
* this function will throw.
|
||||
* @returns true if the feature is enabled, false otherwise.
|
||||
*
|
||||
* @throws if a `minimumVersion` is specified for the feature, and `codeql` is not provided.
|
||||
*/
|
||||
async getValue(feature: Feature, codeql?: CodeQL): Promise<boolean> {
|
||||
const offlineValue = await this.getOfflineValue(feature, codeql);
|
||||
if (offlineValue !== undefined) {
|
||||
return offlineValue;
|
||||
}
|
||||
|
||||
return this.getDefaultValue(feature);
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines whether `feature` is enabled using the CLI and environment variables.
|
||||
*/
|
||||
protected async getOfflineValue(
|
||||
feature: Feature,
|
||||
codeql?: CodeQL,
|
||||
): Promise<boolean | undefined> {
|
||||
const config = this.getFeatureConfig(feature);
|
||||
// Narrow the type to FeatureConfig to avoid type errors. To avoid unsafe use of `as`, we
|
||||
// check that the required properties exist using `satisfies`.
|
||||
const config = featureConfig[
|
||||
feature
|
||||
] satisfies FeatureConfig as FeatureConfig;
|
||||
|
||||
if (!codeql && config.minimumVersion) {
|
||||
throw new Error(
|
||||
@@ -503,68 +447,6 @@ class OfflineFeatures implements FeatureEnablement {
|
||||
return true;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/** Gets the default value of `feature`. */
|
||||
protected async getDefaultValue(feature: Feature): Promise<boolean> {
|
||||
const config = this.getFeatureConfig(feature);
|
||||
const defaultValue = config.defaultValue;
|
||||
this.logger.debug(
|
||||
`Feature ${feature} is ${
|
||||
defaultValue ? "enabled" : "disabled"
|
||||
} due to its default value.`,
|
||||
);
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the enablement status of a number of features.
|
||||
* If feature enablement is not able to be determined locally, a request to the
|
||||
* GitHub API is made to determine the enablement status.
|
||||
*/
|
||||
class Features extends OfflineFeatures {
|
||||
private gitHubFeatureFlags: GitHubFeatureFlags;
|
||||
|
||||
constructor(repositoryNwo: RepositoryNwo, tempDir: string, logger: Logger) {
|
||||
super(logger);
|
||||
|
||||
this.gitHubFeatureFlags = new GitHubFeatureFlags(
|
||||
repositoryNwo,
|
||||
path.join(tempDir, FEATURE_FLAGS_FILE_NAME),
|
||||
logger,
|
||||
);
|
||||
}
|
||||
|
||||
async getDefaultCliVersion(
|
||||
variant: util.GitHubVariant,
|
||||
): Promise<CodeQLDefaultVersionInfo> {
|
||||
if (supportsFeatureFlags(variant)) {
|
||||
return await this.gitHubFeatureFlags.getDefaultCliVersionFromFlags();
|
||||
}
|
||||
return super.getDefaultCliVersion(variant);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param feature The feature to check.
|
||||
* @param codeql An optional CodeQL object. If provided, and a `minimumVersion` is specified for the
|
||||
* feature, the version of the CodeQL CLI will be checked against the minimum version.
|
||||
* If the version is less than the minimum version, the feature will be considered
|
||||
* disabled. If not provided, and a `minimumVersion` is specified for the feature, then
|
||||
* this function will throw.
|
||||
* @returns true if the feature is enabled, false otherwise.
|
||||
*
|
||||
* @throws if a `minimumVersion` is specified for the feature, and `codeql` is not provided.
|
||||
*/
|
||||
async getValue(feature: Feature, codeql?: CodeQL): Promise<boolean> {
|
||||
// Check whether the feature is enabled locally.
|
||||
const offlineValue = await this.getOfflineValue(feature, codeql);
|
||||
if (offlineValue !== undefined) {
|
||||
return offlineValue;
|
||||
}
|
||||
|
||||
// Ask the GitHub API if the feature is enabled.
|
||||
const apiValue = await this.gitHubFeatureFlags.getValue(feature);
|
||||
if (apiValue !== undefined) {
|
||||
@@ -576,8 +458,13 @@ class Features extends OfflineFeatures {
|
||||
return apiValue;
|
||||
}
|
||||
|
||||
// Return the default value.
|
||||
return this.getDefaultValue(feature);
|
||||
const defaultValue = config.defaultValue;
|
||||
this.logger.debug(
|
||||
`Feature ${feature} is ${
|
||||
defaultValue ? "enabled" : "disabled"
|
||||
} due to its default value.`,
|
||||
);
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -589,6 +476,7 @@ class GitHubFeatureFlags {
|
||||
private hasAccessedRemoteFeatureFlags: boolean;
|
||||
|
||||
constructor(
|
||||
private readonly gitHubVersion: util.GitHubVersion,
|
||||
private readonly repositoryNwo: RepositoryNwo,
|
||||
private readonly featureFlagsFile: string,
|
||||
private readonly logger: Logger,
|
||||
@@ -619,6 +507,18 @@ class GitHubFeatureFlags {
|
||||
return version;
|
||||
}
|
||||
|
||||
async getDefaultCliVersion(
|
||||
variant: util.GitHubVariant,
|
||||
): Promise<CodeQLDefaultVersionInfo> {
|
||||
if (supportsFeatureFlags(variant)) {
|
||||
return await this.getDefaultCliVersionFromFlags();
|
||||
}
|
||||
return {
|
||||
cliVersion: defaults.cliVersion,
|
||||
tagName: defaults.bundleVersion,
|
||||
};
|
||||
}
|
||||
|
||||
async getDefaultCliVersionFromFlags(): Promise<CodeQLDefaultVersionInfo> {
|
||||
const response = await this.getAllFeatures();
|
||||
|
||||
@@ -744,6 +644,14 @@ class GitHubFeatureFlags {
|
||||
}
|
||||
|
||||
private async loadApiResponse(): Promise<GitHubFeatureFlagsApiResponse> {
|
||||
// Do nothing when not running against github.com
|
||||
if (!supportsFeatureFlags(this.gitHubVersion.type)) {
|
||||
this.logger.debug(
|
||||
"Not running against github.com. Disabling all toggleable features.",
|
||||
);
|
||||
this.hasAccessedRemoteFeatureFlags = false;
|
||||
return {};
|
||||
}
|
||||
try {
|
||||
const featuresToRequest = Object.entries(featureConfig)
|
||||
.filter(
|
||||
@@ -813,23 +721,3 @@ function supportsFeatureFlags(githubVariant: util.GitHubVariant): boolean {
|
||||
githubVariant === util.GitHubVariant.GHEC_DR
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialises an instance of a `FeatureEnablement` implementation. The implementation used
|
||||
* is determined by the environment we are running in.
|
||||
*/
|
||||
export function initFeatures(
|
||||
gitHubVersion: util.GitHubVersion,
|
||||
repositoryNwo: RepositoryNwo,
|
||||
tempDir: string,
|
||||
logger: Logger,
|
||||
): FeatureEnablement {
|
||||
if (!supportsFeatureFlags(gitHubVersion.type)) {
|
||||
logger.debug(
|
||||
"Not running against github.com. Using default values for all features.",
|
||||
);
|
||||
return new OfflineFeatures(logger);
|
||||
} else {
|
||||
return new Features(repositoryNwo, tempDir, logger);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as apiClient from "../api-client";
|
||||
import {
|
||||
checkExpectedLogMessages,
|
||||
getRecordingLogger,
|
||||
LoggedMessage,
|
||||
setupTests,
|
||||
} from "../testing-utils";
|
||||
import { GitHubVariant, initializeEnvironment, withTmpDir } from "../util";
|
||||
|
||||
import {
|
||||
assertAllFeaturesHaveDefaultValues,
|
||||
setUpFeatureFlagTests,
|
||||
} from "./testing-util";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
test.beforeEach(() => {
|
||||
initializeEnvironment("1.2.3");
|
||||
});
|
||||
|
||||
test("OfflineFeatures makes no API requests", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
const features = setUpFeatureFlagTests(tmpDir, logger, {
|
||||
type: GitHubVariant.GHES,
|
||||
version: "3.0.0",
|
||||
});
|
||||
t.is("OfflineFeatures", features.constructor.name);
|
||||
|
||||
sinon
|
||||
.stub(apiClient, "getApiClient")
|
||||
.throws(new Error("Should not have called getApiClient"));
|
||||
|
||||
await assertAllFeaturesHaveDefaultValues(t, features);
|
||||
checkExpectedLogMessages(t, loggedMessages, [
|
||||
"Not running against github.com. Using default values for all features.",
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -78,17 +78,11 @@ export async function loadPropertiesFromApi(
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(properties).length === 0) {
|
||||
logger.debug("No known repository properties were found.");
|
||||
} else {
|
||||
logger.debug(
|
||||
"Loaded the following values for the repository properties:",
|
||||
);
|
||||
for (const [property, value] of Object.entries(properties).sort(
|
||||
([nameA], [nameB]) => nameA.localeCompare(nameB),
|
||||
)) {
|
||||
logger.debug(` ${property}: ${value}`);
|
||||
}
|
||||
logger.debug("Loaded the following values for the repository properties:");
|
||||
for (const [property, value] of Object.entries(properties).sort(
|
||||
([nameA], [nameB]) => nameA.localeCompare(nameB),
|
||||
)) {
|
||||
logger.debug(` ${property}: ${value}`);
|
||||
}
|
||||
|
||||
return properties;
|
||||
|
||||
@@ -1,87 +0,0 @@
|
||||
import { type ExecutionContext } from "ava";
|
||||
|
||||
import {
|
||||
Feature,
|
||||
featureConfig,
|
||||
FeatureConfig,
|
||||
FeatureEnablement,
|
||||
FeatureWithoutCLI,
|
||||
initFeatures,
|
||||
} from "../feature-flags";
|
||||
import { getRunnerLogger } from "../logging";
|
||||
import { parseRepositoryNwo } from "../repository";
|
||||
import {
|
||||
LoggedMessage,
|
||||
mockCodeQLVersion,
|
||||
setupActionsVars,
|
||||
} from "../testing-utils";
|
||||
import { ToolsFeature } from "../tools-features";
|
||||
import { GitHubVariant } from "../util";
|
||||
import * as util from "../util";
|
||||
|
||||
const testRepositoryNwo = parseRepositoryNwo("github/example");
|
||||
|
||||
export async function assertAllFeaturesHaveDefaultValues(
|
||||
t: ExecutionContext<unknown>,
|
||||
features: FeatureEnablement,
|
||||
) {
|
||||
for (const feature of Object.values(Feature)) {
|
||||
t.deepEqual(
|
||||
await getFeatureIncludingCodeQlIfRequired(features, feature),
|
||||
featureConfig[feature].defaultValue,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function assertAllFeaturesUndefinedInApi(
|
||||
t: ExecutionContext<unknown>,
|
||||
loggedMessages: LoggedMessage[],
|
||||
) {
|
||||
for (const feature of Object.keys(featureConfig)) {
|
||||
t.assert(
|
||||
loggedMessages.find(
|
||||
(v) =>
|
||||
v.type === "debug" &&
|
||||
(v.message as string).includes(feature) &&
|
||||
(v.message as string).includes("undefined in API response"),
|
||||
) !== undefined,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function setUpFeatureFlagTests(
|
||||
tmpDir: string,
|
||||
logger = getRunnerLogger(true),
|
||||
gitHubVersion = { type: GitHubVariant.DOTCOM } as util.GitHubVersion,
|
||||
): FeatureEnablement {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
return initFeatures(gitHubVersion, testRepositoryNwo, tmpDir, logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an argument to pass to `getValue` that if required includes a CodeQL object meeting the
|
||||
* minimum version or tool feature requirements specified by the feature.
|
||||
*/
|
||||
export function getFeatureIncludingCodeQlIfRequired(
|
||||
features: FeatureEnablement,
|
||||
feature: Feature,
|
||||
) {
|
||||
const config = featureConfig[
|
||||
feature
|
||||
] satisfies FeatureConfig as FeatureConfig;
|
||||
if (
|
||||
config.minimumVersion === undefined &&
|
||||
config.toolsFeature === undefined
|
||||
) {
|
||||
return features.getValue(feature as FeatureWithoutCLI);
|
||||
}
|
||||
|
||||
return features.getValue(
|
||||
feature,
|
||||
mockCodeQLVersion(
|
||||
"9.9.9",
|
||||
Object.fromEntries(Object.values(ToolsFeature).map((v) => [v, true])),
|
||||
),
|
||||
);
|
||||
}
|
||||
@@ -5,12 +5,9 @@ import * as actionsUtil from "./actions-util";
|
||||
import { AnalysisKind } from "./analyses";
|
||||
import * as codeql from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Feature } from "./feature-flags";
|
||||
import * as initActionPostHelper from "./init-action-post-helper";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { OverlayDatabaseMode } from "./overlay";
|
||||
import * as overlayStatus from "./overlay/status";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import {
|
||||
createFeatures,
|
||||
@@ -22,11 +19,9 @@ import * as uploadLib from "./upload-lib";
|
||||
import * as util from "./util";
|
||||
import * as workflow from "./workflow";
|
||||
|
||||
const NUM_BYTES_PER_GIB = 1024 * 1024 * 1024;
|
||||
|
||||
setupTests(test);
|
||||
|
||||
test("init-post action with debug mode off", async (t) => {
|
||||
test("post: init action with debug mode off", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
@@ -60,7 +55,7 @@ test("init-post action with debug mode off", async (t) => {
|
||||
});
|
||||
});
|
||||
|
||||
test("init-post action with debug mode on", async (t) => {
|
||||
test("post: init action with debug mode on", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
@@ -313,179 +308,6 @@ test("not uploading failed SARIF when `code-scanning` is not an enabled analysis
|
||||
);
|
||||
});
|
||||
|
||||
test("saves overlay status when overlay-base analysis did not complete successfully", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
// Ensure analyze did not complete successfully.
|
||||
delete process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY];
|
||||
|
||||
const diskUsage: util.DiskUsage = {
|
||||
numAvailableBytes: 100 * NUM_BYTES_PER_GIB,
|
||||
numTotalBytes: 200 * NUM_BYTES_PER_GIB,
|
||||
};
|
||||
sinon.stub(util, "checkDiskUsage").resolves(diskUsage);
|
||||
|
||||
const saveOverlayStatusStub = sinon
|
||||
.stub(overlayStatus, "saveOverlayStatus")
|
||||
.resolves(true);
|
||||
|
||||
const stubCodeQL = codeql.createStubCodeQL({});
|
||||
|
||||
await initActionPostHelper.run(
|
||||
sinon.spy(),
|
||||
sinon.spy(),
|
||||
stubCodeQL,
|
||||
createTestConfig({
|
||||
debugMode: false,
|
||||
languages: ["javascript"],
|
||||
overlayDatabaseMode: OverlayDatabaseMode.OverlayBase,
|
||||
}),
|
||||
parseRepositoryNwo("github/codeql-action"),
|
||||
createFeatures([Feature.OverlayAnalysisStatusSave]),
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
|
||||
t.true(
|
||||
saveOverlayStatusStub.calledOnce,
|
||||
"saveOverlayStatus should be called exactly once",
|
||||
);
|
||||
t.deepEqual(
|
||||
saveOverlayStatusStub.firstCall.args[0],
|
||||
stubCodeQL,
|
||||
"first arg should be the CodeQL instance",
|
||||
);
|
||||
t.deepEqual(
|
||||
saveOverlayStatusStub.firstCall.args[1],
|
||||
["javascript"],
|
||||
"second arg should be the languages",
|
||||
);
|
||||
t.deepEqual(
|
||||
saveOverlayStatusStub.firstCall.args[2],
|
||||
diskUsage,
|
||||
"third arg should be the disk usage",
|
||||
);
|
||||
t.deepEqual(
|
||||
saveOverlayStatusStub.firstCall.args[3],
|
||||
{
|
||||
attemptedToBuildOverlayBaseDatabase: true,
|
||||
builtOverlayBaseDatabase: false,
|
||||
},
|
||||
"fourth arg should be the overlay status recording an unsuccessful build attempt",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test("does not save overlay status when OverlayAnalysisStatusSave feature flag is disabled", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
// Ensure analyze did not complete successfully.
|
||||
delete process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY];
|
||||
|
||||
sinon.stub(util, "checkDiskUsage").resolves({
|
||||
numAvailableBytes: 100 * NUM_BYTES_PER_GIB,
|
||||
numTotalBytes: 200 * NUM_BYTES_PER_GIB,
|
||||
});
|
||||
|
||||
const saveOverlayStatusStub = sinon
|
||||
.stub(overlayStatus, "saveOverlayStatus")
|
||||
.resolves(true);
|
||||
|
||||
await initActionPostHelper.run(
|
||||
sinon.spy(),
|
||||
sinon.spy(),
|
||||
codeql.createStubCodeQL({}),
|
||||
createTestConfig({
|
||||
debugMode: false,
|
||||
languages: ["javascript"],
|
||||
overlayDatabaseMode: OverlayDatabaseMode.OverlayBase,
|
||||
}),
|
||||
parseRepositoryNwo("github/codeql-action"),
|
||||
createFeatures([]),
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
|
||||
t.true(
|
||||
saveOverlayStatusStub.notCalled,
|
||||
"saveOverlayStatus should not be called when OverlayAnalysisStatusSave feature flag is disabled",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test("does not save overlay status when build successful", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
// Mark analyze as having completed successfully.
|
||||
process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY] = "true";
|
||||
|
||||
sinon.stub(util, "checkDiskUsage").resolves({
|
||||
numAvailableBytes: 100 * NUM_BYTES_PER_GIB,
|
||||
numTotalBytes: 200 * NUM_BYTES_PER_GIB,
|
||||
});
|
||||
|
||||
const saveOverlayStatusStub = sinon
|
||||
.stub(overlayStatus, "saveOverlayStatus")
|
||||
.resolves(true);
|
||||
|
||||
await initActionPostHelper.run(
|
||||
sinon.spy(),
|
||||
sinon.spy(),
|
||||
codeql.createStubCodeQL({}),
|
||||
createTestConfig({
|
||||
debugMode: false,
|
||||
languages: ["javascript"],
|
||||
overlayDatabaseMode: OverlayDatabaseMode.OverlayBase,
|
||||
}),
|
||||
parseRepositoryNwo("github/codeql-action"),
|
||||
createFeatures([Feature.OverlayAnalysisStatusSave]),
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
|
||||
t.true(
|
||||
saveOverlayStatusStub.notCalled,
|
||||
"saveOverlayStatus should not be called when build completed successfully",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test("does not save overlay status when overlay not enabled", async (t) => {
|
||||
return await util.withTmpDir(async (tmpDir) => {
|
||||
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
delete process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY];
|
||||
|
||||
sinon.stub(util, "checkDiskUsage").resolves({
|
||||
numAvailableBytes: 100 * NUM_BYTES_PER_GIB,
|
||||
numTotalBytes: 200 * NUM_BYTES_PER_GIB,
|
||||
});
|
||||
|
||||
const saveOverlayStatusStub = sinon
|
||||
.stub(overlayStatus, "saveOverlayStatus")
|
||||
.resolves(true);
|
||||
|
||||
await initActionPostHelper.run(
|
||||
sinon.spy(),
|
||||
sinon.spy(),
|
||||
codeql.createStubCodeQL({}),
|
||||
createTestConfig({
|
||||
debugMode: false,
|
||||
languages: ["javascript"],
|
||||
overlayDatabaseMode: OverlayDatabaseMode.None,
|
||||
}),
|
||||
parseRepositoryNwo("github/codeql-action"),
|
||||
createFeatures([]),
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
|
||||
t.true(
|
||||
saveOverlayStatusStub.notCalled,
|
||||
"saveOverlayStatus should not be called when overlay is not enabled",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
function createTestWorkflow(
|
||||
steps: workflow.WorkflowJobStep[],
|
||||
): workflow.Workflow {
|
||||
@@ -602,8 +424,6 @@ async function testFailedSarifUpload(
|
||||
}
|
||||
t.true(
|
||||
uploadFiles.calledOnceWith(
|
||||
sinon.match.string,
|
||||
codeqlObject,
|
||||
sinon.match.string,
|
||||
sinon.match.string,
|
||||
category,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import * as fs from "fs";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
import * as github from "@actions/github";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
@@ -11,13 +12,10 @@ import * as dependencyCaching from "./dependency-caching";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||
import { Logger } from "./logging";
|
||||
import { OverlayDatabaseMode } from "./overlay";
|
||||
import { OverlayStatus, saveOverlayStatus } from "./overlay/status";
|
||||
import { RepositoryNwo, getRepositoryNwo } from "./repository";
|
||||
import { JobStatus } from "./status-report";
|
||||
import * as uploadLib from "./upload-lib";
|
||||
import {
|
||||
checkDiskUsage,
|
||||
delay,
|
||||
getErrorMessage,
|
||||
getRequiredEnvParam,
|
||||
@@ -107,8 +105,6 @@ async function maybeUploadFailedSarif(
|
||||
|
||||
logger.info(`Uploading failed SARIF file ${sarifFile}`);
|
||||
const uploadResult = await uploadLib.uploadFiles(
|
||||
config.tempDir,
|
||||
codeql,
|
||||
sarifFile,
|
||||
checkoutPath,
|
||||
category,
|
||||
@@ -133,31 +129,48 @@ export async function tryUploadSarifIfRunFailed(
|
||||
features: FeatureEnablement,
|
||||
logger: Logger,
|
||||
): Promise<UploadFailedSarifResult> {
|
||||
// Only upload the failed SARIF to Code scanning if Code scanning is enabled.
|
||||
if (!isCodeScanningEnabled(config)) {
|
||||
return {
|
||||
upload_failed_run_skipped_because: "Code Scanning is not enabled.",
|
||||
};
|
||||
}
|
||||
if (process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY] === "true") {
|
||||
if (process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY] !== "true") {
|
||||
// If analyze didn't complete successfully and the job status hasn't
|
||||
// already been set to Failure/ConfigurationError previously, this
|
||||
// means that something along the way failed in a step that is not
|
||||
// owned by the Action, for example a manual build step. We
|
||||
// consider this a configuration error.
|
||||
core.exportVariable(
|
||||
EnvVar.JOB_STATUS,
|
||||
process.env[EnvVar.JOB_STATUS] ?? JobStatus.ConfigErrorStatus,
|
||||
);
|
||||
|
||||
// If the only enabled analysis kind is `code-quality`, then we shouldn't
|
||||
// upload the failed SARIF to Code Scanning.
|
||||
if (!isCodeScanningEnabled(config)) {
|
||||
return {
|
||||
upload_failed_run_skipped_because: "Code Scanning is not enabled.",
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
return await maybeUploadFailedSarif(
|
||||
config,
|
||||
repositoryNwo,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
} catch (e) {
|
||||
logger.debug(
|
||||
`Failed to upload a SARIF file for this failed CodeQL code scanning run. ${e}`,
|
||||
);
|
||||
return createFailedUploadFailedSarifResult(e);
|
||||
}
|
||||
} else {
|
||||
core.exportVariable(
|
||||
EnvVar.JOB_STATUS,
|
||||
process.env[EnvVar.JOB_STATUS] ?? JobStatus.SuccessStatus,
|
||||
);
|
||||
return {
|
||||
upload_failed_run_skipped_because:
|
||||
"Analyze Action completed successfully",
|
||||
};
|
||||
}
|
||||
try {
|
||||
return await maybeUploadFailedSarif(
|
||||
config,
|
||||
repositoryNwo,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
} catch (e) {
|
||||
logger.debug(
|
||||
`Failed to upload a SARIF file for this failed CodeQL code scanning run. ${e}`,
|
||||
);
|
||||
return createFailedUploadFailedSarifResult(e);
|
||||
}
|
||||
}
|
||||
|
||||
export async function run(
|
||||
@@ -174,8 +187,6 @@ export async function run(
|
||||
features: FeatureEnablement,
|
||||
logger: Logger,
|
||||
) {
|
||||
await recordOverlayStatus(codeql, config, features, logger);
|
||||
|
||||
const uploadFailedSarifResult = await tryUploadSarifIfRunFailed(
|
||||
config,
|
||||
repositoryNwo,
|
||||
@@ -253,68 +264,6 @@ export async function run(
|
||||
return uploadFailedSarifResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* If overlay base database creation was attempted but the analysis did not complete
|
||||
* successfully, save the failure status to the Actions cache so that subsequent runs
|
||||
* can skip overlay analysis until something changes (e.g. a new CodeQL version).
|
||||
*/
|
||||
async function recordOverlayStatus(
|
||||
codeql: CodeQL,
|
||||
config: Config,
|
||||
features: FeatureEnablement,
|
||||
logger: Logger,
|
||||
) {
|
||||
if (
|
||||
config.overlayDatabaseMode !== OverlayDatabaseMode.OverlayBase ||
|
||||
process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY] === "true" ||
|
||||
!(await features.getValue(Feature.OverlayAnalysisStatusSave))
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const overlayStatus: OverlayStatus = {
|
||||
attemptedToBuildOverlayBaseDatabase: true,
|
||||
builtOverlayBaseDatabase: false,
|
||||
};
|
||||
|
||||
const diskUsage = await checkDiskUsage(logger);
|
||||
if (diskUsage === undefined) {
|
||||
logger.warning(
|
||||
"Unable to save overlay status to the Actions cache because the available disk space could not be determined.",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const saved = await saveOverlayStatus(
|
||||
codeql,
|
||||
config.languages,
|
||||
diskUsage,
|
||||
overlayStatus,
|
||||
logger,
|
||||
);
|
||||
|
||||
const blurb =
|
||||
"This job attempted to run with improved incremental analysis but it did not complete successfully. " +
|
||||
"This may have been due to disk space constraints: using improved incremental analysis can " +
|
||||
"require a significant amount of disk space for some repositories.";
|
||||
|
||||
if (saved) {
|
||||
logger.error(
|
||||
`${blurb} ` +
|
||||
"This failure has been recorded in the Actions cache, so the next CodeQL analysis will run " +
|
||||
"without improved incremental analysis. If you want to enable improved incremental analysis, " +
|
||||
"increase the disk space available to the runner. " +
|
||||
"If that doesn't help, contact GitHub Support for further assistance.",
|
||||
);
|
||||
} else {
|
||||
logger.error(
|
||||
`${blurb} ` +
|
||||
"The attempt to save this failure status to the Actions cache failed. The Action will attempt to " +
|
||||
"run with improved incremental analysis again.",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function removeUploadedSarif(
|
||||
uploadFailedSarifResult: UploadFailedSarifResult,
|
||||
logger: Logger,
|
||||
@@ -386,3 +335,20 @@ async function removeUploadedSarif(
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the final job status sent in the `init-post` Action, based on the
|
||||
* current value of the JOB_STATUS environment variable. If the variable is
|
||||
* unset, or if its value is not one of the JobStatus enum values, returns
|
||||
* Unknown. Otherwise it returns the status set in the environment variable.
|
||||
*/
|
||||
export function getFinalJobStatus(): JobStatus {
|
||||
const jobStatusFromEnvironment = process.env[EnvVar.JOB_STATUS];
|
||||
if (
|
||||
!jobStatusFromEnvironment ||
|
||||
!Object.values(JobStatus).includes(jobStatusFromEnvironment as JobStatus)
|
||||
) {
|
||||
return JobStatus.UnknownStatus;
|
||||
}
|
||||
return jobStatusFromEnvironment as JobStatus;
|
||||
}
|
||||
|
||||
+6
-74
@@ -14,14 +14,13 @@ import {
|
||||
import { getGitHubVersion } from "./api-client";
|
||||
import { CachingKind } from "./caching-utils";
|
||||
import { getCodeQL } from "./codeql";
|
||||
import { type Config, getConfig } from "./config-utils";
|
||||
import { Config, getConfig } from "./config-utils";
|
||||
import * as debugArtifacts from "./debug-artifacts";
|
||||
import {
|
||||
DependencyCachingUsageReport,
|
||||
getDependencyCacheUsage,
|
||||
} from "./dependency-caching";
|
||||
import { EnvVar } from "./environment";
|
||||
import { initFeatures } from "./feature-flags";
|
||||
import { Features } from "./feature-flags";
|
||||
import * as gitUtils from "./git-utils";
|
||||
import * as initActionPostHelper from "./init-action-post-helper";
|
||||
import { getActionsLogger } from "./logging";
|
||||
@@ -34,7 +33,6 @@ import {
|
||||
getActionsStatus,
|
||||
ActionName,
|
||||
getJobStatusDisplayName,
|
||||
JobStatus,
|
||||
} from "./status-report";
|
||||
import { checkDiskUsage, checkGitHubVersionInRange, wrapError } from "./util";
|
||||
|
||||
@@ -62,7 +60,7 @@ async function run(startedAt: Date) {
|
||||
checkGitHubVersionInRange(gitHubVersion, logger);
|
||||
|
||||
const repositoryNwo = getRepositoryNwo();
|
||||
const features = initFeatures(
|
||||
const features = new Features(
|
||||
gitHubVersion,
|
||||
repositoryNwo,
|
||||
getTemporaryDirectory(),
|
||||
@@ -87,7 +85,7 @@ async function run(startedAt: Date) {
|
||||
logger,
|
||||
);
|
||||
|
||||
// If we are analyzing the default branch and some kind of caching is enabled,
|
||||
// If we are analysing the default branch and some kind of caching is enabled,
|
||||
// then try to determine our overall cache usage for dependency caches. We only
|
||||
// do this under these circumstances to avoid slowing down analyses for PRs
|
||||
// and where caching may not be enabled.
|
||||
@@ -117,7 +115,7 @@ async function run(startedAt: Date) {
|
||||
}
|
||||
return;
|
||||
}
|
||||
const jobStatus = getFinalJobStatus(config);
|
||||
const jobStatus = initActionPostHelper.getFinalJobStatus();
|
||||
logger.info(`CodeQL job status was ${getJobStatusDisplayName(jobStatus)}.`);
|
||||
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
@@ -132,7 +130,7 @@ async function run(startedAt: Date) {
|
||||
const statusReport: InitPostStatusReport = {
|
||||
...statusReportBase,
|
||||
...uploadFailedSarifResult,
|
||||
job_status: jobStatus,
|
||||
job_status: initActionPostHelper.getFinalJobStatus(),
|
||||
dependency_caching_usage: dependencyCachingUsage,
|
||||
};
|
||||
logger.info("Sending status report for init-post step.");
|
||||
@@ -141,72 +139,6 @@ async function run(startedAt: Date) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the final job status to be reported in the status report.
|
||||
*
|
||||
* If the job status has already been set by another step, we use that.
|
||||
* Otherwise, we determine the job status based on whether the analyze step
|
||||
* completed successfully and whether we have a valid CodeQL config.
|
||||
*/
|
||||
function getFinalJobStatus(config: Config | undefined): JobStatus {
|
||||
const existingJobStatus = getJobStatusFromEnvironment();
|
||||
if (existingJobStatus !== undefined) {
|
||||
return existingJobStatus;
|
||||
}
|
||||
|
||||
let jobStatus: JobStatus;
|
||||
|
||||
if (process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY] === "true") {
|
||||
core.exportVariable(EnvVar.JOB_STATUS, JobStatus.SuccessStatus);
|
||||
jobStatus = JobStatus.SuccessStatus;
|
||||
} else if (config !== undefined) {
|
||||
// - We have computed a CodeQL config
|
||||
// - Analyze didn't complete successfully
|
||||
// - The job status hasn't already been set to Failure/ConfigurationError
|
||||
//
|
||||
// This means that something along the way failed in a step that is not
|
||||
// owned by the Action, for example a manual build step. We consider this a
|
||||
// configuration error.
|
||||
jobStatus = JobStatus.ConfigErrorStatus;
|
||||
} else {
|
||||
// If we didn't manage to compute a CodeQL config, it is unclear at this
|
||||
// point why the analyze Action didn't complete.
|
||||
// - One possibility is that the workflow run was cancelled. We could
|
||||
// consider determining workflow cancellation using the GitHub API, but
|
||||
// for now we treat all these cases as unknown.
|
||||
// - Another possibility is that we're running a workflow that only runs
|
||||
// `init`, for instance a workflow that was created before `setup-codeql`
|
||||
// was available and uses `init` just to set up the CodeQL tools.
|
||||
jobStatus = JobStatus.UnknownStatus;
|
||||
}
|
||||
|
||||
// This shouldn't be necessary, but in the odd case that we run more than one
|
||||
// `init` post step, ensure the job status is consistent between them.
|
||||
core.exportVariable(EnvVar.JOB_STATUS, jobStatus);
|
||||
return jobStatus;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the job status from the environment variable, if it has been set.
|
||||
*
|
||||
* If the job status is invalid, return `UnknownStatus`.
|
||||
*/
|
||||
function getJobStatusFromEnvironment(): JobStatus | undefined {
|
||||
const jobStatusFromEnvironment = process.env[EnvVar.JOB_STATUS];
|
||||
|
||||
if (jobStatusFromEnvironment !== undefined) {
|
||||
// Validate the job status from the environment. If it is invalid, return unknown.
|
||||
if (
|
||||
Object.values(JobStatus).includes(jobStatusFromEnvironment as JobStatus)
|
||||
) {
|
||||
return jobStatusFromEnvironment as JobStatus;
|
||||
}
|
||||
return JobStatus.UnknownStatus;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async function runWrapper() {
|
||||
const startedAt = new Date();
|
||||
const logger = getActionsLogger();
|
||||
|
||||
+22
-100
@@ -2,7 +2,6 @@ import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
import * as github from "@actions/github";
|
||||
import * as io from "@actions/io";
|
||||
import * as semver from "semver";
|
||||
import { v4 as uuidV4 } from "uuid";
|
||||
@@ -31,35 +30,30 @@ import {
|
||||
} from "./dependency-caching";
|
||||
import {
|
||||
addDiagnostic,
|
||||
addNoLanguageDiagnostic,
|
||||
flushDiagnostics,
|
||||
logUnwrittenDiagnostics,
|
||||
makeDiagnostic,
|
||||
makeTelemetryDiagnostic,
|
||||
} from "./diagnostics";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Feature, FeatureEnablement, initFeatures } from "./feature-flags";
|
||||
import {
|
||||
loadPropertiesFromApi,
|
||||
RepositoryProperties,
|
||||
} from "./feature-flags/properties";
|
||||
import { Feature, Features } from "./feature-flags";
|
||||
import { loadPropertiesFromApi } from "./feature-flags/properties";
|
||||
import {
|
||||
checkInstallPython311,
|
||||
checkPacksForOverlayCompatibility,
|
||||
cleanupDatabaseClusterDirectory,
|
||||
getFileCoverageInformationEnabled,
|
||||
initCodeQL,
|
||||
initConfig,
|
||||
runDatabaseInitCluster,
|
||||
} from "./init";
|
||||
import { JavaEnvVars, KnownLanguage } from "./languages";
|
||||
import { KnownLanguage } from "./languages";
|
||||
import { getActionsLogger, Logger } from "./logging";
|
||||
import {
|
||||
downloadOverlayBaseDatabaseFromCache,
|
||||
OverlayBaseDatabaseDownloadStats,
|
||||
OverlayDatabaseMode,
|
||||
} from "./overlay";
|
||||
import { getRepositoryNwo, RepositoryNwo } from "./repository";
|
||||
} from "./overlay-database-utils";
|
||||
import { getRepositoryNwo } from "./repository";
|
||||
import { ToolsSource } from "./setup-codeql";
|
||||
import {
|
||||
ActionName,
|
||||
@@ -93,9 +87,6 @@ import {
|
||||
checkActionVersion,
|
||||
getErrorMessage,
|
||||
BuildMode,
|
||||
GitHubVersion,
|
||||
Result,
|
||||
getOptionalEnvVar,
|
||||
} from "./util";
|
||||
import { checkWorkflow } from "./workflow";
|
||||
|
||||
@@ -211,7 +202,7 @@ async function run(startedAt: Date) {
|
||||
let config: configUtils.Config | undefined;
|
||||
let configFile: string | undefined;
|
||||
let codeql: CodeQL;
|
||||
let features: FeatureEnablement;
|
||||
let features: Features;
|
||||
let sourceRoot: string;
|
||||
let toolsDownloadStatusReport: ToolsDownloadStatusReport | undefined;
|
||||
let toolsFeatureFlagsValid: boolean | undefined;
|
||||
@@ -238,7 +229,7 @@ async function run(startedAt: Date) {
|
||||
|
||||
const repositoryNwo = getRepositoryNwo();
|
||||
|
||||
features = initFeatures(
|
||||
features = new Features(
|
||||
gitHubVersion,
|
||||
repositoryNwo,
|
||||
getTemporaryDirectory(),
|
||||
@@ -246,12 +237,12 @@ async function run(startedAt: Date) {
|
||||
);
|
||||
|
||||
// Fetch the values of known repository properties that affect us.
|
||||
const repositoryPropertiesResult = await loadRepositoryProperties(
|
||||
repositoryNwo,
|
||||
gitHubVersion,
|
||||
features,
|
||||
logger,
|
||||
const enableRepoProps = await features.getValue(
|
||||
Feature.UseRepositoryProperties,
|
||||
);
|
||||
const repositoryProperties = enableRepoProps
|
||||
? await loadPropertiesFromApi(gitHubVersion, logger, repositoryNwo)
|
||||
: {};
|
||||
|
||||
// Create a unique identifier for this run.
|
||||
const jobRunUuid = uuidV4();
|
||||
@@ -343,7 +334,6 @@ async function run(startedAt: Date) {
|
||||
}
|
||||
|
||||
analysisKinds = await getAnalysisKinds(logger);
|
||||
const debugMode = getOptionalInput("debug") === "true" || core.isDebug();
|
||||
config = await initConfig(features, {
|
||||
analysisKinds,
|
||||
languagesInput: getOptionalInput("languages"),
|
||||
@@ -360,7 +350,7 @@ async function run(startedAt: Date) {
|
||||
// - The `init` Action is passed `debug: true`.
|
||||
// - Actions step debugging is enabled (e.g. by [enabling debug logging for a rerun](https://docs.github.com/en/actions/managing-workflow-runs/re-running-workflows-and-jobs#re-running-all-the-jobs-in-a-workflow),
|
||||
// or by setting the `ACTIONS_STEP_DEBUG` secret to `true`).
|
||||
debugMode,
|
||||
debugMode: getOptionalInput("debug") === "true" || core.isDebug(),
|
||||
debugArtifactName:
|
||||
getOptionalInput("debug-artifact-name") || DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
debugDatabaseName:
|
||||
@@ -373,28 +363,10 @@ async function run(startedAt: Date) {
|
||||
githubVersion: gitHubVersion,
|
||||
apiDetails,
|
||||
features,
|
||||
repositoryProperties: repositoryPropertiesResult.orElse({}),
|
||||
enableFileCoverageInformation: await getFileCoverageInformationEnabled(
|
||||
debugMode,
|
||||
repositoryNwo,
|
||||
features,
|
||||
),
|
||||
repositoryProperties,
|
||||
logger,
|
||||
});
|
||||
|
||||
if (repositoryPropertiesResult.isFailure()) {
|
||||
addNoLanguageDiagnostic(
|
||||
config,
|
||||
makeTelemetryDiagnostic(
|
||||
"codeql-action/repository-properties-load-failure",
|
||||
"Failed to load repository properties",
|
||||
{
|
||||
error: getErrorMessage(repositoryPropertiesResult.value),
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
await checkInstallPython311(config.languages, codeql);
|
||||
} catch (unwrappedError) {
|
||||
const error = wrapError(unwrappedError);
|
||||
@@ -457,8 +429,11 @@ async function run(startedAt: Date) {
|
||||
|
||||
// Log CodeQL download telemetry, if appropriate
|
||||
if (toolsDownloadStatusReport) {
|
||||
addNoLanguageDiagnostic(
|
||||
addDiagnostic(
|
||||
config,
|
||||
// Arbitrarily choose the first language. We could also choose all languages, but that
|
||||
// increases the risk of misinterpreting the data.
|
||||
config.languages[0],
|
||||
makeTelemetryDiagnostic(
|
||||
"codeql-action/bundle-download-telemetry",
|
||||
"CodeQL bundle download telemetry",
|
||||
@@ -754,19 +729,6 @@ async function run(startedAt: Date) {
|
||||
}
|
||||
}
|
||||
|
||||
// Enable Java network debugging if the FF is enabled.
|
||||
if (await features.getValue(Feature.JavaNetworkDebugging)) {
|
||||
// Get the existing value of `JAVA_TOOL_OPTIONS`, if any.
|
||||
const existingJavaToolOptions =
|
||||
getOptionalEnvVar(JavaEnvVars.JAVA_TOOL_OPTIONS) || "";
|
||||
|
||||
// Add the network debugging options.
|
||||
core.exportVariable(
|
||||
JavaEnvVars.JAVA_TOOL_OPTIONS,
|
||||
`${existingJavaToolOptions} -Djavax.net.debug=all`,
|
||||
);
|
||||
}
|
||||
|
||||
// Write diagnostics to the database that we previously stored in memory because the database
|
||||
// did not exist until now.
|
||||
flushDiagnostics(config);
|
||||
@@ -813,49 +775,6 @@ async function run(startedAt: Date) {
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads [repository properties](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization) if applicable.
|
||||
*/
|
||||
async function loadRepositoryProperties(
|
||||
repositoryNwo: RepositoryNwo,
|
||||
gitHubVersion: GitHubVersion,
|
||||
features: FeatureEnablement,
|
||||
logger: Logger,
|
||||
): Promise<Result<RepositoryProperties, unknown>> {
|
||||
// See if we can skip loading repository properties early. In particular,
|
||||
// repositories owned by users cannot have repository properties, so we can
|
||||
// skip the API call entirely in that case.
|
||||
const repositoryOwnerType = github.context.payload.repository?.owner.type;
|
||||
logger.debug(
|
||||
`Repository owner type is '${repositoryOwnerType ?? "unknown"}'.`,
|
||||
);
|
||||
if (repositoryOwnerType === "User") {
|
||||
logger.debug(
|
||||
"Skipping loading repository properties because the repository is owned by a user and " +
|
||||
"therefore cannot have repository properties.",
|
||||
);
|
||||
return Result.success({});
|
||||
}
|
||||
|
||||
if (!(await features.getValue(Feature.UseRepositoryProperties))) {
|
||||
logger.debug(
|
||||
"Skipping loading repository properties because the UseRepositoryProperties feature flag is disabled.",
|
||||
);
|
||||
return Result.success({});
|
||||
}
|
||||
|
||||
try {
|
||||
return Result.success(
|
||||
await loadPropertiesFromApi(gitHubVersion, logger, repositoryNwo),
|
||||
);
|
||||
} catch (error) {
|
||||
logger.warning(
|
||||
`Failed to load repository properties: ${getErrorMessage(error)}`,
|
||||
);
|
||||
return Result.failure(error);
|
||||
}
|
||||
}
|
||||
|
||||
function getTrapCachingEnabled(): boolean {
|
||||
// If the workflow specified something always respect that
|
||||
const trapCaching = getOptionalInput("trap-caching");
|
||||
@@ -872,8 +791,11 @@ async function recordZstdAvailability(
|
||||
config: configUtils.Config,
|
||||
zstdAvailability: ZstdAvailability,
|
||||
) {
|
||||
addNoLanguageDiagnostic(
|
||||
addDiagnostic(
|
||||
config,
|
||||
// Arbitrarily choose the first language. We could also choose all languages, but that
|
||||
// increases the risk of misinterpreting the data.
|
||||
config.languages[0],
|
||||
makeTelemetryDiagnostic(
|
||||
"codeql-action/zstd-availability",
|
||||
"Zstandard availability",
|
||||
|
||||
@@ -2,20 +2,14 @@ import * as fs from "fs";
|
||||
import path from "path";
|
||||
|
||||
import test, { ExecutionContext } from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { createStubCodeQL } from "./codeql";
|
||||
import { Feature } from "./feature-flags";
|
||||
import {
|
||||
checkPacksForOverlayCompatibility,
|
||||
cleanupDatabaseClusterDirectory,
|
||||
getFileCoverageInformationEnabled,
|
||||
} from "./init";
|
||||
import { KnownLanguage } from "./languages";
|
||||
import { parseRepositoryNwo } from "./repository";
|
||||
import {
|
||||
createFeatures,
|
||||
LoggedMessage,
|
||||
createTestConfig,
|
||||
getRecordingLogger,
|
||||
@@ -448,61 +442,3 @@ test(
|
||||
expectedResult: true,
|
||||
},
|
||||
);
|
||||
|
||||
test("file coverage information enabled when debugMode is true", async (t) => {
|
||||
t.true(
|
||||
await getFileCoverageInformationEnabled(
|
||||
true, // debugMode
|
||||
parseRepositoryNwo("github/codeql-action"),
|
||||
createFeatures([Feature.SkipFileCoverageOnPrs]),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
test("file coverage information enabled when not analyzing a pull request", async (t) => {
|
||||
sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(false);
|
||||
|
||||
t.true(
|
||||
await getFileCoverageInformationEnabled(
|
||||
false, // debugMode
|
||||
parseRepositoryNwo("github/codeql-action"),
|
||||
createFeatures([Feature.SkipFileCoverageOnPrs]),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
test("file coverage information enabled when owner is not 'github'", async (t) => {
|
||||
sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true);
|
||||
|
||||
t.true(
|
||||
await getFileCoverageInformationEnabled(
|
||||
false, // debugMode
|
||||
parseRepositoryNwo("other-org/some-repo"),
|
||||
createFeatures([Feature.SkipFileCoverageOnPrs]),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
test("file coverage information enabled when feature flag is not enabled", async (t) => {
|
||||
sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true);
|
||||
|
||||
t.true(
|
||||
await getFileCoverageInformationEnabled(
|
||||
false, // debugMode
|
||||
parseRepositoryNwo("github/codeql-action"),
|
||||
createFeatures([]),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
test("file coverage information disabled when all conditions for skipping are met", async (t) => {
|
||||
sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true);
|
||||
|
||||
t.false(
|
||||
await getFileCoverageInformationEnabled(
|
||||
false, // debugMode
|
||||
parseRepositoryNwo("github/codeql-action"),
|
||||
createFeatures([Feature.SkipFileCoverageOnPrs]),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
+2
-29
@@ -5,22 +5,13 @@ import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||
import * as io from "@actions/io";
|
||||
import * as yaml from "js-yaml";
|
||||
|
||||
import {
|
||||
getOptionalInput,
|
||||
isAnalyzingPullRequest,
|
||||
isSelfHostedRunner,
|
||||
} from "./actions-util";
|
||||
import { getOptionalInput, isSelfHostedRunner } from "./actions-util";
|
||||
import { GitHubApiDetails } from "./api-client";
|
||||
import { CodeQL, setupCodeQL } from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
import {
|
||||
CodeQLDefaultVersionInfo,
|
||||
Feature,
|
||||
FeatureEnablement,
|
||||
} from "./feature-flags";
|
||||
import { CodeQLDefaultVersionInfo, FeatureEnablement } from "./feature-flags";
|
||||
import { KnownLanguage, Language } from "./languages";
|
||||
import { Logger, withGroupAsync } from "./logging";
|
||||
import { RepositoryNwo } from "./repository";
|
||||
import { ToolsSource } from "./setup-codeql";
|
||||
import { ZstdAvailability } from "./tar";
|
||||
import { ToolsDownloadStatusReport } from "./tools-download";
|
||||
@@ -297,21 +288,3 @@ export function cleanupDatabaseClusterDirectory(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function getFileCoverageInformationEnabled(
|
||||
debugMode: boolean,
|
||||
repositoryNwo: RepositoryNwo,
|
||||
features: FeatureEnablement,
|
||||
): Promise<boolean> {
|
||||
return (
|
||||
// Always enable file coverage information in debug mode
|
||||
debugMode ||
|
||||
// We're most interested in speeding up PRs, and we want to keep
|
||||
// submitting file coverage information for the default branch since
|
||||
// it is used to populate the status page.
|
||||
!isAnalyzingPullRequest() ||
|
||||
// For now, restrict this feature to the GitHub org
|
||||
repositoryNwo.owner !== "github" ||
|
||||
!(await features.getValue(Feature.SkipFileCoverageOnPrs))
|
||||
);
|
||||
}
|
||||
|
||||
@@ -19,11 +19,3 @@ export enum KnownLanguage {
|
||||
rust = "rust",
|
||||
swift = "swift",
|
||||
}
|
||||
|
||||
/** Java-specific environment variable names that we may care about. */
|
||||
export enum JavaEnvVars {
|
||||
JAVA_HOME = "JAVA_HOME",
|
||||
JAVA_TOOL_OPTIONS = "JAVA_TOOL_OPTIONS",
|
||||
JDK_JAVA_OPTIONS = "JDK_JAVA_OPTIONS",
|
||||
_JAVA_OPTIONS = "_JAVA_OPTIONS",
|
||||
}
|
||||
|
||||
@@ -5,20 +5,12 @@ import * as actionsCache from "@actions/cache";
|
||||
import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "../actions-util";
|
||||
import * as apiClient from "../api-client";
|
||||
import { ResolveDatabaseOutput } from "../codeql";
|
||||
import * as gitUtils from "../git-utils";
|
||||
import { KnownLanguage } from "../languages";
|
||||
import { getRunnerLogger } from "../logging";
|
||||
import {
|
||||
createTestConfig,
|
||||
mockCodeQLVersion,
|
||||
setupTests,
|
||||
} from "../testing-utils";
|
||||
import * as utils from "../util";
|
||||
import { withTmpDir } from "../util";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import * as apiClient from "./api-client";
|
||||
import { ResolveDatabaseOutput } from "./codeql";
|
||||
import * as gitUtils from "./git-utils";
|
||||
import { KnownLanguage } from "./languages";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import {
|
||||
downloadOverlayBaseDatabaseFromCache,
|
||||
getCacheRestoreKeyPrefix,
|
||||
@@ -26,7 +18,14 @@ import {
|
||||
OverlayDatabaseMode,
|
||||
writeBaseDatabaseOidsFile,
|
||||
writeOverlayChangesFile,
|
||||
} from ".";
|
||||
} from "./overlay-database-utils";
|
||||
import {
|
||||
createTestConfig,
|
||||
mockCodeQLVersion,
|
||||
setupTests,
|
||||
} from "./testing-utils";
|
||||
import * as utils from "./util";
|
||||
import { withTmpDir } from "./util";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
@@ -8,13 +8,13 @@ import {
|
||||
getTemporaryDirectory,
|
||||
getWorkflowRunAttempt,
|
||||
getWorkflowRunID,
|
||||
} from "../actions-util";
|
||||
import { getAutomationID } from "../api-client";
|
||||
import { createCacheKeyHash } from "../caching-utils";
|
||||
import { type CodeQL } from "../codeql";
|
||||
import { type Config } from "../config-utils";
|
||||
import { getCommitOid, getFileOidsUnderPath } from "../git-utils";
|
||||
import { Logger, withGroupAsync } from "../logging";
|
||||
} from "./actions-util";
|
||||
import { getAutomationID } from "./api-client";
|
||||
import { createCacheKeyHash } from "./caching-utils";
|
||||
import { type CodeQL } from "./codeql";
|
||||
import { type Config } from "./config-utils";
|
||||
import { getCommitOid, getFileOidsUnderPath } from "./git-utils";
|
||||
import { Logger, withGroupAsync } from "./logging";
|
||||
import {
|
||||
CleanupLevel,
|
||||
getBaseDatabaseOidsFilePath,
|
||||
@@ -23,7 +23,7 @@ import {
|
||||
isInTestMode,
|
||||
tryGetFolderBytes,
|
||||
waitForResultWithTimeLimit,
|
||||
} from "../util";
|
||||
} from "./util";
|
||||
|
||||
export enum OverlayDatabaseMode {
|
||||
Overlay = "overlay",
|
||||
@@ -1,172 +0,0 @@
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import * as actionsCache from "@actions/cache";
|
||||
import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import {
|
||||
getRecordingLogger,
|
||||
LoggedMessage,
|
||||
mockCodeQLVersion,
|
||||
setupTests,
|
||||
} from "../testing-utils";
|
||||
import { DiskUsage, withTmpDir } from "../util";
|
||||
|
||||
import { getCacheKey, shouldSkipOverlayAnalysis } from "./status";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
function makeDiskUsage(totalGiB: number): DiskUsage {
|
||||
return {
|
||||
numTotalBytes: totalGiB * 1024 * 1024 * 1024,
|
||||
numAvailableBytes: 0,
|
||||
};
|
||||
}
|
||||
|
||||
test("getCacheKey incorporates language, CodeQL version, and disk space", async (t) => {
|
||||
const codeql = mockCodeQLVersion("2.20.0");
|
||||
t.is(
|
||||
await getCacheKey(codeql, ["javascript"], makeDiskUsage(50)),
|
||||
"codeql-overlay-status-javascript-2.20.0-runner-50GB",
|
||||
);
|
||||
t.is(
|
||||
await getCacheKey(codeql, ["python"], makeDiskUsage(50)),
|
||||
"codeql-overlay-status-python-2.20.0-runner-50GB",
|
||||
);
|
||||
t.is(
|
||||
await getCacheKey(
|
||||
mockCodeQLVersion("2.21.0"),
|
||||
["javascript"],
|
||||
makeDiskUsage(50),
|
||||
),
|
||||
"codeql-overlay-status-javascript-2.21.0-runner-50GB",
|
||||
);
|
||||
t.is(
|
||||
await getCacheKey(codeql, ["javascript"], makeDiskUsage(100)),
|
||||
"codeql-overlay-status-javascript-2.20.0-runner-100GB",
|
||||
);
|
||||
});
|
||||
|
||||
test("getCacheKey sorts and joins multiple languages", async (t) => {
|
||||
const codeql = mockCodeQLVersion("2.20.0");
|
||||
t.is(
|
||||
await getCacheKey(codeql, ["python", "javascript"], makeDiskUsage(50)),
|
||||
"codeql-overlay-status-javascript+python-2.20.0-runner-50GB",
|
||||
);
|
||||
t.is(
|
||||
await getCacheKey(codeql, ["javascript", "python"], makeDiskUsage(50)),
|
||||
"codeql-overlay-status-javascript+python-2.20.0-runner-50GB",
|
||||
);
|
||||
});
|
||||
|
||||
test("getCacheKey rounds disk space down to nearest 10 GiB", async (t) => {
|
||||
const codeql = mockCodeQLVersion("2.20.0");
|
||||
t.is(
|
||||
await getCacheKey(codeql, ["javascript"], makeDiskUsage(14)),
|
||||
"codeql-overlay-status-javascript-2.20.0-runner-10GB",
|
||||
);
|
||||
t.is(
|
||||
await getCacheKey(codeql, ["javascript"], makeDiskUsage(19)),
|
||||
"codeql-overlay-status-javascript-2.20.0-runner-10GB",
|
||||
);
|
||||
});
|
||||
|
||||
test("shouldSkipOverlayAnalysis returns false when no cached status exists", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
const codeql = mockCodeQLVersion("2.20.0");
|
||||
const messages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(messages);
|
||||
|
||||
sinon.stub(actionsCache, "restoreCache").resolves(undefined);
|
||||
|
||||
const result = await shouldSkipOverlayAnalysis(
|
||||
codeql,
|
||||
["javascript"],
|
||||
makeDiskUsage(50),
|
||||
logger,
|
||||
);
|
||||
|
||||
t.false(result);
|
||||
t.true(
|
||||
messages.some(
|
||||
(m) =>
|
||||
m.type === "debug" &&
|
||||
typeof m.message === "string" &&
|
||||
m.message.includes("No overlay status found in Actions cache."),
|
||||
),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test("shouldSkipOverlayAnalysis returns true when cached status indicates failed build", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
const codeql = mockCodeQLVersion("2.20.0");
|
||||
const messages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(messages);
|
||||
|
||||
const status = {
|
||||
attemptedToBuildOverlayBaseDatabase: true,
|
||||
builtOverlayBaseDatabase: false,
|
||||
};
|
||||
|
||||
// Stub restoreCache to write the status file and return a key
|
||||
sinon.stub(actionsCache, "restoreCache").callsFake(async (paths) => {
|
||||
const statusFile = paths[0];
|
||||
await fs.promises.mkdir(path.dirname(statusFile), { recursive: true });
|
||||
await fs.promises.writeFile(statusFile, JSON.stringify(status));
|
||||
return "found-key";
|
||||
});
|
||||
|
||||
const result = await shouldSkipOverlayAnalysis(
|
||||
codeql,
|
||||
["javascript"],
|
||||
makeDiskUsage(50),
|
||||
logger,
|
||||
);
|
||||
|
||||
t.true(result);
|
||||
});
|
||||
});
|
||||
|
||||
test("shouldSkipOverlayAnalysis returns false when cached status indicates successful build", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
process.env["RUNNER_TEMP"] = tmpDir;
|
||||
const codeql = mockCodeQLVersion("2.20.0");
|
||||
const messages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(messages);
|
||||
|
||||
const status = {
|
||||
attemptedToBuildOverlayBaseDatabase: true,
|
||||
builtOverlayBaseDatabase: true,
|
||||
};
|
||||
|
||||
sinon.stub(actionsCache, "restoreCache").callsFake(async (paths) => {
|
||||
const statusFile = paths[0];
|
||||
await fs.promises.mkdir(path.dirname(statusFile), { recursive: true });
|
||||
await fs.promises.writeFile(statusFile, JSON.stringify(status));
|
||||
return "found-key";
|
||||
});
|
||||
|
||||
const result = await shouldSkipOverlayAnalysis(
|
||||
codeql,
|
||||
["javascript"],
|
||||
makeDiskUsage(50),
|
||||
logger,
|
||||
);
|
||||
|
||||
t.false(result);
|
||||
t.true(
|
||||
messages.some(
|
||||
(m) =>
|
||||
m.type === "debug" &&
|
||||
typeof m.message === "string" &&
|
||||
m.message.includes(
|
||||
"Cached overlay status does not indicate a previous unsuccessful attempt",
|
||||
),
|
||||
),
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -1,190 +0,0 @@
|
||||
/*
|
||||
* We perform enablement checks for overlay analysis to avoid using it on runners that are too small
|
||||
* to support it. However these checks cannot avoid every potential issue without being overly
|
||||
* conservative. Therefore, if our enablement checks enable overlay analysis for a runner that is
|
||||
* too small, we want to remember that, so that we will not try to use overlay analysis until
|
||||
* something changes (e.g. a larger runner is provisioned, or a new CodeQL version is released).
|
||||
*
|
||||
* We use the Actions cache as a lightweight way of providing this functionality.
|
||||
*/
|
||||
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import * as actionsCache from "@actions/cache";
|
||||
|
||||
import { getTemporaryDirectory } from "../actions-util";
|
||||
import { type CodeQL } from "../codeql";
|
||||
import { Logger } from "../logging";
|
||||
import {
|
||||
DiskUsage,
|
||||
getErrorMessage,
|
||||
waitForResultWithTimeLimit,
|
||||
} from "../util";
|
||||
|
||||
/** The maximum time to wait for a cache operation to complete. */
|
||||
const MAX_CACHE_OPERATION_MS = 30_000;
|
||||
|
||||
/** File name for the serialized overlay status. */
|
||||
const STATUS_FILE_NAME = "overlay-status.json";
|
||||
|
||||
/** Path to the local overlay status file. */
|
||||
function getStatusFilePath(languages: string[]): string {
|
||||
return path.join(
|
||||
getTemporaryDirectory(),
|
||||
"overlay-status",
|
||||
[...languages].sort().join("+"),
|
||||
STATUS_FILE_NAME,
|
||||
);
|
||||
}
|
||||
|
||||
/** Status of an overlay analysis for a group of languages. */
|
||||
export interface OverlayStatus {
|
||||
/** Whether the job attempted to build an overlay base database. */
|
||||
attemptedToBuildOverlayBaseDatabase: boolean;
|
||||
/** Whether the job successfully built an overlay base database. */
|
||||
builtOverlayBaseDatabase: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether overlay analysis should be skipped, based on the cached status for the given languages and disk usage.
|
||||
*/
|
||||
export async function shouldSkipOverlayAnalysis(
|
||||
codeql: CodeQL,
|
||||
languages: string[],
|
||||
diskUsage: DiskUsage,
|
||||
logger: Logger,
|
||||
): Promise<boolean> {
|
||||
const status = await getOverlayStatus(codeql, languages, diskUsage, logger);
|
||||
if (status === undefined) {
|
||||
return false;
|
||||
}
|
||||
if (
|
||||
status.attemptedToBuildOverlayBaseDatabase &&
|
||||
!status.builtOverlayBaseDatabase
|
||||
) {
|
||||
logger.debug(
|
||||
"Cached overlay status indicates that building an overlay base database was unsuccessful.",
|
||||
);
|
||||
return true;
|
||||
}
|
||||
logger.debug(
|
||||
"Cached overlay status does not indicate a previous unsuccessful attempt to build an overlay base database.",
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve overlay status from the Actions cache, if available.
|
||||
*
|
||||
* @returns `undefined` if no status was found in the cache (e.g. first run with
|
||||
* this cache key) or if the cache operation fails.
|
||||
*/
|
||||
export async function getOverlayStatus(
|
||||
codeql: CodeQL,
|
||||
languages: string[],
|
||||
diskUsage: DiskUsage,
|
||||
logger: Logger,
|
||||
): Promise<OverlayStatus | undefined> {
|
||||
const cacheKey = await getCacheKey(codeql, languages, diskUsage);
|
||||
const statusFile = getStatusFilePath(languages);
|
||||
|
||||
try {
|
||||
await fs.promises.mkdir(path.dirname(statusFile), { recursive: true });
|
||||
const foundKey = await waitForResultWithTimeLimit(
|
||||
MAX_CACHE_OPERATION_MS,
|
||||
actionsCache.restoreCache([statusFile], cacheKey),
|
||||
() => {
|
||||
logger.warning("Timed out restoring overlay status from cache.");
|
||||
},
|
||||
);
|
||||
if (foundKey === undefined) {
|
||||
logger.debug("No overlay status found in Actions cache.");
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (!fs.existsSync(statusFile)) {
|
||||
logger.debug(
|
||||
"Overlay status cache entry found but status file is missing.",
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const contents = await fs.promises.readFile(statusFile, "utf-8");
|
||||
const parsed: unknown = JSON.parse(contents);
|
||||
if (
|
||||
typeof parsed !== "object" ||
|
||||
parsed === null ||
|
||||
typeof parsed["attemptedToBuildOverlayBaseDatabase"] !== "boolean" ||
|
||||
typeof parsed["builtOverlayBaseDatabase"] !== "boolean"
|
||||
) {
|
||||
logger.debug(
|
||||
"Ignoring overlay status cache entry with unexpected format.",
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
return parsed as OverlayStatus;
|
||||
} catch (error) {
|
||||
logger.warning(
|
||||
`Failed to restore overlay status from cache: ${getErrorMessage(error)}`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save overlay status to the Actions cache.
|
||||
*
|
||||
* @returns `true` if the status was saved successfully, `false` otherwise.
|
||||
*/
|
||||
export async function saveOverlayStatus(
|
||||
codeql: CodeQL,
|
||||
languages: string[],
|
||||
diskUsage: DiskUsage,
|
||||
status: OverlayStatus,
|
||||
logger: Logger,
|
||||
): Promise<boolean> {
|
||||
const cacheKey = await getCacheKey(codeql, languages, diskUsage);
|
||||
const statusFile = getStatusFilePath(languages);
|
||||
|
||||
try {
|
||||
await fs.promises.mkdir(path.dirname(statusFile), { recursive: true });
|
||||
await fs.promises.writeFile(statusFile, JSON.stringify(status));
|
||||
const cacheId = await waitForResultWithTimeLimit(
|
||||
MAX_CACHE_OPERATION_MS,
|
||||
actionsCache.saveCache([statusFile], cacheKey),
|
||||
() => {
|
||||
logger.warning("Timed out saving overlay status to cache.");
|
||||
},
|
||||
);
|
||||
if (cacheId === undefined) {
|
||||
return false;
|
||||
}
|
||||
logger.debug(`Saved overlay status to Actions cache with key ${cacheKey}`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.warning(
|
||||
`Failed to save overlay status to cache: ${getErrorMessage(error)}`,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function getCacheKey(
|
||||
codeql: CodeQL,
|
||||
languages: string[],
|
||||
diskUsage: DiskUsage,
|
||||
): Promise<string> {
|
||||
// Total disk space, rounded to the nearest 10 GB. This is included in the cache key so that if a
|
||||
// customer upgrades their runner, we will try again to use overlay analysis, even if the CodeQL
|
||||
// version has not changed. We round to the nearest 10 GB to work around small differences in disk
|
||||
// space.
|
||||
//
|
||||
// Limitation: this can still flip from "too small" to "large enough" and back again if the disk
|
||||
// space fluctuates above and below a multiple of 10 GB.
|
||||
const diskSpaceToNearest10Gb = `${10 * Math.floor(diskUsage.numTotalBytes / (10 * 1024 * 1024 * 1024))}GB`;
|
||||
|
||||
// Include the CodeQL version in the cache key so we will try again to use overlay analysis when
|
||||
// new queries and libraries that may be more efficient are released.
|
||||
return `codeql-overlay-status-${[...languages].sort().join("+")}-${(await codeql.getVersion()).version}-runner-${diskSpaceToNearest10Gb}`;
|
||||
}
|
||||
@@ -10,7 +10,7 @@ import {
|
||||
import { getGitHubVersion } from "./api-client";
|
||||
import { CodeQL } from "./codeql";
|
||||
import { EnvVar } from "./environment";
|
||||
import { initFeatures } from "./feature-flags";
|
||||
import { Features } from "./feature-flags";
|
||||
import { initCodeQL } from "./init";
|
||||
import { getActionsLogger, Logger } from "./logging";
|
||||
import { getRepositoryNwo } from "./repository";
|
||||
@@ -114,7 +114,7 @@ async function run(startedAt: Date): Promise<void> {
|
||||
|
||||
const repositoryNwo = getRepositoryNwo();
|
||||
|
||||
const features = initFeatures(
|
||||
const features = new Features(
|
||||
gitHubVersion,
|
||||
repositoryNwo,
|
||||
getTemporaryDirectory(),
|
||||
|
||||
+6
-127
@@ -1,22 +1,18 @@
|
||||
import * as path from "path";
|
||||
|
||||
import * as github from "@actions/github";
|
||||
import * as toolcache from "@actions/tool-cache";
|
||||
import test, { ExecutionContext } from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import * as api from "./api-client";
|
||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import * as setupCodeql from "./setup-codeql";
|
||||
import * as tar from "./tar";
|
||||
import {
|
||||
LINKED_CLI_VERSION,
|
||||
LoggedMessage,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
SAMPLE_DOTCOM_API_DETAILS,
|
||||
checkExpectedLogMessages,
|
||||
createFeatures,
|
||||
getRecordingLogger,
|
||||
initializeFeatures,
|
||||
@@ -272,127 +268,13 @@ test("setupCodeQLBundle logs the CodeQL CLI version being used when asked to dow
|
||||
});
|
||||
});
|
||||
|
||||
test("getCodeQLSource correctly returns nightly CLI version when tools == nightly", async (t) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
const features = createFeatures([]);
|
||||
|
||||
const expectedDate = "30260213";
|
||||
const expectedTag = `codeql-bundle-${expectedDate}`;
|
||||
|
||||
// Ensure that we consistently select "zstd" for the test.
|
||||
sinon.stub(process, "platform").value("linux");
|
||||
sinon.stub(tar, "isZstdAvailable").resolves({
|
||||
available: true,
|
||||
foundZstdBinary: true,
|
||||
});
|
||||
|
||||
const client = github.getOctokit("123");
|
||||
const listReleases = sinon.stub(client.rest.repos, "listReleases");
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
listReleases.resolves({
|
||||
data: [{ tag_name: expectedTag }],
|
||||
} as any);
|
||||
sinon.stub(api, "getApiClient").value(() => client);
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
const source = await setupCodeql.getCodeQLSource(
|
||||
"nightly",
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
SAMPLE_DOTCOM_API_DETAILS,
|
||||
GitHubVariant.DOTCOM,
|
||||
false,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
// Check that the `CodeQLToolsSource` object matches our expectations.
|
||||
const expectedVersion = `0.0.0-${expectedDate}`;
|
||||
const expectedURL = `https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/${expectedTag}/${setupCodeql.getCodeQLBundleName("zstd")}`;
|
||||
t.deepEqual(source, {
|
||||
bundleVersion: expectedDate,
|
||||
cliVersion: undefined,
|
||||
codeqlURL: expectedURL,
|
||||
compressionMethod: "zstd",
|
||||
sourceType: "download",
|
||||
toolsVersion: expectedVersion,
|
||||
} satisfies setupCodeql.CodeQLToolsSource);
|
||||
|
||||
// Afterwards, ensure that we see the expected messages in the log.
|
||||
checkExpectedLogMessages(t, loggedMessages, [
|
||||
"Using the latest CodeQL CLI nightly, as requested by 'tools: nightly'.",
|
||||
`Bundle version ${expectedDate} is not in SemVer format. Will treat it as pre-release ${expectedVersion}.`,
|
||||
`Attempting to obtain CodeQL tools. CLI version: unknown, bundle tag name: ${expectedTag}`,
|
||||
`Using CodeQL CLI sourced from ${expectedURL}`,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
test("getCodeQLSource correctly returns nightly CLI version when forced by FF", async (t) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
const features = createFeatures([Feature.ForceNightly]);
|
||||
|
||||
const expectedDate = "30260213";
|
||||
const expectedTag = `codeql-bundle-${expectedDate}`;
|
||||
|
||||
// Ensure that we consistently select "zstd" for the test.
|
||||
sinon.stub(process, "platform").value("linux");
|
||||
sinon.stub(tar, "isZstdAvailable").resolves({
|
||||
available: true,
|
||||
foundZstdBinary: true,
|
||||
});
|
||||
|
||||
const client = github.getOctokit("123");
|
||||
const listReleases = sinon.stub(client.rest.repos, "listReleases");
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
||||
listReleases.resolves({
|
||||
data: [{ tag_name: expectedTag }],
|
||||
} as any);
|
||||
sinon.stub(api, "getApiClient").value(() => client);
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
process.env["GITHUB_EVENT_NAME"] = "dynamic";
|
||||
|
||||
const source = await setupCodeql.getCodeQLSource(
|
||||
undefined,
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
SAMPLE_DOTCOM_API_DETAILS,
|
||||
GitHubVariant.DOTCOM,
|
||||
false,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
// Check that the `CodeQLToolsSource` object matches our expectations.
|
||||
const expectedVersion = `0.0.0-${expectedDate}`;
|
||||
const expectedURL = `https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/${expectedTag}/${setupCodeql.getCodeQLBundleName("zstd")}`;
|
||||
t.deepEqual(source, {
|
||||
bundleVersion: expectedDate,
|
||||
cliVersion: undefined,
|
||||
codeqlURL: expectedURL,
|
||||
compressionMethod: "zstd",
|
||||
sourceType: "download",
|
||||
toolsVersion: expectedVersion,
|
||||
} satisfies setupCodeql.CodeQLToolsSource);
|
||||
|
||||
// Afterwards, ensure that we see the expected messages in the log.
|
||||
checkExpectedLogMessages(t, loggedMessages, [
|
||||
`Using the latest CodeQL CLI nightly, as forced by the ${Feature.ForceNightly} feature flag.`,
|
||||
`Bundle version ${expectedDate} is not in SemVer format. Will treat it as pre-release ${expectedVersion}.`,
|
||||
`Attempting to obtain CodeQL tools. CLI version: unknown, bundle tag name: ${expectedTag}`,
|
||||
`Using CodeQL CLI sourced from ${expectedURL}`,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
test("getCodeQLSource correctly returns latest version from toolcache when tools == toolcache", async (t) => {
|
||||
const loggedMessages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
const features = createFeatures([Feature.AllowToolcacheInput]);
|
||||
|
||||
process.env["GITHUB_EVENT_NAME"] = "dynamic";
|
||||
|
||||
const latestToolcacheVersion = "3.2.1";
|
||||
const latestVersionPath = "/path/to/latest";
|
||||
const testVersions = ["2.3.1", latestToolcacheVersion, "1.2.3"];
|
||||
@@ -406,8 +288,6 @@ test("getCodeQLSource correctly returns latest version from toolcache when tools
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
process.env["GITHUB_EVENT_NAME"] = "dynamic";
|
||||
|
||||
const source = await setupCodeql.getCodeQLSource(
|
||||
"toolcache",
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
@@ -463,17 +343,16 @@ const toolcacheInputFallbackMacro = test.macro({
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
const features = createFeatures(featureList);
|
||||
|
||||
for (const [k, v] of Object.entries(environment)) {
|
||||
process.env[k] = v;
|
||||
}
|
||||
|
||||
const findAllVersionsStub = sinon
|
||||
.stub(toolcache, "findAllVersions")
|
||||
.returns(testVersions);
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
setupActionsVars(tmpDir, tmpDir);
|
||||
|
||||
for (const [k, v] of Object.entries(environment)) {
|
||||
process.env[k] = v;
|
||||
}
|
||||
|
||||
const source = await setupCodeql.getCodeQLSource(
|
||||
"toolcache",
|
||||
SAMPLE_DEFAULT_CLI_VERSION,
|
||||
|
||||
+8
-62
@@ -10,7 +10,6 @@ import { v4 as uuidV4 } from "uuid";
|
||||
import { isDynamicWorkflow, isRunningLocalAction } from "./actions-util";
|
||||
import * as api from "./api-client";
|
||||
import * as defaults from "./defaults.json";
|
||||
import { addNoLanguageDiagnostic, makeDiagnostic } from "./diagnostics";
|
||||
import {
|
||||
CODEQL_VERSION_ZSTD_BUNDLE,
|
||||
CodeQLDefaultVersionInfo,
|
||||
@@ -56,9 +55,7 @@ function getCodeQLBundleExtension(
|
||||
}
|
||||
}
|
||||
|
||||
export function getCodeQLBundleName(
|
||||
compressionMethod: tar.CompressionMethod,
|
||||
): string {
|
||||
function getCodeQLBundleName(compressionMethod: tar.CompressionMethod): string {
|
||||
const extension = getCodeQLBundleExtension(compressionMethod);
|
||||
|
||||
let platform: string;
|
||||
@@ -199,7 +196,7 @@ export function convertToSemVer(version: string, logger: Logger): string {
|
||||
return s;
|
||||
}
|
||||
|
||||
export type CodeQLToolsSource =
|
||||
type CodeQLToolsSource =
|
||||
| {
|
||||
codeqlTarPath: string;
|
||||
compressionMethod: tar.CompressionMethod;
|
||||
@@ -264,20 +261,6 @@ async function findOverridingToolsInCache(
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines where the CodeQL CLI we want to use comes from. This can be from a local file,
|
||||
* the Actions toolcache, or a download.
|
||||
*
|
||||
* @param toolsInput The argument provided for the `tools` input, if any.
|
||||
* @param defaultCliVersion The default CLI version that's linked to the CodeQL Action.
|
||||
* @param apiDetails Information about the GitHub API.
|
||||
* @param variant The GitHub variant we are running on.
|
||||
* @param tarSupportsZstd Whether zstd is supported by `tar`.
|
||||
* @param features Information about enabled features.
|
||||
* @param logger The logger to use.
|
||||
*
|
||||
* @returns Information about where the CodeQL CLI we want to use comes from.
|
||||
*/
|
||||
export async function getCodeQLSource(
|
||||
toolsInput: string | undefined,
|
||||
defaultCliVersion: CodeQLDefaultVersionInfo,
|
||||
@@ -287,9 +270,6 @@ export async function getCodeQLSource(
|
||||
features: FeatureEnablement,
|
||||
logger: Logger,
|
||||
): Promise<CodeQLToolsSource> {
|
||||
// If there is an explicit `tools` input, it's not one of the reserved values, and it doesn't appear
|
||||
// to point to a URL, then we assume it is a local path and use the CLI from there.
|
||||
// TODO: This appears to misclassify filenames that happen to start with `http` as URLs.
|
||||
if (
|
||||
toolsInput &&
|
||||
!isReservedToolsValue(toolsInput) &&
|
||||
@@ -322,47 +302,13 @@ export async function getCodeQLSource(
|
||||
*/
|
||||
let url: string | undefined;
|
||||
|
||||
// We allow forcing the nightly CLI via the FF for `dynamic` events (or in test mode) where the
|
||||
// `tools` input cannot be adjusted to explicitly request it.
|
||||
const canForceNightlyWithFF = isDynamicWorkflow() || util.isInTestMode();
|
||||
const forceNightlyValueFF = await features.getValue(Feature.ForceNightly);
|
||||
const forceNightly = forceNightlyValueFF && canForceNightlyWithFF;
|
||||
|
||||
// For advanced workflows, a value from `CODEQL_NIGHTLY_TOOLS_INPUTS` can be specified explicitly
|
||||
// for the `tools` input in the workflow file.
|
||||
const nightlyRequestedByToolsInput =
|
||||
if (
|
||||
toolsInput !== undefined &&
|
||||
CODEQL_NIGHTLY_TOOLS_INPUTS.includes(toolsInput);
|
||||
|
||||
if (forceNightly || nightlyRequestedByToolsInput) {
|
||||
if (forceNightly) {
|
||||
logger.info(
|
||||
`Using the latest CodeQL CLI nightly, as forced by the ${Feature.ForceNightly} feature flag.`,
|
||||
);
|
||||
addNoLanguageDiagnostic(
|
||||
undefined,
|
||||
makeDiagnostic(
|
||||
"codeql-action/forced-nightly-cli",
|
||||
"A nightly release of CodeQL was used",
|
||||
{
|
||||
markdownMessage:
|
||||
"GitHub configured this analysis to use a nightly release of CodeQL to allow you to preview changes from an upcoming release.\n\n" +
|
||||
"Nightly releases do not undergo the same validation as regular releases and may lead to analysis instability.\n\n" +
|
||||
"If use of a nightly CodeQL release for this analysis is unexpected, please contact GitHub support.",
|
||||
visibility: {
|
||||
cliSummaryTable: true,
|
||||
statusPage: true,
|
||||
telemetry: true,
|
||||
},
|
||||
severity: "note",
|
||||
},
|
||||
),
|
||||
);
|
||||
} else {
|
||||
logger.info(
|
||||
`Using the latest CodeQL CLI nightly, as requested by 'tools: ${toolsInput}'.`,
|
||||
);
|
||||
}
|
||||
CODEQL_NIGHTLY_TOOLS_INPUTS.includes(toolsInput)
|
||||
) {
|
||||
logger.info(
|
||||
`Using the latest CodeQL CLI nightly, as requested by 'tools: ${toolsInput}'.`,
|
||||
);
|
||||
toolsInput = await getNightlyToolsUrl(logger);
|
||||
}
|
||||
|
||||
|
||||
+173
-59
@@ -2,37 +2,132 @@ import { ChildProcess, spawn } from "child_process";
|
||||
import * as path from "path";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
import * as toolcache from "@actions/tool-cache";
|
||||
import { pki } from "node-forge";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { getGitHubVersion } from "./api-client";
|
||||
import { Feature, FeatureEnablement, initFeatures } from "./feature-flags";
|
||||
import { getApiDetails, getAuthorizationHeaderFor } from "./api-client";
|
||||
import { Config } from "./config-utils";
|
||||
import { KnownLanguage } from "./languages";
|
||||
import { getActionsLogger, Logger } from "./logging";
|
||||
import { getRepositoryNwo } from "./repository";
|
||||
import {
|
||||
credentialToStr,
|
||||
Credential,
|
||||
getCredentials,
|
||||
getProxyBinaryPath,
|
||||
getSafeErrorMessage,
|
||||
getDownloadUrl,
|
||||
parseLanguage,
|
||||
ProxyInfo,
|
||||
sendFailedStatusReport,
|
||||
sendSuccessStatusReport,
|
||||
Registry,
|
||||
ProxyConfig,
|
||||
UPDATEJOB_PROXY,
|
||||
} from "./start-proxy";
|
||||
import { generateCertificateAuthority } from "./start-proxy/ca";
|
||||
import { checkProxyEnvironment } from "./start-proxy/environment";
|
||||
import { checkConnections } from "./start-proxy/reachability";
|
||||
import { ActionName, sendUnhandledErrorStatusReport } from "./status-report";
|
||||
import {
|
||||
ActionName,
|
||||
createStatusReportBase,
|
||||
getActionsStatus,
|
||||
sendStatusReport,
|
||||
sendUnhandledErrorStatusReport,
|
||||
StatusReportBase,
|
||||
} from "./status-report";
|
||||
import * as util from "./util";
|
||||
|
||||
const KEY_SIZE = 2048;
|
||||
const KEY_EXPIRY_YEARS = 2;
|
||||
|
||||
type CertificateAuthority = {
|
||||
cert: string;
|
||||
key: string;
|
||||
};
|
||||
|
||||
type BasicAuthCredentials = {
|
||||
username: string;
|
||||
password: string;
|
||||
};
|
||||
|
||||
type ProxyConfig = {
|
||||
all_credentials: Credential[];
|
||||
ca: CertificateAuthority;
|
||||
proxy_auth?: BasicAuthCredentials;
|
||||
};
|
||||
|
||||
const CERT_SUBJECT = [
|
||||
{
|
||||
name: "commonName",
|
||||
value: "Dependabot Internal CA",
|
||||
},
|
||||
{
|
||||
name: "organizationName",
|
||||
value: "GitHub inc.",
|
||||
},
|
||||
{
|
||||
shortName: "OU",
|
||||
value: "Dependabot",
|
||||
},
|
||||
{
|
||||
name: "countryName",
|
||||
value: "US",
|
||||
},
|
||||
{
|
||||
shortName: "ST",
|
||||
value: "California",
|
||||
},
|
||||
{
|
||||
name: "localityName",
|
||||
value: "San Francisco",
|
||||
},
|
||||
];
|
||||
|
||||
function generateCertificateAuthority(): CertificateAuthority {
|
||||
const keys = pki.rsa.generateKeyPair(KEY_SIZE);
|
||||
const cert = pki.createCertificate();
|
||||
cert.publicKey = keys.publicKey;
|
||||
cert.serialNumber = "01";
|
||||
cert.validity.notBefore = new Date();
|
||||
cert.validity.notAfter = new Date();
|
||||
cert.validity.notAfter.setFullYear(
|
||||
cert.validity.notBefore.getFullYear() + KEY_EXPIRY_YEARS,
|
||||
);
|
||||
|
||||
cert.setSubject(CERT_SUBJECT);
|
||||
cert.setIssuer(CERT_SUBJECT);
|
||||
cert.setExtensions([{ name: "basicConstraints", cA: true }]);
|
||||
cert.sign(keys.privateKey);
|
||||
|
||||
const pem = pki.certificateToPem(cert);
|
||||
const key = pki.privateKeyToPem(keys.privateKey);
|
||||
return { cert: pem, key };
|
||||
}
|
||||
|
||||
interface StartProxyStatus extends StatusReportBase {
|
||||
// A comma-separated list of registry types which are configured for CodeQL.
|
||||
// This only includes registry types we support, not all that are configured.
|
||||
registry_types: string;
|
||||
}
|
||||
|
||||
async function sendSuccessStatusReport(
|
||||
startedAt: Date,
|
||||
config: Partial<Config>,
|
||||
registry_types: string[],
|
||||
logger: Logger,
|
||||
) {
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
ActionName.StartProxy,
|
||||
"success",
|
||||
startedAt,
|
||||
config,
|
||||
await util.checkDiskUsage(logger),
|
||||
logger,
|
||||
);
|
||||
if (statusReportBase !== undefined) {
|
||||
const statusReport: StartProxyStatus = {
|
||||
...statusReportBase,
|
||||
registry_types: registry_types.join(","),
|
||||
};
|
||||
await sendStatusReport(statusReport);
|
||||
}
|
||||
}
|
||||
|
||||
async function run(startedAt: Date) {
|
||||
// To capture errors appropriately, keep as much code within the try-catch as
|
||||
// possible, and only use safe functions outside.
|
||||
|
||||
const logger = getActionsLogger();
|
||||
let features: FeatureEnablement | undefined;
|
||||
let language: KnownLanguage | undefined;
|
||||
|
||||
try {
|
||||
@@ -44,21 +139,9 @@ async function run(startedAt: Date) {
|
||||
const proxyLogFilePath = path.resolve(tempDir, "proxy.log");
|
||||
core.saveState("proxy-log-file", proxyLogFilePath);
|
||||
|
||||
// Initialise FFs.
|
||||
const repositoryNwo = getRepositoryNwo();
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
features = initFeatures(
|
||||
gitHubVersion,
|
||||
repositoryNwo,
|
||||
actionsUtil.getTemporaryDirectory(),
|
||||
logger,
|
||||
);
|
||||
|
||||
// Get the language input.
|
||||
// Get the configuration options
|
||||
const languageInput = actionsUtil.getOptionalInput("language");
|
||||
language = languageInput ? parseLanguage(languageInput) : undefined;
|
||||
|
||||
// Get the registry configurations from one of the inputs.
|
||||
const credentials = getCredentials(
|
||||
logger,
|
||||
actionsUtil.getOptionalInput("registry_secrets"),
|
||||
@@ -77,22 +160,7 @@ async function run(startedAt: Date) {
|
||||
.join("\n")}`,
|
||||
);
|
||||
|
||||
// Check the environment for any configurations which may affect the proxy.
|
||||
// This is a best effort process to give us insights into potential factors
|
||||
// which may affect the operation of our proxy.
|
||||
if (core.isDebug() || util.isInTestMode()) {
|
||||
try {
|
||||
await checkProxyEnvironment(logger, language);
|
||||
} catch (err) {
|
||||
logger.debug(
|
||||
`Unable to inspect runner environment: ${util.getErrorMessage(err)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const ca = generateCertificateAuthority(
|
||||
await features.getValue(Feature.ImprovedProxyCertificates),
|
||||
);
|
||||
const ca = generateCertificateAuthority();
|
||||
|
||||
const proxyConfig: ProxyConfig = {
|
||||
all_credentials: credentials,
|
||||
@@ -101,15 +169,7 @@ async function run(startedAt: Date) {
|
||||
|
||||
// Start the Proxy
|
||||
const proxyBin = await getProxyBinaryPath(logger);
|
||||
const proxyInfo = await startProxy(
|
||||
proxyBin,
|
||||
proxyConfig,
|
||||
proxyLogFilePath,
|
||||
logger,
|
||||
);
|
||||
|
||||
// Check that the private registries are reachable.
|
||||
await checkConnections(logger, proxyInfo);
|
||||
await startProxy(proxyBin, proxyConfig, proxyLogFilePath, logger);
|
||||
|
||||
// Report success if we have reached this point.
|
||||
await sendSuccessStatusReport(
|
||||
@@ -121,7 +181,25 @@ async function run(startedAt: Date) {
|
||||
logger,
|
||||
);
|
||||
} catch (unwrappedError) {
|
||||
await sendFailedStatusReport(logger, startedAt, language, unwrappedError);
|
||||
const error = util.wrapError(unwrappedError);
|
||||
core.setFailed(`start-proxy action failed: ${error.message}`);
|
||||
|
||||
// We skip sending the error message and stack trace here to avoid the possibility
|
||||
// of leaking any sensitive information into the telemetry.
|
||||
const errorStatusReportBase = await createStatusReportBase(
|
||||
ActionName.StartProxy,
|
||||
getActionsStatus(error),
|
||||
startedAt,
|
||||
{
|
||||
languages: language && [language],
|
||||
},
|
||||
await util.checkDiskUsage(logger),
|
||||
logger,
|
||||
"Error from start-proxy Action omitted",
|
||||
);
|
||||
if (errorStatusReportBase !== undefined) {
|
||||
await sendStatusReport(errorStatusReportBase);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -136,7 +214,7 @@ async function runWrapper() {
|
||||
await sendUnhandledErrorStatusReport(
|
||||
ActionName.StartProxy,
|
||||
startedAt,
|
||||
getSafeErrorMessage(util.wrapError(error)),
|
||||
new Error("Error from start-proxy Action omitted"),
|
||||
logger,
|
||||
);
|
||||
}
|
||||
@@ -147,7 +225,7 @@ async function startProxy(
|
||||
config: ProxyConfig,
|
||||
logFilePath: string,
|
||||
logger: Logger,
|
||||
): Promise<ProxyInfo> {
|
||||
) {
|
||||
const host = "127.0.0.1";
|
||||
let port = 49152;
|
||||
let subprocess: ChildProcess | undefined = undefined;
|
||||
@@ -190,15 +268,51 @@ async function startProxy(
|
||||
core.setOutput("proxy_port", port.toString());
|
||||
core.setOutput("proxy_ca_certificate", config.ca.cert);
|
||||
|
||||
const registry_urls: Registry[] = config.all_credentials
|
||||
const registry_urls = config.all_credentials
|
||||
.filter((credential) => credential.url !== undefined)
|
||||
.map((credential) => ({
|
||||
type: credential.type,
|
||||
url: credential.url,
|
||||
}));
|
||||
core.setOutput("proxy_urls", JSON.stringify(registry_urls));
|
||||
}
|
||||
|
||||
return { host, port, cert: config.ca.cert, registries: registry_urls };
|
||||
async function getProxyBinaryPath(logger: Logger): Promise<string> {
|
||||
const proxyFileName =
|
||||
process.platform === "win32" ? `${UPDATEJOB_PROXY}.exe` : UPDATEJOB_PROXY;
|
||||
const proxyInfo = await getDownloadUrl(logger);
|
||||
|
||||
let proxyBin = toolcache.find(proxyFileName, proxyInfo.version);
|
||||
if (!proxyBin) {
|
||||
const apiDetails = getApiDetails();
|
||||
const authorization = getAuthorizationHeaderFor(
|
||||
logger,
|
||||
apiDetails,
|
||||
proxyInfo.url,
|
||||
);
|
||||
const temp = await toolcache.downloadTool(
|
||||
proxyInfo.url,
|
||||
undefined,
|
||||
authorization,
|
||||
{
|
||||
accept: "application/octet-stream",
|
||||
},
|
||||
);
|
||||
const extracted = await toolcache.extractTar(temp);
|
||||
proxyBin = await toolcache.cacheDir(
|
||||
extracted,
|
||||
proxyFileName,
|
||||
proxyInfo.version,
|
||||
);
|
||||
}
|
||||
proxyBin = path.join(proxyBin, proxyFileName);
|
||||
return proxyBin;
|
||||
}
|
||||
|
||||
function credentialToStr(c: Credential): string {
|
||||
return `Type: ${c.type}; Host: ${c.host}; Url: ${c.url} Username: ${
|
||||
c.username
|
||||
}; Password: ${c.password !== undefined}; Token: ${c.token !== undefined}`;
|
||||
}
|
||||
|
||||
void runWrapper();
|
||||
|
||||
+3
-357
@@ -1,110 +1,16 @@
|
||||
import * as filepath from "path";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
import * as toolcache from "@actions/tool-cache";
|
||||
import test, { ExecutionContext } from "ava";
|
||||
import test from "ava";
|
||||
import sinon from "sinon";
|
||||
|
||||
import * as apiClient from "./api-client";
|
||||
import * as defaults from "./defaults.json";
|
||||
import { KnownLanguage } from "./languages";
|
||||
import { getRunnerLogger, Logger } from "./logging";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import * as startProxyExports from "./start-proxy";
|
||||
import { parseLanguage } from "./start-proxy";
|
||||
import * as statusReport from "./status-report";
|
||||
import {
|
||||
checkExpectedLogMessages,
|
||||
getRecordingLogger,
|
||||
makeTestToken,
|
||||
setupTests,
|
||||
withRecordingLoggerAsync,
|
||||
} from "./testing-utils";
|
||||
import { ConfigurationError } from "./util";
|
||||
import { setupTests } from "./testing-utils";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
const sendFailedStatusReportTest = test.macro({
|
||||
exec: async (
|
||||
t: ExecutionContext<unknown>,
|
||||
err: Error,
|
||||
expectedMessage: string,
|
||||
expectedStatus: statusReport.ActionStatus = "failure",
|
||||
) => {
|
||||
const now = new Date();
|
||||
|
||||
// Override core.setFailed to avoid it setting the program's exit code
|
||||
sinon.stub(core, "setFailed").returns();
|
||||
|
||||
const createStatusReportBase = sinon.stub(
|
||||
statusReport,
|
||||
"createStatusReportBase",
|
||||
);
|
||||
createStatusReportBase.resolves(undefined);
|
||||
|
||||
await withRecordingLoggerAsync(async (logger) => {
|
||||
await startProxyExports.sendFailedStatusReport(
|
||||
logger,
|
||||
now,
|
||||
undefined,
|
||||
err,
|
||||
);
|
||||
|
||||
// Check that the stub has been called exactly once, with the expected arguments,
|
||||
// but not with the message from the error.
|
||||
sinon.assert.calledOnceWithExactly(
|
||||
createStatusReportBase,
|
||||
statusReport.ActionName.StartProxy,
|
||||
expectedStatus,
|
||||
now,
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
expectedMessage,
|
||||
);
|
||||
t.false(
|
||||
createStatusReportBase.calledWith(
|
||||
statusReport.ActionName.StartProxy,
|
||||
expectedStatus,
|
||||
now,
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
sinon.match((msg: string) => msg.includes(err.message)),
|
||||
),
|
||||
"createStatusReportBase was called with the error message",
|
||||
);
|
||||
});
|
||||
},
|
||||
|
||||
title: (providedTitle = "") => `sendFailedStatusReport - ${providedTitle}`,
|
||||
});
|
||||
|
||||
test(
|
||||
"reports generic error message for non-StartProxyError error",
|
||||
sendFailedStatusReportTest,
|
||||
new Error("Something went wrong today"),
|
||||
"Error from start-proxy Action omitted (Error).",
|
||||
);
|
||||
|
||||
test(
|
||||
"reports generic error message for non-StartProxyError error with safe error message",
|
||||
sendFailedStatusReportTest,
|
||||
new Error(
|
||||
startProxyExports.getStartProxyErrorMessage(
|
||||
startProxyExports.StartProxyErrorType.DownloadFailed,
|
||||
),
|
||||
),
|
||||
"Error from start-proxy Action omitted (Error).",
|
||||
);
|
||||
|
||||
test(
|
||||
"reports generic error message for ConfigurationError error",
|
||||
sendFailedStatusReportTest,
|
||||
new ConfigurationError("Something went wrong today"),
|
||||
"Error from start-proxy Action omitted (ConfigurationError).",
|
||||
"user-error",
|
||||
);
|
||||
|
||||
const toEncodedJSON = (data: any) =>
|
||||
Buffer.from(JSON.stringify(data)).toString("base64");
|
||||
|
||||
@@ -175,27 +81,6 @@ test("getCredentials throws error when credential is not an object", async (t) =
|
||||
}
|
||||
});
|
||||
|
||||
test("getCredentials throws error when credential is missing type", async (t) => {
|
||||
const testCredentials = [[{ token: "abc", url: "https://localhost" }]].map(
|
||||
toEncodedJSON,
|
||||
);
|
||||
|
||||
for (const testCredential of testCredentials) {
|
||||
t.throws(
|
||||
() =>
|
||||
startProxyExports.getCredentials(
|
||||
getRunnerLogger(true),
|
||||
undefined,
|
||||
testCredential,
|
||||
undefined,
|
||||
),
|
||||
{
|
||||
message: "Invalid credentials - must have a type",
|
||||
},
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
test("getCredentials throws error when credential missing host and url", async (t) => {
|
||||
const testCredentials = [
|
||||
[{ type: "npm_registry", token: "abc" }],
|
||||
@@ -289,37 +174,6 @@ test("getCredentials throws an error when non-printable characters are used", as
|
||||
}
|
||||
});
|
||||
|
||||
test("getCredentials logs a warning when a PAT is used without a username", async (t) => {
|
||||
const loggedMessages = [];
|
||||
const logger = getRecordingLogger(loggedMessages);
|
||||
const likelyWrongCredentials = toEncodedJSON([
|
||||
{
|
||||
type: "git_server",
|
||||
host: "https://github.com/",
|
||||
password: `ghp_${makeTestToken()}`,
|
||||
},
|
||||
]);
|
||||
|
||||
const results = startProxyExports.getCredentials(
|
||||
logger,
|
||||
undefined,
|
||||
likelyWrongCredentials,
|
||||
undefined,
|
||||
);
|
||||
|
||||
// The configuration should be accepted, despite the likely problem.
|
||||
t.assert(results);
|
||||
t.is(results.length, 1);
|
||||
t.is(results[0].type, "git_server");
|
||||
t.is(results[0].host, "https://github.com/");
|
||||
t.assert(results[0].password?.startsWith("ghp_"));
|
||||
|
||||
// A warning should have been logged.
|
||||
checkExpectedLogMessages(t, loggedMessages, [
|
||||
"using a GitHub Personal Access Token (PAT), but no username was provided",
|
||||
]);
|
||||
});
|
||||
|
||||
test("parseLanguage", async (t) => {
|
||||
// Exact matches
|
||||
t.deepEqual(parseLanguage("csharp"), KnownLanguage.csharp);
|
||||
@@ -411,211 +265,3 @@ test("getDownloadUrl returns matching release asset", async (t) => {
|
||||
t.is(info.version, defaults.cliVersion);
|
||||
t.is(info.url, "url-we-want");
|
||||
});
|
||||
|
||||
test("credentialToStr - hides passwords", (t) => {
|
||||
const secret = "password123";
|
||||
const credential = {
|
||||
type: "maven_credential",
|
||||
password: secret,
|
||||
url: "https://localhost",
|
||||
};
|
||||
|
||||
const str = startProxyExports.credentialToStr(credential);
|
||||
|
||||
t.false(str.includes(secret));
|
||||
t.is(
|
||||
"Type: maven_credential; Host: undefined; Url: https://localhost Username: undefined; Password: true; Token: false",
|
||||
str,
|
||||
);
|
||||
});
|
||||
|
||||
test("credentialToStr - hides tokens", (t) => {
|
||||
const secret = "password123";
|
||||
const credential = {
|
||||
type: "maven_credential",
|
||||
token: secret,
|
||||
url: "https://localhost",
|
||||
};
|
||||
|
||||
const str = startProxyExports.credentialToStr(credential);
|
||||
|
||||
t.false(str.includes(secret));
|
||||
t.is(
|
||||
"Type: maven_credential; Host: undefined; Url: https://localhost Username: undefined; Password: false; Token: true",
|
||||
str,
|
||||
);
|
||||
});
|
||||
|
||||
test("getSafeErrorMessage - returns actual message for `StartProxyError`", (t) => {
|
||||
const error = new startProxyExports.StartProxyError(
|
||||
startProxyExports.StartProxyErrorType.DownloadFailed,
|
||||
);
|
||||
t.is(
|
||||
startProxyExports.getSafeErrorMessage(error),
|
||||
startProxyExports.getStartProxyErrorMessage(error.errorType),
|
||||
);
|
||||
});
|
||||
|
||||
test("getSafeErrorMessage - does not return message for arbitrary errors", (t) => {
|
||||
const error = new Error(
|
||||
startProxyExports.getStartProxyErrorMessage(
|
||||
startProxyExports.StartProxyErrorType.DownloadFailed,
|
||||
),
|
||||
);
|
||||
|
||||
const message = startProxyExports.getSafeErrorMessage(error);
|
||||
|
||||
t.not(message, error.message);
|
||||
t.assert(message.startsWith("Error from start-proxy Action omitted"));
|
||||
t.assert(message.includes(error.name));
|
||||
});
|
||||
|
||||
const wrapFailureTest = test.macro({
|
||||
exec: async (
|
||||
t: ExecutionContext<unknown>,
|
||||
setup: () => void,
|
||||
fn: (logger: Logger) => Promise<void>,
|
||||
) => {
|
||||
await withRecordingLoggerAsync(async (logger) => {
|
||||
setup();
|
||||
|
||||
await t.throwsAsync(fn(logger), {
|
||||
instanceOf: startProxyExports.StartProxyError,
|
||||
});
|
||||
});
|
||||
},
|
||||
title: (providedTitle) => `${providedTitle} - wraps errors on failure`,
|
||||
});
|
||||
|
||||
test("downloadProxy - returns file path on success", async (t) => {
|
||||
await withRecordingLoggerAsync(async (logger) => {
|
||||
const testPath = "/some/path";
|
||||
sinon.stub(toolcache, "downloadTool").resolves(testPath);
|
||||
|
||||
const result = await startProxyExports.downloadProxy(
|
||||
logger,
|
||||
"url",
|
||||
undefined,
|
||||
);
|
||||
t.is(result, testPath);
|
||||
});
|
||||
});
|
||||
|
||||
test(
|
||||
"downloadProxy",
|
||||
wrapFailureTest,
|
||||
() => {
|
||||
sinon.stub(toolcache, "downloadTool").throws();
|
||||
},
|
||||
async (logger) => {
|
||||
await startProxyExports.downloadProxy(logger, "url", undefined);
|
||||
},
|
||||
);
|
||||
|
||||
test("extractProxy - returns file path on success", async (t) => {
|
||||
await withRecordingLoggerAsync(async (logger) => {
|
||||
const testPath = "/some/path";
|
||||
sinon.stub(toolcache, "extractTar").resolves(testPath);
|
||||
|
||||
const result = await startProxyExports.extractProxy(logger, "/other/path");
|
||||
t.is(result, testPath);
|
||||
});
|
||||
});
|
||||
|
||||
test(
|
||||
"extractProxy",
|
||||
wrapFailureTest,
|
||||
() => {
|
||||
sinon.stub(toolcache, "extractTar").throws();
|
||||
},
|
||||
async (logger) => {
|
||||
await startProxyExports.extractProxy(logger, "path");
|
||||
},
|
||||
);
|
||||
|
||||
test("cacheProxy - returns file path on success", async (t) => {
|
||||
await withRecordingLoggerAsync(async (logger) => {
|
||||
const testPath = "/some/path";
|
||||
sinon.stub(toolcache, "cacheDir").resolves(testPath);
|
||||
|
||||
const result = await startProxyExports.cacheProxy(
|
||||
logger,
|
||||
"/other/path",
|
||||
"proxy",
|
||||
"1.0",
|
||||
);
|
||||
t.is(result, testPath);
|
||||
});
|
||||
});
|
||||
|
||||
test(
|
||||
"cacheProxy",
|
||||
wrapFailureTest,
|
||||
() => {
|
||||
sinon.stub(toolcache, "cacheDir").throws();
|
||||
},
|
||||
async (logger) => {
|
||||
await startProxyExports.cacheProxy(logger, "/other/path", "proxy", "1.0");
|
||||
},
|
||||
);
|
||||
|
||||
test("getProxyBinaryPath - returns path from tool cache if available", async (t) => {
|
||||
mockGetReleaseByTag();
|
||||
|
||||
await withRecordingLoggerAsync(async (logger) => {
|
||||
const toolcachePath = "/path/to/proxy/dir";
|
||||
sinon.stub(toolcache, "find").returns(toolcachePath);
|
||||
|
||||
const path = await startProxyExports.getProxyBinaryPath(logger);
|
||||
|
||||
t.assert(path);
|
||||
t.is(
|
||||
path,
|
||||
filepath.join(toolcachePath, startProxyExports.getProxyFilename()),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test("getProxyBinaryPath - downloads proxy if not in cache", async (t) => {
|
||||
const downloadUrl = "url-we-want";
|
||||
mockGetReleaseByTag([
|
||||
{ name: startProxyExports.getProxyPackage(), url: downloadUrl },
|
||||
]);
|
||||
|
||||
await withRecordingLoggerAsync(async (logger) => {
|
||||
const toolcachePath = "/path/to/proxy/dir";
|
||||
const find = sinon.stub(toolcache, "find").returns("");
|
||||
const getApiDetails = sinon.stub(apiClient, "getApiDetails").returns({
|
||||
auth: "",
|
||||
url: "",
|
||||
apiURL: "",
|
||||
});
|
||||
const getAuthorizationHeaderFor = sinon
|
||||
.stub(apiClient, "getAuthorizationHeaderFor")
|
||||
.returns(undefined);
|
||||
const archivePath = "/path/to/archive";
|
||||
const downloadTool = sinon
|
||||
.stub(toolcache, "downloadTool")
|
||||
.resolves(archivePath);
|
||||
const extractedPath = "/path/to/extracted";
|
||||
const extractTar = sinon
|
||||
.stub(toolcache, "extractTar")
|
||||
.resolves(extractedPath);
|
||||
const cacheDir = sinon.stub(toolcache, "cacheDir").resolves(toolcachePath);
|
||||
|
||||
const path = await startProxyExports.getProxyBinaryPath(logger);
|
||||
|
||||
t.assert(find.calledOnce);
|
||||
t.assert(getApiDetails.calledOnce);
|
||||
t.assert(getAuthorizationHeaderFor.calledOnce);
|
||||
t.assert(downloadTool.calledOnceWith(downloadUrl));
|
||||
t.assert(extractTar.calledOnceWith(archivePath));
|
||||
t.assert(cacheDir.calledOnceWith(extractedPath));
|
||||
|
||||
t.assert(path);
|
||||
t.is(
|
||||
path,
|
||||
filepath.join(toolcachePath, startProxyExports.getProxyFilename()),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
+20
-335
@@ -1,174 +1,25 @@
|
||||
import * as path from "path";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
import * as toolcache from "@actions/tool-cache";
|
||||
|
||||
import {
|
||||
getApiClient,
|
||||
getApiDetails,
|
||||
getAuthorizationHeaderFor,
|
||||
} from "./api-client";
|
||||
import * as artifactScanner from "./artifact-scanner";
|
||||
import { Config } from "./config-utils";
|
||||
import { getApiClient } from "./api-client";
|
||||
import * as defaults from "./defaults.json";
|
||||
import { KnownLanguage } from "./languages";
|
||||
import { Logger } from "./logging";
|
||||
import {
|
||||
Address,
|
||||
RawCredential,
|
||||
Registry,
|
||||
Credential,
|
||||
} from "./start-proxy/types";
|
||||
import {
|
||||
ActionName,
|
||||
createStatusReportBase,
|
||||
getActionsStatus,
|
||||
sendStatusReport,
|
||||
StatusReportBase,
|
||||
} from "./status-report";
|
||||
import * as util from "./util";
|
||||
import { ConfigurationError, getErrorMessage, isDefined } from "./util";
|
||||
|
||||
export * from "./start-proxy/types";
|
||||
|
||||
/**
|
||||
* Enumerates specific error types for which we have corresponding error messages that
|
||||
* are safe to include in status reports.
|
||||
*/
|
||||
export enum StartProxyErrorType {
|
||||
DownloadFailed,
|
||||
ExtractionFailed,
|
||||
CacheFailed,
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns The error message corresponding to the error type.
|
||||
*/
|
||||
export function getStartProxyErrorMessage(
|
||||
errorType: StartProxyErrorType,
|
||||
): string {
|
||||
switch (errorType) {
|
||||
case StartProxyErrorType.DownloadFailed:
|
||||
return "Failed to download proxy archive.";
|
||||
case StartProxyErrorType.ExtractionFailed:
|
||||
return "Failed to extract proxy archive.";
|
||||
case StartProxyErrorType.CacheFailed:
|
||||
return "Failed to add proxy to toolcache";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* We want to avoid accidentally leaking secrets that may be contained in exception
|
||||
* messages in the `start-proxy` action. Consequently, we don't report the messages
|
||||
* of arbitrary exceptions. This type of error ensures that the message is one from
|
||||
* `StartProxyErrorType` and therefore safe to include in a status report.
|
||||
*/
|
||||
export class StartProxyError extends Error {
|
||||
public readonly errorType: StartProxyErrorType;
|
||||
|
||||
constructor(errorType: StartProxyErrorType) {
|
||||
super();
|
||||
this.errorType = errorType;
|
||||
}
|
||||
}
|
||||
|
||||
interface StartProxyStatus extends StatusReportBase {
|
||||
// A comma-separated list of registry types which are configured for CodeQL.
|
||||
// This only includes registry types we support, not all that are configured.
|
||||
registry_types: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends a status report for the `start-proxy` action indicating a successful outcome.
|
||||
*
|
||||
* @param startedAt When the action was started.
|
||||
* @param config The configuration used.
|
||||
* @param registry_types The types of registries that are configured.
|
||||
* @param logger The logger to use.
|
||||
*/
|
||||
export async function sendSuccessStatusReport(
|
||||
startedAt: Date,
|
||||
config: Partial<Config>,
|
||||
registry_types: string[],
|
||||
logger: Logger,
|
||||
) {
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
ActionName.StartProxy,
|
||||
"success",
|
||||
startedAt,
|
||||
config,
|
||||
await util.checkDiskUsage(logger),
|
||||
logger,
|
||||
);
|
||||
if (statusReportBase !== undefined) {
|
||||
const statusReport: StartProxyStatus = {
|
||||
...statusReportBase,
|
||||
registry_types: registry_types.join(","),
|
||||
};
|
||||
await sendStatusReport(statusReport);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an error message for `error` that can safely be reported in a status report,
|
||||
* i.e. that does not contain sensitive information.
|
||||
*
|
||||
* @param error The error for which to get an error message.
|
||||
*/
|
||||
export function getSafeErrorMessage(error: Error): string {
|
||||
// If the error is a `StartProxyError`, resolve the error type to the corresponding
|
||||
// error message.
|
||||
if (error instanceof StartProxyError) {
|
||||
return getStartProxyErrorMessage(error.errorType);
|
||||
}
|
||||
|
||||
// Otherwise, omit the actual error message.
|
||||
return `Error from start-proxy Action omitted (${error.constructor.name}).`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends a status report for the `start-proxy` action indicating a failure.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param startedAt When the action was started.
|
||||
* @param language The language provided as input, if any.
|
||||
* @param unwrappedError The exception that was thrown.
|
||||
*/
|
||||
export async function sendFailedStatusReport(
|
||||
logger: Logger,
|
||||
startedAt: Date,
|
||||
language: KnownLanguage | undefined,
|
||||
unwrappedError: unknown,
|
||||
) {
|
||||
const error = util.wrapError(unwrappedError);
|
||||
core.setFailed(`start-proxy action failed: ${error.message}`);
|
||||
|
||||
// To avoid the possibility of leaking sensitive information into the telemetry,
|
||||
// we don't include arbitrary error messages. Instead, `getSafeErrorMessage` will
|
||||
// return a generic message that includes the type of the error, unless it can decide
|
||||
// that the message is safe to include.
|
||||
const statusReportMessage = getSafeErrorMessage(error);
|
||||
const errorStatusReportBase = await createStatusReportBase(
|
||||
ActionName.StartProxy,
|
||||
getActionsStatus(error),
|
||||
startedAt,
|
||||
{
|
||||
languages: language && [language],
|
||||
},
|
||||
await util.checkDiskUsage(logger),
|
||||
logger,
|
||||
statusReportMessage,
|
||||
);
|
||||
if (errorStatusReportBase !== undefined) {
|
||||
await sendStatusReport(errorStatusReportBase);
|
||||
}
|
||||
}
|
||||
|
||||
export const UPDATEJOB_PROXY = "update-job-proxy";
|
||||
export const UPDATEJOB_PROXY_VERSION = "v2.0.20250624110901";
|
||||
const UPDATEJOB_PROXY_URL_PREFIX =
|
||||
"https://github.com/github/codeql-action/releases/download/codeql-bundle-v2.22.0/";
|
||||
|
||||
export type Credential = {
|
||||
type: string;
|
||||
host?: string;
|
||||
url?: string;
|
||||
username?: string;
|
||||
password?: string;
|
||||
token?: string;
|
||||
};
|
||||
|
||||
/*
|
||||
* Language aliases supported by the start-proxy Action.
|
||||
*
|
||||
@@ -211,13 +62,6 @@ export function parseLanguage(language: string): KnownLanguage | undefined {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function isPAT(value: string) {
|
||||
return artifactScanner.isAuthToken(value, [
|
||||
artifactScanner.GITHUB_PAT_CLASSIC_PATTERN,
|
||||
artifactScanner.GITHUB_PAT_FINE_GRAINED_PATTERN,
|
||||
]);
|
||||
}
|
||||
|
||||
const LANGUAGE_TO_REGISTRY_TYPE: Partial<Record<KnownLanguage, string[]>> = {
|
||||
java: ["maven_repository"],
|
||||
csharp: ["nuget_feed"],
|
||||
@@ -228,31 +72,6 @@ const LANGUAGE_TO_REGISTRY_TYPE: Partial<Record<KnownLanguage, string[]>> = {
|
||||
go: ["goproxy_server", "git_source"],
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* Extracts an `Address` value from the given `Registry` value by determining whether it has
|
||||
* a `url` value, or no `url` value but a `host` value.
|
||||
*
|
||||
* @throws A `ConfigurationError` if the `Registry` value contains neither a `url` or `host` field.
|
||||
*/
|
||||
function getRegistryAddress(registry: Partial<Registry>): Address {
|
||||
if (isDefined(registry.url)) {
|
||||
return {
|
||||
url: registry.url,
|
||||
host: registry.host,
|
||||
};
|
||||
} else if (isDefined(registry.host)) {
|
||||
return {
|
||||
url: undefined,
|
||||
host: registry.host,
|
||||
};
|
||||
} else {
|
||||
// The proxy needs one of these to work. If both are defined, the url has the precedence.
|
||||
throw new ConfigurationError(
|
||||
"Invalid credentials - must specify host or url",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// getCredentials returns registry credentials from action inputs.
|
||||
// It prefers `registries_credentials` over `registry_secrets`.
|
||||
// If neither is set, it returns an empty array.
|
||||
@@ -279,9 +98,9 @@ export function getCredentials(
|
||||
}
|
||||
|
||||
// Parse and validate the credentials
|
||||
let parsed: RawCredential[];
|
||||
let parsed: Credential[];
|
||||
try {
|
||||
parsed = JSON.parse(credentialsStr) as RawCredential[];
|
||||
parsed = JSON.parse(credentialsStr) as Credential[];
|
||||
} catch {
|
||||
// Don't log the error since it might contain sensitive information.
|
||||
logger.error("Failed to parse the credentials data.");
|
||||
@@ -301,11 +120,6 @@ export function getCredentials(
|
||||
throw new ConfigurationError("Invalid credentials - must be an object");
|
||||
}
|
||||
|
||||
// The configuration must have a type.
|
||||
if (!isDefined(e.type)) {
|
||||
throw new ConfigurationError("Invalid credentials - must have a type");
|
||||
}
|
||||
|
||||
// Mask credentials to reduce chance of accidental leakage in logs.
|
||||
if (isDefined(e.password)) {
|
||||
core.setSecret(e.password);
|
||||
@@ -314,7 +128,12 @@ export function getCredentials(
|
||||
core.setSecret(e.token);
|
||||
}
|
||||
|
||||
const address = getRegistryAddress(e);
|
||||
if (!isDefined(e.url) && !isDefined(e.host)) {
|
||||
// The proxy needs one of these to work. If both are defined, the url has the precedence.
|
||||
throw new ConfigurationError(
|
||||
"Invalid credentials - must specify host or url",
|
||||
);
|
||||
}
|
||||
|
||||
// Filter credentials based on language if specified. `type` is the registry type.
|
||||
// E.g., "maven_feed" for Java/Kotlin, "nuget_repository" for C#.
|
||||
@@ -342,25 +161,13 @@ export function getCredentials(
|
||||
);
|
||||
}
|
||||
|
||||
// If the password or token looks like a GitHub PAT, warn if no username is configured.
|
||||
if (
|
||||
!isDefined(e.username) &&
|
||||
((isDefined(e.password) && isPAT(e.password)) ||
|
||||
(isDefined(e.token) && isPAT(e.token)))
|
||||
) {
|
||||
logger.warning(
|
||||
`A ${e.type} private registry is configured for ${e.host || e.url} using a GitHub Personal Access Token (PAT), but no username was provided. ` +
|
||||
`This may not work correctly. When configuring a private registry using a PAT, select "Username and password" and enter the username of the user ` +
|
||||
`who generated the PAT.`,
|
||||
);
|
||||
}
|
||||
|
||||
out.push({
|
||||
type: e.type,
|
||||
host: e.host,
|
||||
url: e.url,
|
||||
username: e.username,
|
||||
password: e.password,
|
||||
token: e.token,
|
||||
...address,
|
||||
});
|
||||
}
|
||||
return out;
|
||||
@@ -449,125 +256,3 @@ export async function getDownloadUrl(
|
||||
version: UPDATEJOB_PROXY_VERSION,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Pretty-prints a `Credential` value to a string, but hides the actual password or token values.
|
||||
*
|
||||
* @param c The credential to convert to a string.
|
||||
*/
|
||||
export function credentialToStr(c: Credential): string {
|
||||
return `Type: ${c.type}; Host: ${c.host}; Url: ${c.url} Username: ${
|
||||
c.username
|
||||
}; Password: ${c.password !== undefined}; Token: ${c.token !== undefined}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to download a file from `url` into the toolcache.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param url The URL to download the proxy binary from.
|
||||
* @param authorization The authorization information to use.
|
||||
* @returns If successful, the path to the downloaded file.
|
||||
*/
|
||||
export async function downloadProxy(
|
||||
logger: Logger,
|
||||
url: string,
|
||||
authorization: string | undefined,
|
||||
) {
|
||||
try {
|
||||
// Download the proxy archive from `url`. We let `downloadTool` choose where
|
||||
// to store it. The path to the downloaded file will be returned if successful.
|
||||
return toolcache.downloadTool(url, /* dest: */ undefined, authorization, {
|
||||
accept: "application/octet-stream",
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Failed to download proxy archive from ${url}: ${getErrorMessage(error)}`,
|
||||
);
|
||||
throw new StartProxyError(StartProxyErrorType.DownloadFailed);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to extract the proxy binary from the `archive`.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param archive The archive to extract.
|
||||
* @returns The path to the extracted file(s).
|
||||
*/
|
||||
export async function extractProxy(logger: Logger, archive: string) {
|
||||
try {
|
||||
return await toolcache.extractTar(archive);
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Failed to extract proxy archive from ${archive}: ${getErrorMessage(error)}`,
|
||||
);
|
||||
throw new StartProxyError(StartProxyErrorType.ExtractionFailed);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to store the proxy in the toolcache.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param source The source path to add to the toolcache.
|
||||
* @param filename The filename of the proxy binary.
|
||||
* @param version The version of the proxy.
|
||||
* @returns The path to the directory in the toolcache.
|
||||
*/
|
||||
export async function cacheProxy(
|
||||
logger: Logger,
|
||||
source: string,
|
||||
filename: string,
|
||||
version: string,
|
||||
) {
|
||||
try {
|
||||
return await toolcache.cacheDir(source, filename, version);
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`Failed to add proxy archive from ${source} to toolcache: ${getErrorMessage(error)}`,
|
||||
);
|
||||
throw new StartProxyError(StartProxyErrorType.CacheFailed);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the platform-specific filename of the proxy binary.
|
||||
*/
|
||||
export function getProxyFilename() {
|
||||
return process.platform === "win32"
|
||||
? `${UPDATEJOB_PROXY}.exe`
|
||||
: UPDATEJOB_PROXY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a path to the proxy binary. If possible, this function will find the proxy in the
|
||||
* runner's tool cache. Otherwise, it downloads and extracts the proxy binary,
|
||||
* and stores it in the tool cache.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @returns The path to the proxy binary.
|
||||
*/
|
||||
export async function getProxyBinaryPath(logger: Logger): Promise<string> {
|
||||
const proxyFileName = getProxyFilename();
|
||||
const proxyInfo = await getDownloadUrl(logger);
|
||||
|
||||
let proxyBin = toolcache.find(proxyFileName, proxyInfo.version);
|
||||
if (!proxyBin) {
|
||||
const apiDetails = getApiDetails();
|
||||
const authorization = getAuthorizationHeaderFor(
|
||||
logger,
|
||||
apiDetails,
|
||||
proxyInfo.url,
|
||||
);
|
||||
const temp = await downloadProxy(logger, proxyInfo.url, authorization);
|
||||
const extracted = await extractProxy(logger, temp);
|
||||
proxyBin = await cacheProxy(
|
||||
logger,
|
||||
extracted,
|
||||
proxyFileName,
|
||||
proxyInfo.version,
|
||||
);
|
||||
}
|
||||
return path.join(proxyBin, proxyFileName);
|
||||
}
|
||||
|
||||
@@ -1,93 +0,0 @@
|
||||
import test, { ExecutionContext } from "ava";
|
||||
import { pki } from "node-forge";
|
||||
|
||||
import { setupTests } from "../testing-utils";
|
||||
|
||||
import * as ca from "./ca";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
const toMap = <T>(array: T[], func: (e: T) => string) =>
|
||||
new Map<string, T>(array.map((val) => [func(val), val]));
|
||||
|
||||
function checkCertAttributes(
|
||||
t: ExecutionContext<unknown>,
|
||||
cert: pki.Certificate,
|
||||
) {
|
||||
const subjectMap = toMap(
|
||||
cert.subject.attributes,
|
||||
(attr) => attr.name as string,
|
||||
);
|
||||
const issuerMap = toMap(
|
||||
cert.issuer.attributes,
|
||||
(attr) => attr.name as string,
|
||||
);
|
||||
|
||||
t.is(subjectMap.get("commonName")?.value, "Dependabot Internal CA");
|
||||
t.is(issuerMap.get("commonName")?.value, "Dependabot Internal CA");
|
||||
|
||||
for (const attrName of subjectMap.keys()) {
|
||||
t.deepEqual(subjectMap.get(attrName), issuerMap.get(attrName));
|
||||
}
|
||||
}
|
||||
|
||||
test("generateCertificateAuthority - generates certificates", (t) => {
|
||||
const result = ca.generateCertificateAuthority(false);
|
||||
const cert = pki.certificateFromPem(result.cert);
|
||||
const key = pki.privateKeyFromPem(result.key);
|
||||
|
||||
t.truthy(cert);
|
||||
t.truthy(key);
|
||||
|
||||
checkCertAttributes(t, cert);
|
||||
|
||||
// Check the validity.
|
||||
t.true(
|
||||
cert.validity.notBefore <= new Date(),
|
||||
"notBefore date is in the future",
|
||||
);
|
||||
t.true(cert.validity.notAfter > new Date(), "notAfter date is in the past");
|
||||
|
||||
// Check that the extensions are set as we'd expect.
|
||||
const exts = cert.extensions as ca.Extension[];
|
||||
t.is(exts.length, 1);
|
||||
t.is(exts[0].name, "basicConstraints");
|
||||
t.is(exts[0].cA, true);
|
||||
|
||||
t.truthy(cert.siginfo);
|
||||
});
|
||||
|
||||
test("generateCertificateAuthority - generates certificates with FF", (t) => {
|
||||
const result = ca.generateCertificateAuthority(true);
|
||||
const cert = pki.certificateFromPem(result.cert);
|
||||
const key = pki.privateKeyFromPem(result.key);
|
||||
|
||||
t.truthy(cert);
|
||||
t.truthy(key);
|
||||
|
||||
checkCertAttributes(t, cert);
|
||||
|
||||
// Check the validity.
|
||||
t.true(
|
||||
cert.validity.notBefore <= new Date(),
|
||||
"notBefore date is in the future",
|
||||
);
|
||||
t.true(cert.validity.notAfter > new Date(), "notAfter date is in the past");
|
||||
|
||||
// Check that the extensions are set as we'd expect.
|
||||
const exts = toMap(cert.extensions as ca.Extension[], (ext) => ext.name);
|
||||
t.is(exts.size, 4);
|
||||
t.true(exts.get("basicConstraints")?.cA);
|
||||
t.truthy(exts.get("subjectKeyIdentifier"));
|
||||
t.truthy(exts.get("authorityKeyIdentifier"));
|
||||
|
||||
const keyUsage = exts.get("keyUsage");
|
||||
if (t.truthy(keyUsage)) {
|
||||
t.true(keyUsage.critical);
|
||||
t.true(keyUsage.keyCertSign);
|
||||
t.true(keyUsage.cRLSign);
|
||||
t.true(keyUsage.digitalSignature);
|
||||
}
|
||||
|
||||
t.truthy(cert.siginfo);
|
||||
});
|
||||
@@ -1,93 +0,0 @@
|
||||
import { md, pki } from "node-forge";
|
||||
|
||||
import { CertificateAuthority } from "./types";
|
||||
|
||||
const KEY_SIZE = 2048;
|
||||
const KEY_EXPIRY_YEARS = 2;
|
||||
|
||||
const CERT_SUBJECT = [
|
||||
{
|
||||
name: "commonName",
|
||||
value: "Dependabot Internal CA",
|
||||
},
|
||||
{
|
||||
name: "organizationName",
|
||||
value: "GitHub inc.",
|
||||
},
|
||||
{
|
||||
shortName: "OU",
|
||||
value: "Dependabot",
|
||||
},
|
||||
{
|
||||
name: "countryName",
|
||||
value: "US",
|
||||
},
|
||||
{
|
||||
shortName: "ST",
|
||||
value: "California",
|
||||
},
|
||||
{
|
||||
name: "localityName",
|
||||
value: "San Francisco",
|
||||
},
|
||||
];
|
||||
|
||||
export type Extension = {
|
||||
name: string;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
|
||||
const extraExtensions: Extension[] = [
|
||||
{
|
||||
name: "keyUsage",
|
||||
critical: true,
|
||||
keyCertSign: true,
|
||||
cRLSign: true,
|
||||
digitalSignature: true,
|
||||
},
|
||||
{ name: "subjectKeyIdentifier" },
|
||||
{ name: "authorityKeyIdentifier", keyIdentifier: true },
|
||||
];
|
||||
|
||||
/**
|
||||
* Generates a CA certificate for the proxy.
|
||||
*
|
||||
* @param newCertGenFF Whether to use the updated certificate generation.
|
||||
* @returns The private and public keys.
|
||||
*/
|
||||
export function generateCertificateAuthority(
|
||||
newCertGenFF: boolean,
|
||||
): CertificateAuthority {
|
||||
const keys = pki.rsa.generateKeyPair(KEY_SIZE);
|
||||
const cert = pki.createCertificate();
|
||||
cert.publicKey = keys.publicKey;
|
||||
cert.serialNumber = "01";
|
||||
cert.validity.notBefore = new Date();
|
||||
cert.validity.notAfter = new Date();
|
||||
cert.validity.notAfter.setFullYear(
|
||||
cert.validity.notBefore.getFullYear() + KEY_EXPIRY_YEARS,
|
||||
);
|
||||
|
||||
cert.setSubject(CERT_SUBJECT);
|
||||
cert.setIssuer(CERT_SUBJECT);
|
||||
|
||||
const extensions: Extension[] = [{ name: "basicConstraints", cA: true }];
|
||||
|
||||
// Add the extra CA extensions if the FF is enabled.
|
||||
if (newCertGenFF) {
|
||||
extensions.push(...extraExtensions);
|
||||
}
|
||||
|
||||
cert.setExtensions(extensions);
|
||||
|
||||
// Specifically use SHA256 when the FF is enabled.
|
||||
if (newCertGenFF) {
|
||||
cert.sign(keys.privateKey, md.sha256.create());
|
||||
} else {
|
||||
cert.sign(keys.privateKey);
|
||||
}
|
||||
|
||||
const pem = pki.certificateToPem(cert);
|
||||
const key = pki.privateKeyToPem(keys.privateKey);
|
||||
return { cert: pem, key };
|
||||
}
|
||||
@@ -1,213 +0,0 @@
|
||||
import * as fs from "fs";
|
||||
import * as os from "os";
|
||||
import path from "path";
|
||||
|
||||
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||
import * as io from "@actions/io";
|
||||
import test, { ExecutionContext } from "ava";
|
||||
import sinon from "sinon";
|
||||
|
||||
import { JavaEnvVars, KnownLanguage } from "../languages";
|
||||
import {
|
||||
checkExpectedLogMessages,
|
||||
getRecordingLogger,
|
||||
LoggedMessage,
|
||||
setupTests,
|
||||
} from "../testing-utils";
|
||||
import { withTmpDir } from "../util";
|
||||
|
||||
import {
|
||||
checkJavaEnvVars,
|
||||
checkJdkSettings,
|
||||
checkProxyEnvironment,
|
||||
checkProxyEnvVars,
|
||||
discoverActionsJdks,
|
||||
JAVA_PROXY_ENV_VARS,
|
||||
ProxyEnvVars,
|
||||
} from "./environment";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
function stubToolrunner() {
|
||||
sinon.stub(io, "which").throws(new Error("Java not installed"));
|
||||
sinon.stub(toolrunner, "ToolRunner").returns({
|
||||
exec: async () => {
|
||||
return 0;
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function assertEnvVarLogMessages(
|
||||
t: ExecutionContext<any>,
|
||||
envVars: string[],
|
||||
messages: LoggedMessage[],
|
||||
expectSet: boolean | string,
|
||||
) {
|
||||
const template = (envVar: string) => {
|
||||
if (typeof expectSet === "string") {
|
||||
return `Environment variable '${envVar}' is set to '${expectSet}'`;
|
||||
}
|
||||
return expectSet
|
||||
? `Environment variable '${envVar}' is set to '${envVar}'`
|
||||
: `Environment variable '${envVar}' is not set`;
|
||||
};
|
||||
|
||||
const expected: string[] = [];
|
||||
|
||||
for (const envVar of envVars) {
|
||||
expected.push(template(envVar));
|
||||
}
|
||||
|
||||
checkExpectedLogMessages(t, messages, expected);
|
||||
}
|
||||
|
||||
test("checkJavaEnvironment - none set", (t) => {
|
||||
const messages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(messages);
|
||||
|
||||
checkJavaEnvVars(logger);
|
||||
assertEnvVarLogMessages(t, JAVA_PROXY_ENV_VARS, messages, false);
|
||||
});
|
||||
|
||||
test("checkJavaEnvironment - logs values when variables are set", (t) => {
|
||||
const messages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(messages);
|
||||
|
||||
for (const envVar of Object.values(JavaEnvVars)) {
|
||||
process.env[envVar] = envVar;
|
||||
}
|
||||
|
||||
checkJavaEnvVars(logger);
|
||||
assertEnvVarLogMessages(t, JAVA_PROXY_ENV_VARS, messages, true);
|
||||
});
|
||||
|
||||
test("discoverActionsJdks - discovers JDK paths", (t) => {
|
||||
// Clear GHA variables that may interfere with this test in CI.
|
||||
for (const envVar of Object.keys(process.env)) {
|
||||
if (envVar.startsWith("JAVA_HOME_")) {
|
||||
delete process.env[envVar];
|
||||
}
|
||||
}
|
||||
|
||||
const jdk8 = "/usr/lib/jvm/temurin-8-jdk-amd64";
|
||||
const jdk17 = "/usr/lib/jvm/temurin-17-jdk-amd64";
|
||||
const jdk21 = "/usr/lib/jvm/temurin-21-jdk-amd64";
|
||||
|
||||
process.env[JavaEnvVars.JAVA_HOME] = jdk17;
|
||||
process.env["JAVA_HOME_8_X64"] = jdk8;
|
||||
process.env["JAVA_HOME_17_X64"] = jdk17;
|
||||
process.env["JAVA_HOME_21_X64"] = jdk21;
|
||||
|
||||
const results = discoverActionsJdks();
|
||||
t.is(results.size, 3);
|
||||
t.true(results.has(jdk8));
|
||||
t.true(results.has(jdk17));
|
||||
t.true(results.has(jdk21));
|
||||
});
|
||||
|
||||
test("checkJdkSettings - does not throw for an empty directory", async (t) => {
|
||||
const messages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(messages);
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
t.notThrows(() => checkJdkSettings(logger, tmpDir));
|
||||
});
|
||||
});
|
||||
|
||||
test("checkJdkSettings - finds files and logs relevant properties", async (t) => {
|
||||
const messages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(messages);
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const dir = path.join(tmpDir, "conf");
|
||||
fs.mkdirSync(dir);
|
||||
|
||||
const file = path.join(dir, "net.properties");
|
||||
fs.writeFileSync(
|
||||
file,
|
||||
[
|
||||
"irrelevant.property=foo",
|
||||
"http.proxyHost=proxy.example.com",
|
||||
"http.unrelated=bar",
|
||||
].join(os.EOL),
|
||||
{},
|
||||
);
|
||||
checkJdkSettings(logger, tmpDir);
|
||||
|
||||
checkExpectedLogMessages(t, messages, [
|
||||
`Found '${file}'.`,
|
||||
`Found 'http.proxyHost=proxy.example.com' in '${file}'`,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
test("checkProxyEnvVars - none set", (t) => {
|
||||
const messages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(messages);
|
||||
|
||||
checkProxyEnvVars(logger);
|
||||
assertEnvVarLogMessages(t, Object.values(ProxyEnvVars), messages, false);
|
||||
});
|
||||
|
||||
test("checkProxyEnvVars - logs values when variables are set", (t) => {
|
||||
const messages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(messages);
|
||||
|
||||
for (const envVar of Object.values(ProxyEnvVars)) {
|
||||
process.env[envVar] = envVar;
|
||||
}
|
||||
|
||||
checkProxyEnvVars(logger);
|
||||
assertEnvVarLogMessages(t, Object.values(ProxyEnvVars), messages, true);
|
||||
});
|
||||
|
||||
test("checkProxyEnvVars - credentials are removed from URLs", (t) => {
|
||||
const messages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(messages);
|
||||
|
||||
for (const envVar of Object.values(ProxyEnvVars)) {
|
||||
process.env[envVar] = "https://secret:password@proxy.local";
|
||||
}
|
||||
|
||||
checkProxyEnvVars(logger);
|
||||
assertEnvVarLogMessages(
|
||||
t,
|
||||
Object.values(ProxyEnvVars),
|
||||
messages,
|
||||
"https://proxy.local/",
|
||||
);
|
||||
});
|
||||
|
||||
test("checkProxyEnvironment - includes base checks for all known languages", async (t) => {
|
||||
stubToolrunner();
|
||||
|
||||
for (const language of Object.values(KnownLanguage)) {
|
||||
const messages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(messages);
|
||||
|
||||
await checkProxyEnvironment(logger, language);
|
||||
assertEnvVarLogMessages(t, Object.keys(ProxyEnvVars), messages, false);
|
||||
}
|
||||
});
|
||||
|
||||
test("checkProxyEnvironment - includes Java checks for Java", async (t) => {
|
||||
const messages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(messages);
|
||||
|
||||
stubToolrunner();
|
||||
|
||||
await checkProxyEnvironment(logger, KnownLanguage.java);
|
||||
assertEnvVarLogMessages(t, Object.keys(ProxyEnvVars), messages, false);
|
||||
assertEnvVarLogMessages(t, JAVA_PROXY_ENV_VARS, messages, false);
|
||||
});
|
||||
|
||||
test("checkProxyEnvironment - includes language-specific checks if the language is undefined", async (t) => {
|
||||
const messages: LoggedMessage[] = [];
|
||||
const logger = getRecordingLogger(messages);
|
||||
|
||||
stubToolrunner();
|
||||
|
||||
await checkProxyEnvironment(logger, undefined);
|
||||
assertEnvVarLogMessages(t, Object.keys(ProxyEnvVars), messages, false);
|
||||
assertEnvVarLogMessages(t, JAVA_PROXY_ENV_VARS, messages, false);
|
||||
});
|
||||
@@ -1,209 +0,0 @@
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||
import * as io from "@actions/io";
|
||||
|
||||
import { JavaEnvVars, KnownLanguage, Language } from "../languages";
|
||||
import { Logger } from "../logging";
|
||||
import { getErrorMessage, isDefined } from "../util";
|
||||
|
||||
/**
|
||||
* Checks whether an environment variable named `name` is set and logs its value if set.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param name The name of the environment variable.
|
||||
* @returns True if set or false otherwise.
|
||||
*/
|
||||
function checkEnvVar(logger: Logger, name: string): boolean {
|
||||
const value = process.env[name];
|
||||
if (isDefined(value)) {
|
||||
const url = URL.parse(value);
|
||||
if (isDefined(url)) {
|
||||
url.username = "";
|
||||
url.password = "";
|
||||
logger.info(`Environment variable '${name}' is set to '${url}'.`);
|
||||
} else {
|
||||
logger.info(`Environment variable '${name}' is set to '${value}'.`);
|
||||
}
|
||||
return true;
|
||||
} else {
|
||||
logger.debug(`Environment variable '${name}' is not set.`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// The JRE properties that may affect the proxy.
|
||||
const javaProperties = [
|
||||
"http.proxyHost",
|
||||
"http.proxyPort",
|
||||
"https.proxyHost",
|
||||
"https.proxyPort",
|
||||
"http.nonProxyHosts",
|
||||
"java.net.useSystemProxies",
|
||||
"javax.net.ssl.trustStore",
|
||||
"javax.net.ssl.trustStoreType",
|
||||
"javax.net.ssl.trustStoreProvider",
|
||||
"jdk.tls.client.protocols",
|
||||
"jdk.tls.disabledAlgorithms",
|
||||
"jdk.security.allowNonCaAnchor",
|
||||
"https.protocols",
|
||||
"com.sun.net.ssl.enableAIAcaIssuers",
|
||||
"com.sun.net.ssl.checkRevocation",
|
||||
"com.sun.security.enableCRLDP",
|
||||
"ocsp.enable",
|
||||
];
|
||||
|
||||
/** Java-specific environment variables which may contain information about proxy settings. */
|
||||
export const JAVA_PROXY_ENV_VARS: JavaEnvVars[] = [
|
||||
JavaEnvVars.JAVA_TOOL_OPTIONS,
|
||||
JavaEnvVars.JDK_JAVA_OPTIONS,
|
||||
JavaEnvVars._JAVA_OPTIONS,
|
||||
];
|
||||
|
||||
/**
|
||||
* Checks whether any Java-specific environment variables which may contain proxy
|
||||
* configurations are set and logs their values if so.
|
||||
*/
|
||||
export function checkJavaEnvVars(logger: Logger) {
|
||||
for (const envVar of JAVA_PROXY_ENV_VARS) {
|
||||
checkEnvVar(logger, envVar);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Discovers paths to JDK directories based on JAVA_HOME and GHA-specific environment variables.
|
||||
* @returns A set of JDK paths.
|
||||
*/
|
||||
export function discoverActionsJdks(): Set<string> {
|
||||
const paths: Set<string> = new Set();
|
||||
|
||||
// Check whether JAVA_HOME is set.
|
||||
const javaHome = process.env[JavaEnvVars.JAVA_HOME];
|
||||
if (isDefined(javaHome)) {
|
||||
paths.add(javaHome);
|
||||
}
|
||||
|
||||
for (const [envVar, value] of Object.entries(process.env)) {
|
||||
if (isDefined(value) && envVar.match(/^JAVA_HOME_\d+_/)) {
|
||||
paths.add(value);
|
||||
}
|
||||
}
|
||||
|
||||
return paths;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to inspect JDK configuration files for the specified JDK path which may contain proxy settings.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param jdkHome The JDK home directory.
|
||||
*/
|
||||
export function checkJdkSettings(logger: Logger, jdkHome: string) {
|
||||
const filesToCheck = [
|
||||
// JDK 9+
|
||||
path.join("conf", "net.properties"),
|
||||
// JDK 8 and below
|
||||
path.join("lib", "net.properties"),
|
||||
];
|
||||
|
||||
for (const fileToCheck of filesToCheck) {
|
||||
const file = path.join(jdkHome, fileToCheck);
|
||||
|
||||
try {
|
||||
if (fs.existsSync(file)) {
|
||||
logger.debug(`Found '${file}'.`);
|
||||
|
||||
const lines = String(fs.readFileSync(file)).split("\n");
|
||||
for (const line of lines) {
|
||||
for (const property of javaProperties) {
|
||||
if (line.startsWith(`${property}=`)) {
|
||||
logger.info(`Found '${line.trimEnd()}' in '${file}'.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logger.debug(`'${file}' does not exist.`);
|
||||
}
|
||||
} catch (err) {
|
||||
logger.debug(`Failed to read '${file}': ${getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Invokes `java` to get it to show us the active configuration. */
|
||||
async function showJavaSettings(logger: Logger): Promise<void> {
|
||||
try {
|
||||
const java = await io.which("java", true);
|
||||
|
||||
let output = "";
|
||||
await new toolrunner.ToolRunner(
|
||||
java,
|
||||
["-XshowSettings:all", "-XshowSettings:security:all", "-version"],
|
||||
{
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
output += String(data);
|
||||
},
|
||||
stderr: (data) => {
|
||||
output += String(data);
|
||||
},
|
||||
},
|
||||
},
|
||||
).exec();
|
||||
|
||||
logger.startGroup("Java settings");
|
||||
logger.info(output);
|
||||
logger.endGroup();
|
||||
} catch (err) {
|
||||
logger.debug(`Failed to query java settings: ${getErrorMessage(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
/** Enumerates environment variable names which may contain information about proxy settings. */
|
||||
export enum ProxyEnvVars {
|
||||
HTTP_PROXY = "HTTP_PROXY",
|
||||
HTTPS_PROXY = "HTTPS_PROXY",
|
||||
ALL_PROXY = "ALL_PROXY",
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether any proxy-related environment variables are set and logs their values if so.
|
||||
*/
|
||||
export function checkProxyEnvVars(logger: Logger) {
|
||||
// Both upper-case and lower-case variants of these environment variables are used.
|
||||
for (const envVar of Object.values(ProxyEnvVars)) {
|
||||
checkEnvVar(logger, envVar);
|
||||
checkEnvVar(logger, envVar.toLowerCase());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Inspects environment variables and other configurations on the runner to determine whether
|
||||
* any settings that may affect the operation of the proxy are present. All relevant information
|
||||
* is written to the log.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param language The enabled language, if known.
|
||||
*/
|
||||
export async function checkProxyEnvironment(
|
||||
logger: Logger,
|
||||
language: Language | undefined,
|
||||
): Promise<void> {
|
||||
// Determine whether there is an existing proxy configured.
|
||||
checkProxyEnvVars(logger);
|
||||
|
||||
// Check language-specific configurations. If we don't know the language,
|
||||
// then we perform all checks.
|
||||
if (language === undefined || language === KnownLanguage.java) {
|
||||
checkJavaEnvVars(logger);
|
||||
|
||||
await showJavaSettings(logger);
|
||||
|
||||
const jdks = discoverActionsJdks();
|
||||
for (const jdk of jdks) {
|
||||
checkJdkSettings(logger, jdk);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,120 +0,0 @@
|
||||
import test from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import {
|
||||
checkExpectedLogMessages,
|
||||
setupTests,
|
||||
withRecordingLoggerAsync,
|
||||
} from "./../testing-utils";
|
||||
import {
|
||||
checkConnections,
|
||||
ReachabilityBackend,
|
||||
ReachabilityError,
|
||||
} from "./reachability";
|
||||
import { ProxyInfo, Registry } from "./types";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
class MockReachabilityBackend implements ReachabilityBackend {
|
||||
public async checkConnection(_url: URL): Promise<number> {
|
||||
return 200;
|
||||
}
|
||||
}
|
||||
|
||||
const mavenRegistry: Registry = {
|
||||
type: "maven_registry",
|
||||
url: "https://repo.maven.apache.org/maven2/",
|
||||
};
|
||||
|
||||
const nugetFeed: Registry = {
|
||||
type: "nuget_feed",
|
||||
url: "https://api.nuget.org/v3/index.json",
|
||||
};
|
||||
|
||||
const proxyInfo: ProxyInfo = {
|
||||
host: "127.0.0.1",
|
||||
port: 1080,
|
||||
cert: "",
|
||||
registries: [mavenRegistry, nugetFeed],
|
||||
};
|
||||
|
||||
test("checkConnections - basic functionality", async (t) => {
|
||||
const backend = new MockReachabilityBackend();
|
||||
const messages = await withRecordingLoggerAsync(async (logger) => {
|
||||
const reachable = await checkConnections(logger, proxyInfo, backend);
|
||||
t.is(reachable.size, proxyInfo.registries.length);
|
||||
t.true(reachable.has(mavenRegistry));
|
||||
t.true(reachable.has(nugetFeed));
|
||||
});
|
||||
checkExpectedLogMessages(t, messages, [
|
||||
`Testing connection to ${mavenRegistry.url}`,
|
||||
`Successfully tested connection to ${mavenRegistry.url}`,
|
||||
`Testing connection to ${nugetFeed.url}`,
|
||||
`Successfully tested connection to ${nugetFeed.url}`,
|
||||
`Finished testing connections`,
|
||||
]);
|
||||
});
|
||||
|
||||
test("checkConnections - excludes failed status codes", async (t) => {
|
||||
const backend = new MockReachabilityBackend();
|
||||
sinon
|
||||
.stub(backend, "checkConnection")
|
||||
.onSecondCall()
|
||||
.throws(new ReachabilityError(400));
|
||||
const messages = await withRecordingLoggerAsync(async (logger) => {
|
||||
const reachable = await checkConnections(logger, proxyInfo, backend);
|
||||
t.is(reachable.size, 1);
|
||||
t.true(reachable.has(mavenRegistry));
|
||||
});
|
||||
checkExpectedLogMessages(t, messages, [
|
||||
`Testing connection to ${mavenRegistry.url}`,
|
||||
`Successfully tested connection to ${mavenRegistry.url}`,
|
||||
`Testing connection to ${nugetFeed.url}`,
|
||||
`Connection test to ${nugetFeed.url} failed. (400)`,
|
||||
`Finished testing connections`,
|
||||
]);
|
||||
});
|
||||
|
||||
test("checkConnections - handles other exceptions", async (t) => {
|
||||
const backend = new MockReachabilityBackend();
|
||||
sinon
|
||||
.stub(backend, "checkConnection")
|
||||
.onSecondCall()
|
||||
.throws(new Error("Some generic error"));
|
||||
const messages = await withRecordingLoggerAsync(async (logger) => {
|
||||
const reachable = await checkConnections(logger, proxyInfo, backend);
|
||||
t.is(reachable.size, 1);
|
||||
t.true(reachable.has(mavenRegistry));
|
||||
});
|
||||
checkExpectedLogMessages(t, messages, [
|
||||
`Testing connection to ${mavenRegistry.url}`,
|
||||
`Successfully tested connection to ${mavenRegistry.url}`,
|
||||
`Testing connection to ${nugetFeed.url}`,
|
||||
`Connection test to ${nugetFeed.url} failed: Some generic error`,
|
||||
`Finished testing connections`,
|
||||
]);
|
||||
});
|
||||
|
||||
test("checkConnections - handles invalid URLs", async (t) => {
|
||||
const backend = new MockReachabilityBackend();
|
||||
const messages = await withRecordingLoggerAsync(async (logger) => {
|
||||
const reachable = await checkConnections(
|
||||
logger,
|
||||
{
|
||||
...proxyInfo,
|
||||
registries: [
|
||||
{
|
||||
type: "nuget_feed",
|
||||
url: "localhost",
|
||||
},
|
||||
],
|
||||
},
|
||||
backend,
|
||||
);
|
||||
t.is(reachable.size, 0);
|
||||
});
|
||||
checkExpectedLogMessages(t, messages, [
|
||||
`Skipping check for localhost since it is not a valid URL.`,
|
||||
`Finished testing connections`,
|
||||
]);
|
||||
});
|
||||
@@ -1,130 +0,0 @@
|
||||
import * as https from "https";
|
||||
|
||||
import { HttpsProxyAgent } from "https-proxy-agent";
|
||||
|
||||
import { Logger } from "../logging";
|
||||
import { getErrorMessage } from "../util";
|
||||
|
||||
import { getAddressString, ProxyInfo, Registry } from "./types";
|
||||
|
||||
export class ReachabilityError extends Error {
|
||||
constructor(public readonly statusCode?: number | undefined) {
|
||||
super();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Abstracts over the backend for the reachability checks,
|
||||
* to allow actual networking to be replaced with stubs.
|
||||
*/
|
||||
export interface ReachabilityBackend {
|
||||
/**
|
||||
* Performs a test HTTP request to the specified `url`. Resolves to the status code,
|
||||
* if a successful status code was obtained. Otherwise throws
|
||||
*
|
||||
* @param url The URL of the registry to try and reach.
|
||||
* @returns The successful status code (in the `<400` range).
|
||||
*/
|
||||
checkConnection: (url: URL) => Promise<number>;
|
||||
}
|
||||
|
||||
class NetworkReachabilityBackend implements ReachabilityBackend {
|
||||
private agent: https.Agent;
|
||||
|
||||
constructor(private readonly proxy: ProxyInfo) {
|
||||
this.agent = new HttpsProxyAgent(`http://${proxy.host}:${proxy.port}`);
|
||||
}
|
||||
|
||||
public async checkConnection(url: URL): Promise<number> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = https.request(
|
||||
url,
|
||||
{
|
||||
agent: this.agent,
|
||||
method: "HEAD",
|
||||
ca: this.proxy.cert,
|
||||
timeout: 5 * 1000, // 5 seconds
|
||||
},
|
||||
(res) => {
|
||||
res.destroy();
|
||||
|
||||
if (res.statusCode !== undefined && res.statusCode < 400) {
|
||||
resolve(res.statusCode);
|
||||
} else {
|
||||
reject(new ReachabilityError(res.statusCode));
|
||||
}
|
||||
},
|
||||
);
|
||||
req.on("error", (e) => {
|
||||
reject(e);
|
||||
});
|
||||
req.on("timeout", () => {
|
||||
req.destroy();
|
||||
reject(new Error("Connection timeout."));
|
||||
});
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines which configured registries can be reached by performing test requests to them.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param proxy Information about the proxy, including the configured registries.
|
||||
* @param backend Optionally for testing, a `ReachabilityBackend` to use.
|
||||
* @returns The set of registries which passed the checks.
|
||||
*/
|
||||
export async function checkConnections(
|
||||
logger: Logger,
|
||||
proxy: ProxyInfo,
|
||||
backend?: ReachabilityBackend,
|
||||
): Promise<Set<Registry>> {
|
||||
const result: Set<Registry> = new Set();
|
||||
|
||||
// Don't do anything if there are no registries.
|
||||
if (proxy.registries.length === 0) return result;
|
||||
|
||||
try {
|
||||
// Initialise a networking backend if no backend was provided.
|
||||
if (backend === undefined) {
|
||||
backend = new NetworkReachabilityBackend(proxy);
|
||||
}
|
||||
|
||||
for (const registry of proxy.registries) {
|
||||
const address = getAddressString(registry);
|
||||
const url = URL.parse(address);
|
||||
|
||||
if (url === null) {
|
||||
logger.info(
|
||||
`Skipping check for ${address} since it is not a valid URL.`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
logger.debug(`Testing connection to ${url}...`);
|
||||
const statusCode = await backend.checkConnection(url);
|
||||
|
||||
logger.info(`Successfully tested connection to ${url} (${statusCode})`);
|
||||
result.add(registry);
|
||||
} catch (e) {
|
||||
if (e instanceof ReachabilityError && e.statusCode !== undefined) {
|
||||
logger.error(`Connection test to ${url} failed. (${e.statusCode})`);
|
||||
} else {
|
||||
logger.error(
|
||||
`Connection test to ${url} failed: ${getErrorMessage(e)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug(`Finished testing connections to private registries.`);
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Failed to test connections to private registries: ${getErrorMessage(e)}`,
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
@@ -1,81 +0,0 @@
|
||||
/**
|
||||
* After parsing configurations from JSON, we don't know whether all the keys we expect are
|
||||
* present or not. This type is used to represent such values, which we expect to be
|
||||
* `Credential` values, but haven't validated yet.
|
||||
*/
|
||||
export type RawCredential = Partial<Credential>;
|
||||
|
||||
/**
|
||||
* A package registry configuration includes identifying information as well as
|
||||
* authentication credentials.
|
||||
*/
|
||||
export type Credential = {
|
||||
/** The username needed to authenticate to the package registry, if any. */
|
||||
username?: string;
|
||||
/** The password needed to authenticate to the package registry, if any. */
|
||||
password?: string;
|
||||
/** The token needed to authenticate to the package registry, if any. */
|
||||
token?: string;
|
||||
} & Registry;
|
||||
|
||||
/** A package registry is identified by its type and address. */
|
||||
export type Registry = {
|
||||
/** The type of the package registry. */
|
||||
type: string;
|
||||
} & Address;
|
||||
|
||||
// If a registry has an `url`, then that takes precedence over the `host` which may or may
|
||||
// not be defined.
|
||||
interface HasUrl {
|
||||
url: string;
|
||||
host?: string;
|
||||
}
|
||||
|
||||
// If a registry does not have an `url`, then it must have a `host`.
|
||||
interface WithoutUrl {
|
||||
url: undefined;
|
||||
host: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* A valid `Registry` value must either have a `url` or a `host` value. If it has a `url` value,
|
||||
* then that takes precedence over the `host` value. If there is no `url` value, then it must
|
||||
* have a `host` value.
|
||||
*/
|
||||
export type Address = HasUrl | WithoutUrl;
|
||||
|
||||
/** Gets the address as a string. This will either be the `url` if present, or the `host` if not. */
|
||||
export function getAddressString(address: Address): string {
|
||||
if (address.url === undefined) {
|
||||
return address.host;
|
||||
} else {
|
||||
return address.url;
|
||||
}
|
||||
}
|
||||
|
||||
export interface ProxyInfo {
|
||||
host: string;
|
||||
port: number;
|
||||
cert: string;
|
||||
registries: Registry[];
|
||||
}
|
||||
|
||||
export type CertificateAuthority = {
|
||||
cert: string;
|
||||
key: string;
|
||||
};
|
||||
|
||||
export type BasicAuthCredentials = {
|
||||
username: string;
|
||||
password: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Represents configurations for the authentication proxy.
|
||||
*/
|
||||
export type ProxyConfig = {
|
||||
/** The validated configurations for the proxy. */
|
||||
all_credentials: Credential[];
|
||||
ca: CertificateAuthority;
|
||||
proxy_auth?: BasicAuthCredentials;
|
||||
};
|
||||
@@ -18,7 +18,7 @@ import { DocUrl } from "./doc-url";
|
||||
import { EnvVar } from "./environment";
|
||||
import { getRef } from "./git-utils";
|
||||
import { Logger } from "./logging";
|
||||
import { OverlayBaseDatabaseDownloadStats } from "./overlay";
|
||||
import { OverlayBaseDatabaseDownloadStats } from "./overlay-database-utils";
|
||||
import { getRepositoryNwo } from "./repository";
|
||||
import { ToolsSource } from "./setup-codeql";
|
||||
import {
|
||||
|
||||
+6
-98
@@ -21,7 +21,7 @@ import {
|
||||
FeatureEnablement,
|
||||
} from "./feature-flags";
|
||||
import { Logger } from "./logging";
|
||||
import { OverlayDatabaseMode } from "./overlay";
|
||||
import { OverlayDatabaseMode } from "./overlay-database-utils";
|
||||
import {
|
||||
DEFAULT_DEBUG_ARTIFACT_NAME,
|
||||
DEFAULT_DEBUG_DATABASE_NAME,
|
||||
@@ -145,67 +145,13 @@ export function setupActionsVars(tempDir: string, toolsDir: string) {
|
||||
process.env["RUNNER_TEMP"] = tempDir;
|
||||
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
|
||||
process.env["GITHUB_WORKSPACE"] = tempDir;
|
||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||
}
|
||||
|
||||
type LogLevel = "debug" | "info" | "warning" | "error";
|
||||
|
||||
export interface LoggedMessage {
|
||||
type: LogLevel;
|
||||
type: "debug" | "info" | "warning" | "error";
|
||||
message: string | Error;
|
||||
}
|
||||
|
||||
export class RecordingLogger implements Logger {
|
||||
messages: LoggedMessage[] = [];
|
||||
groups: string[] = [];
|
||||
unfinishedGroups: Set<string> = new Set();
|
||||
private currentGroup: string | undefined = undefined;
|
||||
|
||||
constructor(private readonly logToConsole: boolean = true) {}
|
||||
|
||||
private addMessage(level: LogLevel, message: string | Error): void {
|
||||
this.messages.push({ type: level, message });
|
||||
|
||||
if (this.logToConsole) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.debug(message);
|
||||
}
|
||||
}
|
||||
|
||||
isDebug() {
|
||||
return true;
|
||||
}
|
||||
|
||||
debug(message: string) {
|
||||
this.addMessage("debug", message);
|
||||
}
|
||||
|
||||
info(message: string) {
|
||||
this.addMessage("info", message);
|
||||
}
|
||||
|
||||
warning(message: string | Error) {
|
||||
this.addMessage("warning", message);
|
||||
}
|
||||
|
||||
error(message: string | Error) {
|
||||
this.addMessage("error", message);
|
||||
}
|
||||
|
||||
startGroup(name: string) {
|
||||
this.groups.push(name);
|
||||
this.currentGroup = name;
|
||||
this.unfinishedGroups.add(name);
|
||||
}
|
||||
|
||||
endGroup() {
|
||||
if (this.currentGroup !== undefined) {
|
||||
this.unfinishedGroups.delete(this.currentGroup);
|
||||
}
|
||||
this.currentGroup = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
export function getRecordingLogger(
|
||||
messages: LoggedMessage[],
|
||||
{ logToConsole }: { logToConsole?: boolean } = { logToConsole: true },
|
||||
@@ -250,49 +196,18 @@ export function checkExpectedLogMessages(
|
||||
messages: LoggedMessage[],
|
||||
expectedMessages: string[],
|
||||
) {
|
||||
const missingMessages: string[] = [];
|
||||
|
||||
for (const expectedMessage of expectedMessages) {
|
||||
if (
|
||||
!messages.some(
|
||||
t.assert(
|
||||
messages.some(
|
||||
(msg) =>
|
||||
typeof msg.message === "string" &&
|
||||
msg.message.includes(expectedMessage),
|
||||
)
|
||||
) {
|
||||
missingMessages.push(expectedMessage);
|
||||
}
|
||||
}
|
||||
|
||||
if (missingMessages.length > 0) {
|
||||
const listify = (lines: string[]) =>
|
||||
lines.map((m) => ` - '${m}'`).join("\n");
|
||||
|
||||
t.fail(
|
||||
`Expected\n\n${listify(missingMessages)}\n\nin the logger output, but didn't find it in:\n\n${messages.map((m) => ` - '${m.message}'`).join("\n")}`,
|
||||
),
|
||||
`Expected '${expectedMessage}' in the logger output, but didn't find it in:\n ${messages.map((m) => ` - '${m.message}'`).join("\n")}`,
|
||||
);
|
||||
} else {
|
||||
t.pass();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialises a recording logger and calls `body` with it.
|
||||
*
|
||||
* @param body The test that requires a recording logger.
|
||||
* @returns The logged messages.
|
||||
*/
|
||||
export async function withRecordingLoggerAsync(
|
||||
body: (logger: Logger) => Promise<void>,
|
||||
): Promise<LoggedMessage[]> {
|
||||
const messages = [];
|
||||
const logger = getRecordingLogger(messages);
|
||||
|
||||
await body(logger);
|
||||
|
||||
return messages;
|
||||
}
|
||||
|
||||
/** Mock the HTTP request to the feature flags enablement API endpoint. */
|
||||
export function mockFeatureFlagApiEndpoint(
|
||||
responseStatusCode: number,
|
||||
@@ -493,14 +408,7 @@ export function createTestConfig(overrides: Partial<Config>): Config {
|
||||
overlayDatabaseMode: OverlayDatabaseMode.None,
|
||||
useOverlayDatabaseCaching: false,
|
||||
repositoryProperties: {},
|
||||
enableFileCoverageInformation: true,
|
||||
} satisfies Config,
|
||||
overrides,
|
||||
);
|
||||
}
|
||||
|
||||
export function makeTestToken(length: number = 36) {
|
||||
const chars =
|
||||
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
|
||||
return chars.repeat(Math.ceil(length / chars.length)).slice(0, length);
|
||||
}
|
||||
|
||||
+25
-70
@@ -12,7 +12,6 @@ import * as api from "./api-client";
|
||||
import { getRunnerLogger, Logger } from "./logging";
|
||||
import { setupTests } from "./testing-utils";
|
||||
import * as uploadLib from "./upload-lib";
|
||||
import { UploadPayload } from "./upload-lib/types";
|
||||
import { GitHubVariant, initializeEnvironment, withTmpDir } from "./util";
|
||||
|
||||
setupTests(test);
|
||||
@@ -129,21 +128,11 @@ test("finding SARIF files", async (t) => {
|
||||
"file",
|
||||
);
|
||||
|
||||
// add some non-Code Scanning files that should be ignored, unless we look for them specifically
|
||||
for (const analysisKind of analyses.supportedAnalysisKinds) {
|
||||
if (analysisKind === AnalysisKind.CodeScanning) continue;
|
||||
// add some `.quality.sarif` files that should be ignored, unless we look for them specifically
|
||||
fs.writeFileSync(path.join(tmpDir, "a.quality.sarif"), "");
|
||||
fs.writeFileSync(path.join(tmpDir, "dir1", "b.quality.sarif"), "");
|
||||
|
||||
const analysis = analyses.getAnalysisConfig(analysisKind);
|
||||
|
||||
fs.writeFileSync(path.join(tmpDir, `a${analysis.sarifExtension}`), "");
|
||||
fs.writeFileSync(
|
||||
path.join(tmpDir, "dir1", `b${analysis.sarifExtension}`),
|
||||
"",
|
||||
);
|
||||
}
|
||||
|
||||
const expectedSarifFiles: Partial<Record<AnalysisKind, string[]>> = {};
|
||||
expectedSarifFiles[AnalysisKind.CodeScanning] = [
|
||||
const expectedSarifFiles = [
|
||||
path.join(tmpDir, "a.sarif"),
|
||||
path.join(tmpDir, "b.sarif"),
|
||||
path.join(tmpDir, "dir1", "d.sarif"),
|
||||
@@ -154,24 +143,18 @@ test("finding SARIF files", async (t) => {
|
||||
CodeScanning.sarifPredicate,
|
||||
);
|
||||
|
||||
t.deepEqual(sarifFiles, expectedSarifFiles[AnalysisKind.CodeScanning]);
|
||||
t.deepEqual(sarifFiles, expectedSarifFiles);
|
||||
|
||||
for (const analysisKind of analyses.supportedAnalysisKinds) {
|
||||
if (analysisKind === AnalysisKind.CodeScanning) continue;
|
||||
const expectedQualitySarifFiles = [
|
||||
path.join(tmpDir, "a.quality.sarif"),
|
||||
path.join(tmpDir, "dir1", "b.quality.sarif"),
|
||||
];
|
||||
const qualitySarifFiles = uploadLib.findSarifFilesInDir(
|
||||
tmpDir,
|
||||
CodeQuality.sarifPredicate,
|
||||
);
|
||||
|
||||
const analysis = analyses.getAnalysisConfig(analysisKind);
|
||||
|
||||
expectedSarifFiles[analysisKind] = [
|
||||
path.join(tmpDir, `a${analysis.sarifExtension}`),
|
||||
path.join(tmpDir, "dir1", `b${analysis.sarifExtension}`),
|
||||
];
|
||||
const foundSarifFiles = uploadLib.findSarifFilesInDir(
|
||||
tmpDir,
|
||||
analysis.sarifPredicate,
|
||||
);
|
||||
|
||||
t.deepEqual(foundSarifFiles, expectedSarifFiles[analysisKind]);
|
||||
}
|
||||
t.deepEqual(qualitySarifFiles, expectedQualitySarifFiles);
|
||||
|
||||
const groupedSarifFiles = await uploadLib.getGroupedSarifFilePaths(
|
||||
getRunnerLogger(true),
|
||||
@@ -179,31 +162,16 @@ test("finding SARIF files", async (t) => {
|
||||
);
|
||||
|
||||
t.not(groupedSarifFiles, undefined);
|
||||
for (const analysisKind of analyses.supportedAnalysisKinds) {
|
||||
t.not(groupedSarifFiles[analysisKind], undefined);
|
||||
t.deepEqual(
|
||||
groupedSarifFiles[analysisKind],
|
||||
expectedSarifFiles[analysisKind],
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test("getGroupedSarifFilePaths - Risk Assessment files", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const sarifPath = path.join(tmpDir, "a.csra.sarif");
|
||||
fs.writeFileSync(sarifPath, "");
|
||||
|
||||
const groupedSarifFiles = await uploadLib.getGroupedSarifFilePaths(
|
||||
getRunnerLogger(true),
|
||||
sarifPath,
|
||||
t.not(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
|
||||
t.not(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
|
||||
t.deepEqual(
|
||||
groupedSarifFiles[AnalysisKind.CodeScanning],
|
||||
expectedSarifFiles,
|
||||
);
|
||||
t.deepEqual(
|
||||
groupedSarifFiles[AnalysisKind.CodeQuality],
|
||||
expectedQualitySarifFiles,
|
||||
);
|
||||
|
||||
t.not(groupedSarifFiles, undefined);
|
||||
t.is(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
|
||||
t.is(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
|
||||
t.not(groupedSarifFiles[AnalysisKind.RiskAssessment], undefined);
|
||||
t.deepEqual(groupedSarifFiles[AnalysisKind.RiskAssessment], [sarifPath]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -220,7 +188,6 @@ test("getGroupedSarifFilePaths - Code Quality file", async (t) => {
|
||||
t.not(groupedSarifFiles, undefined);
|
||||
t.is(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
|
||||
t.not(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
|
||||
t.is(groupedSarifFiles[AnalysisKind.RiskAssessment], undefined);
|
||||
t.deepEqual(groupedSarifFiles[AnalysisKind.CodeQuality], [sarifPath]);
|
||||
});
|
||||
});
|
||||
@@ -238,7 +205,6 @@ test("getGroupedSarifFilePaths - Code Scanning file", async (t) => {
|
||||
t.not(groupedSarifFiles, undefined);
|
||||
t.not(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
|
||||
t.is(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
|
||||
t.is(groupedSarifFiles[AnalysisKind.RiskAssessment], undefined);
|
||||
t.deepEqual(groupedSarifFiles[AnalysisKind.CodeScanning], [sarifPath]);
|
||||
});
|
||||
});
|
||||
@@ -256,7 +222,6 @@ test("getGroupedSarifFilePaths - Other file", async (t) => {
|
||||
t.not(groupedSarifFiles, undefined);
|
||||
t.not(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
|
||||
t.is(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
|
||||
t.is(groupedSarifFiles[AnalysisKind.RiskAssessment], undefined);
|
||||
t.deepEqual(groupedSarifFiles[AnalysisKind.CodeScanning], [sarifPath]);
|
||||
});
|
||||
});
|
||||
@@ -910,15 +875,7 @@ function createMockSarif(id?: string, tool?: string) {
|
||||
|
||||
function uploadPayloadFixtures(analysis: analyses.AnalysisConfig) {
|
||||
const mockData = {
|
||||
payload: {
|
||||
commit_oid: "abc123",
|
||||
ref: "ref",
|
||||
sarif: "base64data",
|
||||
workflow_run_id: 1,
|
||||
workflow_run_attempt: 1,
|
||||
checkout_uri: "uri",
|
||||
tool_names: ["codeql"],
|
||||
} satisfies UploadPayload,
|
||||
payload: { sarif: "base64data", commit_sha: "abc123" },
|
||||
owner: "test-owner",
|
||||
repo: "test-repo",
|
||||
response: {
|
||||
@@ -950,9 +907,7 @@ function uploadPayloadFixtures(analysis: analyses.AnalysisConfig) {
|
||||
};
|
||||
}
|
||||
|
||||
for (const analysisKind of analyses.supportedAnalysisKinds) {
|
||||
const analysis = analyses.getAnalysisConfig(analysisKind);
|
||||
|
||||
for (const analysis of [CodeScanning, CodeQuality]) {
|
||||
test(`uploadPayload on ${analysis.name} uploads successfully`, async (t) => {
|
||||
const { upload, requestStub, mockData } = uploadPayloadFixtures(analysis);
|
||||
requestStub
|
||||
|
||||
+63
-79
@@ -11,7 +11,8 @@ import * as actionsUtil from "./actions-util";
|
||||
import * as analyses from "./analyses";
|
||||
import * as api from "./api-client";
|
||||
import { getGitHubVersion, wrapApiConfigurationError } from "./api-client";
|
||||
import { type CodeQL } from "./codeql";
|
||||
import { CodeQL, getCodeQL } from "./codeql";
|
||||
import { getConfig } from "./config-utils";
|
||||
import { readDiffRangesJsonFile } from "./diff-informed-analysis-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import { FeatureEnablement } from "./feature-flags";
|
||||
@@ -20,7 +21,6 @@ import * as gitUtils from "./git-utils";
|
||||
import { initCodeQL } from "./init";
|
||||
import { Logger } from "./logging";
|
||||
import { getRepositoryNwo, RepositoryNwo } from "./repository";
|
||||
import { BasePayload, UploadPayload } from "./upload-lib/types";
|
||||
import * as util from "./util";
|
||||
import {
|
||||
ConfigurationError,
|
||||
@@ -182,44 +182,6 @@ async function shouldDisableCombineSarifFiles(
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialises a `CodeQL` instance that we can use to combine SARIF files.
|
||||
*/
|
||||
export async function minimalInitCodeQL(
|
||||
logger: Logger,
|
||||
gitHubVersion: GitHubVersion,
|
||||
features: FeatureEnablement,
|
||||
): Promise<CodeQL> {
|
||||
logger.info(
|
||||
"Initializing CodeQL since the 'init' Action was not called before this step.",
|
||||
);
|
||||
|
||||
const apiDetails = {
|
||||
auth: actionsUtil.getRequiredInput("token"),
|
||||
externalRepoAuth: actionsUtil.getOptionalInput("external-repository-token"),
|
||||
url: getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||
apiURL: getRequiredEnvParam("GITHUB_API_URL"),
|
||||
};
|
||||
|
||||
const codeQLDefaultVersionInfo = await features.getDefaultCliVersion(
|
||||
gitHubVersion.type,
|
||||
);
|
||||
|
||||
const initCodeQLResult = await initCodeQL(
|
||||
undefined, // There is no tools input on the upload action
|
||||
apiDetails,
|
||||
actionsUtil.getTemporaryDirectory(),
|
||||
gitHubVersion.type,
|
||||
codeQLDefaultVersionInfo,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
return initCodeQLResult.codeql;
|
||||
}
|
||||
|
||||
export type CodeQLGetter = () => Promise<CodeQL>;
|
||||
|
||||
// Takes a list of paths to sarif files and combines them together using the
|
||||
// CLI `github merge-results` command when all SARIF files are produced by
|
||||
// CodeQL. Otherwise, it will fall back to combining the files in the action.
|
||||
@@ -227,10 +189,8 @@ export type CodeQLGetter = () => Promise<CodeQL>;
|
||||
async function combineSarifFilesUsingCLI(
|
||||
sarifFiles: string[],
|
||||
gitHubVersion: GitHubVersion,
|
||||
_features: FeatureEnablement,
|
||||
features: FeatureEnablement,
|
||||
logger: Logger,
|
||||
getCodeQL: CodeQLGetter,
|
||||
tempDir: string,
|
||||
): Promise<SarifFile> {
|
||||
logger.info("Combining SARIF files using the CodeQL CLI");
|
||||
|
||||
@@ -268,10 +228,45 @@ async function combineSarifFilesUsingCLI(
|
||||
return combineSarifFiles(sarifFiles, logger);
|
||||
}
|
||||
|
||||
// Obtain a `CodeQL` instance. For `analyze`, this is typically the instance that was used for running the queries.
|
||||
// For `upload-sarif`, this either initialises a new instance or returns a previously initialised one if `getCodeQL`
|
||||
// is called more than once.
|
||||
const codeQL: CodeQL = await getCodeQL();
|
||||
// Initialize CodeQL, either by using the config file from the 'init' step,
|
||||
// or by initializing it here.
|
||||
let codeQL: CodeQL;
|
||||
let tempDir: string = actionsUtil.getTemporaryDirectory();
|
||||
|
||||
const config = await getConfig(tempDir, logger);
|
||||
if (config !== undefined) {
|
||||
codeQL = await getCodeQL(config.codeQLCmd);
|
||||
tempDir = config.tempDir;
|
||||
} else {
|
||||
logger.info(
|
||||
"Initializing CodeQL since the 'init' Action was not called before this step.",
|
||||
);
|
||||
|
||||
const apiDetails = {
|
||||
auth: actionsUtil.getRequiredInput("token"),
|
||||
externalRepoAuth: actionsUtil.getOptionalInput(
|
||||
"external-repository-token",
|
||||
),
|
||||
url: getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||
apiURL: getRequiredEnvParam("GITHUB_API_URL"),
|
||||
};
|
||||
|
||||
const codeQLDefaultVersionInfo = await features.getDefaultCliVersion(
|
||||
gitHubVersion.type,
|
||||
);
|
||||
|
||||
const initCodeQLResult = await initCodeQL(
|
||||
undefined, // There is no tools input on the upload action
|
||||
apiDetails,
|
||||
tempDir,
|
||||
gitHubVersion.type,
|
||||
codeQLDefaultVersionInfo,
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
codeQL = initCodeQLResult.codeql;
|
||||
}
|
||||
|
||||
const baseTempDir = path.resolve(tempDir, "combined-sarif");
|
||||
fs.mkdirSync(baseTempDir, { recursive: true });
|
||||
@@ -331,7 +326,7 @@ function getAutomationID(
|
||||
* This is exported for testing purposes only.
|
||||
*/
|
||||
export async function uploadPayload(
|
||||
payload: BasePayload,
|
||||
payload: any,
|
||||
repositoryNwo: RepositoryNwo,
|
||||
logger: Logger,
|
||||
analysis: analyses.AnalysisConfig,
|
||||
@@ -623,8 +618,8 @@ export function buildPayload(
|
||||
environment: string | undefined,
|
||||
toolNames: string[],
|
||||
mergeBaseCommitOid: string | undefined,
|
||||
): UploadPayload {
|
||||
const payloadObj: UploadPayload = {
|
||||
) {
|
||||
const payloadObj = {
|
||||
commit_oid: commitOid,
|
||||
ref,
|
||||
analysis_key: analysisKey,
|
||||
@@ -677,8 +672,6 @@ export interface PostProcessingResults {
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param features Information about enabled features.
|
||||
* @param getCodeQL A function to retrieve a `CodeQL` instance.
|
||||
* @param tempPath A path to a temporary directory.
|
||||
* @param checkoutPath The path where the repo was checked out at.
|
||||
* @param sarifPaths The paths of the SARIF files to post-process.
|
||||
* @param category The analysis category.
|
||||
@@ -690,8 +683,6 @@ export interface PostProcessingResults {
|
||||
export async function postProcessSarifFiles(
|
||||
logger: Logger,
|
||||
features: FeatureEnablement,
|
||||
getCodeQL: CodeQLGetter,
|
||||
tempPath: string,
|
||||
checkoutPath: string,
|
||||
sarifPaths: string[],
|
||||
category: string | undefined,
|
||||
@@ -716,8 +707,6 @@ export async function postProcessSarifFiles(
|
||||
gitHubVersion,
|
||||
features,
|
||||
logger,
|
||||
getCodeQL,
|
||||
tempPath,
|
||||
);
|
||||
} else {
|
||||
const sarifPath = sarifPaths[0];
|
||||
@@ -778,8 +767,6 @@ export async function writePostProcessedFiles(
|
||||
* to.
|
||||
*/
|
||||
export async function uploadFiles(
|
||||
tempDir: string,
|
||||
codeql: CodeQL,
|
||||
inputSarifPath: string,
|
||||
checkoutPath: string,
|
||||
category: string | undefined,
|
||||
@@ -793,8 +780,6 @@ export async function uploadFiles(
|
||||
);
|
||||
|
||||
return uploadSpecifiedFiles(
|
||||
tempDir,
|
||||
codeql,
|
||||
sarifPaths,
|
||||
checkoutPath,
|
||||
category,
|
||||
@@ -808,8 +793,6 @@ export async function uploadFiles(
|
||||
* Uploads the given array of SARIF files.
|
||||
*/
|
||||
async function uploadSpecifiedFiles(
|
||||
tempDir: string,
|
||||
codeql: CodeQL,
|
||||
sarifPaths: string[],
|
||||
checkoutPath: string,
|
||||
category: string | undefined,
|
||||
@@ -820,8 +803,6 @@ async function uploadSpecifiedFiles(
|
||||
const processingResults: PostProcessingResults = await postProcessSarifFiles(
|
||||
logger,
|
||||
features,
|
||||
async () => codeql,
|
||||
tempDir,
|
||||
checkoutPath,
|
||||
sarifPaths,
|
||||
category,
|
||||
@@ -866,20 +847,18 @@ export async function uploadPostProcessedFiles(
|
||||
const zippedSarif = zlib.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = url.pathToFileURL(checkoutPath).href;
|
||||
|
||||
const payload = uploadTarget.transformPayload(
|
||||
buildPayload(
|
||||
await gitUtils.getCommitOid(checkoutPath),
|
||||
await gitUtils.getRef(),
|
||||
postProcessingResults.analysisKey,
|
||||
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
actionsUtil.getWorkflowRunID(),
|
||||
actionsUtil.getWorkflowRunAttempt(),
|
||||
checkoutURI,
|
||||
postProcessingResults.environment,
|
||||
toolNames,
|
||||
await gitUtils.determineBaseBranchHeadCommitOid(),
|
||||
),
|
||||
const payload = buildPayload(
|
||||
await gitUtils.getCommitOid(checkoutPath),
|
||||
await gitUtils.getRef(),
|
||||
postProcessingResults.analysisKey,
|
||||
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||
zippedSarif,
|
||||
actionsUtil.getWorkflowRunID(),
|
||||
actionsUtil.getWorkflowRunAttempt(),
|
||||
checkoutURI,
|
||||
postProcessingResults.environment,
|
||||
toolNames,
|
||||
await gitUtils.determineBaseBranchHeadCommitOid(),
|
||||
);
|
||||
|
||||
// Log some useful debug info about the info
|
||||
@@ -960,6 +939,7 @@ export async function waitForProcessing(
|
||||
const client = api.getApiClient();
|
||||
|
||||
const statusCheckingStarted = Date.now();
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (
|
||||
Date.now() >
|
||||
@@ -1148,7 +1128,11 @@ function sanitize(str?: string) {
|
||||
/**
|
||||
* An error that occurred due to an invalid SARIF upload request.
|
||||
*/
|
||||
export class InvalidSarifUploadError extends Error {}
|
||||
export class InvalidSarifUploadError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
|
||||
function filterAlertsByDiffRange(logger: Logger, sarif: SarifFile): SarifFile {
|
||||
const diffRanges = readDiffRangesJsonFile(logger);
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
/**
|
||||
* Represents the minimum, common payload for SARIF upload endpoints that we support.
|
||||
*/
|
||||
export interface BasePayload {
|
||||
/** The gzipped contents of a SARIF file. */
|
||||
sarif: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents the payload expected for Code Scanning and Code Quality SARIF uploads.
|
||||
*/
|
||||
export interface UploadPayload extends BasePayload {
|
||||
/** The SHA of the commit that was analysed. */
|
||||
commit_oid: string;
|
||||
/** The ref that was analysed. */
|
||||
ref: string;
|
||||
/** The analysis key that identifies the analysis. */
|
||||
analysis_key?: string;
|
||||
/** The name of the analysis. */
|
||||
analysis_name?: string;
|
||||
/** The ID of the workflow run that performed the analysis. */
|
||||
workflow_run_id: number;
|
||||
/** The attempt number. */
|
||||
workflow_run_attempt: number;
|
||||
/** The URI where the repository was checked out. */
|
||||
checkout_uri: string;
|
||||
/** The matrix value. */
|
||||
environment?: string;
|
||||
/** A string representation of when the analysis was started. */
|
||||
started_at?: string;
|
||||
/** The names of the tools that performed the analysis. */
|
||||
tool_names: string[];
|
||||
/** For a pull request, the ref of the base the PR is targeting. */
|
||||
base_ref?: string;
|
||||
/** For a pull request, the commit SHA of the merge base. */
|
||||
base_sha?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents the payload expected for Code Scanning Risk Assessment SARIF uploads.
|
||||
*/
|
||||
export interface AssessmentPayload extends BasePayload {
|
||||
/** The ID of the assessment for which the SARIF is for. */
|
||||
assessment_id: number;
|
||||
}
|
||||
@@ -1,10 +1,10 @@
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { getActionVersion, getTemporaryDirectory } from "./actions-util";
|
||||
import * as analyses from "./analyses";
|
||||
import { getGitHubVersion } from "./api-client";
|
||||
import { getConfig } from "./config-utils";
|
||||
import { initFeatures } from "./feature-flags";
|
||||
import { Features } from "./feature-flags";
|
||||
import { Logger, getActionsLogger } from "./logging";
|
||||
import { getRepositoryNwo } from "./repository";
|
||||
import {
|
||||
@@ -17,11 +17,7 @@ import {
|
||||
isThirdPartyAnalysis,
|
||||
} from "./status-report";
|
||||
import * as upload_lib from "./upload-lib";
|
||||
import {
|
||||
getOrInitCodeQL,
|
||||
postProcessAndUploadSarif,
|
||||
UploadSarifState,
|
||||
} from "./upload-sarif";
|
||||
import { postProcessAndUploadSarif } from "./upload-sarif";
|
||||
import {
|
||||
ConfigurationError,
|
||||
checkActionVersion,
|
||||
@@ -63,22 +59,21 @@ async function run(startedAt: Date) {
|
||||
// possible, and only use safe functions outside.
|
||||
|
||||
const logger = getActionsLogger();
|
||||
const state: UploadSarifState = { cachedCodeQL: undefined };
|
||||
|
||||
try {
|
||||
initializeEnvironment(actionsUtil.getActionVersion());
|
||||
initializeEnvironment(getActionVersion());
|
||||
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
checkActionVersion(actionsUtil.getActionVersion(), gitHubVersion);
|
||||
checkActionVersion(getActionVersion(), gitHubVersion);
|
||||
|
||||
// Make inputs accessible in the `post` step.
|
||||
actionsUtil.persistInputs();
|
||||
|
||||
const repositoryNwo = getRepositoryNwo();
|
||||
const features = initFeatures(
|
||||
const features = new Features(
|
||||
gitHubVersion,
|
||||
repositoryNwo,
|
||||
actionsUtil.getTemporaryDirectory(),
|
||||
getTemporaryDirectory(),
|
||||
logger,
|
||||
);
|
||||
|
||||
@@ -99,20 +94,9 @@ async function run(startedAt: Date) {
|
||||
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
|
||||
const category = actionsUtil.getOptionalInput("category");
|
||||
|
||||
// Determine the temporary directory to use. If we are able to read a `Config` from a previous CodeQL Action
|
||||
// step in the job, then use the temporary directory configured there. Otherwise, use our default.
|
||||
let tempDir: string = actionsUtil.getTemporaryDirectory();
|
||||
|
||||
const config = await getConfig(tempDir, logger);
|
||||
if (config !== undefined) {
|
||||
tempDir = config.tempDir;
|
||||
}
|
||||
|
||||
const uploadResults = await postProcessAndUploadSarif(
|
||||
logger,
|
||||
tempDir,
|
||||
features,
|
||||
() => getOrInitCodeQL(state, logger, gitHubVersion, features, config),
|
||||
"always",
|
||||
checkoutPath,
|
||||
sarifPath,
|
||||
|
||||
@@ -5,102 +5,15 @@ import test, { ExecutionContext } from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import { AnalysisKind, getAnalysisConfig } from "./analyses";
|
||||
import { getCodeQLForTesting } from "./codeql";
|
||||
import * as codeql from "./codeql";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { createFeatures, createTestConfig, setupTests } from "./testing-utils";
|
||||
import { createFeatures, setupTests } from "./testing-utils";
|
||||
import { UploadResult } from "./upload-lib";
|
||||
import * as uploadLib from "./upload-lib";
|
||||
import {
|
||||
getOrInitCodeQL,
|
||||
postProcessAndUploadSarif,
|
||||
UploadSarifState,
|
||||
} from "./upload-sarif";
|
||||
import { postProcessAndUploadSarif } from "./upload-sarif";
|
||||
import * as util from "./util";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
test("getOrInitCodeQL - gets cached CodeQL instance when available", async (t) => {
|
||||
const cachedCodeQL = await getCodeQLForTesting();
|
||||
const getCodeQL = sinon.stub(codeql, "getCodeQL").resolves(undefined);
|
||||
const minimalInitCodeQL = sinon
|
||||
.stub(uploadLib, "minimalInitCodeQL")
|
||||
.resolves(undefined);
|
||||
|
||||
const result = await getOrInitCodeQL(
|
||||
{ cachedCodeQL },
|
||||
getRunnerLogger(true),
|
||||
{ type: util.GitHubVariant.GHES, version: "3.0" },
|
||||
createFeatures([]),
|
||||
undefined,
|
||||
);
|
||||
|
||||
// Neither of the two functions to get a CodeQL instance were called.
|
||||
t.true(getCodeQL.notCalled);
|
||||
t.true(minimalInitCodeQL.notCalled);
|
||||
|
||||
// But we have an instance that refers to the same object as the one we put into the state.
|
||||
t.truthy(result);
|
||||
t.is(result, cachedCodeQL);
|
||||
});
|
||||
|
||||
test("getOrInitCodeQL - uses minimalInitCodeQL when there's no config", async (t) => {
|
||||
const newInstance = await getCodeQLForTesting();
|
||||
const getCodeQL = sinon.stub(codeql, "getCodeQL").resolves(undefined);
|
||||
const minimalInitCodeQL = sinon
|
||||
.stub(uploadLib, "minimalInitCodeQL")
|
||||
.resolves(newInstance);
|
||||
|
||||
const state: UploadSarifState = { cachedCodeQL: undefined };
|
||||
const result = await getOrInitCodeQL(
|
||||
state,
|
||||
getRunnerLogger(true),
|
||||
{ type: util.GitHubVariant.GHES, version: "3.0" },
|
||||
createFeatures([]),
|
||||
undefined,
|
||||
);
|
||||
|
||||
// Check that the right function was called.
|
||||
t.true(getCodeQL.notCalled);
|
||||
t.true(minimalInitCodeQL.calledOnce);
|
||||
|
||||
// And that we received the instance that we expected.
|
||||
t.truthy(result);
|
||||
t.is(result, newInstance);
|
||||
|
||||
// And that it was cached.
|
||||
t.is(state.cachedCodeQL, newInstance);
|
||||
});
|
||||
|
||||
test("getOrInitCodeQL - uses getCodeQL when there's a config", async (t) => {
|
||||
const newInstance = await getCodeQLForTesting();
|
||||
const getCodeQL = sinon.stub(codeql, "getCodeQL").resolves(newInstance);
|
||||
const minimalInitCodeQL = sinon
|
||||
.stub(uploadLib, "minimalInitCodeQL")
|
||||
.resolves(undefined);
|
||||
const config = createTestConfig({});
|
||||
|
||||
const state: UploadSarifState = { cachedCodeQL: undefined };
|
||||
const result = await getOrInitCodeQL(
|
||||
state,
|
||||
getRunnerLogger(true),
|
||||
{ type: util.GitHubVariant.GHES, version: "3.0" },
|
||||
createFeatures([]),
|
||||
config,
|
||||
);
|
||||
|
||||
// Check that the right function was called.
|
||||
t.true(getCodeQL.calledOnce);
|
||||
t.true(minimalInitCodeQL.notCalled);
|
||||
|
||||
// And that we received the instance that we expected.
|
||||
t.truthy(result);
|
||||
t.is(result, newInstance);
|
||||
|
||||
// And that it was cached.
|
||||
t.is(state.cachedCodeQL, newInstance);
|
||||
});
|
||||
|
||||
interface UploadSarifExpectedResult {
|
||||
uploadResult?: UploadResult;
|
||||
expectedFiles?: string[];
|
||||
@@ -118,8 +31,6 @@ function mockPostProcessSarifFiles() {
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
analysisConfig,
|
||||
)
|
||||
.resolves({ sarif: { runs: [] }, analysisKey: "", environment: "" });
|
||||
@@ -162,9 +73,7 @@ const postProcessAndUploadSarifMacro = test.macro({
|
||||
|
||||
const actual = await postProcessAndUploadSarif(
|
||||
logger,
|
||||
tempDir,
|
||||
features,
|
||||
async () => getCodeQLForTesting(),
|
||||
"always",
|
||||
"",
|
||||
testPath,
|
||||
@@ -181,8 +90,6 @@ const postProcessAndUploadSarifMacro = test.macro({
|
||||
postProcessSarifFiles.calledWith(
|
||||
logger,
|
||||
features,
|
||||
sinon.match.func,
|
||||
tempDir,
|
||||
sinon.match.any,
|
||||
analysisKindResult.expectedFiles?.map(toFullPath) ??
|
||||
fullSarifPaths,
|
||||
@@ -314,9 +221,7 @@ test("postProcessAndUploadSarif doesn't upload if upload is disabled", async (t)
|
||||
|
||||
const actual = await postProcessAndUploadSarif(
|
||||
logger,
|
||||
tempDir,
|
||||
features,
|
||||
() => getCodeQLForTesting(),
|
||||
"never",
|
||||
"",
|
||||
tempDir,
|
||||
@@ -343,9 +248,7 @@ test("postProcessAndUploadSarif writes post-processed SARIF files if output dire
|
||||
const postProcessedOutPath = path.join(tempDir, "post-processed");
|
||||
const actual = await postProcessAndUploadSarif(
|
||||
logger,
|
||||
tempDir,
|
||||
features,
|
||||
() => getCodeQLForTesting(),
|
||||
"never",
|
||||
"",
|
||||
tempDir,
|
||||
|
||||
+1
-42
@@ -1,57 +1,20 @@
|
||||
import { UploadKind } from "./actions-util";
|
||||
import * as analyses from "./analyses";
|
||||
import type { CodeQL } from "./codeql";
|
||||
import * as codeql from "./codeql";
|
||||
import { Config } from "./config-utils";
|
||||
import { FeatureEnablement } from "./feature-flags";
|
||||
import { Logger } from "./logging";
|
||||
import * as upload_lib from "./upload-lib";
|
||||
import { GitHubVersion, unsafeEntriesInvariant } from "./util";
|
||||
|
||||
export interface UploadSarifState {
|
||||
/** The cached `CodeQL` instance, if any. */
|
||||
cachedCodeQL: CodeQL | undefined;
|
||||
}
|
||||
import { unsafeEntriesInvariant } from "./util";
|
||||
|
||||
// Maps analysis kinds to SARIF IDs.
|
||||
export type UploadSarifResults = Partial<
|
||||
Record<analyses.AnalysisKind, upload_lib.UploadResult>
|
||||
>;
|
||||
|
||||
/** Get or initialise a `CodeQL` instance for use by the `upload-sarif` action. */
|
||||
export async function getOrInitCodeQL(
|
||||
actionState: UploadSarifState,
|
||||
logger: Logger,
|
||||
gitHubVersion: GitHubVersion,
|
||||
features: FeatureEnablement,
|
||||
config: Config | undefined,
|
||||
): Promise<CodeQL> {
|
||||
// Return the cached instance, if we have one.
|
||||
if (actionState.cachedCodeQL !== undefined) return actionState.cachedCodeQL;
|
||||
|
||||
// If we have been able to load a `Config` from an earlier CodeQL Action step in the job,
|
||||
// then use the CodeQL executable that we have used previously. Otherwise, initialise the
|
||||
// CLI specifically for `upload-sarif`. Either way, we cache the instance.
|
||||
if (config !== undefined) {
|
||||
actionState.cachedCodeQL = await codeql.getCodeQL(config.codeQLCmd);
|
||||
} else {
|
||||
actionState.cachedCodeQL = await upload_lib.minimalInitCodeQL(
|
||||
logger,
|
||||
gitHubVersion,
|
||||
features,
|
||||
);
|
||||
}
|
||||
|
||||
return actionState.cachedCodeQL;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds SARIF files in `sarifPath`, post-processes them, and uploads them to the appropriate services.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param tempPath The path to the temporary directory.
|
||||
* @param features Information about enabled features.
|
||||
* @param getCodeQL A function to retrieve a `CodeQL` instance.
|
||||
* @param uploadKind The kind of upload that is requested.
|
||||
* @param checkoutPath The path where the repository was checked out at.
|
||||
* @param sarifPath The path to the file or directory to upload.
|
||||
@@ -62,9 +25,7 @@ export async function getOrInitCodeQL(
|
||||
*/
|
||||
export async function postProcessAndUploadSarif(
|
||||
logger: Logger,
|
||||
tempPath: string,
|
||||
features: FeatureEnablement,
|
||||
getCodeQL: upload_lib.CodeQLGetter,
|
||||
uploadKind: UploadKind,
|
||||
checkoutPath: string,
|
||||
sarifPath: string,
|
||||
@@ -84,8 +45,6 @@ export async function postProcessAndUploadSarif(
|
||||
const postProcessingResults = await upload_lib.postProcessSarifFiles(
|
||||
logger,
|
||||
features,
|
||||
getCodeQL,
|
||||
tempPath,
|
||||
checkoutPath,
|
||||
sarifFiles,
|
||||
category,
|
||||
|
||||
@@ -563,28 +563,3 @@ test("joinAtMost - truncates list if array is > than limit", (t) => {
|
||||
t.assert(result.includes("test5"));
|
||||
t.false(result.includes("test6"));
|
||||
});
|
||||
|
||||
test("Result.success creates a success result", (t) => {
|
||||
const result = util.Result.success("test value");
|
||||
t.true(result.isSuccess());
|
||||
t.false(result.isFailure());
|
||||
t.is(result.value, "test value");
|
||||
});
|
||||
|
||||
test("Result.failure creates a failure result", (t) => {
|
||||
const error = new Error("test error");
|
||||
const result = util.Result.failure(error);
|
||||
t.false(result.isSuccess());
|
||||
t.true(result.isFailure());
|
||||
t.is(result.value, error);
|
||||
});
|
||||
|
||||
test("Result.orElse returns the value for a success result", (t) => {
|
||||
const result = util.Result.success("success value");
|
||||
t.is(result.orElse("default value"), "success value");
|
||||
});
|
||||
|
||||
test("Result.orElse returns the default value for a failure result", (t) => {
|
||||
const result = util.Result.failure(new Error("test error"));
|
||||
t.is(result.orElse("default value"), "default value");
|
||||
});
|
||||
|
||||
+5
-43
@@ -690,7 +690,11 @@ export class HTTPError extends Error {
|
||||
* An Error class that indicates an error that occurred due to
|
||||
* a misconfiguration of the action or the CodeQL CLI.
|
||||
*/
|
||||
export class ConfigurationError extends Error {}
|
||||
export class ConfigurationError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
|
||||
export function asHTTPError(arg: any): HTTPError | undefined {
|
||||
if (
|
||||
@@ -740,7 +744,6 @@ export async function bundleDb(
|
||||
language: Language,
|
||||
codeql: CodeQL,
|
||||
dbName: string,
|
||||
{ includeDiagnostics }: { includeDiagnostics: boolean },
|
||||
) {
|
||||
const databasePath = getCodeQLDatabasePath(config, language);
|
||||
const databaseBundlePath = path.resolve(config.dbLocation, `${dbName}.zip`);
|
||||
@@ -771,7 +774,6 @@ export async function bundleDb(
|
||||
databasePath,
|
||||
databaseBundlePath,
|
||||
dbName,
|
||||
includeDiagnostics,
|
||||
additionalFiles,
|
||||
);
|
||||
return databaseBundlePath;
|
||||
@@ -1290,43 +1292,3 @@ export function joinAtMost(
|
||||
|
||||
return array.join(separator);
|
||||
}
|
||||
|
||||
/** A success result. */
|
||||
type Success<T> = Result<T, never>;
|
||||
/** A failure result. */
|
||||
type Failure<E> = Result<never, E>;
|
||||
|
||||
/**
|
||||
* A simple result type representing either a success or a failure.
|
||||
*/
|
||||
export class Result<T, E> {
|
||||
private constructor(
|
||||
private readonly _ok: boolean,
|
||||
public readonly value: T | E,
|
||||
) {}
|
||||
|
||||
/** Creates a success result. */
|
||||
static success<T>(value: T): Success<T> {
|
||||
return new Result(true, value) as Success<T>;
|
||||
}
|
||||
|
||||
/** Creates a failure result. */
|
||||
static failure<E>(value: E): Failure<E> {
|
||||
return new Result(false, value) as Failure<E>;
|
||||
}
|
||||
|
||||
/** Whether this result represents a success. */
|
||||
isSuccess(): this is Success<T> {
|
||||
return this._ok;
|
||||
}
|
||||
|
||||
/** Whether this result represents a failure. */
|
||||
isFailure(): this is Failure<E> {
|
||||
return !this._ok;
|
||||
}
|
||||
|
||||
/** Get the value if this is a success, or return the default value if this is a failure. */
|
||||
orElse<U>(defaultValue: U): T | U {
|
||||
return this.isSuccess() ? this.value : defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user