Merge pull request #3494 from github/update-v4.32.4-39ba80c47

This commit is contained in:
Michael B. Gale
2026-02-20 14:15:31 +00:00
committed by GitHub
45 changed files with 27259 additions and 24818 deletions
+13 -4
View File
@@ -71,8 +71,9 @@ def open_pr(
body.append('')
body.append('Contains the following pull requests:')
for pr in pull_requests:
merger = get_merger_of_pr(repo, pr)
body.append(f'- #{pr.number} (@{merger})')
# Use PR author if they are GitHub staff, otherwise use the merger
display_user = get_pr_author_if_staff(pr) or get_merger_of_pr(repo, pr)
body.append(f'- #{pr.number} (@{display_user})')
# List all commits not part of a PR
if len(commits_without_pull_requests) > 0:
@@ -168,6 +169,14 @@ def get_pr_for_commit(commit):
def get_merger_of_pr(repo, pr):
return repo.get_commit(pr.merge_commit_sha).author.login
# Get the PR author if they are GitHub staff, otherwise None.
def get_pr_author_if_staff(pr):
if pr.user is None:
return None
if getattr(pr.user, 'site_admin', False):
return pr.user.login
return None
def get_current_version():
with open('package.json', 'r') as f:
return json.load(f)['version']
@@ -181,9 +190,9 @@ def replace_version_package_json(prev_version, new_version):
print(line.replace(prev_version, new_version), end='')
else:
prev_line_is_codeql = False
print(line, end='')
print(line, end='')
if '\"name\": \"codeql\",' in line:
prev_line_is_codeql = True
prev_line_is_codeql = True
def get_today_string():
today = datetime.datetime.today()
@@ -3,7 +3,7 @@
# pr-checks/sync.sh
# to regenerate this file.
name: PR Check - Quality queries input
name: PR Check - Analysis kinds
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
@@ -29,9 +29,9 @@ defaults:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group: quality-queries-${{github.ref}}
group: analysis-kinds-${{github.ref}}
jobs:
quality-queries:
analysis-kinds:
strategy:
fail-fast: false
matrix:
@@ -45,6 +45,9 @@ jobs:
- os: ubuntu-latest
version: linked
analysis-kinds: code-scanning,code-quality
- os: ubuntu-latest
version: linked
analysis-kinds: risk-assessment
- os: ubuntu-latest
version: nightly-latest
analysis-kinds: code-scanning
@@ -54,7 +57,10 @@ jobs:
- os: ubuntu-latest
version: nightly-latest
analysis-kinds: code-scanning,code-quality
name: Quality queries input
- os: ubuntu-latest
version: nightly-latest
analysis-kinds: risk-assessment
name: Analysis kinds
if: github.triggering_actor != 'dependabot[bot]'
permissions:
contents: read
@@ -81,30 +87,24 @@ jobs:
output: ${{ runner.temp }}/results
upload-database: false
post-processed-sarif-path: ${{ runner.temp }}/post-processed
- name: Upload security SARIF
if: contains(matrix.analysis-kinds, 'code-scanning')
- name: Upload SARIF files
uses: actions/upload-artifact@v6
with:
name: |
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
path: ${{ runner.temp }}/results/javascript.sarif
retention-days: 7
- name: Upload quality SARIF
if: contains(matrix.analysis-kinds, 'code-quality')
uses: actions/upload-artifact@v6
with:
name: |
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
path: ${{ runner.temp }}/results/javascript.quality.sarif
analysis-kinds-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}
path: ${{ runner.temp }}/results/*.sarif
retention-days: 7
- name: Upload post-processed SARIF
uses: actions/upload-artifact@v6
with:
name: |
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}
path: ${{ runner.temp }}/post-processed
retention-days: 7
if-no-files-found: error
- name: Check quality query does not appear in security SARIF
if: contains(matrix.analysis-kinds, 'code-scanning')
uses: actions/github-script@v8
@@ -122,6 +122,7 @@ jobs:
with:
script: ${{ env.CHECK_SCRIPT }}
env:
CODEQL_ACTION_RISK_ASSESSMENT_ID: 1
CHECK_SCRIPT: |
const fs = require('fs');
+69
View File
@@ -0,0 +1,69 @@
# Warning: This file is generated automatically, and should not be modified.
# Instead, please modify the template in the pr-checks directory and run:
# pr-checks/sync.sh
# to regenerate this file.
name: 'PR Check - Bundle: From nightly'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GO111MODULE: auto
on:
push:
branches:
- main
- releases/v*
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review
schedule:
- cron: '0 5 * * *'
workflow_dispatch:
inputs: {}
workflow_call:
inputs: {}
defaults:
run:
shell: bash
concurrency:
cancel-in-progress: ${{ github.event_name == 'pull_request' || false }}
group: bundle-from-nightly-${{github.ref}}
jobs:
bundle-from-nightly:
strategy:
fail-fast: false
matrix:
include:
- os: ubuntu-latest
version: linked
name: 'Bundle: From nightly'
if: github.triggering_actor != 'dependabot[bot]'
permissions:
contents: read
security-events: read
timeout-minutes: 45
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v6
- name: Prepare test
id: prepare-test
uses: ./.github/actions/prepare-test
with:
version: ${{ matrix.version }}
use-all-platform-bundle: 'false'
setup-kotlin: 'true'
- id: init
uses: ./../action/init
env:
CODEQL_ACTION_FORCE_NIGHTLY: true
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: javascript
- name: Fail if the CodeQL version is not a nightly
if: "!contains(steps.init.outputs.codeql-version, '+')"
run: exit 1
env:
CODEQL_ACTION_TEST_MODE: true
+8
View File
@@ -2,6 +2,14 @@
See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs.
## 4.32.4 - 20 Feb 2026
- Update default CodeQL bundle version to [2.24.2](https://github.com/github/codeql-action/releases/tag/codeql-bundle-v2.24.2). [#3493](https://github.com/github/codeql-action/pull/3493)
- Added an experimental change which improves how certificates are generated for the authentication proxy that is used by the CodeQL Action in Default Setup when [private package registries are configured](https://docs.github.com/en/code-security/how-tos/secure-at-scale/configure-organization-security/manage-usage-and-access/giving-org-access-private-registries). This is expected to generate more widely compatible certificates and should have no impact on analyses which are working correctly already. We expect to roll this change out to everyone in February. [#3473](https://github.com/github/codeql-action/pull/3473)
- When the CodeQL Action is run [with debugging enabled in Default Setup](https://docs.github.com/en/code-security/how-tos/scan-code-for-vulnerabilities/troubleshooting/troubleshooting-analysis-errors/logs-not-detailed-enough#creating-codeql-debugging-artifacts-for-codeql-default-setup) and [private package registries are configured](https://docs.github.com/en/code-security/how-tos/secure-at-scale/configure-organization-security/manage-usage-and-access/giving-org-access-private-registries), the "Setup proxy for registries" step will output additional diagnostic information that can be used for troubleshooting. [#3486](https://github.com/github/codeql-action/pull/3486)
- Added a setting which allows the CodeQL Action to enable network debugging for Java programs. This will help GitHub staff support customers with troubleshooting issues in GitHub-managed CodeQL workflows, such as Default Setup. This setting can only be enabled by GitHub staff. [#3485](https://github.com/github/codeql-action/pull/3485)
- Added a setting which enables GitHub-managed workflows, such as Default Setup, to use a [nightly CodeQL CLI release](https://github.com/dsp-testing/codeql-cli-nightlies) instead of the latest, stable release that is used by default. This will help GitHub staff support customers whose analyses for a given repository or organization require early access to a change in an upcoming CodeQL CLI release. This setting can only be enabled by GitHub staff. [#3484](https://github.com/github/codeql-action/pull/3484)
## 4.32.3 - 13 Feb 2026
- Added experimental support for testing connections to [private package registries](https://docs.github.com/en/code-security/how-tos/secure-at-scale/configure-organization-security/manage-usage-and-access/giving-org-access-private-registries). This feature is not currently enabled for any analysis. In the future, it may be enabled by default for Default Setup. [#3466](https://github.com/github/codeql-action/pull/3466)
+447 -413
View File
File diff suppressed because it is too large Load Diff
+549 -440
View File
File diff suppressed because it is too large Load Diff
+449 -415
View File
File diff suppressed because it is too large Load Diff
+4 -4
View File
@@ -1,6 +1,6 @@
{
"bundleVersion": "codeql-bundle-v2.24.1",
"cliVersion": "2.24.1",
"priorBundleVersion": "codeql-bundle-v2.24.0",
"priorCliVersion": "2.24.0"
"bundleVersion": "codeql-bundle-v2.24.2",
"cliVersion": "2.24.2",
"priorBundleVersion": "codeql-bundle-v2.24.1",
"priorCliVersion": "2.24.1"
}
+1108 -958
View File
File diff suppressed because it is too large Load Diff
+531 -433
View File
File diff suppressed because it is too large Load Diff
+447 -413
View File
File diff suppressed because it is too large Load Diff
+775 -656
View File
File diff suppressed because it is too large Load Diff
+447 -413
View File
File diff suppressed because it is too large Load Diff
+18871 -18663
View File
File diff suppressed because it is too large Load Diff
+855 -690
View File
File diff suppressed because it is too large Load Diff
+447 -413
View File
File diff suppressed because it is too large Load Diff
+842 -688
View File
File diff suppressed because it is too large Load Diff
+6 -6
View File
@@ -1,12 +1,12 @@
{
"name": "codeql",
"version": "4.32.3",
"version": "4.32.4",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "codeql",
"version": "4.32.3",
"version": "4.32.4",
"license": "MIT",
"dependencies": {
"@actions/artifact": "^5.0.3",
@@ -5452,9 +5452,9 @@
"license": "MIT"
},
"node_modules/fast-xml-parser": {
"version": "5.3.4",
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.3.4.tgz",
"integrity": "sha512-EFd6afGmXlCx8H8WTZHhAoDaWaGyuIBoZJ2mknrNxug+aZKjkp0a0dlars9Izl+jF+7Gu1/5f/2h68cQpe0IiA==",
"version": "5.3.6",
"resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.3.6.tgz",
"integrity": "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA==",
"funding": [
{
"type": "github",
@@ -5463,7 +5463,7 @@
],
"license": "MIT",
"dependencies": {
"strnum": "^2.1.0"
"strnum": "^2.1.2"
},
"bin": {
"fxparser": "src/cli/cli.js"
+1 -1
View File
@@ -1,6 +1,6 @@
{
"name": "codeql",
"version": "4.32.3",
"version": "4.32.4",
"private": true,
"description": "CodeQL action",
"scripts": {
@@ -1,8 +1,9 @@
name: "Quality queries input"
description: "Tests that queries specified in the quality-queries input are used."
name: "Analysis kinds"
description: "Tests basic functionality for different `analysis-kinds` inputs."
versions: ["linked", "nightly-latest"]
analysisKinds: ["code-scanning", "code-quality", "code-scanning,code-quality"]
analysisKinds: ["code-scanning", "code-quality", "code-scanning,code-quality", "risk-assessment"]
env:
CODEQL_ACTION_RISK_ASSESSMENT_ID: 1
CHECK_SCRIPT: |
const fs = require('fs');
@@ -37,30 +38,24 @@ steps:
output: "${{ runner.temp }}/results"
upload-database: false
post-processed-sarif-path: "${{ runner.temp }}/post-processed"
- name: Upload security SARIF
if: contains(matrix.analysis-kinds, 'code-scanning')
- name: Upload SARIF files
uses: actions/upload-artifact@v6
with:
name: |
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
path: "${{ runner.temp }}/results/javascript.sarif"
retention-days: 7
- name: Upload quality SARIF
if: contains(matrix.analysis-kinds, 'code-quality')
uses: actions/upload-artifact@v6
with:
name: |
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
path: "${{ runner.temp }}/results/javascript.quality.sarif"
analysis-kinds-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}
path: "${{ runner.temp }}/results/*.sarif"
retention-days: 7
- name: Upload post-processed SARIF
uses: actions/upload-artifact@v6
with:
name: |
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}
path: "${{ runner.temp }}/post-processed"
retention-days: 7
if-no-files-found: error
- name: Check quality query does not appear in security SARIF
if: contains(matrix.analysis-kinds, 'code-scanning')
uses: actions/github-script@v8
+15
View File
@@ -0,0 +1,15 @@
name: "Bundle: From nightly"
description: "The nightly CodeQL bundle should be used when forced"
versions:
- linked # overruled by the FF set below
steps:
- id: init
uses: ./../action/init
env:
CODEQL_ACTION_FORCE_NIGHTLY: true
with:
tools: ${{ steps.prepare-test.outputs.tools-url }}
languages: javascript
- name: Fail if the CodeQL version is not a nightly
if: "!contains(steps.init.outputs.codeql-version, '+')"
run: exit 1
+112
View File
@@ -1,15 +1,23 @@
import path from "path";
import test from "ava";
import * as sinon from "sinon";
import * as actionsUtil from "./actions-util";
import {
AnalysisKind,
CodeScanning,
compatibilityMatrix,
RiskAssessment,
getAnalysisConfig,
getAnalysisKinds,
parseAnalysisKinds,
supportedAnalysisKinds,
} from "./analyses";
import { EnvVar } from "./environment";
import { getRunnerLogger } from "./logging";
import { setupTests } from "./testing-utils";
import { AssessmentPayload } from "./upload-lib/types";
import { ConfigurationError } from "./util";
setupTests(test);
@@ -67,3 +75,107 @@ test("getAnalysisKinds - throws if `analysis-kinds` input is invalid", async (t)
requiredInputStub.withArgs("analysis-kinds").returns("no-such-thing");
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true));
});
// Test the compatibility matrix by looping through all analysis kinds.
const analysisKinds = Object.values(AnalysisKind);
for (let i = 0; i < analysisKinds.length; i++) {
const analysisKind = analysisKinds[i];
for (let j = i + 1; j < analysisKinds.length; j++) {
const otherAnalysis = analysisKinds[j];
if (analysisKind === otherAnalysis) continue;
if (compatibilityMatrix[analysisKind].has(otherAnalysis)) {
test(`getAnalysisKinds - allows ${analysisKind} with ${otherAnalysis}`, async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns([analysisKind, otherAnalysis].join(","));
const result = await getAnalysisKinds(getRunnerLogger(true), true);
t.is(result.length, 2);
});
} else {
test(`getAnalysisKinds - throws if ${analysisKind} is enabled with ${otherAnalysis}`, async (t) => {
const requiredInputStub = sinon.stub(actionsUtil, "getRequiredInput");
requiredInputStub
.withArgs("analysis-kinds")
.returns([analysisKind, otherAnalysis].join(","));
await t.throwsAsync(getAnalysisKinds(getRunnerLogger(true), true), {
instanceOf: ConfigurationError,
message: `${analysisKind} and ${otherAnalysis} cannot be enabled at the same time`,
});
});
}
}
}
test("Code Scanning configuration does not accept other SARIF extensions", (t) => {
for (const analysisKind of supportedAnalysisKinds) {
if (analysisKind === AnalysisKind.CodeScanning) continue;
const analysis = getAnalysisConfig(analysisKind);
const sarifPath = path.join("path", "to", `file${analysis.sarifExtension}`);
// The Code Scanning configuration's `sarifPredicate` should not accept a path which
// ends in a different configuration's `sarifExtension`.
t.false(CodeScanning.sarifPredicate(sarifPath));
}
});
test("Risk Assessment configuration transforms SARIF upload payload", (t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "1";
const payload = RiskAssessment.transformPayload({
commit_oid: "abc",
sarif: "sarif",
ref: "ref",
workflow_run_attempt: 1,
workflow_run_id: 1,
checkout_uri: "uri",
tool_names: [],
}) as AssessmentPayload;
const expected: AssessmentPayload = { sarif: "sarif", assessment_id: 1 };
t.deepEqual(expected, payload);
});
test("Risk Assessment configuration throws for negative assessment IDs", (t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "-1";
t.throws(
() =>
RiskAssessment.transformPayload({
commit_oid: "abc",
sarif: "sarif",
ref: "ref",
workflow_run_attempt: 1,
workflow_run_id: 1,
checkout_uri: "uri",
tool_names: [],
}),
{
instanceOf: Error,
message: (msg) =>
msg.startsWith(`${EnvVar.RISK_ASSESSMENT_ID} must not be negative: `),
},
);
});
test("Risk Assessment configuration throws for invalid IDs", (t) => {
process.env[EnvVar.RISK_ASSESSMENT_ID] = "foo";
t.throws(
() =>
RiskAssessment.transformPayload({
commit_oid: "abc",
sarif: "sarif",
ref: "ref",
workflow_run_attempt: 1,
workflow_run_id: 1,
checkout_uri: "uri",
tool_names: [],
}),
{
instanceOf: Error,
message: (msg) =>
msg.startsWith(`${EnvVar.RISK_ASSESSMENT_ID} must not be NaN: `),
},
);
});
+80 -6
View File
@@ -3,14 +3,30 @@ import {
getOptionalInput,
getRequiredInput,
} from "./actions-util";
import { EnvVar } from "./environment";
import { Logger } from "./logging";
import { ConfigurationError } from "./util";
import {
AssessmentPayload,
BasePayload,
UploadPayload,
} from "./upload-lib/types";
import { ConfigurationError, getRequiredEnvParam } from "./util";
export enum AnalysisKind {
CodeScanning = "code-scanning",
CodeQuality = "code-quality",
RiskAssessment = "risk-assessment",
}
export type CompatibilityMatrix = Record<AnalysisKind, Set<AnalysisKind>>;
/** A mapping from analysis kinds to other analysis kinds which can be enabled concurrently. */
export const compatibilityMatrix: CompatibilityMatrix = {
[AnalysisKind.CodeScanning]: new Set([AnalysisKind.CodeQuality]),
[AnalysisKind.CodeQuality]: new Set([AnalysisKind.CodeScanning]),
[AnalysisKind.RiskAssessment]: new Set(),
};
// Exported for testing. A set of all known analysis kinds.
export const supportedAnalysisKinds = new Set(Object.values(AnalysisKind));
@@ -67,7 +83,7 @@ export async function getAnalysisKinds(
return cachedAnalysisKinds;
}
cachedAnalysisKinds = await parseAnalysisKinds(
const analysisKinds = await parseAnalysisKinds(
getRequiredInput("analysis-kinds"),
);
@@ -85,12 +101,27 @@ export async function getAnalysisKinds(
// if an input to `quality-queries` was specified. We should remove this once
// `quality-queries` is no longer used.
if (
!cachedAnalysisKinds.includes(AnalysisKind.CodeQuality) &&
!analysisKinds.includes(AnalysisKind.CodeQuality) &&
qualityQueriesInput !== undefined
) {
cachedAnalysisKinds.push(AnalysisKind.CodeQuality);
analysisKinds.push(AnalysisKind.CodeQuality);
}
// Check that all enabled analysis kinds are compatible with each other.
for (const analysisKind of analysisKinds) {
for (const otherAnalysisKind of analysisKinds) {
if (analysisKind === otherAnalysisKind) continue;
if (!compatibilityMatrix[analysisKind].has(otherAnalysisKind)) {
throw new ConfigurationError(
`${analysisKind} and ${otherAnalysisKind} cannot be enabled at the same time`,
);
}
}
}
// Cache the analysis kinds and return them.
cachedAnalysisKinds = analysisKinds;
return cachedAnalysisKinds;
}
@@ -101,6 +132,7 @@ export const codeQualityQueries: string[] = ["code-quality"];
enum SARIF_UPLOAD_ENDPOINT {
CODE_SCANNING = "PUT /repos/:owner/:repo/code-scanning/analysis",
CODE_QUALITY = "PUT /repos/:owner/:repo/code-quality/analysis",
RISK_ASSESSMENT = "PUT /repos/:owner/:repo/code-scanning/risk-assessment",
}
// Represents configurations for different analysis kinds.
@@ -120,6 +152,8 @@ export interface AnalysisConfig {
fixCategory: (logger: Logger, category?: string) => string | undefined;
/** A prefix for environment variables used to track the uniqueness of SARIF uploads. */
sentinelPrefix: string;
/** Transforms the upload payload in an analysis-specific way. */
transformPayload: (payload: UploadPayload) => BasePayload;
}
// Represents the Code Scanning analysis configuration.
@@ -130,9 +164,11 @@ export const CodeScanning: AnalysisConfig = {
sarifExtension: ".sarif",
sarifPredicate: (name) =>
name.endsWith(CodeScanning.sarifExtension) &&
!CodeQuality.sarifPredicate(name),
!CodeQuality.sarifPredicate(name) &&
!RiskAssessment.sarifPredicate(name),
fixCategory: (_, category) => category,
sentinelPrefix: "CODEQL_UPLOAD_SARIF_",
transformPayload: (payload) => payload,
};
// Represents the Code Quality analysis configuration.
@@ -144,6 +180,38 @@ export const CodeQuality: AnalysisConfig = {
sarifPredicate: (name) => name.endsWith(CodeQuality.sarifExtension),
fixCategory: fixCodeQualityCategory,
sentinelPrefix: "CODEQL_UPLOAD_QUALITY_SARIF_",
transformPayload: (payload) => payload,
};
/**
* Retrieves the CSRA assessment id from an environment variable and adds it to the payload.
* @param payload The base payload.
*/
function addAssessmentId(payload: UploadPayload): AssessmentPayload {
const rawAssessmentId = getRequiredEnvParam(EnvVar.RISK_ASSESSMENT_ID);
const assessmentId = parseInt(rawAssessmentId, 10);
if (Number.isNaN(assessmentId)) {
throw new Error(
`${EnvVar.RISK_ASSESSMENT_ID} must not be NaN: ${rawAssessmentId}`,
);
}
if (assessmentId < 0) {
throw new Error(
`${EnvVar.RISK_ASSESSMENT_ID} must not be negative: ${rawAssessmentId}`,
);
}
return { sarif: payload.sarif, assessment_id: assessmentId };
}
export const RiskAssessment: AnalysisConfig = {
kind: AnalysisKind.RiskAssessment,
name: "code scanning risk assessment",
target: SARIF_UPLOAD_ENDPOINT.RISK_ASSESSMENT,
sarifExtension: ".csra.sarif",
sarifPredicate: (name) => name.endsWith(RiskAssessment.sarifExtension),
fixCategory: (_, category) => category,
sentinelPrefix: "CODEQL_UPLOAD_CSRA_SARIF_",
transformPayload: addAssessmentId,
};
/**
@@ -160,6 +228,8 @@ export function getAnalysisConfig(kind: AnalysisKind): AnalysisConfig {
return CodeScanning;
case AnalysisKind.CodeQuality:
return CodeQuality;
case AnalysisKind.RiskAssessment:
return RiskAssessment;
}
}
@@ -167,4 +237,8 @@ export function getAnalysisConfig(kind: AnalysisKind): AnalysisConfig {
// we want to scan a folder containing SARIF files in an order that finds the more
// specific extensions first. This constant defines an array in the order of analyis
// configurations with more specific extensions to less specific extensions.
export const SarifScanOrder = [CodeQuality, CodeScanning];
export const SarifScanOrder: AnalysisConfig[] = [
RiskAssessment,
CodeQuality,
CodeScanning,
];
+2 -1
View File
@@ -4,7 +4,7 @@ import * as path from "path";
import test from "ava";
import * as sinon from "sinon";
import { CodeQuality, CodeScanning } from "./analyses";
import { CodeQuality, CodeScanning, RiskAssessment } from "./analyses";
import {
runQueries,
defaultSuites,
@@ -155,5 +155,6 @@ test("addSarifExtension", (t) => {
addSarifExtension(CodeQuality, language),
`${language}.quality.sarif`,
);
t.is(addSarifExtension(RiskAssessment, language), `${language}.csra.sarif`);
}
});
+3 -6
View File
@@ -549,12 +549,9 @@ export async function runQueries(
): Promise<{ summary: string; sarifFile: string }> {
logger.info(`Interpreting ${analysis.name} results for ${language}`);
// If this is a Code Quality analysis, correct the category to one
// accepted by the Code Quality backend.
let category = automationDetailsId;
if (analysis.kind === analyses.AnalysisKind.CodeQuality) {
category = analysis.fixCategory(logger, automationDetailsId);
}
// Apply the analysis configuration's `fixCategory` function to adjust the category if needed.
// This is a no-op for Code Scanning.
const category = analysis.fixCategory(logger, automationDetailsId);
const sarifFile = path.join(
sarifFolder,
+20 -1
View File
@@ -7,7 +7,7 @@ import * as yaml from "js-yaml";
import * as sinon from "sinon";
import * as actionsUtil from "./actions-util";
import { AnalysisKind } from "./analyses";
import { AnalysisKind, supportedAnalysisKinds } from "./analyses";
import * as api from "./api-client";
import { CachingKind } from "./caching-utils";
import { createStubCodeQL } from "./codeql";
@@ -1829,3 +1829,22 @@ test("hasActionsWorkflows doesn't throw if workflows folder doesn't exist", asyn
t.notThrows(() => configUtils.hasActionsWorkflows(tmpDir));
});
});
test("getPrimaryAnalysisConfig - single analysis kind", (t) => {
// If only one analysis kind is configured, we expect to get the matching configuration.
for (const analysisKind of supportedAnalysisKinds) {
const singleKind = createTestConfig({ analysisKinds: [analysisKind] });
t.is(configUtils.getPrimaryAnalysisConfig(singleKind).kind, analysisKind);
}
});
test("getPrimaryAnalysisConfig - Code Scanning + Code Quality", (t) => {
// For CS+CQ, we expect to get the Code Scanning configuration.
const codeScanningAndCodeQuality = createTestConfig({
analysisKinds: [AnalysisKind.CodeScanning, AnalysisKind.CodeQuality],
});
t.is(
configUtils.getPrimaryAnalysisConfig(codeScanningAndCodeQuality).kind,
AnalysisKind.CodeScanning,
);
});
+11 -13
View File
@@ -12,9 +12,8 @@ import {
import {
AnalysisConfig,
AnalysisKind,
CodeQuality,
codeQualityQueries,
CodeScanning,
getAnalysisConfig,
} from "./analyses";
import * as api from "./api-client";
import { CachingKind, getCachingKind } from "./caching-utils";
@@ -1389,28 +1388,27 @@ export function isCodeQualityEnabled(config: Config): boolean {
}
/**
* Returns the primary analysis kind that the Action is initialised with. This is
* always `AnalysisKind.CodeScanning` unless `AnalysisKind.CodeScanning` is not enabled.
* Returns the primary analysis kind that the Action is initialised with. If there is only
* one analysis kind, then that is returned.
*
* @returns Returns `AnalysisKind.CodeScanning` if `AnalysisKind.CodeScanning` is enabled;
* otherwise `AnalysisKind.CodeQuality`.
* The special case is Code Scanning + Code Quality, which can be enabled at the same time.
* In that case, this function returns Code Scanning.
*/
function getPrimaryAnalysisKind(config: Config): AnalysisKind {
if (config.analysisKinds.length === 1) {
return config.analysisKinds[0];
}
return isCodeScanningEnabled(config)
? AnalysisKind.CodeScanning
: AnalysisKind.CodeQuality;
}
/**
* Returns the primary analysis configuration that the Action is initialised with. This is
* always `CodeScanning` unless `CodeScanning` is not enabled.
*
* @returns Returns `CodeScanning` if `AnalysisKind.CodeScanning` is enabled; otherwise `CodeQuality`.
* Returns the primary analysis configuration that the Action is initialised with.
*/
export function getPrimaryAnalysisConfig(config: Config): AnalysisConfig {
return getPrimaryAnalysisKind(config) === AnalysisKind.CodeScanning
? CodeScanning
: CodeQuality;
return getAnalysisConfig(getPrimaryAnalysisKind(config));
}
/** Logs the Git version as a telemetry diagnostic. */
+4 -4
View File
@@ -1,6 +1,6 @@
{
"bundleVersion": "codeql-bundle-v2.24.1",
"cliVersion": "2.24.1",
"priorBundleVersion": "codeql-bundle-v2.24.0",
"priorCliVersion": "2.24.0"
"bundleVersion": "codeql-bundle-v2.24.2",
"cliVersion": "2.24.2",
"priorBundleVersion": "codeql-bundle-v2.24.1",
"priorCliVersion": "2.24.1"
}
+27 -12
View File
@@ -66,6 +66,12 @@ interface UnwrittenDiagnostic {
/** A list of diagnostics which have not yet been written to disk. */
let unwrittenDiagnostics: UnwrittenDiagnostic[] = [];
/**
* A list of diagnostics which have not yet been written to disk,
* and where the language does not matter.
*/
let unwrittenDefaultLanguageDiagnostics: DiagnosticMessage[] = [];
/**
* Constructs a new diagnostic message with the specified id and name, as well as optional additional data.
*
@@ -119,16 +125,20 @@ export function addDiagnostic(
/** Adds a diagnostic that is not specific to any language. */
export function addNoLanguageDiagnostic(
config: Config,
config: Config | undefined,
diagnostic: DiagnosticMessage,
) {
addDiagnostic(
config,
// Arbitrarily choose the first language. We could also choose all languages, but that
// increases the risk of misinterpreting the data.
config.languages[0],
diagnostic,
);
if (config !== undefined) {
addDiagnostic(
config,
// Arbitrarily choose the first language. We could also choose all languages, but that
// increases the risk of misinterpreting the data.
config.languages[0],
diagnostic,
);
} else {
unwrittenDefaultLanguageDiagnostics.push(diagnostic);
}
}
/**
@@ -188,16 +198,21 @@ export function logUnwrittenDiagnostics() {
/** Writes all unwritten diagnostics to disk. */
export function flushDiagnostics(config: Config) {
const logger = getActionsLogger();
logger.debug(
`Writing ${unwrittenDiagnostics.length} diagnostic(s) to database.`,
);
const diagnosticsCount =
unwrittenDiagnostics.length + unwrittenDefaultLanguageDiagnostics.length;
logger.debug(`Writing ${diagnosticsCount} diagnostic(s) to database.`);
for (const unwritten of unwrittenDiagnostics) {
writeDiagnostic(config, unwritten.language, unwritten.diagnostic);
}
for (const unwritten of unwrittenDefaultLanguageDiagnostics) {
addNoLanguageDiagnostic(config, unwritten);
}
// Reset the unwritten diagnostics array.
// Reset the unwritten diagnostics arrays.
unwrittenDiagnostics = [];
unwrittenDefaultLanguageDiagnostics = [];
}
/**
+3
View File
@@ -141,4 +141,7 @@ export enum EnvVar {
* `getAnalysisKey`, but can also be set manually for testing and non-standard applications.
*/
ANALYSIS_KEY = "CODEQL_ACTION_ANALYSIS_KEY",
/** Used by Code Scanning Risk Assessment to communicate the assessment ID to the CodeQL Action. */
RISK_ASSESSMENT_ID = "CODEQL_ACTION_RISK_ASSESSMENT_ID",
}
+19 -1
View File
@@ -46,7 +46,10 @@ export enum Feature {
DisableJavaBuildlessEnabled = "disable_java_buildless_enabled",
DisableKotlinAnalysisEnabled = "disable_kotlin_analysis_enabled",
ExportDiagnosticsEnabled = "export_diagnostics_enabled",
ForceNightly = "force_nightly",
IgnoreGeneratedFiles = "ignore_generated_files",
ImprovedProxyCertificates = "improved_proxy_certificates",
JavaNetworkDebugging = "java_network_debugging",
OverlayAnalysis = "overlay_analysis",
OverlayAnalysisActions = "overlay_analysis_actions",
OverlayAnalysisCodeScanningActions = "overlay_analysis_code_scanning_actions",
@@ -75,7 +78,7 @@ export enum Feature {
SkipFileCoverageOnPrs = "skip_file_coverage_on_prs",
StartProxyConnectionChecks = "start_proxy_connection_checks",
UploadOverlayDbToApi = "upload_overlay_db_to_api",
UseRepositoryProperties = "use_repository_properties",
UseRepositoryProperties = "use_repository_properties_v2",
ValidateDbConfig = "validate_db_config",
}
@@ -163,11 +166,26 @@ export const featureConfig = {
legacyApi: true,
minimumVersion: undefined,
},
[Feature.ForceNightly]: {
defaultValue: false,
envVar: "CODEQL_ACTION_FORCE_NIGHTLY",
minimumVersion: undefined,
},
[Feature.IgnoreGeneratedFiles]: {
defaultValue: false,
envVar: "CODEQL_ACTION_IGNORE_GENERATED_FILES",
minimumVersion: undefined,
},
[Feature.ImprovedProxyCertificates]: {
defaultValue: false,
envVar: "CODEQL_ACTION_IMPROVED_PROXY_CERTIFICATES",
minimumVersion: undefined,
},
[Feature.JavaNetworkDebugging]: {
defaultValue: false,
envVar: "CODEQL_ACTION_JAVA_NETWORK_DEBUGGING",
minimumVersion: undefined,
},
[Feature.OverlayAnalysis]: {
defaultValue: false,
envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS",
+15 -1
View File
@@ -52,7 +52,7 @@ import {
initConfig,
runDatabaseInitCluster,
} from "./init";
import { KnownLanguage } from "./languages";
import { JavaEnvVars, KnownLanguage } from "./languages";
import { getActionsLogger, Logger } from "./logging";
import {
downloadOverlayBaseDatabaseFromCache,
@@ -95,6 +95,7 @@ import {
BuildMode,
GitHubVersion,
Result,
getOptionalEnvVar,
} from "./util";
import { checkWorkflow } from "./workflow";
@@ -753,6 +754,19 @@ async function run(startedAt: Date) {
}
}
// Enable Java network debugging if the FF is enabled.
if (await features.getValue(Feature.JavaNetworkDebugging)) {
// Get the existing value of `JAVA_TOOL_OPTIONS`, if any.
const existingJavaToolOptions =
getOptionalEnvVar(JavaEnvVars.JAVA_TOOL_OPTIONS) || "";
// Add the network debugging options.
core.exportVariable(
JavaEnvVars.JAVA_TOOL_OPTIONS,
`${existingJavaToolOptions} -Djavax.net.debug=all`,
);
}
// Write diagnostics to the database that we previously stored in memory because the database
// did not exist until now.
flushDiagnostics(config);
+8
View File
@@ -19,3 +19,11 @@ export enum KnownLanguage {
rust = "rust",
swift = "swift",
}
/** Java-specific environment variable names that we may care about. */
export enum JavaEnvVars {
JAVA_HOME = "JAVA_HOME",
JAVA_TOOL_OPTIONS = "JAVA_TOOL_OPTIONS",
JDK_JAVA_OPTIONS = "JDK_JAVA_OPTIONS",
_JAVA_OPTIONS = "_JAVA_OPTIONS",
}
+127 -6
View File
@@ -1,18 +1,22 @@
import * as path from "path";
import * as github from "@actions/github";
import * as toolcache from "@actions/tool-cache";
import test, { ExecutionContext } from "ava";
import * as sinon from "sinon";
import * as actionsUtil from "./actions-util";
import * as api from "./api-client";
import { Feature, FeatureEnablement } from "./feature-flags";
import { getRunnerLogger } from "./logging";
import * as setupCodeql from "./setup-codeql";
import * as tar from "./tar";
import {
LINKED_CLI_VERSION,
LoggedMessage,
SAMPLE_DEFAULT_CLI_VERSION,
SAMPLE_DOTCOM_API_DETAILS,
checkExpectedLogMessages,
createFeatures,
getRecordingLogger,
initializeFeatures,
@@ -268,13 +272,127 @@ test("setupCodeQLBundle logs the CodeQL CLI version being used when asked to dow
});
});
test("getCodeQLSource correctly returns nightly CLI version when tools == nightly", async (t) => {
const loggedMessages: LoggedMessage[] = [];
const logger = getRecordingLogger(loggedMessages);
const features = createFeatures([]);
const expectedDate = "30260213";
const expectedTag = `codeql-bundle-${expectedDate}`;
// Ensure that we consistently select "zstd" for the test.
sinon.stub(process, "platform").value("linux");
sinon.stub(tar, "isZstdAvailable").resolves({
available: true,
foundZstdBinary: true,
});
const client = github.getOctokit("123");
const listReleases = sinon.stub(client.rest.repos, "listReleases");
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
listReleases.resolves({
data: [{ tag_name: expectedTag }],
} as any);
sinon.stub(api, "getApiClient").value(() => client);
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
const source = await setupCodeql.getCodeQLSource(
"nightly",
SAMPLE_DEFAULT_CLI_VERSION,
SAMPLE_DOTCOM_API_DETAILS,
GitHubVariant.DOTCOM,
false,
features,
logger,
);
// Check that the `CodeQLToolsSource` object matches our expectations.
const expectedVersion = `0.0.0-${expectedDate}`;
const expectedURL = `https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/${expectedTag}/${setupCodeql.getCodeQLBundleName("zstd")}`;
t.deepEqual(source, {
bundleVersion: expectedDate,
cliVersion: undefined,
codeqlURL: expectedURL,
compressionMethod: "zstd",
sourceType: "download",
toolsVersion: expectedVersion,
} satisfies setupCodeql.CodeQLToolsSource);
// Afterwards, ensure that we see the expected messages in the log.
checkExpectedLogMessages(t, loggedMessages, [
"Using the latest CodeQL CLI nightly, as requested by 'tools: nightly'.",
`Bundle version ${expectedDate} is not in SemVer format. Will treat it as pre-release ${expectedVersion}.`,
`Attempting to obtain CodeQL tools. CLI version: unknown, bundle tag name: ${expectedTag}`,
`Using CodeQL CLI sourced from ${expectedURL}`,
]);
});
});
test("getCodeQLSource correctly returns nightly CLI version when forced by FF", async (t) => {
const loggedMessages: LoggedMessage[] = [];
const logger = getRecordingLogger(loggedMessages);
const features = createFeatures([Feature.ForceNightly]);
const expectedDate = "30260213";
const expectedTag = `codeql-bundle-${expectedDate}`;
// Ensure that we consistently select "zstd" for the test.
sinon.stub(process, "platform").value("linux");
sinon.stub(tar, "isZstdAvailable").resolves({
available: true,
foundZstdBinary: true,
});
const client = github.getOctokit("123");
const listReleases = sinon.stub(client.rest.repos, "listReleases");
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
listReleases.resolves({
data: [{ tag_name: expectedTag }],
} as any);
sinon.stub(api, "getApiClient").value(() => client);
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
process.env["GITHUB_EVENT_NAME"] = "dynamic";
const source = await setupCodeql.getCodeQLSource(
undefined,
SAMPLE_DEFAULT_CLI_VERSION,
SAMPLE_DOTCOM_API_DETAILS,
GitHubVariant.DOTCOM,
false,
features,
logger,
);
// Check that the `CodeQLToolsSource` object matches our expectations.
const expectedVersion = `0.0.0-${expectedDate}`;
const expectedURL = `https://github.com/dsp-testing/codeql-cli-nightlies/releases/download/${expectedTag}/${setupCodeql.getCodeQLBundleName("zstd")}`;
t.deepEqual(source, {
bundleVersion: expectedDate,
cliVersion: undefined,
codeqlURL: expectedURL,
compressionMethod: "zstd",
sourceType: "download",
toolsVersion: expectedVersion,
} satisfies setupCodeql.CodeQLToolsSource);
// Afterwards, ensure that we see the expected messages in the log.
checkExpectedLogMessages(t, loggedMessages, [
`Using the latest CodeQL CLI nightly, as forced by the ${Feature.ForceNightly} feature flag.`,
`Bundle version ${expectedDate} is not in SemVer format. Will treat it as pre-release ${expectedVersion}.`,
`Attempting to obtain CodeQL tools. CLI version: unknown, bundle tag name: ${expectedTag}`,
`Using CodeQL CLI sourced from ${expectedURL}`,
]);
});
});
test("getCodeQLSource correctly returns latest version from toolcache when tools == toolcache", async (t) => {
const loggedMessages: LoggedMessage[] = [];
const logger = getRecordingLogger(loggedMessages);
const features = createFeatures([Feature.AllowToolcacheInput]);
process.env["GITHUB_EVENT_NAME"] = "dynamic";
const latestToolcacheVersion = "3.2.1";
const latestVersionPath = "/path/to/latest";
const testVersions = ["2.3.1", latestToolcacheVersion, "1.2.3"];
@@ -288,6 +406,8 @@ test("getCodeQLSource correctly returns latest version from toolcache when tools
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
process.env["GITHUB_EVENT_NAME"] = "dynamic";
const source = await setupCodeql.getCodeQLSource(
"toolcache",
SAMPLE_DEFAULT_CLI_VERSION,
@@ -343,16 +463,17 @@ const toolcacheInputFallbackMacro = test.macro({
const logger = getRecordingLogger(loggedMessages);
const features = createFeatures(featureList);
for (const [k, v] of Object.entries(environment)) {
process.env[k] = v;
}
const findAllVersionsStub = sinon
.stub(toolcache, "findAllVersions")
.returns(testVersions);
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
for (const [k, v] of Object.entries(environment)) {
process.env[k] = v;
}
const source = await setupCodeql.getCodeQLSource(
"toolcache",
SAMPLE_DEFAULT_CLI_VERSION,
+62 -8
View File
@@ -10,6 +10,7 @@ import { v4 as uuidV4 } from "uuid";
import { isDynamicWorkflow, isRunningLocalAction } from "./actions-util";
import * as api from "./api-client";
import * as defaults from "./defaults.json";
import { addNoLanguageDiagnostic, makeDiagnostic } from "./diagnostics";
import {
CODEQL_VERSION_ZSTD_BUNDLE,
CodeQLDefaultVersionInfo,
@@ -55,7 +56,9 @@ function getCodeQLBundleExtension(
}
}
function getCodeQLBundleName(compressionMethod: tar.CompressionMethod): string {
export function getCodeQLBundleName(
compressionMethod: tar.CompressionMethod,
): string {
const extension = getCodeQLBundleExtension(compressionMethod);
let platform: string;
@@ -196,7 +199,7 @@ export function convertToSemVer(version: string, logger: Logger): string {
return s;
}
type CodeQLToolsSource =
export type CodeQLToolsSource =
| {
codeqlTarPath: string;
compressionMethod: tar.CompressionMethod;
@@ -261,6 +264,20 @@ async function findOverridingToolsInCache(
return undefined;
}
/**
* Determines where the CodeQL CLI we want to use comes from. This can be from a local file,
* the Actions toolcache, or a download.
*
* @param toolsInput The argument provided for the `tools` input, if any.
* @param defaultCliVersion The default CLI version that's linked to the CodeQL Action.
* @param apiDetails Information about the GitHub API.
* @param variant The GitHub variant we are running on.
* @param tarSupportsZstd Whether zstd is supported by `tar`.
* @param features Information about enabled features.
* @param logger The logger to use.
*
* @returns Information about where the CodeQL CLI we want to use comes from.
*/
export async function getCodeQLSource(
toolsInput: string | undefined,
defaultCliVersion: CodeQLDefaultVersionInfo,
@@ -270,6 +287,9 @@ export async function getCodeQLSource(
features: FeatureEnablement,
logger: Logger,
): Promise<CodeQLToolsSource> {
// If there is an explicit `tools` input, it's not one of the reserved values, and it doesn't appear
// to point to a URL, then we assume it is a local path and use the CLI from there.
// TODO: This appears to misclassify filenames that happen to start with `http` as URLs.
if (
toolsInput &&
!isReservedToolsValue(toolsInput) &&
@@ -302,13 +322,47 @@ export async function getCodeQLSource(
*/
let url: string | undefined;
if (
// We allow forcing the nightly CLI via the FF for `dynamic` events (or in test mode) where the
// `tools` input cannot be adjusted to explicitly request it.
const canForceNightlyWithFF = isDynamicWorkflow() || util.isInTestMode();
const forceNightlyValueFF = await features.getValue(Feature.ForceNightly);
const forceNightly = forceNightlyValueFF && canForceNightlyWithFF;
// For advanced workflows, a value from `CODEQL_NIGHTLY_TOOLS_INPUTS` can be specified explicitly
// for the `tools` input in the workflow file.
const nightlyRequestedByToolsInput =
toolsInput !== undefined &&
CODEQL_NIGHTLY_TOOLS_INPUTS.includes(toolsInput)
) {
logger.info(
`Using the latest CodeQL CLI nightly, as requested by 'tools: ${toolsInput}'.`,
);
CODEQL_NIGHTLY_TOOLS_INPUTS.includes(toolsInput);
if (forceNightly || nightlyRequestedByToolsInput) {
if (forceNightly) {
logger.info(
`Using the latest CodeQL CLI nightly, as forced by the ${Feature.ForceNightly} feature flag.`,
);
addNoLanguageDiagnostic(
undefined,
makeDiagnostic(
"codeql-action/forced-nightly-cli",
"A nightly release of CodeQL was used",
{
markdownMessage:
"GitHub configured this analysis to use a nightly release of CodeQL to allow you to preview changes from an upcoming release.\n\n" +
"Nightly releases do not undergo the same validation as regular releases and may lead to analysis instability.\n\n" +
"If use of a nightly CodeQL release for this analysis is unexpected, please contact GitHub support.",
visibility: {
cliSummaryTable: true,
statusPage: true,
telemetry: true,
},
severity: "note",
},
),
);
} else {
logger.info(
`Using the latest CodeQL CLI nightly, as requested by 'tools: ${toolsInput}'.`,
);
}
toolsInput = await getNightlyToolsUrl(logger);
}
+19 -71
View File
@@ -2,7 +2,6 @@ import { ChildProcess, spawn } from "child_process";
import * as path from "path";
import * as core from "@actions/core";
import { pki } from "node-forge";
import * as actionsUtil from "./actions-util";
import { getGitHubVersion } from "./api-client";
@@ -19,81 +18,15 @@ import {
ProxyInfo,
sendFailedStatusReport,
sendSuccessStatusReport,
Credential,
Registry,
ProxyConfig,
} from "./start-proxy";
import { generateCertificateAuthority } from "./start-proxy/ca";
import { checkProxyEnvironment } from "./start-proxy/environment";
import { checkConnections } from "./start-proxy/reachability";
import { ActionName, sendUnhandledErrorStatusReport } from "./status-report";
import * as util from "./util";
const KEY_SIZE = 2048;
const KEY_EXPIRY_YEARS = 2;
type CertificateAuthority = {
cert: string;
key: string;
};
type BasicAuthCredentials = {
username: string;
password: string;
};
type ProxyConfig = {
/** The validated configurations for the proxy. */
all_credentials: Credential[];
ca: CertificateAuthority;
proxy_auth?: BasicAuthCredentials;
};
const CERT_SUBJECT = [
{
name: "commonName",
value: "Dependabot Internal CA",
},
{
name: "organizationName",
value: "GitHub inc.",
},
{
shortName: "OU",
value: "Dependabot",
},
{
name: "countryName",
value: "US",
},
{
shortName: "ST",
value: "California",
},
{
name: "localityName",
value: "San Francisco",
},
];
function generateCertificateAuthority(): CertificateAuthority {
const keys = pki.rsa.generateKeyPair(KEY_SIZE);
const cert = pki.createCertificate();
cert.publicKey = keys.publicKey;
cert.serialNumber = "01";
cert.validity.notBefore = new Date();
cert.validity.notAfter = new Date();
cert.validity.notAfter.setFullYear(
cert.validity.notBefore.getFullYear() + KEY_EXPIRY_YEARS,
);
cert.setSubject(CERT_SUBJECT);
cert.setIssuer(CERT_SUBJECT);
cert.setExtensions([{ name: "basicConstraints", cA: true }]);
cert.sign(keys.privateKey);
const pem = pki.certificateToPem(cert);
const key = pki.privateKeyToPem(keys.privateKey);
return { cert: pem, key };
}
async function run(startedAt: Date) {
// To capture errors appropriately, keep as much code within the try-catch as
// possible, and only use safe functions outside.
@@ -144,7 +77,22 @@ async function run(startedAt: Date) {
.join("\n")}`,
);
const ca = generateCertificateAuthority();
// Check the environment for any configurations which may affect the proxy.
// This is a best effort process to give us insights into potential factors
// which may affect the operation of our proxy.
if (core.isDebug() || util.isInTestMode()) {
try {
await checkProxyEnvironment(logger, language);
} catch (err) {
logger.debug(
`Unable to inspect runner environment: ${util.getErrorMessage(err)}`,
);
}
}
const ca = generateCertificateAuthority(
await features.getValue(Feature.ImprovedProxyCertificates),
);
const proxyConfig: ProxyConfig = {
all_credentials: credentials,
+93
View File
@@ -0,0 +1,93 @@
import test, { ExecutionContext } from "ava";
import { pki } from "node-forge";
import { setupTests } from "../testing-utils";
import * as ca from "./ca";
setupTests(test);
const toMap = <T>(array: T[], func: (e: T) => string) =>
new Map<string, T>(array.map((val) => [func(val), val]));
function checkCertAttributes(
t: ExecutionContext<unknown>,
cert: pki.Certificate,
) {
const subjectMap = toMap(
cert.subject.attributes,
(attr) => attr.name as string,
);
const issuerMap = toMap(
cert.issuer.attributes,
(attr) => attr.name as string,
);
t.is(subjectMap.get("commonName")?.value, "Dependabot Internal CA");
t.is(issuerMap.get("commonName")?.value, "Dependabot Internal CA");
for (const attrName of subjectMap.keys()) {
t.deepEqual(subjectMap.get(attrName), issuerMap.get(attrName));
}
}
test("generateCertificateAuthority - generates certificates", (t) => {
const result = ca.generateCertificateAuthority(false);
const cert = pki.certificateFromPem(result.cert);
const key = pki.privateKeyFromPem(result.key);
t.truthy(cert);
t.truthy(key);
checkCertAttributes(t, cert);
// Check the validity.
t.true(
cert.validity.notBefore <= new Date(),
"notBefore date is in the future",
);
t.true(cert.validity.notAfter > new Date(), "notAfter date is in the past");
// Check that the extensions are set as we'd expect.
const exts = cert.extensions as ca.Extension[];
t.is(exts.length, 1);
t.is(exts[0].name, "basicConstraints");
t.is(exts[0].cA, true);
t.truthy(cert.siginfo);
});
test("generateCertificateAuthority - generates certificates with FF", (t) => {
const result = ca.generateCertificateAuthority(true);
const cert = pki.certificateFromPem(result.cert);
const key = pki.privateKeyFromPem(result.key);
t.truthy(cert);
t.truthy(key);
checkCertAttributes(t, cert);
// Check the validity.
t.true(
cert.validity.notBefore <= new Date(),
"notBefore date is in the future",
);
t.true(cert.validity.notAfter > new Date(), "notAfter date is in the past");
// Check that the extensions are set as we'd expect.
const exts = toMap(cert.extensions as ca.Extension[], (ext) => ext.name);
t.is(exts.size, 4);
t.true(exts.get("basicConstraints")?.cA);
t.truthy(exts.get("subjectKeyIdentifier"));
t.truthy(exts.get("authorityKeyIdentifier"));
const keyUsage = exts.get("keyUsage");
if (t.truthy(keyUsage)) {
t.true(keyUsage.critical);
t.true(keyUsage.keyCertSign);
t.true(keyUsage.cRLSign);
t.true(keyUsage.digitalSignature);
}
t.truthy(cert.siginfo);
});
+93
View File
@@ -0,0 +1,93 @@
import { md, pki } from "node-forge";
import { CertificateAuthority } from "./types";
const KEY_SIZE = 2048;
const KEY_EXPIRY_YEARS = 2;
const CERT_SUBJECT = [
{
name: "commonName",
value: "Dependabot Internal CA",
},
{
name: "organizationName",
value: "GitHub inc.",
},
{
shortName: "OU",
value: "Dependabot",
},
{
name: "countryName",
value: "US",
},
{
shortName: "ST",
value: "California",
},
{
name: "localityName",
value: "San Francisco",
},
];
export type Extension = {
name: string;
[key: string]: unknown;
};
const extraExtensions: Extension[] = [
{
name: "keyUsage",
critical: true,
keyCertSign: true,
cRLSign: true,
digitalSignature: true,
},
{ name: "subjectKeyIdentifier" },
{ name: "authorityKeyIdentifier", keyIdentifier: true },
];
/**
* Generates a CA certificate for the proxy.
*
* @param newCertGenFF Whether to use the updated certificate generation.
* @returns The private and public keys.
*/
export function generateCertificateAuthority(
newCertGenFF: boolean,
): CertificateAuthority {
const keys = pki.rsa.generateKeyPair(KEY_SIZE);
const cert = pki.createCertificate();
cert.publicKey = keys.publicKey;
cert.serialNumber = "01";
cert.validity.notBefore = new Date();
cert.validity.notAfter = new Date();
cert.validity.notAfter.setFullYear(
cert.validity.notBefore.getFullYear() + KEY_EXPIRY_YEARS,
);
cert.setSubject(CERT_SUBJECT);
cert.setIssuer(CERT_SUBJECT);
const extensions: Extension[] = [{ name: "basicConstraints", cA: true }];
// Add the extra CA extensions if the FF is enabled.
if (newCertGenFF) {
extensions.push(...extraExtensions);
}
cert.setExtensions(extensions);
// Specifically use SHA256 when the FF is enabled.
if (newCertGenFF) {
cert.sign(keys.privateKey, md.sha256.create());
} else {
cert.sign(keys.privateKey);
}
const pem = pki.certificateToPem(cert);
const key = pki.privateKeyToPem(keys.privateKey);
return { cert: pem, key };
}
+213
View File
@@ -0,0 +1,213 @@
import * as fs from "fs";
import * as os from "os";
import path from "path";
import * as toolrunner from "@actions/exec/lib/toolrunner";
import * as io from "@actions/io";
import test, { ExecutionContext } from "ava";
import sinon from "sinon";
import { JavaEnvVars, KnownLanguage } from "../languages";
import {
checkExpectedLogMessages,
getRecordingLogger,
LoggedMessage,
setupTests,
} from "../testing-utils";
import { withTmpDir } from "../util";
import {
checkJavaEnvVars,
checkJdkSettings,
checkProxyEnvironment,
checkProxyEnvVars,
discoverActionsJdks,
JAVA_PROXY_ENV_VARS,
ProxyEnvVars,
} from "./environment";
setupTests(test);
function stubToolrunner() {
sinon.stub(io, "which").throws(new Error("Java not installed"));
sinon.stub(toolrunner, "ToolRunner").returns({
exec: async () => {
return 0;
},
});
}
function assertEnvVarLogMessages(
t: ExecutionContext<any>,
envVars: string[],
messages: LoggedMessage[],
expectSet: boolean | string,
) {
const template = (envVar: string) => {
if (typeof expectSet === "string") {
return `Environment variable '${envVar}' is set to '${expectSet}'`;
}
return expectSet
? `Environment variable '${envVar}' is set to '${envVar}'`
: `Environment variable '${envVar}' is not set`;
};
const expected: string[] = [];
for (const envVar of envVars) {
expected.push(template(envVar));
}
checkExpectedLogMessages(t, messages, expected);
}
test("checkJavaEnvironment - none set", (t) => {
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
checkJavaEnvVars(logger);
assertEnvVarLogMessages(t, JAVA_PROXY_ENV_VARS, messages, false);
});
test("checkJavaEnvironment - logs values when variables are set", (t) => {
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
for (const envVar of Object.values(JavaEnvVars)) {
process.env[envVar] = envVar;
}
checkJavaEnvVars(logger);
assertEnvVarLogMessages(t, JAVA_PROXY_ENV_VARS, messages, true);
});
test("discoverActionsJdks - discovers JDK paths", (t) => {
// Clear GHA variables that may interfere with this test in CI.
for (const envVar of Object.keys(process.env)) {
if (envVar.startsWith("JAVA_HOME_")) {
delete process.env[envVar];
}
}
const jdk8 = "/usr/lib/jvm/temurin-8-jdk-amd64";
const jdk17 = "/usr/lib/jvm/temurin-17-jdk-amd64";
const jdk21 = "/usr/lib/jvm/temurin-21-jdk-amd64";
process.env[JavaEnvVars.JAVA_HOME] = jdk17;
process.env["JAVA_HOME_8_X64"] = jdk8;
process.env["JAVA_HOME_17_X64"] = jdk17;
process.env["JAVA_HOME_21_X64"] = jdk21;
const results = discoverActionsJdks();
t.is(results.size, 3);
t.true(results.has(jdk8));
t.true(results.has(jdk17));
t.true(results.has(jdk21));
});
test("checkJdkSettings - does not throw for an empty directory", async (t) => {
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
await withTmpDir(async (tmpDir) => {
t.notThrows(() => checkJdkSettings(logger, tmpDir));
});
});
test("checkJdkSettings - finds files and logs relevant properties", async (t) => {
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
await withTmpDir(async (tmpDir) => {
const dir = path.join(tmpDir, "conf");
fs.mkdirSync(dir);
const file = path.join(dir, "net.properties");
fs.writeFileSync(
file,
[
"irrelevant.property=foo",
"http.proxyHost=proxy.example.com",
"http.unrelated=bar",
].join(os.EOL),
{},
);
checkJdkSettings(logger, tmpDir);
checkExpectedLogMessages(t, messages, [
`Found '${file}'.`,
`Found 'http.proxyHost=proxy.example.com' in '${file}'`,
]);
});
});
test("checkProxyEnvVars - none set", (t) => {
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
checkProxyEnvVars(logger);
assertEnvVarLogMessages(t, Object.values(ProxyEnvVars), messages, false);
});
test("checkProxyEnvVars - logs values when variables are set", (t) => {
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
for (const envVar of Object.values(ProxyEnvVars)) {
process.env[envVar] = envVar;
}
checkProxyEnvVars(logger);
assertEnvVarLogMessages(t, Object.values(ProxyEnvVars), messages, true);
});
test("checkProxyEnvVars - credentials are removed from URLs", (t) => {
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
for (const envVar of Object.values(ProxyEnvVars)) {
process.env[envVar] = "https://secret:password@proxy.local";
}
checkProxyEnvVars(logger);
assertEnvVarLogMessages(
t,
Object.values(ProxyEnvVars),
messages,
"https://proxy.local/",
);
});
test("checkProxyEnvironment - includes base checks for all known languages", async (t) => {
stubToolrunner();
for (const language of Object.values(KnownLanguage)) {
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
await checkProxyEnvironment(logger, language);
assertEnvVarLogMessages(t, Object.keys(ProxyEnvVars), messages, false);
}
});
test("checkProxyEnvironment - includes Java checks for Java", async (t) => {
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
stubToolrunner();
await checkProxyEnvironment(logger, KnownLanguage.java);
assertEnvVarLogMessages(t, Object.keys(ProxyEnvVars), messages, false);
assertEnvVarLogMessages(t, JAVA_PROXY_ENV_VARS, messages, false);
});
test("checkProxyEnvironment - includes language-specific checks if the language is undefined", async (t) => {
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
stubToolrunner();
await checkProxyEnvironment(logger, undefined);
assertEnvVarLogMessages(t, Object.keys(ProxyEnvVars), messages, false);
assertEnvVarLogMessages(t, JAVA_PROXY_ENV_VARS, messages, false);
});
+209
View File
@@ -0,0 +1,209 @@
import * as fs from "fs";
import * as path from "path";
import * as toolrunner from "@actions/exec/lib/toolrunner";
import * as io from "@actions/io";
import { JavaEnvVars, KnownLanguage, Language } from "../languages";
import { Logger } from "../logging";
import { getErrorMessage, isDefined } from "../util";
/**
* Checks whether an environment variable named `name` is set and logs its value if set.
*
* @param logger The logger to use.
* @param name The name of the environment variable.
* @returns True if set or false otherwise.
*/
function checkEnvVar(logger: Logger, name: string): boolean {
const value = process.env[name];
if (isDefined(value)) {
const url = URL.parse(value);
if (isDefined(url)) {
url.username = "";
url.password = "";
logger.info(`Environment variable '${name}' is set to '${url}'.`);
} else {
logger.info(`Environment variable '${name}' is set to '${value}'.`);
}
return true;
} else {
logger.debug(`Environment variable '${name}' is not set.`);
return false;
}
}
// The JRE properties that may affect the proxy.
const javaProperties = [
"http.proxyHost",
"http.proxyPort",
"https.proxyHost",
"https.proxyPort",
"http.nonProxyHosts",
"java.net.useSystemProxies",
"javax.net.ssl.trustStore",
"javax.net.ssl.trustStoreType",
"javax.net.ssl.trustStoreProvider",
"jdk.tls.client.protocols",
"jdk.tls.disabledAlgorithms",
"jdk.security.allowNonCaAnchor",
"https.protocols",
"com.sun.net.ssl.enableAIAcaIssuers",
"com.sun.net.ssl.checkRevocation",
"com.sun.security.enableCRLDP",
"ocsp.enable",
];
/** Java-specific environment variables which may contain information about proxy settings. */
export const JAVA_PROXY_ENV_VARS: JavaEnvVars[] = [
JavaEnvVars.JAVA_TOOL_OPTIONS,
JavaEnvVars.JDK_JAVA_OPTIONS,
JavaEnvVars._JAVA_OPTIONS,
];
/**
* Checks whether any Java-specific environment variables which may contain proxy
* configurations are set and logs their values if so.
*/
export function checkJavaEnvVars(logger: Logger) {
for (const envVar of JAVA_PROXY_ENV_VARS) {
checkEnvVar(logger, envVar);
}
}
/**
* Discovers paths to JDK directories based on JAVA_HOME and GHA-specific environment variables.
* @returns A set of JDK paths.
*/
export function discoverActionsJdks(): Set<string> {
const paths: Set<string> = new Set();
// Check whether JAVA_HOME is set.
const javaHome = process.env[JavaEnvVars.JAVA_HOME];
if (isDefined(javaHome)) {
paths.add(javaHome);
}
for (const [envVar, value] of Object.entries(process.env)) {
if (isDefined(value) && envVar.match(/^JAVA_HOME_\d+_/)) {
paths.add(value);
}
}
return paths;
}
/**
* Tries to inspect JDK configuration files for the specified JDK path which may contain proxy settings.
*
* @param logger The logger to use.
* @param jdkHome The JDK home directory.
*/
export function checkJdkSettings(logger: Logger, jdkHome: string) {
const filesToCheck = [
// JDK 9+
path.join("conf", "net.properties"),
// JDK 8 and below
path.join("lib", "net.properties"),
];
for (const fileToCheck of filesToCheck) {
const file = path.join(jdkHome, fileToCheck);
try {
if (fs.existsSync(file)) {
logger.debug(`Found '${file}'.`);
const lines = String(fs.readFileSync(file)).split("\n");
for (const line of lines) {
for (const property of javaProperties) {
if (line.startsWith(`${property}=`)) {
logger.info(`Found '${line.trimEnd()}' in '${file}'.`);
}
}
}
} else {
logger.debug(`'${file}' does not exist.`);
}
} catch (err) {
logger.debug(`Failed to read '${file}': ${getErrorMessage(err)}`);
}
}
}
/** Invokes `java` to get it to show us the active configuration. */
async function showJavaSettings(logger: Logger): Promise<void> {
try {
const java = await io.which("java", true);
let output = "";
await new toolrunner.ToolRunner(
java,
["-XshowSettings:all", "-XshowSettings:security:all", "-version"],
{
silent: true,
listeners: {
stdout: (data) => {
output += String(data);
},
stderr: (data) => {
output += String(data);
},
},
},
).exec();
logger.startGroup("Java settings");
logger.info(output);
logger.endGroup();
} catch (err) {
logger.debug(`Failed to query java settings: ${getErrorMessage(err)}`);
}
}
/** Enumerates environment variable names which may contain information about proxy settings. */
export enum ProxyEnvVars {
HTTP_PROXY = "HTTP_PROXY",
HTTPS_PROXY = "HTTPS_PROXY",
ALL_PROXY = "ALL_PROXY",
}
/**
* Checks whether any proxy-related environment variables are set and logs their values if so.
*/
export function checkProxyEnvVars(logger: Logger) {
// Both upper-case and lower-case variants of these environment variables are used.
for (const envVar of Object.values(ProxyEnvVars)) {
checkEnvVar(logger, envVar);
checkEnvVar(logger, envVar.toLowerCase());
}
}
/**
* Inspects environment variables and other configurations on the runner to determine whether
* any settings that may affect the operation of the proxy are present. All relevant information
* is written to the log.
*
* @param logger The logger to use.
* @param language The enabled language, if known.
*/
export async function checkProxyEnvironment(
logger: Logger,
language: Language | undefined,
): Promise<void> {
// Determine whether there is an existing proxy configured.
checkProxyEnvVars(logger);
// Check language-specific configurations. If we don't know the language,
// then we perform all checks.
if (language === undefined || language === KnownLanguage.java) {
checkJavaEnvVars(logger);
await showJavaSettings(logger);
const jdks = discoverActionsJdks();
for (const jdk of jdks) {
checkJdkSettings(logger, jdk);
}
}
}
+20
View File
@@ -59,3 +59,23 @@ export interface ProxyInfo {
cert: string;
registries: Registry[];
}
export type CertificateAuthority = {
cert: string;
key: string;
};
export type BasicAuthCredentials = {
username: string;
password: string;
};
/**
* Represents configurations for the authentication proxy.
*/
export type ProxyConfig = {
/** The validated configurations for the proxy. */
all_credentials: Credential[];
ca: CertificateAuthority;
proxy_auth?: BasicAuthCredentials;
};
+73 -5
View File
@@ -145,13 +145,67 @@ export function setupActionsVars(tempDir: string, toolsDir: string) {
process.env["RUNNER_TEMP"] = tempDir;
process.env["RUNNER_TOOL_CACHE"] = toolsDir;
process.env["GITHUB_WORKSPACE"] = tempDir;
process.env["GITHUB_EVENT_NAME"] = "push";
}
type LogLevel = "debug" | "info" | "warning" | "error";
export interface LoggedMessage {
type: "debug" | "info" | "warning" | "error";
type: LogLevel;
message: string | Error;
}
export class RecordingLogger implements Logger {
messages: LoggedMessage[] = [];
groups: string[] = [];
unfinishedGroups: Set<string> = new Set();
private currentGroup: string | undefined = undefined;
constructor(private readonly logToConsole: boolean = true) {}
private addMessage(level: LogLevel, message: string | Error): void {
this.messages.push({ type: level, message });
if (this.logToConsole) {
// eslint-disable-next-line no-console
console.debug(message);
}
}
isDebug() {
return true;
}
debug(message: string) {
this.addMessage("debug", message);
}
info(message: string) {
this.addMessage("info", message);
}
warning(message: string | Error) {
this.addMessage("warning", message);
}
error(message: string | Error) {
this.addMessage("error", message);
}
startGroup(name: string) {
this.groups.push(name);
this.currentGroup = name;
this.unfinishedGroups.add(name);
}
endGroup() {
if (this.currentGroup !== undefined) {
this.unfinishedGroups.delete(this.currentGroup);
}
this.currentGroup = undefined;
}
}
export function getRecordingLogger(
messages: LoggedMessage[],
{ logToConsole }: { logToConsole?: boolean } = { logToConsole: true },
@@ -196,15 +250,29 @@ export function checkExpectedLogMessages(
messages: LoggedMessage[],
expectedMessages: string[],
) {
const missingMessages: string[] = [];
for (const expectedMessage of expectedMessages) {
t.assert(
messages.some(
if (
!messages.some(
(msg) =>
typeof msg.message === "string" &&
msg.message.includes(expectedMessage),
),
`Expected '${expectedMessage}' in the logger output, but didn't find it in:\n ${messages.map((m) => ` - '${m.message}'`).join("\n")}`,
)
) {
missingMessages.push(expectedMessage);
}
}
if (missingMessages.length > 0) {
const listify = (lines: string[]) =>
lines.map((m) => ` - '${m}'`).join("\n");
t.fail(
`Expected\n\n${listify(missingMessages)}\n\nin the logger output, but didn't find it in:\n\n${messages.map((m) => ` - '${m.message}'`).join("\n")}`,
);
} else {
t.pass();
}
}
+70 -25
View File
@@ -12,6 +12,7 @@ import * as api from "./api-client";
import { getRunnerLogger, Logger } from "./logging";
import { setupTests } from "./testing-utils";
import * as uploadLib from "./upload-lib";
import { UploadPayload } from "./upload-lib/types";
import { GitHubVariant, initializeEnvironment, withTmpDir } from "./util";
setupTests(test);
@@ -128,11 +129,21 @@ test("finding SARIF files", async (t) => {
"file",
);
// add some `.quality.sarif` files that should be ignored, unless we look for them specifically
fs.writeFileSync(path.join(tmpDir, "a.quality.sarif"), "");
fs.writeFileSync(path.join(tmpDir, "dir1", "b.quality.sarif"), "");
// add some non-Code Scanning files that should be ignored, unless we look for them specifically
for (const analysisKind of analyses.supportedAnalysisKinds) {
if (analysisKind === AnalysisKind.CodeScanning) continue;
const expectedSarifFiles = [
const analysis = analyses.getAnalysisConfig(analysisKind);
fs.writeFileSync(path.join(tmpDir, `a${analysis.sarifExtension}`), "");
fs.writeFileSync(
path.join(tmpDir, "dir1", `b${analysis.sarifExtension}`),
"",
);
}
const expectedSarifFiles: Partial<Record<AnalysisKind, string[]>> = {};
expectedSarifFiles[AnalysisKind.CodeScanning] = [
path.join(tmpDir, "a.sarif"),
path.join(tmpDir, "b.sarif"),
path.join(tmpDir, "dir1", "d.sarif"),
@@ -143,18 +154,24 @@ test("finding SARIF files", async (t) => {
CodeScanning.sarifPredicate,
);
t.deepEqual(sarifFiles, expectedSarifFiles);
t.deepEqual(sarifFiles, expectedSarifFiles[AnalysisKind.CodeScanning]);
const expectedQualitySarifFiles = [
path.join(tmpDir, "a.quality.sarif"),
path.join(tmpDir, "dir1", "b.quality.sarif"),
];
const qualitySarifFiles = uploadLib.findSarifFilesInDir(
tmpDir,
CodeQuality.sarifPredicate,
);
for (const analysisKind of analyses.supportedAnalysisKinds) {
if (analysisKind === AnalysisKind.CodeScanning) continue;
t.deepEqual(qualitySarifFiles, expectedQualitySarifFiles);
const analysis = analyses.getAnalysisConfig(analysisKind);
expectedSarifFiles[analysisKind] = [
path.join(tmpDir, `a${analysis.sarifExtension}`),
path.join(tmpDir, "dir1", `b${analysis.sarifExtension}`),
];
const foundSarifFiles = uploadLib.findSarifFilesInDir(
tmpDir,
analysis.sarifPredicate,
);
t.deepEqual(foundSarifFiles, expectedSarifFiles[analysisKind]);
}
const groupedSarifFiles = await uploadLib.getGroupedSarifFilePaths(
getRunnerLogger(true),
@@ -162,16 +179,31 @@ test("finding SARIF files", async (t) => {
);
t.not(groupedSarifFiles, undefined);
t.not(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
t.not(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
t.deepEqual(
groupedSarifFiles[AnalysisKind.CodeScanning],
expectedSarifFiles,
);
t.deepEqual(
groupedSarifFiles[AnalysisKind.CodeQuality],
expectedQualitySarifFiles,
for (const analysisKind of analyses.supportedAnalysisKinds) {
t.not(groupedSarifFiles[analysisKind], undefined);
t.deepEqual(
groupedSarifFiles[analysisKind],
expectedSarifFiles[analysisKind],
);
}
});
});
test("getGroupedSarifFilePaths - Risk Assessment files", async (t) => {
await withTmpDir(async (tmpDir) => {
const sarifPath = path.join(tmpDir, "a.csra.sarif");
fs.writeFileSync(sarifPath, "");
const groupedSarifFiles = await uploadLib.getGroupedSarifFilePaths(
getRunnerLogger(true),
sarifPath,
);
t.not(groupedSarifFiles, undefined);
t.is(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
t.is(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
t.not(groupedSarifFiles[AnalysisKind.RiskAssessment], undefined);
t.deepEqual(groupedSarifFiles[AnalysisKind.RiskAssessment], [sarifPath]);
});
});
@@ -188,6 +220,7 @@ test("getGroupedSarifFilePaths - Code Quality file", async (t) => {
t.not(groupedSarifFiles, undefined);
t.is(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
t.not(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
t.is(groupedSarifFiles[AnalysisKind.RiskAssessment], undefined);
t.deepEqual(groupedSarifFiles[AnalysisKind.CodeQuality], [sarifPath]);
});
});
@@ -205,6 +238,7 @@ test("getGroupedSarifFilePaths - Code Scanning file", async (t) => {
t.not(groupedSarifFiles, undefined);
t.not(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
t.is(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
t.is(groupedSarifFiles[AnalysisKind.RiskAssessment], undefined);
t.deepEqual(groupedSarifFiles[AnalysisKind.CodeScanning], [sarifPath]);
});
});
@@ -222,6 +256,7 @@ test("getGroupedSarifFilePaths - Other file", async (t) => {
t.not(groupedSarifFiles, undefined);
t.not(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
t.is(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
t.is(groupedSarifFiles[AnalysisKind.RiskAssessment], undefined);
t.deepEqual(groupedSarifFiles[AnalysisKind.CodeScanning], [sarifPath]);
});
});
@@ -875,7 +910,15 @@ function createMockSarif(id?: string, tool?: string) {
function uploadPayloadFixtures(analysis: analyses.AnalysisConfig) {
const mockData = {
payload: { sarif: "base64data", commit_sha: "abc123" },
payload: {
commit_oid: "abc123",
ref: "ref",
sarif: "base64data",
workflow_run_id: 1,
workflow_run_attempt: 1,
checkout_uri: "uri",
tool_names: ["codeql"],
} satisfies UploadPayload,
owner: "test-owner",
repo: "test-repo",
response: {
@@ -907,7 +950,9 @@ function uploadPayloadFixtures(analysis: analyses.AnalysisConfig) {
};
}
for (const analysis of [CodeScanning, CodeQuality]) {
for (const analysisKind of analyses.supportedAnalysisKinds) {
const analysis = analyses.getAnalysisConfig(analysisKind);
test(`uploadPayload on ${analysis.name} uploads successfully`, async (t) => {
const { upload, requestStub, mockData } = uploadPayloadFixtures(analysis);
requestStub
+18 -15
View File
@@ -21,6 +21,7 @@ import * as gitUtils from "./git-utils";
import { initCodeQL } from "./init";
import { Logger } from "./logging";
import { getRepositoryNwo, RepositoryNwo } from "./repository";
import { BasePayload, UploadPayload } from "./upload-lib/types";
import * as util from "./util";
import {
ConfigurationError,
@@ -326,7 +327,7 @@ function getAutomationID(
* This is exported for testing purposes only.
*/
export async function uploadPayload(
payload: any,
payload: BasePayload,
repositoryNwo: RepositoryNwo,
logger: Logger,
analysis: analyses.AnalysisConfig,
@@ -618,8 +619,8 @@ export function buildPayload(
environment: string | undefined,
toolNames: string[],
mergeBaseCommitOid: string | undefined,
) {
const payloadObj = {
): UploadPayload {
const payloadObj: UploadPayload = {
commit_oid: commitOid,
ref,
analysis_key: analysisKey,
@@ -847,18 +848,20 @@ export async function uploadPostProcessedFiles(
const zippedSarif = zlib.gzipSync(sarifPayload).toString("base64");
const checkoutURI = url.pathToFileURL(checkoutPath).href;
const payload = buildPayload(
await gitUtils.getCommitOid(checkoutPath),
await gitUtils.getRef(),
postProcessingResults.analysisKey,
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
zippedSarif,
actionsUtil.getWorkflowRunID(),
actionsUtil.getWorkflowRunAttempt(),
checkoutURI,
postProcessingResults.environment,
toolNames,
await gitUtils.determineBaseBranchHeadCommitOid(),
const payload = uploadTarget.transformPayload(
buildPayload(
await gitUtils.getCommitOid(checkoutPath),
await gitUtils.getRef(),
postProcessingResults.analysisKey,
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
zippedSarif,
actionsUtil.getWorkflowRunID(),
actionsUtil.getWorkflowRunAttempt(),
checkoutURI,
postProcessingResults.environment,
toolNames,
await gitUtils.determineBaseBranchHeadCommitOid(),
),
);
// Log some useful debug info about the info
+45
View File
@@ -0,0 +1,45 @@
/**
* Represents the minimum, common payload for SARIF upload endpoints that we support.
*/
export interface BasePayload {
/** The gzipped contents of a SARIF file. */
sarif: string;
}
/**
* Represents the payload expected for Code Scanning and Code Quality SARIF uploads.
*/
export interface UploadPayload extends BasePayload {
/** The SHA of the commit that was analysed. */
commit_oid: string;
/** The ref that was analysed. */
ref: string;
/** The analysis key that identifies the analysis. */
analysis_key?: string;
/** The name of the analysis. */
analysis_name?: string;
/** The ID of the workflow run that performed the analysis. */
workflow_run_id: number;
/** The attempt number. */
workflow_run_attempt: number;
/** The URI where the repository was checked out. */
checkout_uri: string;
/** The matrix value. */
environment?: string;
/** A string representation of when the analysis was started. */
started_at?: string;
/** The names of the tools that performed the analysis. */
tool_names: string[];
/** For a pull request, the ref of the base the PR is targeting. */
base_ref?: string;
/** For a pull request, the commit SHA of the merge base. */
base_sha?: string;
}
/**
* Represents the payload expected for Code Scanning Risk Assessment SARIF uploads.
*/
export interface AssessmentPayload extends BasePayload {
/** The ID of the assessment for which the SARIF is for. */
assessment_id: number;
}