mirror of
https://github.com/github/codeql-action.git
synced 2026-05-08 06:40:19 +00:00
Merge remote-tracking branch 'origin/releases/v3' into backport-v2.24.10-4355270be
# Conflicts: # lib/codeql.js # src/codeql.ts
This commit is contained in:
@@ -286,14 +286,17 @@ test("determineMergeBaseCommitOid no error", async (t) => {
|
||||
|
||||
process.env["GITHUB_EVENT_NAME"] = "pull_request";
|
||||
process.env["GITHUB_SHA"] = "100912429fab4cb230e66ffb11e738ac5194e73a";
|
||||
await actionsUtil.determineMergeBaseCommitOid(path.join(__dirname, "../.."));
|
||||
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
await actionsUtil.determineMergeBaseCommitOid(tmpDir);
|
||||
});
|
||||
|
||||
t.deepEqual(1, infoStub.callCount);
|
||||
t.assert(
|
||||
infoStub.firstCall.args[0].startsWith(
|
||||
"The checkout path provided to the action does not appear to be a git repository.",
|
||||
),
|
||||
);
|
||||
|
||||
infoStub.restore();
|
||||
});
|
||||
|
||||
|
||||
@@ -7,11 +7,18 @@ import * as core from "@actions/core";
|
||||
|
||||
import * as analyzeActionPostHelper from "./analyze-action-post-helper";
|
||||
import * as debugArtifacts from "./debug-artifacts";
|
||||
import * as uploadSarifActionPostHelper from "./upload-sarif-action-post-helper";
|
||||
import { wrapError } from "./util";
|
||||
|
||||
async function runWrapper() {
|
||||
try {
|
||||
await analyzeActionPostHelper.run(debugArtifacts.uploadSarifDebugArtifact);
|
||||
|
||||
// Also run the upload-sarif post action since we're potentially running
|
||||
// the same steps in the analyze action.
|
||||
await uploadSarifActionPostHelper.uploadArtifacts(
|
||||
debugArtifacts.uploadDebugArtifacts,
|
||||
);
|
||||
} catch (error) {
|
||||
core.setFailed(
|
||||
`analyze post-action step failed: ${wrapError(error).message}`,
|
||||
|
||||
+27
-24
@@ -74,22 +74,24 @@ async function sendStatusReport(
|
||||
error?.message,
|
||||
error?.stack,
|
||||
);
|
||||
const report: FinishStatusReport = {
|
||||
...statusReportBase,
|
||||
...(stats || {}),
|
||||
...(dbCreationTimings || {}),
|
||||
};
|
||||
if (config && didUploadTrapCaches) {
|
||||
const trapCacheUploadStatusReport: FinishWithTrapUploadStatusReport = {
|
||||
...report,
|
||||
trap_cache_upload_duration_ms: Math.round(trapCacheUploadTime || 0),
|
||||
trap_cache_upload_size_bytes: Math.round(
|
||||
await getTotalCacheSize(config.trapCaches, logger),
|
||||
),
|
||||
if (statusReportBase !== undefined) {
|
||||
const report: FinishStatusReport = {
|
||||
...statusReportBase,
|
||||
...(stats || {}),
|
||||
...(dbCreationTimings || {}),
|
||||
};
|
||||
await statusReport.sendStatusReport(trapCacheUploadStatusReport);
|
||||
} else {
|
||||
await statusReport.sendStatusReport(report);
|
||||
if (config && didUploadTrapCaches) {
|
||||
const trapCacheUploadStatusReport: FinishWithTrapUploadStatusReport = {
|
||||
...report,
|
||||
trap_cache_upload_duration_ms: Math.round(trapCacheUploadTime || 0),
|
||||
trap_cache_upload_size_bytes: Math.round(
|
||||
await getTotalCacheSize(config.trapCaches, logger),
|
||||
),
|
||||
};
|
||||
await statusReport.sendStatusReport(trapCacheUploadStatusReport);
|
||||
} else {
|
||||
await statusReport.sendStatusReport(report);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -190,16 +192,17 @@ async function run() {
|
||||
|
||||
const logger = getActionsLogger();
|
||||
try {
|
||||
await statusReport.sendStatusReport(
|
||||
await createStatusReportBase(
|
||||
ActionName.Analyze,
|
||||
"starting",
|
||||
startedAt,
|
||||
config,
|
||||
await util.checkDiskUsage(logger),
|
||||
logger,
|
||||
),
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
ActionName.Analyze,
|
||||
"starting",
|
||||
startedAt,
|
||||
config,
|
||||
await util.checkDiskUsage(logger),
|
||||
logger,
|
||||
);
|
||||
if (statusReportBase !== undefined) {
|
||||
await statusReport.sendStatusReport(statusReportBase);
|
||||
}
|
||||
|
||||
config = await getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
||||
if (config === undefined) {
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"maximumVersion": "3.13", "minimumVersion": "3.8"}
|
||||
{"maximumVersion": "3.13", "minimumVersion": "3.9"}
|
||||
|
||||
+18
-15
@@ -55,12 +55,14 @@ async function sendCompletedStatusReport(
|
||||
cause?.message,
|
||||
cause?.stack,
|
||||
);
|
||||
const statusReport: AutobuildStatusReport = {
|
||||
...statusReportBase,
|
||||
autobuild_languages: allLanguages.join(","),
|
||||
autobuild_failure: failingLanguage,
|
||||
};
|
||||
await sendStatusReport(statusReport);
|
||||
if (statusReportBase !== undefined) {
|
||||
const statusReport: AutobuildStatusReport = {
|
||||
...statusReportBase,
|
||||
autobuild_languages: allLanguages.join(","),
|
||||
autobuild_failure: failingLanguage,
|
||||
};
|
||||
await sendStatusReport(statusReport);
|
||||
}
|
||||
}
|
||||
|
||||
async function run() {
|
||||
@@ -70,16 +72,17 @@ async function run() {
|
||||
let currentLanguage: Language | undefined;
|
||||
let languages: Language[] | undefined;
|
||||
try {
|
||||
await sendStatusReport(
|
||||
await createStatusReportBase(
|
||||
ActionName.Autobuild,
|
||||
"starting",
|
||||
startedAt,
|
||||
config,
|
||||
await checkDiskUsage(logger),
|
||||
logger,
|
||||
),
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
ActionName.Autobuild,
|
||||
"starting",
|
||||
startedAt,
|
||||
config,
|
||||
await checkDiskUsage(logger),
|
||||
logger,
|
||||
);
|
||||
if (statusReportBase !== undefined) {
|
||||
await sendStatusReport(statusReportBase);
|
||||
}
|
||||
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
checkGitHubVersionInRange(gitHubVersion, logger);
|
||||
|
||||
+16
-16
@@ -29,6 +29,7 @@ import {
|
||||
makeVersionInfo,
|
||||
createTestConfig,
|
||||
} from "./testing-utils";
|
||||
import { ToolsFeature } from "./tools-features";
|
||||
import * as util from "./util";
|
||||
import { initializeEnvironment } from "./util";
|
||||
|
||||
@@ -772,7 +773,17 @@ test("does not pass a qlconfig to the CLI when it is undefined", async (t: Execu
|
||||
|
||||
const NEW_ANALYSIS_SUMMARY_TEST_CASES = [
|
||||
{
|
||||
codeqlVersion: "2.15.0",
|
||||
codeqlVersion: makeVersionInfo("2.15.0", {
|
||||
[ToolsFeature.AnalysisSummaryV2IsDefault]: true,
|
||||
}),
|
||||
githubVersion: {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
},
|
||||
flagPassed: false,
|
||||
negativeFlagPassed: false,
|
||||
},
|
||||
{
|
||||
codeqlVersion: makeVersionInfo("2.15.0"),
|
||||
githubVersion: {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
},
|
||||
@@ -780,7 +791,7 @@ const NEW_ANALYSIS_SUMMARY_TEST_CASES = [
|
||||
negativeFlagPassed: false,
|
||||
},
|
||||
{
|
||||
codeqlVersion: "2.15.0",
|
||||
codeqlVersion: makeVersionInfo("2.15.0"),
|
||||
githubVersion: {
|
||||
type: util.GitHubVariant.GHES,
|
||||
version: "3.9.0",
|
||||
@@ -789,16 +800,7 @@ const NEW_ANALYSIS_SUMMARY_TEST_CASES = [
|
||||
negativeFlagPassed: false,
|
||||
},
|
||||
{
|
||||
codeqlVersion: "2.15.0",
|
||||
githubVersion: {
|
||||
type: util.GitHubVariant.GHES,
|
||||
version: "3.8.6",
|
||||
},
|
||||
flagPassed: false,
|
||||
negativeFlagPassed: true,
|
||||
},
|
||||
{
|
||||
codeqlVersion: "2.14.6",
|
||||
codeqlVersion: makeVersionInfo("2.14.6"),
|
||||
githubVersion: {
|
||||
type: util.GitHubVariant.DOTCOM,
|
||||
},
|
||||
@@ -819,14 +821,12 @@ for (const {
|
||||
: negativeFlagPassed
|
||||
? "--no-new-analysis-summary"
|
||||
: "nothing"
|
||||
} for CodeQL CLI v${codeqlVersion} and ${
|
||||
} for CodeQL version ${JSON.stringify(codeqlVersion)} and ${
|
||||
util.GitHubVariant[githubVersion.type]
|
||||
} ${githubVersion.version ? ` ${githubVersion.version}` : ""}`, async (t) => {
|
||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||
sinon
|
||||
.stub(codeqlObject, "getVersion")
|
||||
.resolves(makeVersionInfo(codeqlVersion));
|
||||
sinon.stub(codeqlObject, "getVersion").resolves(codeqlVersion);
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults(
|
||||
|
||||
+52
-12
@@ -50,6 +50,10 @@ interface ExtraOptions {
|
||||
extractor?: Options;
|
||||
queries?: Options;
|
||||
};
|
||||
github?: {
|
||||
"*"?: Options;
|
||||
"merge-results"?: Options;
|
||||
};
|
||||
}
|
||||
|
||||
export interface CodeQL {
|
||||
@@ -191,6 +195,14 @@ export interface CodeQL {
|
||||
): Promise<void>;
|
||||
/** Get the location of an extractor for the specified language. */
|
||||
resolveExtractor(language: Language): Promise<string>;
|
||||
/**
|
||||
* Run 'codeql github merge-results'.
|
||||
*/
|
||||
mergeResults(
|
||||
sarifFiles: string[],
|
||||
outputFile: string,
|
||||
options: { mergeRunsFromEqualCategory?: boolean },
|
||||
): Promise<void>;
|
||||
}
|
||||
|
||||
export interface VersionInfo {
|
||||
@@ -268,17 +280,17 @@ const CODEQL_MINIMUM_VERSION = "2.11.6";
|
||||
/**
|
||||
* This version will shortly become the oldest version of CodeQL that the Action will run with.
|
||||
*/
|
||||
const CODEQL_NEXT_MINIMUM_VERSION = "2.11.6";
|
||||
const CODEQL_NEXT_MINIMUM_VERSION = "2.12.6";
|
||||
|
||||
/**
|
||||
* This is the version of GHES that was most recently deprecated.
|
||||
*/
|
||||
const GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.7";
|
||||
const GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.8";
|
||||
|
||||
/**
|
||||
* This is the deprecation date for the version of GHES that was most recently deprecated.
|
||||
*/
|
||||
const GHES_MOST_RECENT_DEPRECATION_DATE = "2023-11-08";
|
||||
const GHES_MOST_RECENT_DEPRECATION_DATE = "2024-03-26";
|
||||
|
||||
/** The CLI verbosity level to use for extraction in debug mode. */
|
||||
const EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++";
|
||||
@@ -489,6 +501,7 @@ export function setCodeQL(partialCodeql: Partial<CodeQL>): CodeQL {
|
||||
),
|
||||
diagnosticsExport: resolveFunction(partialCodeql, "diagnosticsExport"),
|
||||
resolveExtractor: resolveFunction(partialCodeql, "resolveExtractor"),
|
||||
mergeResults: resolveFunction(partialCodeql, "mergeResults"),
|
||||
};
|
||||
return cachedCodeQL;
|
||||
}
|
||||
@@ -877,20 +890,16 @@ export async function getCodeQLForCmd(
|
||||
codeqlArgs.push("--no-sarif-include-diagnostics");
|
||||
}
|
||||
if (
|
||||
// Analysis summary v2 links to the status page, so check the GHES version we're running on
|
||||
// supports the status page.
|
||||
(config.gitHubVersion.type !== util.GitHubVariant.GHES ||
|
||||
semver.gte(config.gitHubVersion.version, "3.9.0")) &&
|
||||
(await util.codeQlVersionAbove(
|
||||
this,
|
||||
CODEQL_VERSION_ANALYSIS_SUMMARY_V2,
|
||||
))
|
||||
)) &&
|
||||
!isSupportedToolsFeature(
|
||||
await this.getVersion(),
|
||||
ToolsFeature.AnalysisSummaryV2IsDefault,
|
||||
)
|
||||
) {
|
||||
codeqlArgs.push("--new-analysis-summary");
|
||||
} else if (
|
||||
await util.codeQlVersionAbove(this, CODEQL_VERSION_ANALYSIS_SUMMARY_V2)
|
||||
) {
|
||||
codeqlArgs.push("--no-new-analysis-summary");
|
||||
}
|
||||
codeqlArgs.push(databasePath);
|
||||
if (querySuitePaths) {
|
||||
@@ -1077,6 +1086,31 @@ export async function getCodeQLForCmd(
|
||||
).exec();
|
||||
return JSON.parse(extractorPath);
|
||||
},
|
||||
async mergeResults(
|
||||
sarifFiles: string[],
|
||||
outputFile: string,
|
||||
{
|
||||
mergeRunsFromEqualCategory = false,
|
||||
}: { mergeRunsFromEqualCategory?: boolean },
|
||||
): Promise<void> {
|
||||
const args = [
|
||||
"github",
|
||||
"merge-results",
|
||||
"--output",
|
||||
outputFile,
|
||||
...getExtraOptionsFromEnv(["github", "merge-results"]),
|
||||
];
|
||||
|
||||
for (const sarifFile of sarifFiles) {
|
||||
args.push("--sarif", sarifFile);
|
||||
}
|
||||
|
||||
if (mergeRunsFromEqualCategory) {
|
||||
args.push("--sarif-merge-runs-from-equal-category");
|
||||
}
|
||||
|
||||
await runTool(cmd, args);
|
||||
},
|
||||
};
|
||||
// To ensure that status reports include the CodeQL CLI version wherever
|
||||
// possible, we want to call getVersion(), which populates the version value
|
||||
@@ -1110,8 +1144,14 @@ export async function getCodeQLForCmd(
|
||||
"version of the CLI using the 'tools' input to the 'init' Action, you can remove this " +
|
||||
"input to use the default version.\n\n" +
|
||||
"Alternatively, if you want to continue using CodeQL CLI version " +
|
||||
<<<<<<< HEAD
|
||||
`${result.version}, you can replace 'github/codeql-action/*@v2' by ` +
|
||||
`'github/codeql-action/*@v${getActionVersion()}' in your code scanning workflow to ` +
|
||||
=======
|
||||
`${result.version}, you can replace 'github/codeql-action/*@v${
|
||||
getActionVersion().split(".")[0]
|
||||
}' by 'github/codeql-action/*@v${getActionVersion()}' in your code scanning workflow to ` +
|
||||
>>>>>>> origin/releases/v3
|
||||
"continue using this version of the CodeQL Action.",
|
||||
);
|
||||
core.exportVariable(EnvVar.SUPPRESS_DEPRECATED_SOON_WARNING, "true");
|
||||
|
||||
+4
-4
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.16.5",
|
||||
"cliVersion": "2.16.5",
|
||||
"priorBundleVersion": "codeql-bundle-v2.16.4",
|
||||
"priorCliVersion": "2.16.4"
|
||||
"bundleVersion": "codeql-bundle-v2.17.0",
|
||||
"cliVersion": "2.17.0",
|
||||
"priorBundleVersion": "codeql-bundle-v2.16.5",
|
||||
"priorCliVersion": "2.16.5"
|
||||
}
|
||||
|
||||
@@ -44,6 +44,7 @@ export interface FeatureEnablement {
|
||||
* Each value of this enum should end with `_enabled`.
|
||||
*/
|
||||
export enum Feature {
|
||||
CliSarifMerge = "cli_sarif_merge_enabled",
|
||||
CppDependencyInstallation = "cpp_dependency_installation_enabled",
|
||||
CppTrapCachingEnabled = "cpp_trap_caching_enabled",
|
||||
DisableJavaBuildlessEnabled = "disable_java_buildless_enabled",
|
||||
@@ -58,6 +59,12 @@ export const featureConfig: Record<
|
||||
Feature,
|
||||
{ envVar: string; minimumVersion: string | undefined; defaultValue: boolean }
|
||||
> = {
|
||||
[Feature.CliSarifMerge]: {
|
||||
envVar: "CODEQL_ACTION_CLI_SARIF_MERGE",
|
||||
// This is guarded by a `supportsFeature` check rather than by a version check.
|
||||
minimumVersion: undefined,
|
||||
defaultValue: false,
|
||||
},
|
||||
[Feature.CppDependencyInstallation]: {
|
||||
envVar: "CODEQL_EXTRACTOR_CPP_AUTOINSTALL_DEPENDENCIES",
|
||||
minimumVersion: "2.15.0",
|
||||
|
||||
@@ -259,6 +259,9 @@ export async function addFingerprints(
|
||||
sourceRoot: string,
|
||||
logger: Logger,
|
||||
): Promise<SarifFile> {
|
||||
logger.info(
|
||||
"Adding fingerprints to SARIF file. For more information, see https://docs.github.com/en/enterprise-cloud@latest/code-security/code-scanning/integrating-with-code-scanning/sarif-support-for-code-scanning#providing-data-to-track-code-scanning-alerts-across-runs",
|
||||
);
|
||||
// Gather together results for the same file and construct
|
||||
// callbacks to accept hashes for that file and update the location
|
||||
const callbacksByFile: { [filename: string]: hashCallback[] } = {};
|
||||
|
||||
+20
-17
@@ -76,18 +76,19 @@ async function runWrapper() {
|
||||
const error = wrapError(unwrappedError);
|
||||
core.setFailed(error.message);
|
||||
|
||||
await sendStatusReport(
|
||||
await createStatusReportBase(
|
||||
ActionName.InitPost,
|
||||
getActionsStatus(error),
|
||||
startedAt,
|
||||
config,
|
||||
await checkDiskUsage(),
|
||||
logger,
|
||||
error.message,
|
||||
error.stack,
|
||||
),
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
ActionName.InitPost,
|
||||
getActionsStatus(error),
|
||||
startedAt,
|
||||
config,
|
||||
await checkDiskUsage(),
|
||||
logger,
|
||||
error.message,
|
||||
error.stack,
|
||||
);
|
||||
if (statusReportBase !== undefined) {
|
||||
await sendStatusReport(statusReportBase);
|
||||
}
|
||||
return;
|
||||
}
|
||||
const jobStatus = initActionPostHelper.getFinalJobStatus();
|
||||
@@ -101,12 +102,14 @@ async function runWrapper() {
|
||||
await checkDiskUsage(),
|
||||
logger,
|
||||
);
|
||||
const statusReport: InitPostStatusReport = {
|
||||
...statusReportBase,
|
||||
...uploadFailedSarifResult,
|
||||
job_status: initActionPostHelper.getFinalJobStatus(),
|
||||
};
|
||||
await sendStatusReport(statusReport);
|
||||
if (statusReportBase !== undefined) {
|
||||
const statusReport: InitPostStatusReport = {
|
||||
...statusReportBase,
|
||||
...uploadFailedSarifResult,
|
||||
job_status: initActionPostHelper.getFinalJobStatus(),
|
||||
};
|
||||
await sendStatusReport(statusReport);
|
||||
}
|
||||
}
|
||||
|
||||
void runWrapper();
|
||||
|
||||
+26
-21
@@ -120,6 +120,10 @@ async function sendCompletedStatusReport(
|
||||
error?.stack,
|
||||
);
|
||||
|
||||
if (statusReportBase === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const workflowLanguages = getOptionalInput("languages");
|
||||
|
||||
const initStatusReport: InitStatusReport = {
|
||||
@@ -226,17 +230,17 @@ async function run() {
|
||||
core.exportVariable(EnvVar.INIT_ACTION_HAS_RUN, "true");
|
||||
|
||||
try {
|
||||
await sendStatusReport(
|
||||
await createStatusReportBase(
|
||||
ActionName.Init,
|
||||
"starting",
|
||||
startedAt,
|
||||
config,
|
||||
await checkDiskUsage(logger),
|
||||
logger,
|
||||
),
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
ActionName.Init,
|
||||
"starting",
|
||||
startedAt,
|
||||
config,
|
||||
await checkDiskUsage(logger),
|
||||
logger,
|
||||
);
|
||||
|
||||
if (statusReportBase !== undefined) {
|
||||
await sendStatusReport(statusReportBase);
|
||||
}
|
||||
const codeQLDefaultVersionInfo = await features.getDefaultCliVersion(
|
||||
gitHubVersion.type,
|
||||
);
|
||||
@@ -315,18 +319,19 @@ async function run() {
|
||||
} catch (unwrappedError) {
|
||||
const error = wrapError(unwrappedError);
|
||||
core.setFailed(error.message);
|
||||
await sendStatusReport(
|
||||
await createStatusReportBase(
|
||||
ActionName.Init,
|
||||
error instanceof ConfigurationError ? "user-error" : "aborted",
|
||||
startedAt,
|
||||
config,
|
||||
await checkDiskUsage(),
|
||||
logger,
|
||||
error.message,
|
||||
error.stack,
|
||||
),
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
ActionName.Init,
|
||||
error instanceof ConfigurationError ? "user-error" : "aborted",
|
||||
startedAt,
|
||||
config,
|
||||
await checkDiskUsage(),
|
||||
logger,
|
||||
error.message,
|
||||
error.stack,
|
||||
);
|
||||
if (statusReportBase !== undefined) {
|
||||
await sendStatusReport(statusReportBase);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -34,16 +34,17 @@ async function run() {
|
||||
let config: Config | undefined;
|
||||
|
||||
try {
|
||||
await sendStatusReport(
|
||||
await createStatusReportBase(
|
||||
ActionName.ResolveEnvironment,
|
||||
"starting",
|
||||
startedAt,
|
||||
config,
|
||||
await checkDiskUsage(),
|
||||
logger,
|
||||
),
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
ActionName.ResolveEnvironment,
|
||||
"starting",
|
||||
startedAt,
|
||||
config,
|
||||
await checkDiskUsage(),
|
||||
logger,
|
||||
);
|
||||
if (statusReportBase !== undefined) {
|
||||
await sendStatusReport(statusReportBase);
|
||||
}
|
||||
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
checkGitHubVersionInRange(gitHubVersion, logger);
|
||||
@@ -80,33 +81,35 @@ async function run() {
|
||||
`Failed to resolve a build environment suitable for automatically building your code. ${error.message}`,
|
||||
);
|
||||
|
||||
await sendStatusReport(
|
||||
await createStatusReportBase(
|
||||
ActionName.ResolveEnvironment,
|
||||
getActionsStatus(error),
|
||||
startedAt,
|
||||
config,
|
||||
await checkDiskUsage(),
|
||||
logger,
|
||||
error.message,
|
||||
error.stack,
|
||||
),
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
ActionName.ResolveEnvironment,
|
||||
getActionsStatus(error),
|
||||
startedAt,
|
||||
config,
|
||||
await checkDiskUsage(),
|
||||
logger,
|
||||
error.message,
|
||||
error.stack,
|
||||
);
|
||||
if (statusReportBase !== undefined) {
|
||||
await sendStatusReport(statusReportBase);
|
||||
}
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
await sendStatusReport(
|
||||
await createStatusReportBase(
|
||||
ActionName.ResolveEnvironment,
|
||||
"success",
|
||||
startedAt,
|
||||
config,
|
||||
await checkDiskUsage(),
|
||||
logger,
|
||||
),
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
ActionName.ResolveEnvironment,
|
||||
"success",
|
||||
startedAt,
|
||||
config,
|
||||
await checkDiskUsage(),
|
||||
logger,
|
||||
);
|
||||
if (statusReportBase !== undefined) {
|
||||
await sendStatusReport(statusReportBase);
|
||||
}
|
||||
}
|
||||
|
||||
async function runWrapper() {
|
||||
|
||||
+34
-31
@@ -51,32 +51,35 @@ test("createStatusReportBase", async (t) => {
|
||||
"failure cause",
|
||||
"exception stack trace",
|
||||
);
|
||||
t.truthy(statusReport);
|
||||
|
||||
t.is(statusReport.action_name, ActionName.Init);
|
||||
t.is(statusReport.action_oid, "unknown");
|
||||
t.is(typeof statusReport.action_version, "string");
|
||||
t.is(
|
||||
statusReport.action_started_at,
|
||||
new Date("May 19, 2023 05:19:00").toISOString(),
|
||||
);
|
||||
t.is(statusReport.actions_event_name, "dynamic");
|
||||
t.is(statusReport.analysis_key, "analysis-key");
|
||||
t.is(statusReport.build_mode, BuildMode.None);
|
||||
t.is(statusReport.cause, "failure cause");
|
||||
t.is(statusReport.commit_oid, process.env["GITHUB_SHA"]!);
|
||||
t.is(statusReport.exception, "exception stack trace");
|
||||
t.is(statusReport.job_name, process.env["GITHUB_JOB"] || "");
|
||||
t.is(typeof statusReport.job_run_uuid, "string");
|
||||
t.is(statusReport.languages, "java,swift");
|
||||
t.is(statusReport.ref, process.env["GITHUB_REF"]!);
|
||||
t.is(statusReport.runner_available_disk_space_bytes, 100);
|
||||
t.is(statusReport.runner_image_version, process.env["ImageVersion"]);
|
||||
t.is(statusReport.runner_os, process.env["RUNNER_OS"]!);
|
||||
t.is(statusReport.started_at, process.env[EnvVar.WORKFLOW_STARTED_AT]!);
|
||||
t.is(statusReport.status, "failure");
|
||||
t.is(statusReport.workflow_name, process.env["GITHUB_WORKFLOW"] || "");
|
||||
t.is(statusReport.workflow_run_attempt, 2);
|
||||
t.is(statusReport.workflow_run_id, 100);
|
||||
if (statusReport !== undefined) {
|
||||
t.is(statusReport.action_name, ActionName.Init);
|
||||
t.is(statusReport.action_oid, "unknown");
|
||||
t.is(typeof statusReport.action_version, "string");
|
||||
t.is(
|
||||
statusReport.action_started_at,
|
||||
new Date("May 19, 2023 05:19:00").toISOString(),
|
||||
);
|
||||
t.is(statusReport.actions_event_name, "dynamic");
|
||||
t.is(statusReport.analysis_key, "analysis-key");
|
||||
t.is(statusReport.build_mode, BuildMode.None);
|
||||
t.is(statusReport.cause, "failure cause");
|
||||
t.is(statusReport.commit_oid, process.env["GITHUB_SHA"]!);
|
||||
t.is(statusReport.exception, "exception stack trace");
|
||||
t.is(statusReport.job_name, process.env["GITHUB_JOB"] || "");
|
||||
t.is(typeof statusReport.job_run_uuid, "string");
|
||||
t.is(statusReport.languages, "java,swift");
|
||||
t.is(statusReport.ref, process.env["GITHUB_REF"]!);
|
||||
t.is(statusReport.runner_available_disk_space_bytes, 100);
|
||||
t.is(statusReport.runner_image_version, process.env["ImageVersion"]);
|
||||
t.is(statusReport.runner_os, process.env["RUNNER_OS"]!);
|
||||
t.is(statusReport.started_at, process.env[EnvVar.WORKFLOW_STARTED_AT]!);
|
||||
t.is(statusReport.status, "failure");
|
||||
t.is(statusReport.workflow_name, process.env["GITHUB_WORKFLOW"] || "");
|
||||
t.is(statusReport.workflow_run_attempt, 2);
|
||||
t.is(statusReport.workflow_run_id, 100);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -96,7 +99,7 @@ test("createStatusReportBase_firstParty", async (t) => {
|
||||
"failure cause",
|
||||
"exception stack trace",
|
||||
)
|
||||
).first_party_analysis,
|
||||
)?.first_party_analysis,
|
||||
false,
|
||||
);
|
||||
|
||||
@@ -112,7 +115,7 @@ test("createStatusReportBase_firstParty", async (t) => {
|
||||
"failure cause",
|
||||
"exception stack trace",
|
||||
)
|
||||
).first_party_analysis,
|
||||
)?.first_party_analysis,
|
||||
true,
|
||||
);
|
||||
|
||||
@@ -129,7 +132,7 @@ test("createStatusReportBase_firstParty", async (t) => {
|
||||
"failure cause",
|
||||
"exception stack trace",
|
||||
)
|
||||
).first_party_analysis,
|
||||
)?.first_party_analysis,
|
||||
false,
|
||||
);
|
||||
|
||||
@@ -145,7 +148,7 @@ test("createStatusReportBase_firstParty", async (t) => {
|
||||
"failure cause",
|
||||
"exception stack trace",
|
||||
)
|
||||
).first_party_analysis,
|
||||
)?.first_party_analysis,
|
||||
true,
|
||||
);
|
||||
|
||||
@@ -162,7 +165,7 @@ test("createStatusReportBase_firstParty", async (t) => {
|
||||
"failure cause",
|
||||
"exception stack trace",
|
||||
)
|
||||
).first_party_analysis,
|
||||
)?.first_party_analysis,
|
||||
true,
|
||||
);
|
||||
|
||||
@@ -178,7 +181,7 @@ test("createStatusReportBase_firstParty", async (t) => {
|
||||
"failure cause",
|
||||
"exception stack trace",
|
||||
)
|
||||
).first_party_analysis,
|
||||
)?.first_party_analysis,
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
+104
-94
@@ -233,6 +233,7 @@ export interface EventReport {
|
||||
* @param startedAt The time this action started executing.
|
||||
* @param cause Cause of failure (only supply if status is 'failure')
|
||||
* @param exception Exception (only supply if status is 'failure')
|
||||
* @returns undefined if an exception was thrown.
|
||||
*/
|
||||
export async function createStatusReportBase(
|
||||
actionName: ActionName,
|
||||
@@ -243,103 +244,112 @@ export async function createStatusReportBase(
|
||||
logger: Logger,
|
||||
cause?: string,
|
||||
exception?: string,
|
||||
): Promise<StatusReportBase> {
|
||||
const commitOid = getOptionalInput("sha") || process.env["GITHUB_SHA"] || "";
|
||||
const ref = await getRef();
|
||||
const jobRunUUID = process.env[EnvVar.JOB_RUN_UUID] || "";
|
||||
const workflowRunID = getWorkflowRunID();
|
||||
const workflowRunAttempt = getWorkflowRunAttempt();
|
||||
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
|
||||
const jobName = process.env["GITHUB_JOB"] || "";
|
||||
const analysis_key = await getAnalysisKey();
|
||||
let workflowStartedAt = process.env[EnvVar.WORKFLOW_STARTED_AT];
|
||||
if (workflowStartedAt === undefined) {
|
||||
workflowStartedAt = actionStartedAt.toISOString();
|
||||
core.exportVariable(EnvVar.WORKFLOW_STARTED_AT, workflowStartedAt);
|
||||
}
|
||||
const runnerOs = getRequiredEnvParam("RUNNER_OS");
|
||||
const codeQlCliVersion = getCachedCodeQlVersion();
|
||||
const actionRef = process.env["GITHUB_ACTION_REF"];
|
||||
const testingEnvironment = process.env[EnvVar.TESTING_ENVIRONMENT] || "";
|
||||
// re-export the testing environment variable so that it is available to subsequent steps,
|
||||
// even if it was only set for this step
|
||||
if (testingEnvironment !== "") {
|
||||
core.exportVariable(EnvVar.TESTING_ENVIRONMENT, testingEnvironment);
|
||||
}
|
||||
|
||||
const statusReport: StatusReportBase = {
|
||||
action_name: actionName,
|
||||
action_oid: "unknown", // TODO decide if it's possible to fill this in
|
||||
action_ref: actionRef,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
action_version: getActionVersion(),
|
||||
analysis_key,
|
||||
build_mode: config?.buildMode,
|
||||
commit_oid: commitOid,
|
||||
first_party_analysis: isFirstPartyAnalysis(actionName),
|
||||
job_name: jobName,
|
||||
job_run_uuid: jobRunUUID,
|
||||
ref,
|
||||
runner_os: runnerOs,
|
||||
started_at: workflowStartedAt,
|
||||
status,
|
||||
testing_environment: testingEnvironment,
|
||||
workflow_name: workflowName,
|
||||
workflow_run_attempt: workflowRunAttempt,
|
||||
workflow_run_id: workflowRunID,
|
||||
};
|
||||
|
||||
): Promise<StatusReportBase | undefined> {
|
||||
try {
|
||||
statusReport.actions_event_name = getWorkflowEventName();
|
||||
const commitOid =
|
||||
getOptionalInput("sha") || process.env["GITHUB_SHA"] || "";
|
||||
const ref = await getRef();
|
||||
const jobRunUUID = process.env[EnvVar.JOB_RUN_UUID] || "";
|
||||
const workflowRunID = getWorkflowRunID();
|
||||
const workflowRunAttempt = getWorkflowRunAttempt();
|
||||
const workflowName = process.env["GITHUB_WORKFLOW"] || "";
|
||||
const jobName = process.env["GITHUB_JOB"] || "";
|
||||
const analysis_key = await getAnalysisKey();
|
||||
let workflowStartedAt = process.env[EnvVar.WORKFLOW_STARTED_AT];
|
||||
if (workflowStartedAt === undefined) {
|
||||
workflowStartedAt = actionStartedAt.toISOString();
|
||||
core.exportVariable(EnvVar.WORKFLOW_STARTED_AT, workflowStartedAt);
|
||||
}
|
||||
const runnerOs = getRequiredEnvParam("RUNNER_OS");
|
||||
const codeQlCliVersion = getCachedCodeQlVersion();
|
||||
const actionRef = process.env["GITHUB_ACTION_REF"] || "";
|
||||
const testingEnvironment = process.env[EnvVar.TESTING_ENVIRONMENT] || "";
|
||||
// re-export the testing environment variable so that it is available to subsequent steps,
|
||||
// even if it was only set for this step
|
||||
if (testingEnvironment !== "") {
|
||||
core.exportVariable(EnvVar.TESTING_ENVIRONMENT, testingEnvironment);
|
||||
}
|
||||
|
||||
const statusReport: StatusReportBase = {
|
||||
action_name: actionName,
|
||||
action_oid: "unknown", // TODO decide if it's possible to fill this in
|
||||
action_ref: actionRef,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
action_version: getActionVersion(),
|
||||
analysis_key,
|
||||
build_mode: config?.buildMode,
|
||||
commit_oid: commitOid,
|
||||
first_party_analysis: isFirstPartyAnalysis(actionName),
|
||||
job_name: jobName,
|
||||
job_run_uuid: jobRunUUID,
|
||||
ref,
|
||||
runner_os: runnerOs,
|
||||
started_at: workflowStartedAt,
|
||||
status,
|
||||
testing_environment: testingEnvironment,
|
||||
workflow_name: workflowName,
|
||||
workflow_run_attempt: workflowRunAttempt,
|
||||
workflow_run_id: workflowRunID,
|
||||
};
|
||||
|
||||
try {
|
||||
statusReport.actions_event_name = getWorkflowEventName();
|
||||
} catch (e) {
|
||||
logger.warning(`Could not determine the workflow event name: ${e}.`);
|
||||
}
|
||||
|
||||
if (config) {
|
||||
statusReport.languages = config.languages.join(",");
|
||||
}
|
||||
|
||||
if (diskInfo) {
|
||||
statusReport.runner_available_disk_space_bytes =
|
||||
diskInfo.numAvailableBytes;
|
||||
statusReport.runner_total_disk_space_bytes = diskInfo.numTotalBytes;
|
||||
}
|
||||
|
||||
// Add optional parameters
|
||||
if (cause) {
|
||||
statusReport.cause = cause;
|
||||
}
|
||||
if (exception) {
|
||||
statusReport.exception = exception;
|
||||
}
|
||||
if (
|
||||
status === "success" ||
|
||||
status === "failure" ||
|
||||
status === "aborted" ||
|
||||
status === "user-error"
|
||||
) {
|
||||
statusReport.completed_at = new Date().toISOString();
|
||||
}
|
||||
const matrix = getRequiredInput("matrix");
|
||||
if (matrix) {
|
||||
statusReport.matrix_vars = matrix;
|
||||
}
|
||||
if ("RUNNER_ARCH" in process.env) {
|
||||
// RUNNER_ARCH is available only in GHES 3.4 and later
|
||||
// Values other than X86, X64, ARM, or ARM64 are discarded server side
|
||||
statusReport.runner_arch = process.env["RUNNER_ARCH"];
|
||||
}
|
||||
if (runnerOs === "Windows" || runnerOs === "macOS") {
|
||||
statusReport.runner_os_release = os.release();
|
||||
}
|
||||
if (codeQlCliVersion !== undefined) {
|
||||
statusReport.codeql_version = codeQlCliVersion.version;
|
||||
}
|
||||
const imageVersion = process.env["ImageVersion"];
|
||||
if (imageVersion) {
|
||||
statusReport.runner_image_version = imageVersion;
|
||||
}
|
||||
|
||||
return statusReport;
|
||||
} catch (e) {
|
||||
logger.warning(`Could not determine the workflow event name: ${e}.`);
|
||||
logger.warning(
|
||||
`Caught an exception while gathering information for telemetry: ${e}. Will skip sending status report.`,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (config) {
|
||||
statusReport.languages = config.languages.join(",");
|
||||
}
|
||||
|
||||
if (diskInfo) {
|
||||
statusReport.runner_available_disk_space_bytes = diskInfo.numAvailableBytes;
|
||||
statusReport.runner_total_disk_space_bytes = diskInfo.numTotalBytes;
|
||||
}
|
||||
|
||||
// Add optional parameters
|
||||
if (cause) {
|
||||
statusReport.cause = cause;
|
||||
}
|
||||
if (exception) {
|
||||
statusReport.exception = exception;
|
||||
}
|
||||
if (
|
||||
status === "success" ||
|
||||
status === "failure" ||
|
||||
status === "aborted" ||
|
||||
status === "user-error"
|
||||
) {
|
||||
statusReport.completed_at = new Date().toISOString();
|
||||
}
|
||||
const matrix = getRequiredInput("matrix");
|
||||
if (matrix) {
|
||||
statusReport.matrix_vars = matrix;
|
||||
}
|
||||
if ("RUNNER_ARCH" in process.env) {
|
||||
// RUNNER_ARCH is available only in GHES 3.4 and later
|
||||
// Values other than X86, X64, ARM, or ARM64 are discarded server side
|
||||
statusReport.runner_arch = process.env["RUNNER_ARCH"];
|
||||
}
|
||||
if (runnerOs === "Windows" || runnerOs === "macOS") {
|
||||
statusReport.runner_os_release = os.release();
|
||||
}
|
||||
if (codeQlCliVersion !== undefined) {
|
||||
statusReport.codeql_version = codeQlCliVersion.version;
|
||||
}
|
||||
const imageVersion = process.env["ImageVersion"];
|
||||
if (imageVersion) {
|
||||
statusReport.runner_image_version = imageVersion;
|
||||
}
|
||||
|
||||
return statusReport;
|
||||
}
|
||||
|
||||
const OUT_OF_DATE_MSG =
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import type { VersionInfo } from "./codeql";
|
||||
|
||||
export enum ToolsFeature {
|
||||
AnalysisSummaryV2IsDefault = "analysisSummaryV2Default",
|
||||
BuildModeOption = "buildModeOption",
|
||||
IndirectTracingSupportsStaticBinaries = "indirectTracingSupportsStaticBinaries",
|
||||
InformsAboutUnsupportedPathFilters = "informsAboutUnsupportedPathFilters",
|
||||
SetsCodeqlRunnerEnvVar = "setsCodeqlRunnerEnvVar",
|
||||
TraceCommandUseBuildMode = "traceCommandUseBuildMode",
|
||||
SarifMergeRunsFromEqualCategory = "sarifMergeRunsFromEqualCategory",
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -42,6 +42,7 @@ test("validate correct payload used for push, PR merge commit, and PR head", asy
|
||||
undefined,
|
||||
["CodeQL", "eslint"],
|
||||
"mergeBaseCommit",
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
// Not triggered by a pull request
|
||||
t.falsy(pushPayload.base_ref);
|
||||
@@ -65,6 +66,7 @@ test("validate correct payload used for push, PR merge commit, and PR head", asy
|
||||
undefined,
|
||||
["CodeQL", "eslint"],
|
||||
"mergeBaseCommit",
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
// Uploads for a merge commit use the merge base
|
||||
t.deepEqual(prMergePayload.base_ref, "refs/heads/master");
|
||||
@@ -82,6 +84,7 @@ test("validate correct payload used for push, PR merge commit, and PR head", asy
|
||||
undefined,
|
||||
["CodeQL", "eslint"],
|
||||
"mergeBaseCommit",
|
||||
getRunnerLogger(true),
|
||||
);
|
||||
// Uploads for the head use the PR base
|
||||
t.deepEqual(prHeadPayload.base_ref, "refs/heads/master");
|
||||
@@ -317,9 +320,9 @@ test("accept results with invalid artifactLocation.uri value", (t) => {
|
||||
const sarifFile = `${__dirname}/../src/testdata/with-invalid-uri.sarif`;
|
||||
uploadLib.validateSarifFileSchema(sarifFile, mockLogger);
|
||||
|
||||
t.deepEqual(loggedMessages.length, 1);
|
||||
t.deepEqual(loggedMessages.length, 2);
|
||||
t.deepEqual(
|
||||
loggedMessages[0],
|
||||
loggedMessages[1],
|
||||
"Warning: 'not a valid URI' is not a valid URI in 'instance.runs[0].results[0].locations[0].physicalLocation.artifactLocation.uri'.",
|
||||
);
|
||||
});
|
||||
|
||||
+142
-3
@@ -8,13 +8,26 @@ import fileUrl from "file-url";
|
||||
import * as jsonschema from "jsonschema";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { getOptionalInput, getRequiredInput } from "./actions-util";
|
||||
import * as api from "./api-client";
|
||||
import { getGitHubVersion } from "./api-client";
|
||||
import { CodeQL, getCodeQL } from "./codeql";
|
||||
import { getConfig } from "./config-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Feature, Features } from "./feature-flags";
|
||||
import * as fingerprints from "./fingerprints";
|
||||
import { initCodeQL } from "./init";
|
||||
import { Logger } from "./logging";
|
||||
import { parseRepositoryNwo, RepositoryNwo } from "./repository";
|
||||
import { ToolsFeature } from "./tools-features";
|
||||
import * as util from "./util";
|
||||
import { SarifFile, ConfigurationError, wrapError } from "./util";
|
||||
import {
|
||||
ConfigurationError,
|
||||
getRequiredEnvParam,
|
||||
GitHubVersion,
|
||||
SarifFile,
|
||||
wrapError,
|
||||
} from "./util";
|
||||
|
||||
const GENERIC_403_MSG =
|
||||
"The repo on which this action is running has not opted-in to CodeQL code scanning.";
|
||||
@@ -23,13 +36,15 @@ const GENERIC_404_MSG =
|
||||
|
||||
// Takes a list of paths to sarif files and combines them together,
|
||||
// returning the contents of the combined sarif file.
|
||||
function combineSarifFiles(sarifFiles: string[]): SarifFile {
|
||||
function combineSarifFiles(sarifFiles: string[], logger: Logger): SarifFile {
|
||||
logger.info(`Loading SARIF file(s)`);
|
||||
const combinedSarif: SarifFile = {
|
||||
version: null,
|
||||
runs: [],
|
||||
};
|
||||
|
||||
for (const sarifFile of sarifFiles) {
|
||||
logger.debug(`Loading SARIF file: ${sarifFile}`);
|
||||
const sarifObject = JSON.parse(
|
||||
fs.readFileSync(sarifFile, "utf8"),
|
||||
) as SarifFile;
|
||||
@@ -48,6 +63,108 @@ function combineSarifFiles(sarifFiles: string[]): SarifFile {
|
||||
return combinedSarif;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether all the runs in the given SARIF files were produced by CodeQL.
|
||||
* @param sarifFiles The list of SARIF files to check.
|
||||
*/
|
||||
function areAllRunsProducedByCodeQL(sarifFiles: string[]): boolean {
|
||||
return sarifFiles.every((sarifFile) => {
|
||||
const sarifObject = JSON.parse(
|
||||
fs.readFileSync(sarifFile, "utf8"),
|
||||
) as SarifFile;
|
||||
|
||||
return sarifObject.runs?.every(
|
||||
(run) => run.tool?.driver?.name === "CodeQL",
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// Takes a list of paths to sarif files and combines them together using the
|
||||
// CLI `github merge-results` command when all SARIF files are produced by
|
||||
// CodeQL. Otherwise, it will fall back to combining the files in the action.
|
||||
// Returns the contents of the combined sarif file.
|
||||
async function combineSarifFilesUsingCLI(
|
||||
sarifFiles: string[],
|
||||
gitHubVersion: GitHubVersion,
|
||||
features: Features,
|
||||
logger: Logger,
|
||||
): Promise<SarifFile> {
|
||||
logger.info("Combining SARIF files using the CodeQL CLI");
|
||||
if (sarifFiles.length === 1) {
|
||||
return JSON.parse(fs.readFileSync(sarifFiles[0], "utf8")) as SarifFile;
|
||||
}
|
||||
|
||||
if (!areAllRunsProducedByCodeQL(sarifFiles)) {
|
||||
logger.debug(
|
||||
"Not all SARIF files were produced by CodeQL. Merging files in the action.",
|
||||
);
|
||||
|
||||
// If not, use the naive method of combining the files.
|
||||
return combineSarifFiles(sarifFiles, logger);
|
||||
}
|
||||
|
||||
// Initialize CodeQL, either by using the config file from the 'init' step,
|
||||
// or by initializing it here.
|
||||
let codeQL: CodeQL;
|
||||
let tempDir: string = actionsUtil.getTemporaryDirectory();
|
||||
|
||||
const config = await getConfig(tempDir, logger);
|
||||
if (config !== undefined) {
|
||||
codeQL = await getCodeQL(config.codeQLCmd);
|
||||
tempDir = config.tempDir;
|
||||
} else {
|
||||
logger.info(
|
||||
"Initializing CodeQL since the 'init' Action was not called before this step.",
|
||||
);
|
||||
|
||||
const apiDetails = {
|
||||
auth: getRequiredInput("token"),
|
||||
externalRepoAuth: getOptionalInput("external-repository-token"),
|
||||
url: getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||
apiURL: getRequiredEnvParam("GITHUB_API_URL"),
|
||||
};
|
||||
|
||||
const codeQLDefaultVersionInfo = await features.getDefaultCliVersion(
|
||||
gitHubVersion.type,
|
||||
);
|
||||
|
||||
const initCodeQLResult = await initCodeQL(
|
||||
undefined, // There is no tools input on the upload action
|
||||
apiDetails,
|
||||
tempDir,
|
||||
gitHubVersion.type,
|
||||
codeQLDefaultVersionInfo,
|
||||
logger,
|
||||
);
|
||||
|
||||
codeQL = initCodeQLResult.codeql;
|
||||
}
|
||||
|
||||
if (
|
||||
!(await codeQL.supportsFeature(
|
||||
ToolsFeature.SarifMergeRunsFromEqualCategory,
|
||||
))
|
||||
) {
|
||||
logger.warning(
|
||||
"The CodeQL CLI does not support merging SARIF files. Merging files in the action.",
|
||||
);
|
||||
|
||||
return combineSarifFiles(sarifFiles, logger);
|
||||
}
|
||||
|
||||
const baseTempDir = path.resolve(tempDir, "combined-sarif");
|
||||
fs.mkdirSync(baseTempDir, { recursive: true });
|
||||
const outputDirectory = fs.mkdtempSync(path.resolve(baseTempDir, "output-"));
|
||||
|
||||
const outputFile = path.resolve(outputDirectory, "combined-sarif.sarif");
|
||||
|
||||
await codeQL.mergeResults(sarifFiles, outputFile, {
|
||||
mergeRunsFromEqualCategory: true,
|
||||
});
|
||||
|
||||
return JSON.parse(fs.readFileSync(outputFile, "utf8")) as SarifFile;
|
||||
}
|
||||
|
||||
// Populates the run.automationDetails.id field using the analysis_key and environment
|
||||
// and return an updated sarif file contents.
|
||||
export function populateRunAutomationDetails(
|
||||
@@ -242,6 +359,7 @@ function countResultsInSarif(sarif: string): number {
|
||||
// Validates that the given file path refers to a valid SARIF file.
|
||||
// Throws an error if the file is invalid.
|
||||
export function validateSarifFileSchema(sarifFilePath: string, logger: Logger) {
|
||||
logger.info(`Validating ${sarifFilePath}`);
|
||||
let sarif;
|
||||
try {
|
||||
sarif = JSON.parse(fs.readFileSync(sarifFilePath, "utf8")) as SarifFile;
|
||||
@@ -301,7 +419,9 @@ export function buildPayload(
|
||||
environment: string | undefined,
|
||||
toolNames: string[],
|
||||
mergeBaseCommitOid: string | undefined,
|
||||
logger: Logger,
|
||||
) {
|
||||
logger.info(`Combining SARIF files using CLI`);
|
||||
const payloadObj = {
|
||||
commit_oid: commitOid,
|
||||
ref,
|
||||
@@ -363,12 +483,27 @@ async function uploadFiles(
|
||||
logger.startGroup("Uploading results");
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifFiles)}`);
|
||||
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
const features = new Features(
|
||||
gitHubVersion,
|
||||
repositoryNwo,
|
||||
actionsUtil.getTemporaryDirectory(),
|
||||
logger,
|
||||
);
|
||||
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
validateSarifFileSchema(file, logger);
|
||||
}
|
||||
|
||||
let sarif = combineSarifFiles(sarifFiles);
|
||||
let sarif = (await features.getValue(Feature.CliSarifMerge))
|
||||
? await combineSarifFilesUsingCLI(
|
||||
sarifFiles,
|
||||
gitHubVersion,
|
||||
features,
|
||||
logger,
|
||||
)
|
||||
: combineSarifFiles(sarifFiles, logger);
|
||||
sarif = await fingerprints.addFingerprints(sarif, sourceRoot, logger);
|
||||
|
||||
sarif = populateRunAutomationDetails(
|
||||
@@ -380,8 +515,11 @@ async function uploadFiles(
|
||||
|
||||
const toolNames = util.getToolNames(sarif);
|
||||
|
||||
logger.debug(`Validating that each SARIF run has a unique category`);
|
||||
validateUniqueCategory(sarif);
|
||||
logger.debug(`Serializing SARIF for upload`);
|
||||
const sarifPayload = JSON.stringify(sarif);
|
||||
logger.debug(`Compressing serialized SARIF`);
|
||||
const zippedSarif = zlib.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = fileUrl(sourceRoot);
|
||||
|
||||
@@ -397,6 +535,7 @@ async function uploadFiles(
|
||||
environment,
|
||||
toolNames,
|
||||
await actionsUtil.determineMergeBaseCommitOid(),
|
||||
logger,
|
||||
);
|
||||
|
||||
// Log some useful debug info about the info
|
||||
|
||||
@@ -0,0 +1,49 @@
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
|
||||
export async function uploadArtifacts(
|
||||
uploadDebugArtifacts: (
|
||||
toUpload: string[],
|
||||
rootDir: string,
|
||||
artifactName: string,
|
||||
) => Promise<void>,
|
||||
) {
|
||||
const tempDir = actionsUtil.getTemporaryDirectory();
|
||||
|
||||
// Upload Actions SARIF artifacts for debugging when environment variable is set
|
||||
if (process.env["CODEQL_ACTION_DEBUG_COMBINED_SARIF"] === "true") {
|
||||
core.info(
|
||||
"Uploading available combined SARIF files as Actions debugging artifact...",
|
||||
);
|
||||
|
||||
const baseTempDir = path.resolve(tempDir, "combined-sarif");
|
||||
|
||||
const toUpload: string[] = [];
|
||||
|
||||
if (fs.existsSync(baseTempDir)) {
|
||||
const outputDirs = fs.readdirSync(baseTempDir);
|
||||
|
||||
for (const outputDir of outputDirs) {
|
||||
const sarifFiles = fs
|
||||
.readdirSync(path.resolve(baseTempDir, outputDir))
|
||||
.filter((f) => f.endsWith(".sarif"));
|
||||
|
||||
for (const sarifFile of sarifFiles) {
|
||||
toUpload.push(path.resolve(baseTempDir, outputDir, sarifFile));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (toUpload.length > 0) {
|
||||
await uploadDebugArtifacts(
|
||||
toUpload,
|
||||
baseTempDir,
|
||||
"upload-debug-artifacts",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
/**
|
||||
* This file is the entry point for the `post:` hook of `upload-sarif-action.yml`.
|
||||
* It will run after the all steps in this job, in reverse order in relation to
|
||||
* other `post:` hooks.
|
||||
*/
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import * as debugArtifacts from "./debug-artifacts";
|
||||
import * as uploadSarifActionPostHelper from "./upload-sarif-action-post-helper";
|
||||
import { wrapError } from "./util";
|
||||
|
||||
async function runWrapper() {
|
||||
try {
|
||||
await uploadSarifActionPostHelper.uploadArtifacts(
|
||||
debugArtifacts.uploadDebugArtifacts,
|
||||
);
|
||||
} catch (error) {
|
||||
core.setFailed(
|
||||
`upload-sarif post-action step failed: ${wrapError(error).message}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
void runWrapper();
|
||||
+30
-25
@@ -41,11 +41,13 @@ async function sendSuccessStatusReport(
|
||||
await checkDiskUsage(),
|
||||
logger,
|
||||
);
|
||||
const statusReport: UploadSarifStatusReport = {
|
||||
...statusReportBase,
|
||||
...uploadStats,
|
||||
};
|
||||
await sendStatusReport(statusReport);
|
||||
if (statusReportBase !== undefined) {
|
||||
const statusReport: UploadSarifStatusReport = {
|
||||
...statusReportBase,
|
||||
...uploadStats,
|
||||
};
|
||||
await sendStatusReport(statusReport);
|
||||
}
|
||||
}
|
||||
|
||||
async function run() {
|
||||
@@ -56,16 +58,17 @@ async function run() {
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
checkActionVersion(getActionVersion(), gitHubVersion);
|
||||
|
||||
await sendStatusReport(
|
||||
await createStatusReportBase(
|
||||
ActionName.UploadSarif,
|
||||
"starting",
|
||||
startedAt,
|
||||
undefined,
|
||||
await checkDiskUsage(),
|
||||
logger,
|
||||
),
|
||||
const startingStatusReportBase = await createStatusReportBase(
|
||||
ActionName.UploadSarif,
|
||||
"starting",
|
||||
startedAt,
|
||||
undefined,
|
||||
await checkDiskUsage(),
|
||||
logger,
|
||||
);
|
||||
if (startingStatusReportBase !== undefined) {
|
||||
await sendStatusReport(startingStatusReportBase);
|
||||
}
|
||||
|
||||
try {
|
||||
const uploadResult = await upload_lib.uploadFromActions(
|
||||
@@ -96,18 +99,20 @@ async function run() {
|
||||
const message = error.message;
|
||||
core.setFailed(message);
|
||||
console.log(error);
|
||||
await sendStatusReport(
|
||||
await createStatusReportBase(
|
||||
ActionName.UploadSarif,
|
||||
getActionsStatus(error),
|
||||
startedAt,
|
||||
undefined,
|
||||
await checkDiskUsage(),
|
||||
logger,
|
||||
message,
|
||||
error.stack,
|
||||
),
|
||||
|
||||
const errorStatusReportBase = await createStatusReportBase(
|
||||
ActionName.UploadSarif,
|
||||
getActionsStatus(error),
|
||||
startedAt,
|
||||
undefined,
|
||||
await checkDiskUsage(),
|
||||
logger,
|
||||
message,
|
||||
error.stack,
|
||||
);
|
||||
if (errorStatusReportBase !== undefined) {
|
||||
await sendStatusReport(errorStatusReportBase);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user