Merge branch 'main' into mbg/csharp/more-cache-locations

This commit is contained in:
Michael B. Gale
2025-11-26 10:01:51 +00:00
140 changed files with 7602 additions and 7219 deletions
+1 -1
View File
@@ -80,7 +80,7 @@ export function isRunningLocalAction(): boolean {
*
* This can be used to get the Action's name or tell if we're running a local Action.
*/
export function getRelativeScriptPath(): string {
function getRelativeScriptPath(): string {
const runnerTemp = getRequiredEnvParam("RUNNER_TEMP");
const actionsDirectory = path.join(path.dirname(runnerTemp), "_actions");
return path.relative(actionsDirectory, __filename);
+1 -1
View File
@@ -98,7 +98,7 @@ export async function getAnalysisKinds(
export const codeQualityQueries: string[] = ["code-quality"];
// Enumerates API endpoints that accept SARIF files.
export enum SARIF_UPLOAD_ENDPOINT {
enum SARIF_UPLOAD_ENDPOINT {
CODE_SCANNING = "PUT /repos/:owner/:repo/code-scanning/analysis",
CODE_QUALITY = "PUT /repos/:owner/:repo/code-quality/analysis",
}
+15 -6
View File
@@ -25,7 +25,7 @@ import {
isCodeQualityEnabled,
isCodeScanningEnabled,
} from "./config-utils";
import { uploadDatabases } from "./database-upload";
import { cleanupAndUploadDatabases } from "./database-upload";
import {
DependencyCacheUploadStatusReport,
uploadDependencyCaches,
@@ -35,7 +35,7 @@ import { EnvVar } from "./environment";
import { Feature, Features } from "./feature-flags";
import { KnownLanguage } from "./languages";
import { getActionsLogger, Logger } from "./logging";
import { uploadOverlayBaseDatabaseToCache } from "./overlay-database-utils";
import { cleanupAndUploadOverlayBaseDatabaseToCache } from "./overlay-database-utils";
import { getRepositoryNwo } from "./repository";
import * as statusReport from "./status-report";
import {
@@ -418,12 +418,21 @@ async function run() {
}
// Possibly upload the overlay-base database to actions cache.
// If databases are to be uploaded, they will first be cleaned up at the overlay level.
await uploadOverlayBaseDatabaseToCache(codeql, config, logger);
// Note: Take care with the ordering of this call since databases may be cleaned up
// at the `overlay` level.
await cleanupAndUploadOverlayBaseDatabaseToCache(codeql, config, logger);
// Possibly upload the database bundles for remote queries.
// If databases are to be uploaded, they will first be cleaned up at the clear level.
await uploadDatabases(repositoryNwo, codeql, config, apiDetails, logger);
// Note: Take care with the ordering of this call since databases may be cleaned up
// at the `overlay` or `clear` level.
await cleanupAndUploadDatabases(
repositoryNwo,
codeql,
config,
apiDetails,
features,
logger,
);
// Possibly upload the TRAP caches for later re-use
const trapCacheUploadStartTime = performance.now();
-5
View File
@@ -18,11 +18,6 @@ import {
const GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version";
export enum DisallowedAPIVersionReason {
ACTION_TOO_OLD,
ACTION_TOO_NEW,
}
export type GitHubApiCombinedDetails = GitHubApiDetails &
GitHubApiExternalRepoDetails;
+1 -4
View File
@@ -159,10 +159,7 @@ type CliErrorConfiguration = {
* All of our caught CLI error messages that we handle specially: ie. if we
* would like to categorize an error as a configuration error or not.
*/
export const cliErrorsConfig: Record<
CliConfigErrorCategory,
CliErrorConfiguration
> = {
const cliErrorsConfig: Record<CliConfigErrorCategory, CliErrorConfiguration> = {
[CliConfigErrorCategory.AutobuildError]: {
cliErrorMessageCandidates: [
new RegExp("We were unable to automatically build your code"),
+8 -5
View File
@@ -35,7 +35,7 @@ import { ToolsDownloadStatusReport } from "./tools-download";
import { ToolsFeature, isSupportedToolsFeature } from "./tools-features";
import { shouldEnableIndirectTracing } from "./tracer-config";
import * as util from "./util";
import { BuildMode, getErrorMessage } from "./util";
import { BuildMode, CleanupLevel, getErrorMessage } from "./util";
type Options = Array<string | number | boolean>;
@@ -141,7 +141,10 @@ export interface CodeQL {
/**
* Clean up all the databases within a database cluster.
*/
databaseCleanupCluster(config: Config, cleanupLevel: string): Promise<void>;
databaseCleanupCluster(
config: Config,
cleanupLevel: CleanupLevel,
): Promise<void>;
/**
* Run 'codeql database bundle'.
*/
@@ -513,7 +516,7 @@ export async function getCodeQLForTesting(
* version requirement. Must be set to true outside tests.
* @returns A new CodeQL object
*/
export async function getCodeQLForCmd(
async function getCodeQLForCmd(
cmd: string,
checkVersion: boolean,
): Promise<CodeQL> {
@@ -878,7 +881,7 @@ export async function getCodeQLForCmd(
},
async databaseCleanupCluster(
config: Config,
cleanupLevel: string,
cleanupLevel: CleanupLevel,
): Promise<void> {
const cacheCleanupFlag = (await util.codeQlVersionAtLeast(
this,
@@ -1222,7 +1225,7 @@ export async function getTrapCachingExtractorConfigArgsForLang(
*
* This will not exist if the configuration is being parsed in the Action.
*/
export function getGeneratedCodeScanningConfigPath(config: Config): string {
function getGeneratedCodeScanningConfigPath(config: Config): string {
return path.resolve(config.tempDir, "user-config.yaml");
}
+95 -53
View File
@@ -37,7 +37,9 @@ import {
ConfigurationError,
withTmpDir,
BuildMode,
DiskUsage,
} from "./util";
import * as util from "./util";
setupTests(test);
@@ -200,12 +202,9 @@ test("load code quality config", async (t) => {
);
// And the config we expect it to result in
const expectedConfig: configUtils.Config = {
version: actionsUtil.getActionVersion(),
const expectedConfig = createTestConfig({
analysisKinds: [AnalysisKind.CodeQuality],
languages: [KnownLanguage.actions],
buildMode: undefined,
originalUserInput: {},
// This gets set because we only have `AnalysisKind.CodeQuality`
computedConfig: {
"disable-default-queries": true,
@@ -219,14 +218,7 @@ test("load code quality config", async (t) => {
debugMode: false,
debugArtifactName: "",
debugDatabaseName: "",
trapCaches: {},
trapCacheDownloadTime: 0,
dependencyCachingEnabled: CachingKind.None,
extraQueryExclusions: [],
overlayDatabaseMode: OverlayDatabaseMode.None,
useOverlayDatabaseCaching: false,
repositoryProperties: {},
};
});
t.deepEqual(config, expectedConfig);
});
@@ -507,9 +499,7 @@ test("load non-empty input", async (t) => {
};
// And the config we expect it to parse to
const expectedConfig: configUtils.Config = {
version: actionsUtil.getActionVersion(),
analysisKinds: [AnalysisKind.CodeScanning],
const expectedConfig = createTestConfig({
languages: [KnownLanguage.javascript],
buildMode: BuildMode.None,
originalUserInput: userConfig,
@@ -521,14 +511,7 @@ test("load non-empty input", async (t) => {
debugMode: false,
debugArtifactName: "my-artifact",
debugDatabaseName: "my-db",
trapCaches: {},
trapCacheDownloadTime: 0,
dependencyCachingEnabled: CachingKind.None,
extraQueryExclusions: [],
overlayDatabaseMode: OverlayDatabaseMode.None,
useOverlayDatabaseCaching: false,
repositoryProperties: {},
};
});
const languagesInput = "javascript";
const configFilePath = createConfigFile(inputFileContents, tempDir);
@@ -990,12 +973,12 @@ interface OverlayDatabaseModeTestSetup {
features: Feature[];
isPullRequest: boolean;
isDefaultBranch: boolean;
repositoryOwner: string;
buildMode: BuildMode | undefined;
languages: Language[];
codeqlVersion: string;
gitRoot: string | undefined;
codeScanningConfig: configUtils.UserConfig;
diskUsage: DiskUsage | undefined;
}
const defaultOverlayDatabaseModeTestSetup: OverlayDatabaseModeTestSetup = {
@@ -1003,12 +986,15 @@ const defaultOverlayDatabaseModeTestSetup: OverlayDatabaseModeTestSetup = {
features: [],
isPullRequest: false,
isDefaultBranch: false,
repositoryOwner: "github",
buildMode: BuildMode.None,
languages: [KnownLanguage.javascript],
codeqlVersion: CODEQL_OVERLAY_MINIMUM_VERSION,
gitRoot: "/some/git/root",
codeScanningConfig: {},
diskUsage: {
numAvailableBytes: 50_000_000_000,
numTotalBytes: 100_000_000_000,
},
};
const getOverlayDatabaseModeMacro = test.macro({
@@ -1041,6 +1027,8 @@ const getOverlayDatabaseModeMacro = test.macro({
setup.overlayDatabaseEnvVar;
}
sinon.stub(util, "checkDiskUsage").resolves(setup.diskUsage);
// Mock feature flags
const features = createFeatures(setup.features);
@@ -1049,12 +1037,6 @@ const getOverlayDatabaseModeMacro = test.macro({
.stub(actionsUtil, "isAnalyzingPullRequest")
.returns(setup.isPullRequest);
// Mock repository owner
const repository = {
owner: setup.repositoryOwner,
repo: "test-repo",
};
// Set up CodeQL mock
const codeql = mockCodeQLVersion(setup.codeqlVersion);
@@ -1077,7 +1059,6 @@ const getOverlayDatabaseModeMacro = test.macro({
const result = await configUtils.getOverlayDatabaseMode(
codeql,
repository,
features,
setup.languages,
tempDir, // sourceRoot
@@ -1205,6 +1186,45 @@ test(
},
);
test(
getOverlayDatabaseModeMacro,
"No overlay-base database on default branch if runner disk space is too low",
{
languages: [KnownLanguage.javascript],
features: [
Feature.OverlayAnalysis,
Feature.OverlayAnalysisCodeScanningJavascript,
],
isDefaultBranch: true,
diskUsage: {
numAvailableBytes: 1_000_000_000,
numTotalBytes: 100_000_000_000,
},
},
{
overlayDatabaseMode: OverlayDatabaseMode.None,
useOverlayDatabaseCaching: false,
},
);
test(
getOverlayDatabaseModeMacro,
"No overlay-base database on default branch if we can't determine runner disk space",
{
languages: [KnownLanguage.javascript],
features: [
Feature.OverlayAnalysis,
Feature.OverlayAnalysisCodeScanningJavascript,
],
isDefaultBranch: true,
diskUsage: undefined,
},
{
overlayDatabaseMode: OverlayDatabaseMode.None,
useOverlayDatabaseCaching: false,
},
);
test(
getOverlayDatabaseModeMacro,
"No overlay-base database on default branch when code-scanning feature enabled with disable-default-queries",
@@ -1375,6 +1395,45 @@ test(
},
);
test(
getOverlayDatabaseModeMacro,
"No overlay analysis on PR if runner disk space is too low",
{
languages: [KnownLanguage.javascript],
features: [
Feature.OverlayAnalysis,
Feature.OverlayAnalysisCodeScanningJavascript,
],
isPullRequest: true,
diskUsage: {
numAvailableBytes: 1_000_000_000,
numTotalBytes: 100_000_000_000,
},
},
{
overlayDatabaseMode: OverlayDatabaseMode.None,
useOverlayDatabaseCaching: false,
},
);
test(
getOverlayDatabaseModeMacro,
"No overlay analysis on PR if we can't determine runner disk space",
{
languages: [KnownLanguage.javascript],
features: [
Feature.OverlayAnalysis,
Feature.OverlayAnalysisCodeScanningJavascript,
],
isPullRequest: true,
diskUsage: undefined,
},
{
overlayDatabaseMode: OverlayDatabaseMode.None,
useOverlayDatabaseCaching: false,
},
);
test(
getOverlayDatabaseModeMacro,
"No overlay analysis on PR when code-scanning feature enabled with disable-default-queries",
@@ -1499,10 +1558,9 @@ test(
test(
getOverlayDatabaseModeMacro,
"Overlay PR analysis by env for dsp-testing",
"Overlay PR analysis by env",
{
overlayDatabaseEnvVar: "overlay",
repositoryOwner: "dsp-testing",
},
{
overlayDatabaseMode: OverlayDatabaseMode.Overlay,
@@ -1512,10 +1570,10 @@ test(
test(
getOverlayDatabaseModeMacro,
"Overlay PR analysis by env for other-org",
"Overlay PR analysis by env on a runner with low disk space",
{
overlayDatabaseEnvVar: "overlay",
repositoryOwner: "other-org",
diskUsage: { numAvailableBytes: 0, numTotalBytes: 100_000_000_000 },
},
{
overlayDatabaseMode: OverlayDatabaseMode.Overlay,
@@ -1525,12 +1583,11 @@ test(
test(
getOverlayDatabaseModeMacro,
"Overlay PR analysis by feature flag for dsp-testing",
"Overlay PR analysis by feature flag",
{
languages: [KnownLanguage.javascript],
features: [Feature.OverlayAnalysis, Feature.OverlayAnalysisJavascript],
isPullRequest: true,
repositoryOwner: "dsp-testing",
},
{
overlayDatabaseMode: OverlayDatabaseMode.Overlay,
@@ -1538,21 +1595,6 @@ test(
},
);
test(
getOverlayDatabaseModeMacro,
"No overlay PR analysis by feature flag for other-org",
{
languages: [KnownLanguage.javascript],
features: [Feature.OverlayAnalysis, Feature.OverlayAnalysisJavascript],
isPullRequest: true,
repositoryOwner: "other-org",
},
{
overlayDatabaseMode: OverlayDatabaseMode.None,
useOverlayDatabaseCaching: false,
},
);
test(
getOverlayDatabaseModeMacro,
"Fallback due to autobuild with traced language",
+48 -24
View File
@@ -43,10 +43,22 @@ import {
codeQlVersionAtLeast,
cloneObject,
isDefined,
checkDiskUsage,
} from "./util";
export * from "./config/db-config";
/**
* The minimum available disk space (in MB) required to perform overlay analysis.
* If the available disk space on the runner is below the threshold when deciding
* whether to perform overlay analysis, then the action will not perform overlay
* analysis unless overlay analysis has been explicitly enabled via environment
* variable.
*/
const OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 20000;
const OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_BYTES =
OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB * 1_000_000;
export type RegistryConfigWithCredentials = RegistryConfigNoCredentials & {
// Token to use when downloading packs from this registry.
token: string;
@@ -148,6 +160,9 @@ export interface Config {
/** A value indicating how dependency caching should be used. */
dependencyCachingEnabled: CachingKind;
/** The keys of caches that we restored, if any. */
dependencyCachingRestoredKeys: string[];
/**
* Extra query exclusions to append to the config.
*/
@@ -176,7 +191,7 @@ export interface Config {
repositoryProperties: RepositoryProperties;
}
export async function getSupportedLanguageMap(
async function getSupportedLanguageMap(
codeql: CodeQL,
logger: Logger,
): Promise<Record<string, string>> {
@@ -239,7 +254,7 @@ export function hasActionsWorkflows(sourceRoot: string): boolean {
/**
* Gets the set of languages in the current repository.
*/
export async function getRawLanguagesInRepo(
async function getRawLanguagesInRepo(
repository: RepositoryNwo,
sourceRoot: string,
logger: Logger,
@@ -348,7 +363,7 @@ export function getRawLanguagesNoAutodetect(
* @returns A tuple containing a list of languages in this repository that might be
* analyzable and whether or not this list was determined automatically.
*/
export async function getRawLanguages(
async function getRawLanguages(
languagesInput: string | undefined,
repository: RepositoryNwo,
sourceRoot: string,
@@ -496,6 +511,7 @@ export async function initActionState(
trapCaches,
trapCacheDownloadTime,
dependencyCachingEnabled: getCachingKind(dependencyCachingEnabled),
dependencyCachingRestoredKeys: [],
extraQueryExclusions: [],
overlayDatabaseMode: OverlayDatabaseMode.None,
useOverlayDatabaseCaching: false,
@@ -579,17 +595,11 @@ const OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES: Record<Language, Feature> = {
};
async function isOverlayAnalysisFeatureEnabled(
repository: RepositoryNwo,
features: FeatureEnablement,
codeql: CodeQL,
languages: Language[],
codeScanningConfig: UserConfig,
): Promise<boolean> {
// TODO: Remove the repository owner check once support for overlay analysis
// stabilizes, and no more backward-incompatible changes are expected.
if (!["github", "dsp-testing"].includes(repository.owner)) {
return false;
}
if (!(await features.getValue(Feature.OverlayAnalysis, codeql))) {
return false;
}
@@ -647,7 +657,6 @@ async function isOverlayAnalysisFeatureEnabled(
*/
export async function getOverlayDatabaseMode(
codeql: CodeQL,
repository: RepositoryNwo,
features: FeatureEnablement,
languages: Language[],
sourceRoot: string,
@@ -676,27 +685,43 @@ export async function getOverlayDatabaseMode(
);
} else if (
await isOverlayAnalysisFeatureEnabled(
repository,
features,
codeql,
languages,
codeScanningConfig,
)
) {
if (isAnalyzingPullRequest()) {
overlayDatabaseMode = OverlayDatabaseMode.Overlay;
useOverlayDatabaseCaching = true;
const diskUsage = await checkDiskUsage(logger);
if (
diskUsage === undefined ||
diskUsage.numAvailableBytes < OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_BYTES
) {
const diskSpaceMb =
diskUsage === undefined
? 0
: Math.round(diskUsage.numAvailableBytes / 1_000_000);
overlayDatabaseMode = OverlayDatabaseMode.None;
useOverlayDatabaseCaching = false;
logger.info(
`Setting overlay database mode to ${overlayDatabaseMode} ` +
"with caching because we are analyzing a pull request.",
);
} else if (await isAnalyzingDefaultBranch()) {
overlayDatabaseMode = OverlayDatabaseMode.OverlayBase;
useOverlayDatabaseCaching = true;
logger.info(
`Setting overlay database mode to ${overlayDatabaseMode} ` +
"with caching because we are analyzing the default branch.",
`due to insufficient disk space (${diskSpaceMb} MB).`,
);
} else {
if (isAnalyzingPullRequest()) {
overlayDatabaseMode = OverlayDatabaseMode.Overlay;
useOverlayDatabaseCaching = true;
logger.info(
`Setting overlay database mode to ${overlayDatabaseMode} ` +
"with caching because we are analyzing a pull request.",
);
} else if (await isAnalyzingDefaultBranch()) {
overlayDatabaseMode = OverlayDatabaseMode.OverlayBase;
useOverlayDatabaseCaching = true;
logger.info(
`Setting overlay database mode to ${overlayDatabaseMode} ` +
"with caching because we are analyzing the default branch.",
);
}
}
}
@@ -846,7 +871,6 @@ export async function initConfig(
const { overlayDatabaseMode, useOverlayDatabaseCaching } =
await getOverlayDatabaseMode(
inputs.codeql,
inputs.repository,
inputs.features,
config.languages,
inputs.sourceRoot,
@@ -1235,7 +1259,7 @@ export function isCodeQualityEnabled(config: Config): boolean {
* @returns Returns `AnalysisKind.CodeScanning` if `AnalysisKind.CodeScanning` is enabled;
* otherwise `AnalysisKind.CodeQuality`.
*/
export function getPrimaryAnalysisKind(config: Config): AnalysisKind {
function getPrimaryAnalysisKind(config: Config): AnalysisKind {
return isCodeScanningEnabled(config)
? AnalysisKind.CodeScanning
: AnalysisKind.CodeQuality;
+16 -8
View File
@@ -10,11 +10,12 @@ import { GitHubApiDetails } from "./api-client";
import * as apiClient from "./api-client";
import { createStubCodeQL } from "./codeql";
import { Config } from "./config-utils";
import { uploadDatabases } from "./database-upload";
import { cleanupAndUploadDatabases } from "./database-upload";
import * as gitUtils from "./git-utils";
import { KnownLanguage } from "./languages";
import { RepositoryNwo } from "./repository";
import {
createFeatures,
createTestConfig,
getRecordingLogger,
LoggedMessage,
@@ -91,11 +92,12 @@ test("Abort database upload if 'upload-database' input set to false", async (t)
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
const loggedMessages = [];
await uploadDatabases(
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
getTestConfig(tmpDir),
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
@@ -121,7 +123,7 @@ test("Abort database upload if 'analysis-kinds: code-scanning' is not enabled",
await mockHttpRequests(201);
const loggedMessages = [];
await uploadDatabases(
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
{
@@ -129,6 +131,7 @@ test("Abort database upload if 'analysis-kinds: code-scanning' is not enabled",
analysisKinds: [AnalysisKind.CodeQuality],
},
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
@@ -155,11 +158,12 @@ test("Abort database upload if running against GHES", async (t) => {
config.gitHubVersion = { type: GitHubVariant.GHES, version: "3.0" };
const loggedMessages = [];
await uploadDatabases(
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
config,
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
@@ -183,11 +187,12 @@ test("Abort database upload if not analyzing default branch", async (t) => {
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false);
const loggedMessages = [];
await uploadDatabases(
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
getTestConfig(tmpDir),
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
@@ -212,11 +217,12 @@ test("Don't crash if uploading a database fails", async (t) => {
await mockHttpRequests(500);
const loggedMessages = [] as LoggedMessage[];
await uploadDatabases(
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
getTestConfig(tmpDir),
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
@@ -243,11 +249,12 @@ test("Successfully uploading a database to github.com", async (t) => {
await mockHttpRequests(201);
const loggedMessages = [] as LoggedMessage[];
await uploadDatabases(
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
getTestConfig(tmpDir),
testApiDetails,
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
@@ -272,7 +279,7 @@ test("Successfully uploading a database to GHEC-DR", async (t) => {
const databaseUploadSpy = await mockHttpRequests(201);
const loggedMessages = [] as LoggedMessage[];
await uploadDatabases(
await cleanupAndUploadDatabases(
testRepoName,
getCodeQL(),
getTestConfig(tmpDir),
@@ -281,6 +288,7 @@ test("Successfully uploading a database to GHEC-DR", async (t) => {
url: "https://tenant.ghe.com",
apiURL: undefined,
},
createFeatures([]),
getRecordingLogger(loggedMessages),
);
t.assert(
+12 -3
View File
@@ -5,17 +5,20 @@ import { AnalysisKind } from "./analyses";
import { getApiClient, GitHubApiDetails } from "./api-client";
import { type CodeQL } from "./codeql";
import { Config } from "./config-utils";
import { Feature, FeatureEnablement } from "./feature-flags";
import * as gitUtils from "./git-utils";
import { Logger, withGroupAsync } from "./logging";
import { OverlayDatabaseMode } from "./overlay-database-utils";
import { RepositoryNwo } from "./repository";
import * as util from "./util";
import { bundleDb, parseGitHubUrl } from "./util";
import { bundleDb, CleanupLevel, parseGitHubUrl } from "./util";
export async function uploadDatabases(
export async function cleanupAndUploadDatabases(
repositoryNwo: RepositoryNwo,
codeql: CodeQL,
config: Config,
apiDetails: GitHubApiDetails,
features: FeatureEnablement,
logger: Logger,
): Promise<void> {
if (actionsUtil.getRequiredInput("upload-database") !== "true") {
@@ -50,10 +53,16 @@ export async function uploadDatabases(
return;
}
const cleanupLevel =
config.overlayDatabaseMode === OverlayDatabaseMode.OverlayBase &&
(await features.getValue(Feature.UploadOverlayDbToApi))
? CleanupLevel.Overlay
: CleanupLevel.Clear;
// Clean up the database, since intermediate results may still be written to the
// database if there is high RAM pressure.
await withGroupAsync("Cleaning up databases", async () => {
await codeql.databaseCleanupCluster(config, "clear");
await codeql.databaseCleanupCluster(config, cleanupLevel);
});
const client = getApiClient();
+4 -4
View File
@@ -1,6 +1,6 @@
{
"bundleVersion": "codeql-bundle-v2.23.3",
"cliVersion": "2.23.3",
"priorBundleVersion": "codeql-bundle-v2.23.2",
"priorCliVersion": "2.23.2"
"bundleVersion": "codeql-bundle-v2.23.6",
"cliVersion": "2.23.6",
"priorBundleVersion": "codeql-bundle-v2.23.5",
"priorCliVersion": "2.23.5"
}
+252 -14
View File
@@ -7,6 +7,7 @@ import test from "ava";
import * as sinon from "sinon";
import { cacheKeyHashLength } from "./caching-utils";
import * as cachingUtils from "./caching-utils";
import { createStubCodeQL } from "./codeql";
import {
CacheConfig,
@@ -22,6 +23,8 @@ import {
cacheKey,
getCsharpDependencyDirs,
getCsharpTempDependencyDir,
uploadDependencyCaches,
CacheStoreResult,
} from "./dependency-caching";
import { Feature } from "./feature-flags";
import { KnownLanguage } from "./languages";
@@ -31,6 +34,7 @@ import {
getRecordingLogger,
checkExpectedLogMessages,
LoggedMessage,
createTestConfig,
} from "./testing-utils";
import { withTmpDir } from "./util";
@@ -261,15 +265,17 @@ test("downloadDependencyCaches - does not restore caches with feature keys if no
.resolves(CSHARP_BASE_PATTERNS);
makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined);
const results = await downloadDependencyCaches(
const result = await downloadDependencyCaches(
codeql,
createFeatures([]),
[KnownLanguage.csharp],
logger,
);
t.is(results.length, 1);
t.is(results[0].language, KnownLanguage.csharp);
t.is(results[0].hit_kind, CacheHitKind.Miss);
const statusReport = result.statusReport;
t.is(statusReport.length, 1);
t.is(statusReport[0].language, KnownLanguage.csharp);
t.is(statusReport[0].hit_kind, CacheHitKind.Miss);
t.deepEqual(result.restoredKeys, []);
t.assert(restoreCacheStub.calledOnce);
});
@@ -281,7 +287,8 @@ test("downloadDependencyCaches - restores caches with feature keys if features a
const logger = getRecordingLogger(messages);
const features = createFeatures([Feature.CsharpNewCacheKey]);
sinon.stub(glob, "hashFiles").resolves("abcdef");
const mockHash = "abcdef";
sinon.stub(glob, "hashFiles").resolves(mockHash);
const keyWithFeature = await cacheKey(
codeql,
@@ -301,15 +308,28 @@ test("downloadDependencyCaches - restores caches with feature keys if features a
.resolves(CSHARP_BASE_PATTERNS);
makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined);
const results = await downloadDependencyCaches(
const result = await downloadDependencyCaches(
codeql,
features,
[KnownLanguage.csharp],
logger,
);
t.is(results.length, 1);
t.is(results[0].language, KnownLanguage.csharp);
t.is(results[0].hit_kind, CacheHitKind.Exact);
// Check that the status report for telemetry indicates that one cache was restored with an exact match.
const statusReport = result.statusReport;
t.is(statusReport.length, 1);
t.is(statusReport[0].language, KnownLanguage.csharp);
t.is(statusReport[0].hit_kind, CacheHitKind.Exact);
// Check that the restored key has been returned.
const restoredKeys = result.restoredKeys;
t.is(restoredKeys.length, 1);
t.assert(
restoredKeys[0].endsWith(mockHash),
"Expected restored key to end with hash returned by `hashFiles`",
);
// `restoreCache` should have been called exactly once.
t.assert(restoreCacheStub.calledOnce);
});
@@ -321,8 +341,14 @@ test("downloadDependencyCaches - restores caches with feature keys if features a
const logger = getRecordingLogger(messages);
const features = createFeatures([Feature.CsharpNewCacheKey]);
// We expect two calls to `hashFiles`: the first by the call to `cacheKey` below,
// and the second by `downloadDependencyCaches`. We use the result of the first
// call as part of the cache key that identifies a mock, existing cache. The result
// of the second call is for the primary restore key, which we don't want to match
// the first key so that we can test the restore keys logic.
const restoredHash = "abcdef";
const hashFilesStub = sinon.stub(glob, "hashFiles");
hashFilesStub.onFirstCall().resolves("abcdef");
hashFilesStub.onFirstCall().resolves(restoredHash);
hashFilesStub.onSecondCall().resolves("123456");
const keyWithFeature = await cacheKey(
@@ -343,18 +369,230 @@ test("downloadDependencyCaches - restores caches with feature keys if features a
.resolves(CSHARP_BASE_PATTERNS);
makePatternCheckStub.withArgs(CSHARP_EXTRA_PATTERNS).resolves(undefined);
const results = await downloadDependencyCaches(
const result = await downloadDependencyCaches(
codeql,
features,
[KnownLanguage.csharp],
logger,
);
t.is(results.length, 1);
t.is(results[0].language, KnownLanguage.csharp);
t.is(results[0].hit_kind, CacheHitKind.Partial);
// Check that the status report for telemetry indicates that one cache was restored with a partial match.
const statusReport = result.statusReport;
t.is(statusReport.length, 1);
t.is(statusReport[0].language, KnownLanguage.csharp);
t.is(statusReport[0].hit_kind, CacheHitKind.Partial);
// Check that the restored key has been returned.
const restoredKeys = result.restoredKeys;
t.is(restoredKeys.length, 1);
t.assert(
restoredKeys[0].endsWith(restoredHash),
"Expected restored key to end with hash returned by `hashFiles`",
);
t.assert(restoreCacheStub.calledOnce);
});
test("uploadDependencyCaches - skips upload for a language with no cache config", async (t) => {
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([]);
const config = createTestConfig({
languages: [KnownLanguage.actions],
});
const result = await uploadDependencyCaches(codeql, features, config, logger);
t.is(result.length, 0);
checkExpectedLogMessages(t, messages, [
"Skipping upload of dependency cache for actions",
]);
});
test("uploadDependencyCaches - skips upload if no files for the hash exist", async (t) => {
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([]);
const config = createTestConfig({
languages: [KnownLanguage.go],
});
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub.resolves(undefined);
const result = await uploadDependencyCaches(codeql, features, config, logger);
t.is(result.length, 1);
t.is(result[0].language, KnownLanguage.go);
t.is(result[0].result, CacheStoreResult.NoHash);
});
test("uploadDependencyCaches - skips upload if we know the cache already exists", async (t) => {
process.env["RUNNER_OS"] = "Linux";
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([]);
const mockHash = "abcdef";
sinon.stub(glob, "hashFiles").resolves(mockHash);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
const primaryCacheKey = await cacheKey(
codeql,
features,
KnownLanguage.csharp,
CSHARP_BASE_PATTERNS,
);
const config = createTestConfig({
languages: [KnownLanguage.csharp],
dependencyCachingRestoredKeys: [primaryCacheKey],
});
const result = await uploadDependencyCaches(codeql, features, config, logger);
t.is(result.length, 1);
t.is(result[0].language, KnownLanguage.csharp);
t.is(result[0].result, CacheStoreResult.Duplicate);
});
test("uploadDependencyCaches - skips upload if cache size is 0", async (t) => {
process.env["RUNNER_OS"] = "Linux";
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([]);
const mockHash = "abcdef";
sinon.stub(glob, "hashFiles").resolves(mockHash);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
sinon.stub(cachingUtils, "getTotalCacheSize").resolves(0);
const config = createTestConfig({
languages: [KnownLanguage.csharp],
});
const result = await uploadDependencyCaches(codeql, features, config, logger);
t.is(result.length, 1);
t.is(result[0].language, KnownLanguage.csharp);
t.is(result[0].result, CacheStoreResult.Empty);
checkExpectedLogMessages(t, messages, [
"Skipping upload of dependency cache",
]);
});
test("uploadDependencyCaches - uploads caches when all requirements are met", async (t) => {
process.env["RUNNER_OS"] = "Linux";
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([]);
const mockHash = "abcdef";
sinon.stub(glob, "hashFiles").resolves(mockHash);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
sinon.stub(cachingUtils, "getTotalCacheSize").resolves(1024);
sinon.stub(actionsCache, "saveCache").resolves();
const config = createTestConfig({
languages: [KnownLanguage.csharp],
});
const result = await uploadDependencyCaches(codeql, features, config, logger);
t.is(result.length, 1);
t.is(result[0].language, KnownLanguage.csharp);
t.is(result[0].result, CacheStoreResult.Stored);
t.is(result[0].upload_size_bytes, 1024);
checkExpectedLogMessages(t, messages, ["Uploading cache of size"]);
});
test("uploadDependencyCaches - catches `ReserveCacheError` exceptions", async (t) => {
process.env["RUNNER_OS"] = "Linux";
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([]);
const mockHash = "abcdef";
sinon.stub(glob, "hashFiles").resolves(mockHash);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
sinon.stub(cachingUtils, "getTotalCacheSize").resolves(1024);
sinon
.stub(actionsCache, "saveCache")
.throws(new actionsCache.ReserveCacheError("Already in use"));
const config = createTestConfig({
languages: [KnownLanguage.csharp],
});
await t.notThrowsAsync(async () => {
const result = await uploadDependencyCaches(
codeql,
features,
config,
logger,
);
t.is(result.length, 1);
t.is(result[0].language, KnownLanguage.csharp);
t.is(result[0].result, CacheStoreResult.Duplicate);
checkExpectedLogMessages(t, messages, ["Not uploading cache for"]);
});
});
test("uploadDependencyCaches - throws other exceptions", async (t) => {
process.env["RUNNER_OS"] = "Linux";
const codeql = createStubCodeQL({});
const messages: LoggedMessage[] = [];
const logger = getRecordingLogger(messages);
const features = createFeatures([]);
const mockHash = "abcdef";
sinon.stub(glob, "hashFiles").resolves(mockHash);
const makePatternCheckStub = sinon.stub(internal, "makePatternCheck");
makePatternCheckStub
.withArgs(CSHARP_BASE_PATTERNS)
.resolves(CSHARP_BASE_PATTERNS);
sinon.stub(cachingUtils, "getTotalCacheSize").resolves(1024);
sinon.stub(actionsCache, "saveCache").throws();
const config = createTestConfig({
languages: [KnownLanguage.csharp],
});
await t.throwsAsync(async () => {
await uploadDependencyCaches(codeql, features, config, logger);
});
});
test("getFeaturePrefix - returns empty string if no features are enabled", async (t) => {
const codeql = createStubCodeQL({});
const features = createFeatures([]);
+37 -7
View File
@@ -228,6 +228,14 @@ export interface DependencyCacheRestoreStatus {
/** An array of `DependencyCacheRestoreStatus` objects for each analysed language with a caching configuration. */
export type DependencyCacheRestoreStatusReport = DependencyCacheRestoreStatus[];
/** Represents the results of `downloadDependencyCaches`. */
export interface DownloadDependencyCachesResult {
/** The status report for telemetry */
statusReport: DependencyCacheRestoreStatusReport;
/** An array of cache keys that we have restored and therefore know to exist. */
restoredKeys: string[];
}
/**
* A wrapper around `cacheConfig.getHashPatterns` which logs when there are no files to calculate
* a hash for the cache key from.
@@ -274,8 +282,9 @@ export async function downloadDependencyCaches(
features: FeatureEnablement,
languages: Language[],
logger: Logger,
): Promise<DependencyCacheRestoreStatusReport> {
): Promise<DownloadDependencyCachesResult> {
const status: DependencyCacheRestoreStatusReport = [];
const restoredKeys: string[] = [];
for (const language of languages) {
const cacheConfig = defaultCacheConfigs[language];
@@ -323,16 +332,27 @@ export async function downloadDependencyCaches(
if (hitKey !== undefined) {
logger.info(`Cache hit on key ${hitKey} for ${language}.`);
const hit_kind =
hitKey === primaryKey ? CacheHitKind.Exact : CacheHitKind.Partial;
status.push({ language, hit_kind, download_duration_ms });
// We have a partial cache hit, unless the key of the restored cache matches the
// primary restore key.
let hit_kind = CacheHitKind.Partial;
if (hitKey === primaryKey) {
hit_kind = CacheHitKind.Exact;
}
status.push({
language,
hit_kind,
download_duration_ms,
});
restoredKeys.push(hitKey);
} else {
status.push({ language, hit_kind: CacheHitKind.Miss });
logger.info(`No suitable cache found for ${language}.`);
}
}
return status;
return { statusReport: status, restoredKeys };
}
/** Enumerates possible outcomes for storing caches. */
@@ -400,6 +420,18 @@ export async function uploadDependencyCaches(
continue;
}
// Now that we have verified that there are suitable files, compute the hash for the cache key.
const key = await cacheKey(codeql, features, language, patterns);
// Check that we haven't previously restored this exact key. If a cache with this key
// already exists in the Actions Cache, performing the next steps is pointless as the cache
// will not get overwritten. We can therefore skip the expensive work of measuring the size
// of the cache contents and attempting to upload it if we know that the cache already exists.
if (config.dependencyCachingRestoredKeys.includes(key)) {
status.push({ language, result: CacheStoreResult.Duplicate });
continue;
}
// Calculate the size of the files that we would store in the cache. We use this to determine whether the
// cache should be saved or not. For example, if there are no files to store, then we skip creating the
// cache. In the future, we could also:
@@ -425,8 +457,6 @@ export async function uploadDependencyCaches(
continue;
}
const key = await cacheKey(codeql, features, language, patterns);
logger.info(
`Uploading cache of size ${size} for ${language} with key ${key}...`,
);
-14
View File
@@ -20,12 +20,6 @@ export enum EnvVar {
/** Whether the CodeQL Action has invoked the Go autobuilder. */
DID_AUTOBUILD_GOLANG = "CODEQL_ACTION_DID_AUTOBUILD_GOLANG",
/**
* Whether to disable the SARIF post-processing in the Action that removes duplicate locations from
* notifications in the `run[].invocations[].toolExecutionNotifications` SARIF property.
*/
DISABLE_DUPLICATE_LOCATION_FIX = "CODEQL_ACTION_DISABLE_DUPLICATE_LOCATION_FIX",
/**
* Whether the CodeQL Action is using its own deprecated and non-standard way of scanning for
* multiple languages.
@@ -56,20 +50,12 @@ export enum EnvVar {
/** Whether the error for a deprecated version of the CodeQL Action was logged. */
LOG_VERSION_DEPRECATION = "CODEQL_ACTION_DID_LOG_VERSION_DEPRECATION",
/**
* For macOS. Result of `csrutil status` to determine whether System Integrity
* Protection is enabled.
*/
IS_SIP_ENABLED = "CODEQL_ACTION_IS_SIP_ENABLED",
/** UUID representing the current job run. */
JOB_RUN_UUID = "JOB_RUN_UUID",
/** Status for the entire job, submitted to the status report in `init-post` */
JOB_STATUS = "CODEQL_ACTION_JOB_STATUS",
ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION",
/** The value of the `output` input for the analyze action. */
SARIF_RESULTS_OUTPUT_DIR = "CODEQL_ACTION_SARIF_RESULTS_OUTPUT_DIR",
+14 -8
View File
@@ -78,6 +78,7 @@ export enum Feature {
OverlayAnalysisSwift = "overlay_analysis_swift",
PythonDefaultIsToNotExtractStdlib = "python_default_is_to_not_extract_stdlib",
QaTelemetryEnabled = "qa_telemetry_enabled",
UploadOverlayDbToApi = "upload_overlay_db_to_api",
UseRepositoryProperties = "use_repository_properties",
ValidateDbConfig = "validate_db_config",
}
@@ -172,6 +173,11 @@ export const featureConfig: Record<
legacyApi: true,
minimumVersion: undefined,
},
[Feature.JavaMinimizeDependencyJars]: {
defaultValue: false,
envVar: "CODEQL_ACTION_JAVA_MINIMIZE_DEPENDENCY_JARS",
minimumVersion: "2.23.0",
},
[Feature.OverlayAnalysis]: {
defaultValue: false,
envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS",
@@ -283,21 +289,21 @@ export const featureConfig: Record<
minimumVersion: undefined,
toolsFeature: ToolsFeature.PythonDefaultIsToNotExtractStdlib,
},
[Feature.UseRepositoryProperties]: {
defaultValue: false,
envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES",
minimumVersion: undefined,
},
[Feature.QaTelemetryEnabled]: {
defaultValue: false,
envVar: "CODEQL_ACTION_QA_TELEMETRY",
legacyApi: true,
minimumVersion: undefined,
},
[Feature.JavaMinimizeDependencyJars]: {
[Feature.UploadOverlayDbToApi]: {
defaultValue: false,
envVar: "CODEQL_ACTION_JAVA_MINIMIZE_DEPENDENCY_JARS",
minimumVersion: "2.23.0",
envVar: "CODEQL_ACTION_UPLOAD_OVERLAY_DB_TO_API",
minimumVersion: undefined,
},
[Feature.UseRepositoryProperties]: {
defaultValue: false,
envVar: "CODEQL_ACTION_USE_REPOSITORY_PROPERTIES",
minimumVersion: undefined,
},
[Feature.ValidateDbConfig]: {
defaultValue: false,
-61
View File
@@ -122,67 +122,6 @@ export const determineBaseBranchHeadCommitOid = async function (
}
};
/**
* Deepen the git history of HEAD by one level. Errors are logged.
*
* This function uses the `checkout_path` to determine the repository path and
* works only when called from `analyze` or `upload-sarif`.
*/
export const deepenGitHistory = async function () {
try {
await runGitCommand(
getOptionalInput("checkout_path"),
[
"fetch",
"origin",
"HEAD",
"--no-tags",
"--no-recurse-submodules",
"--deepen=1",
],
"Cannot deepen the shallow repository.",
);
} catch {
// Errors are already logged by runGitCommand()
}
};
/**
* Fetch the given remote branch. Errors are logged.
*
* This function uses the `checkout_path` to determine the repository path and
* works only when called from `analyze` or `upload-sarif`.
*/
export const gitFetch = async function (branch: string, extraFlags: string[]) {
try {
await runGitCommand(
getOptionalInput("checkout_path"),
["fetch", "--no-tags", ...extraFlags, "origin", `${branch}:${branch}`],
`Cannot fetch ${branch}.`,
);
} catch {
// Errors are already logged by runGitCommand()
}
};
/**
* Repack the git repository, using with the given flags. Errors are logged.
*
* This function uses the `checkout_path` to determine the repository path and
* works only when called from `analyze` or `upload-sarif`.
*/
export const gitRepack = async function (flags: string[]) {
try {
await runGitCommand(
getOptionalInput("checkout_path"),
["repack", ...flags],
"Cannot repack the repository.",
);
} catch {
// Errors are already logged by runGitCommand()
}
};
/**
* Decode, if necessary, a file path produced by Git. See
* https://git-scm.com/docs/git-config#Documentation/git-config.txt-corequotePath
+7 -4
View File
@@ -371,7 +371,7 @@ async function run() {
}
let overlayBaseDatabaseStats: OverlayBaseDatabaseDownloadStats | undefined;
let dependencyCachingResults: DependencyCacheRestoreStatusReport | undefined;
let dependencyCachingStatus: DependencyCacheRestoreStatusReport | undefined;
try {
if (
config.overlayDatabaseMode === OverlayDatabaseMode.Overlay &&
@@ -579,12 +579,15 @@ async function run() {
// Restore dependency cache(s), if they exist.
if (shouldRestoreCache(config.dependencyCachingEnabled)) {
dependencyCachingResults = await downloadDependencyCaches(
const dependencyCachingResult = await downloadDependencyCaches(
codeql,
features,
config.languages,
logger,
);
dependencyCachingStatus = dependencyCachingResult.statusReport;
config.dependencyCachingRestoredKeys =
dependencyCachingResult.restoredKeys;
}
// Suppress warnings about disabled Python library extraction.
@@ -732,7 +735,7 @@ async function run() {
toolsSource,
toolsVersion,
overlayBaseDatabaseStats,
dependencyCachingResults,
dependencyCachingStatus,
logger,
error,
);
@@ -755,7 +758,7 @@ async function run() {
toolsSource,
toolsVersion,
overlayBaseDatabaseStats,
dependencyCachingResults,
dependencyCachingStatus,
logger,
);
}
+5 -4
View File
@@ -16,6 +16,7 @@ import { type Config } from "./config-utils";
import { getCommitOid, getFileOidsUnderPath } from "./git-utils";
import { Logger, withGroupAsync } from "./logging";
import {
CleanupLevel,
getErrorMessage,
isInTestMode,
tryGetFolderBytes,
@@ -28,7 +29,7 @@ export enum OverlayDatabaseMode {
None = "none",
}
export const CODEQL_OVERLAY_MINIMUM_VERSION = "2.22.4";
export const CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.5";
/**
* The maximum (uncompressed) size of the overlay base database that we will
@@ -175,7 +176,7 @@ const MAX_CACHE_OPERATION_MS = 600_000;
* @param warningPrefix Prefix for the check failure warning message
* @returns True if the verification succeeded, false otherwise
*/
export function checkOverlayBaseDatabase(
function checkOverlayBaseDatabase(
config: Config,
logger: Logger,
warningPrefix: string,
@@ -204,7 +205,7 @@ export function checkOverlayBaseDatabase(
* @returns A promise that resolves to true if the upload was performed and
* successfully completed, or false otherwise
*/
export async function uploadOverlayBaseDatabaseToCache(
export async function cleanupAndUploadOverlayBaseDatabaseToCache(
codeql: CodeQL,
config: Config,
logger: Logger,
@@ -242,7 +243,7 @@ export async function uploadOverlayBaseDatabaseToCache(
// Clean up the database using the overlay cleanup level.
await withGroupAsync("Cleaning up databases", async () => {
await codeql.databaseCleanupCluster(config, "overlay");
await codeql.databaseCleanupCluster(config, CleanupLevel.Overlay);
});
const dbLocation = config.dbLocation;
+4 -15
View File
@@ -34,7 +34,7 @@ export enum ToolsSource {
Download = "DOWNLOAD",
}
export const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
const CODEQL_DEFAULT_ACTION_REPOSITORY = "github/codeql-action";
const CODEQL_NIGHTLIES_REPOSITORY_OWNER = "dsp-testing";
const CODEQL_NIGHTLIES_REPOSITORY_NAME = "codeql-cli-nightlies";
@@ -180,17 +180,6 @@ export function tryGetTagNameFromUrl(
return match[1];
}
export function tryGetBundleVersionFromUrl(
url: string,
logger: Logger,
): string | undefined {
const tagName = tryGetTagNameFromUrl(url, logger);
if (tagName === undefined) {
return undefined;
}
return tryGetBundleVersionFromTagName(tagName, logger);
}
export function convertToSemVer(version: string, logger: Logger): string {
if (!semver.valid(version)) {
logger.debug(
@@ -580,7 +569,7 @@ export async function getCodeQLSource(
* Gets a fallback version number to use when looking for CodeQL in the toolcache if we didn't find
* the `x.y.z` version. This is to support old versions of the toolcache.
*/
export async function tryGetFallbackToolcacheVersion(
async function tryGetFallbackToolcacheVersion(
cliVersion: string | undefined,
tagName: string,
logger: Logger,
@@ -729,7 +718,7 @@ function getCanonicalToolcacheVersion(
return cliVersion;
}
export interface SetupCodeQLResult {
interface SetupCodeQLResult {
codeqlFolder: string;
toolsDownloadStatusReport?: ToolsDownloadStatusReport;
toolsSource: ToolsSource;
@@ -750,7 +739,7 @@ export async function setupCodeQLBundle(
defaultCliVersion: CodeQLDefaultVersionInfo,
features: FeatureEnablement,
logger: Logger,
) {
): Promise<SetupCodeQLResult> {
if (!(await util.isBinaryAccessible("tar", logger))) {
throw new util.ConfigurationError(
"Could not find tar in PATH, so unable to extract CodeQL bundle.",
+2 -2
View File
@@ -8,7 +8,7 @@ import { ConfigurationError, getErrorMessage, isDefined } from "./util";
export const UPDATEJOB_PROXY = "update-job-proxy";
export const UPDATEJOB_PROXY_VERSION = "v2.0.20250624110901";
export const UPDATEJOB_PROXY_URL_PREFIX =
const UPDATEJOB_PROXY_URL_PREFIX =
"https://github.com/github/codeql-action/releases/download/codeql-bundle-v2.22.0/";
export type Credential = {
@@ -202,7 +202,7 @@ export function getFallbackUrl(proxyPackage: string): string {
*
* @returns The response from the GitHub API.
*/
export async function getLinkedRelease() {
async function getLinkedRelease() {
return getApiClient().rest.repos.getReleaseByTag({
owner: "github",
repo: "codeql-action",
+1 -1
View File
@@ -54,7 +54,7 @@ export enum ActionName {
* considered to be a third party analysis and is treated differently when calculating SLOs. To ensure
* misconfigured workflows are not treated as third party, only the upload-sarif action can return false.
*/
export function isFirstPartyAnalysis(actionName: ActionName): boolean {
function isFirstPartyAnalysis(actionName: ActionName): boolean {
if (actionName !== ActionName.UploadSarif) {
return true;
}
+1
View File
@@ -392,6 +392,7 @@ export function createTestConfig(overrides: Partial<Config>): Config {
trapCaches: {},
trapCacheDownloadTime: 0,
dependencyCachingEnabled: CachingKind.None,
dependencyCachingRestoredKeys: [],
extraQueryExclusions: [],
overlayDatabaseMode: OverlayDatabaseMode.None,
useOverlayDatabaseCaching: false,
+1 -1
View File
@@ -17,7 +17,7 @@ import { cleanUpPath, getErrorMessage, getRequiredEnvParam } from "./util";
/**
* High watermark to use when streaming the download and extraction of the CodeQL tools.
*/
export const STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024; // 4 MiB
const STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024; // 4 MiB
/**
* The name of the tool cache directory for the CodeQL tools.
+1 -1
View File
@@ -76,7 +76,7 @@ export async function endTracingForCluster(
}
}
export async function getTracerConfigForCluster(
async function getTracerConfigForCluster(
config: Config,
): Promise<TracerConfig> {
const tracingEnvVariables = JSON.parse(
+1 -1
View File
@@ -412,7 +412,7 @@ export function findSarifFilesInDir(
return sarifFiles;
}
export function getSarifFilePaths(
function getSarifFilePaths(
sarifPath: string,
isSarif: (name: string) => boolean,
) {
+1 -1
View File
@@ -476,7 +476,7 @@ for (const [
githubVersion,
)}`;
test(`checkActionVersion ${reportErrorDescription} for ${versionsDescription}`, async (t) => {
const warningSpy = sinon.spy(core, "error");
const warningSpy = sinon.spy(core, "warning");
const versionStub = sinon
.stub(api, "getGitHubVersion")
.resolves(githubVersion);
+6 -84
View File
@@ -4,7 +4,6 @@ import * as os from "os";
import * as path from "path";
import * as core from "@actions/core";
import * as exec from "@actions/exec/lib/exec";
import * as io from "@actions/io";
import getFolderSize from "get-folder-size";
import * as yaml from "js-yaml";
@@ -1026,34 +1025,6 @@ export function fixInvalidNotifications(
return newSarif;
}
/**
* Removes duplicates from the sarif file.
*
* When `CODEQL_ACTION_DISABLE_DUPLICATE_LOCATION_FIX` is set to true, this will
* simply rename the input file to the output file. Otherwise, it will parse the
* input file as JSON, remove duplicate locations from the SARIF notification
* objects, and write the result to the output file.
*
* For context, see documentation of:
* `CODEQL_ACTION_DISABLE_DUPLICATE_LOCATION_FIX`. */
export function fixInvalidNotificationsInFile(
inputPath: string,
outputPath: string,
logger: Logger,
): void {
if (process.env[EnvVar.DISABLE_DUPLICATE_LOCATION_FIX] === "true") {
logger.info(
"SARIF notification object duplicate location fix disabled by the " +
`${EnvVar.DISABLE_DUPLICATE_LOCATION_FIX} environment variable.`,
);
fs.renameSync(inputPath, outputPath);
} else {
let sarif = JSON.parse(fs.readFileSync(inputPath, "utf8")) as SarifFile;
sarif = fixInvalidNotifications(sarif, logger);
fs.writeFileSync(outputPath, JSON.stringify(sarif));
}
}
export function wrapError(error: unknown): Error {
return error instanceof Error ? error : new Error(String(error));
}
@@ -1141,7 +1112,7 @@ export function checkActionVersion(
">=3.20",
))
) {
core.error(
core.warning(
"CodeQL Action v3 will be deprecated in December 2026. " +
"Please update all occurrences of the CodeQL Action in your workflow files to v4. " +
"For more information, see " +
@@ -1197,49 +1168,6 @@ export function cloneObject<T>(obj: T): T {
return JSON.parse(JSON.stringify(obj)) as T;
}
// The first time this function is called, it runs `csrutil status` to determine
// whether System Integrity Protection is enabled; and saves the result in an
// environment variable. Afterwards, simply return the value of the environment
// variable.
export async function checkSipEnablement(
logger: Logger,
): Promise<boolean | undefined> {
if (
process.env[EnvVar.IS_SIP_ENABLED] !== undefined &&
["true", "false"].includes(process.env[EnvVar.IS_SIP_ENABLED])
) {
return process.env[EnvVar.IS_SIP_ENABLED] === "true";
}
try {
const sipStatusOutput = await exec.getExecOutput("csrutil status");
if (sipStatusOutput.exitCode === 0) {
if (
sipStatusOutput.stdout.includes(
"System Integrity Protection status: enabled.",
)
) {
core.exportVariable(EnvVar.IS_SIP_ENABLED, "true");
return true;
}
if (
sipStatusOutput.stdout.includes(
"System Integrity Protection status: disabled.",
)
) {
core.exportVariable(EnvVar.IS_SIP_ENABLED, "false");
return false;
}
}
return undefined;
} catch (e) {
logger.warning(
`Failed to determine if System Integrity Protection was enabled: ${e}`,
);
return undefined;
}
}
export async function cleanUpPath(file: string, name: string, logger: Logger) {
logger.debug(`Cleaning up ${name}.`);
try {
@@ -1291,17 +1219,6 @@ export function isDefined<T>(value: T | null | undefined): value is T {
return value !== undefined && value !== null;
}
/** Like `Object.keys`, but typed so that the elements of the resulting array have the
* same type as the keys of the input object. Note that this may not be sound if the input
* object has been cast to `T` from a subtype of `T` and contains additional keys that
* are not represented by `keyof T`.
*/
export function unsafeKeysInvariant<T extends Record<string, any>>(
object: T,
): Array<keyof T> {
return Object.keys(object) as Array<keyof T>;
}
/** Like `Object.entries`, but typed so that the key elements of the result have the
* same type as the keys of the input object. Note that this may not be sound if the input
* object has been cast to `T` from a subtype of `T` and contains additional keys that
@@ -1314,3 +1231,8 @@ export function unsafeEntriesInvariant<T extends Record<string, any>>(
([_, val]) => val !== undefined,
) as Array<[keyof T, Exclude<T[keyof T], undefined>]>;
}
export enum CleanupLevel {
Clear = "clear",
Overlay = "overlay",
}