Match CLI version to cached overlay-base database

This commit is contained in:
Henry Mercer
2026-05-06 17:42:31 +01:00
parent b0942116d7
commit 55d6319f96
19 changed files with 2437 additions and 1101 deletions
+57 -51
View File
@@ -44531,11 +44531,11 @@ var require_valid = __commonJS({
"node_modules/semver/functions/valid.js"(exports2, module2) {
"use strict";
var parse2 = require_parse3();
var valid3 = (version, options) => {
var valid4 = (version, options) => {
const v = parse2(version, options);
return v ? v.version : null;
};
module2.exports = valid3;
module2.exports = valid4;
}
});
@@ -44678,8 +44678,8 @@ var require_rcompare = __commonJS({
"node_modules/semver/functions/rcompare.js"(exports2, module2) {
"use strict";
var compare2 = require_compare();
var rcompare2 = (a, b, loose) => compare2(b, a, loose);
module2.exports = rcompare2;
var rcompare3 = (a, b, loose) => compare2(b, a, loose);
module2.exports = rcompare3;
}
});
@@ -45895,7 +45895,7 @@ var require_semver2 = __commonJS({
var SemVer = require_semver();
var identifiers = require_identifiers();
var parse2 = require_parse3();
var valid3 = require_valid();
var valid4 = require_valid();
var clean3 = require_clean();
var inc = require_inc();
var diff = require_diff();
@@ -45904,7 +45904,7 @@ var require_semver2 = __commonJS({
var patch = require_patch();
var prerelease = require_prerelease();
var compare2 = require_compare();
var rcompare2 = require_rcompare();
var rcompare3 = require_rcompare();
var compareLoose = require_compare_loose();
var compareBuild = require_compare_build();
var sort = require_sort();
@@ -45933,7 +45933,7 @@ var require_semver2 = __commonJS({
var subset = require_subset();
module2.exports = {
parse: parse2,
valid: valid3,
valid: valid4,
clean: clean3,
inc,
diff,
@@ -45942,7 +45942,7 @@ var require_semver2 = __commonJS({
patch,
prerelease,
compare: compare2,
rcompare: rcompare2,
rcompare: rcompare3,
compareLoose,
compareBuild,
sort,
@@ -47732,16 +47732,16 @@ var require_attribute = __commonJS({
var result = new ValidatorResult(instance, schema2, options, ctx);
var self2 = this;
schema2.allOf.forEach(function(v, i) {
var valid3 = self2.validateSchema(instance, v, options, ctx);
if (!valid3.valid) {
var valid4 = self2.validateSchema(instance, v, options, ctx);
if (!valid4.valid) {
var id = v.$id || v.id;
var msg = id || v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]";
result.addError({
name: "allOf",
argument: { id: msg, length: valid3.errors.length, valid: valid3 },
message: "does not match allOf schema " + msg + " with " + valid3.errors.length + " error[s]:"
argument: { id: msg, length: valid4.errors.length, valid: valid4 },
message: "does not match allOf schema " + msg + " with " + valid4.errors.length + " error[s]:"
});
result.importErrors(valid3);
result.importErrors(valid4);
}
});
return result;
@@ -48030,8 +48030,8 @@ var require_attribute = __commonJS({
if (typeof schema2.exclusiveMinimum === "boolean") return;
if (!this.types.number(instance)) return;
var result = new ValidatorResult(instance, schema2, options, ctx);
var valid3 = instance > schema2.exclusiveMinimum;
if (!valid3) {
var valid4 = instance > schema2.exclusiveMinimum;
if (!valid4) {
result.addError({
name: "exclusiveMinimum",
argument: schema2.exclusiveMinimum,
@@ -48044,8 +48044,8 @@ var require_attribute = __commonJS({
if (typeof schema2.exclusiveMaximum === "boolean") return;
if (!this.types.number(instance)) return;
var result = new ValidatorResult(instance, schema2, options, ctx);
var valid3 = instance < schema2.exclusiveMaximum;
if (!valid3) {
var valid4 = instance < schema2.exclusiveMaximum;
if (!valid4) {
result.addError({
name: "exclusiveMaximum",
argument: schema2.exclusiveMaximum,
@@ -50828,8 +50828,8 @@ var require_semver3 = __commonJS({
return null;
}
}
exports2.valid = valid3;
function valid3(version, options) {
exports2.valid = valid4;
function valid4(version, options) {
var v = parse2(version, options);
return v ? v.version : null;
}
@@ -51129,8 +51129,8 @@ var require_semver3 = __commonJS({
var versionB = new SemVer(b, loose);
return versionA.compare(versionB) || versionA.compareBuild(versionB);
}
exports2.rcompare = rcompare2;
function rcompare2(a, b, loose) {
exports2.rcompare = rcompare3;
function rcompare3(a, b, loose) {
return compare2(b, a, loose);
}
exports2.sort = sort;
@@ -51958,7 +51958,7 @@ var require_cacheUtils = __commonJS({
var crypto2 = __importStar2(require("crypto"));
var fs9 = __importStar2(require("fs"));
var path9 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var semver10 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
var constants_1 = require_constants12();
var versionSalt = "1.0";
@@ -52051,7 +52051,7 @@ var require_cacheUtils = __commonJS({
function getCompressionMethod() {
return __awaiter2(this, void 0, void 0, function* () {
const versionOutput = yield getVersion("zstd", ["--quiet"]);
const version = semver9.clean(versionOutput);
const version = semver10.clean(versionOutput);
core15.debug(`zstd version: ${version}`);
if (versionOutput === "") {
return constants_1.CompressionMethod.Gzip;
@@ -93457,7 +93457,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache4;
exports2.saveCache = saveCache5;
var core15 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93634,7 +93634,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
function saveCache5(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -99134,8 +99134,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
exports2.restoreCache = restoreCache5;
exports2.saveCache = saveCache5;
var core15 = __importStar2(require_core());
var path9 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -99192,7 +99192,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache5(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99336,7 +99336,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache4(paths_1, key_1, options_1) {
function saveCache5(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99573,7 +99573,7 @@ var require_manifest = __commonJS({
exports2._findMatch = _findMatch;
exports2._getOsVersion = _getOsVersion;
exports2._readLinuxVersionFile = _readLinuxVersionFile;
var semver9 = __importStar2(require_semver2());
var semver10 = __importStar2(require_semver2());
var core_1 = require_core();
var os2 = require("os");
var cp = require("child_process");
@@ -99587,7 +99587,7 @@ var require_manifest = __commonJS({
for (const candidate of candidates) {
const version = candidate.version;
(0, core_1.debug)(`check ${version} satisfies ${versionSpec}`);
if (semver9.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) {
if (semver10.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) {
file = candidate.files.find((item) => {
(0, core_1.debug)(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`);
let chk = item.arch === archFilter && item.platform === platFilter;
@@ -99596,7 +99596,7 @@ var require_manifest = __commonJS({
if (osVersion === item.platform_version) {
chk = true;
} else {
chk = semver9.satisfies(osVersion, item.platform_version);
chk = semver10.satisfies(osVersion, item.platform_version);
}
}
return chk;
@@ -99856,7 +99856,7 @@ var require_tool_cache = __commonJS({
var os2 = __importStar2(require("os"));
var path9 = __importStar2(require("path"));
var httpm = __importStar2(require_lib());
var semver9 = __importStar2(require_semver2());
var semver10 = __importStar2(require_semver2());
var stream = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var assert_1 = require("assert");
@@ -100129,7 +100129,7 @@ var require_tool_cache = __commonJS({
}
function cacheDir(sourceDir, tool, version, arch) {
return __awaiter2(this, void 0, void 0, function* () {
version = semver9.clean(version) || version;
version = semver10.clean(version) || version;
arch = arch || os2.arch();
core15.debug(`Caching tool ${tool} ${version} ${arch}`);
core15.debug(`source dir: ${sourceDir}`);
@@ -100147,7 +100147,7 @@ var require_tool_cache = __commonJS({
}
function cacheFile(sourceFile, targetFile, tool, version, arch) {
return __awaiter2(this, void 0, void 0, function* () {
version = semver9.clean(version) || version;
version = semver10.clean(version) || version;
arch = arch || os2.arch();
core15.debug(`Caching tool ${tool} ${version} ${arch}`);
core15.debug(`source file: ${sourceFile}`);
@@ -100177,7 +100177,7 @@ var require_tool_cache = __commonJS({
}
let toolPath = "";
if (versionSpec) {
versionSpec = semver9.clean(versionSpec) || "";
versionSpec = semver10.clean(versionSpec) || "";
const cachePath = path9.join(_getCacheDirectory(), toolName, versionSpec, arch);
core15.debug(`checking cache: ${cachePath}`);
if (fs9.existsSync(cachePath) && fs9.existsSync(`${cachePath}.complete`)) {
@@ -100257,7 +100257,7 @@ var require_tool_cache = __commonJS({
}
function _createToolPath(tool, version, arch) {
return __awaiter2(this, void 0, void 0, function* () {
const folderPath = path9.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch || "");
const folderPath = path9.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch || "");
core15.debug(`destination ${folderPath}`);
const markerPath = `${folderPath}.complete`;
yield io6.rmRF(folderPath);
@@ -100267,30 +100267,30 @@ var require_tool_cache = __commonJS({
});
}
function _completeToolPath(tool, version, arch) {
const folderPath = path9.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch || "");
const folderPath = path9.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch || "");
const markerPath = `${folderPath}.complete`;
fs9.writeFileSync(markerPath, "");
core15.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
const c = semver9.clean(versionSpec) || "";
const c = semver10.clean(versionSpec) || "";
core15.debug(`isExplicit: ${c}`);
const valid3 = semver9.valid(c) != null;
core15.debug(`explicit? ${valid3}`);
return valid3;
const valid4 = semver10.valid(c) != null;
core15.debug(`explicit? ${valid4}`);
return valid4;
}
function evaluateVersions(versions, versionSpec) {
let version = "";
core15.debug(`evaluating ${versions.length} versions`);
versions = versions.sort((a, b) => {
if (semver9.gt(a, b)) {
if (semver10.gt(a, b)) {
return 1;
}
return -1;
});
for (let i = versions.length - 1; i >= 0; i--) {
const potential = versions[i];
const satisfied = semver9.satisfies(potential, versionSpec);
const satisfied = semver10.satisfies(potential, versionSpec);
if (satisfied) {
version = potential;
break;
@@ -108004,7 +108004,7 @@ var require_stream_writable = __commonJS({
pna.nextTick(cb, er);
}
function validChunk(stream, state, chunk, cb) {
var valid3 = true;
var valid4 = true;
var er = false;
if (chunk === null) {
er = new TypeError("May not write null values to stream");
@@ -108014,9 +108014,9 @@ var require_stream_writable = __commonJS({
if (er) {
stream.emit("error", er);
pna.nextTick(cb, er);
valid3 = false;
valid4 = false;
}
return valid3;
return valid4;
}
Writable.prototype.write = function(chunk, encoding, cb) {
var state = this._writableState;
@@ -162985,20 +162985,26 @@ function appendExtraQueryExclusions(extraQueryExclusions, cliConfig) {
// src/setup-codeql.ts
var toolcache3 = __toESM(require_tool_cache());
var import_fast_deep_equal = __toESM(require_fast_deep_equal());
var semver8 = __toESM(require_semver2());
var semver9 = __toESM(require_semver2());
// src/overlay/caching.ts
var actionsCache3 = __toESM(require_cache5());
var semver6 = __toESM(require_semver2());
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
// src/tar.ts
var import_toolrunner = __toESM(require_toolrunner());
var io4 = __toESM(require_io());
var toolcache = __toESM(require_tool_cache());
var semver6 = __toESM(require_semver2());
var semver7 = __toESM(require_semver2());
// src/tools-download.ts
var core10 = __toESM(require_core());
var import_http_client = __toESM(require_lib());
var toolcache2 = __toESM(require_tool_cache());
var import_follow_redirects = __toESM(require_follow_redirects());
var semver7 = __toESM(require_semver2());
var semver8 = __toESM(require_semver2());
var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024;
// src/tracer-config.ts
@@ -163595,7 +163601,7 @@ var core12 = __toESM(require_core());
// src/dependency-caching.ts
var import_path = require("path");
var actionsCache3 = __toESM(require_cache5());
var actionsCache4 = __toESM(require_cache5());
var glob = __toESM(require_glob());
function getJavaTempDependencyDir() {
return (0, import_path.join)(getTemporaryDirectory(), "codeql_java", "repository");
+339 -193
View File
@@ -107131,6 +107131,32 @@ var persistInputs = function() {
);
core4.saveState(persistedInputsKey, JSON.stringify(inputEnvironmentVariables));
};
function getPullRequestBranches() {
const pullRequest = github.context.payload.pull_request;
if (pullRequest) {
return {
base: pullRequest.base.ref,
// We use the head label instead of the head ref here, because the head
// ref lacks owner information and by itself does not uniquely identify
// the head branch (which may be in a forked repository).
head: pullRequest.head.label
};
}
const codeScanningRef = process.env.CODE_SCANNING_REF;
const codeScanningBaseBranch = process.env.CODE_SCANNING_BASE_BRANCH;
if (codeScanningRef && codeScanningBaseBranch) {
return {
base: codeScanningBaseBranch,
// PR analysis under Default Setup analyzes the PR head commit instead of
// the merge commit, so we can use the provided ref directly.
head: codeScanningRef
};
}
return void 0;
}
function isAnalyzingPullRequest() {
return getPullRequestBranches() !== void 0;
}
var qualityCategoryMapping = {
"c#": "csharp",
cpp: "c-cpp",
@@ -107227,7 +107253,7 @@ var SarifScanOrder = [
];
// src/analyze.ts
var fs13 = __toESM(require("fs"));
var fs14 = __toESM(require("fs"));
var path12 = __toESM(require("path"));
var import_perf_hooks2 = require("perf_hooks");
var io5 = __toESM(require_io());
@@ -107511,7 +107537,7 @@ function wrapApiConfigurationError(e) {
}
// src/codeql.ts
var fs12 = __toESM(require("fs"));
var fs13 = __toESM(require("fs"));
var path11 = __toESM(require("path"));
var core11 = __toESM(require_core());
var toolrunner3 = __toESM(require_toolrunner());
@@ -108868,6 +108894,17 @@ var builtin_default = {
// src/languages/index.ts
var builtInLanguageSet = new Set(builtin_default.languages);
function isBuiltInLanguage(language) {
return builtInLanguageSet.has(language);
}
function parseBuiltInLanguage(language) {
language = language.trim().toLowerCase();
language = builtin_default.aliases[language] ?? language;
if (isBuiltInLanguage(language)) {
return language;
}
return void 0;
}
// src/overlay/status.ts
var actionsCache = __toESM(require_cache5());
@@ -109080,11 +109117,11 @@ function getPrimaryAnalysisConfig(config) {
}
// src/setup-codeql.ts
var fs10 = __toESM(require("fs"));
var fs11 = __toESM(require("fs"));
var path9 = __toESM(require("path"));
var toolcache3 = __toESM(require_tool_cache());
var import_fast_deep_equal = __toESM(require_fast_deep_equal());
var semver8 = __toESM(require_semver2());
var semver9 = __toESM(require_semver2());
// node_modules/uuid/dist-node/stringify.js
var byteToHex = [];
@@ -109130,14 +109167,203 @@ function _v4(options, buf, offset) {
}
var v4_default = v4;
// src/overlay/caching.ts
var fs8 = __toESM(require("fs"));
var actionsCache3 = __toESM(require_cache5());
var semver6 = __toESM(require_semver2());
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
var CACHE_VERSION2 = 1;
var CACHE_PREFIX = "codeql-overlay-base-database";
var MAX_CACHE_OPERATION_MS2 = 6e5;
async function checkOverlayBaseDatabase(codeql, config, logger, warningPrefix) {
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
if (!fs8.existsSync(baseDatabaseOidsFilePath)) {
logger.warning(
`${warningPrefix}: ${baseDatabaseOidsFilePath} does not exist`
);
return false;
}
for (const language of config.languages) {
const dbPath = getCodeQLDatabasePath(config, language);
try {
const resolveDatabaseOutput = await codeql.resolveDatabase(dbPath);
if (resolveDatabaseOutput === void 0 || !("overlayBaseSpecifier" in resolveDatabaseOutput)) {
logger.info(`${warningPrefix}: no overlayBaseSpecifier defined`);
return false;
} else {
logger.debug(
`Overlay base specifier for ${language} overlay-base database found: ${resolveDatabaseOutput.overlayBaseSpecifier}`
);
}
} catch (e) {
logger.warning(`${warningPrefix}: failed to resolve database: ${e}`);
return false;
}
}
return true;
}
async function cleanupAndUploadOverlayBaseDatabaseToCache(codeql, config, logger) {
const overlayDatabaseMode = config.overlayDatabaseMode;
if (overlayDatabaseMode !== "overlay-base" /* OverlayBase */) {
logger.debug(
`Overlay database mode is ${overlayDatabaseMode}. Skip uploading overlay-base database to cache.`
);
return false;
}
if (!config.useOverlayDatabaseCaching) {
logger.debug(
"Overlay database caching is disabled. Skip uploading overlay-base database to cache."
);
return false;
}
if (isInTestMode()) {
logger.debug(
"In test mode. Skip uploading overlay-base database to cache."
);
return false;
}
const databaseIsValid = await checkOverlayBaseDatabase(
codeql,
config,
logger,
"Abort uploading overlay-base database to cache"
);
if (!databaseIsValid) {
return false;
}
await withGroupAsync("Cleaning up databases", async () => {
await codeql.databaseCleanupCluster(config, "overlay" /* Overlay */);
});
const dbLocation = config.dbLocation;
const databaseSizeBytes = await tryGetFolderBytes(dbLocation, logger);
if (databaseSizeBytes === void 0) {
logger.warning(
"Failed to determine database size. Skip uploading overlay-base database to cache."
);
return false;
}
if (databaseSizeBytes > OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES) {
const databaseSizeMB = Math.round(databaseSizeBytes / 1e6);
logger.warning(
`Database size (${databaseSizeMB} MB) exceeds maximum upload size (${OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB} MB). Skip uploading overlay-base database to cache.`
);
return false;
}
const codeQlVersion = (await codeql.getVersion()).version;
const checkoutPath = getRequiredInput("checkout_path");
const cacheSaveKey = await getCacheSaveKey(
config,
codeQlVersion,
checkoutPath,
logger
);
logger.info(
`Uploading overlay-base database to Actions cache with key ${cacheSaveKey}`
);
try {
const cacheId = await waitForResultWithTimeLimit(
MAX_CACHE_OPERATION_MS2,
actionsCache3.saveCache([dbLocation], cacheSaveKey),
() => {
}
);
if (cacheId === void 0) {
logger.warning("Timed out while uploading overlay-base database");
return false;
}
} catch (error3) {
logger.warning(
`Failed to upload overlay-base database to cache: ${error3 instanceof Error ? error3.message : String(error3)}`
);
return false;
}
logger.info(`Successfully uploaded overlay-base database from ${dbLocation}`);
return true;
}
async function getCacheSaveKey(config, codeQlVersion, checkoutPath, logger) {
let runId = 1;
let attemptId = 1;
try {
runId = getWorkflowRunID();
attemptId = getWorkflowRunAttempt();
} catch (e) {
logger.warning(
`Failed to get workflow run ID or attempt ID. Reason: ${getErrorMessage(e)}`
);
}
const sha = await getCommitOid(checkoutPath);
const restoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion
);
return `${restoreKeyPrefix}${sha}-${runId}-${attemptId}`;
}
async function getCacheRestoreKeyPrefix(config, codeQlVersion) {
return `${await getCacheKeyPrefixBase(config.languages)}${codeQlVersion}-`;
}
async function getCacheKeyPrefixBase(parsedLanguages) {
const languagesComponent = [...parsedLanguages].sort().join("_");
const cacheKeyComponents = {
automationID: await getAutomationID()
// Add more components here as needed in the future
};
const componentsHash = createCacheKeyHash(cacheKeyComponents);
return `${CACHE_PREFIX}-${CACHE_VERSION2}-${componentsHash}-${languagesComponent}-`;
}
async function getCodeQlVersionsForOverlayBaseDatabases(rawLanguages, logger) {
const languages = rawLanguages.map(parseBuiltInLanguage);
if (languages.includes(void 0)) {
logger.warning(
"One or more provided languages are not recognized as built-in languages. Skipping searching for overlay-base databases in cache."
);
return void 0;
}
const cacheKeyPrefix = await getCacheKeyPrefixBase(
languages.filter((l) => l !== void 0)
);
logger.debug(
`Searching for overlay-base databases in Actions cache with prefix ${cacheKeyPrefix}`
);
const caches = await listActionsCaches(cacheKeyPrefix);
if (caches.length === 0) {
logger.info("No overlay-base databases found in Actions cache.");
return [];
}
logger.info(
`Found ${caches.length} overlay-base ${caches.length === 1 ? "database" : "databases"} in the Actions cache.`
);
const versionRegex = /^([\d.]+)-/;
const versionSet = /* @__PURE__ */ new Set();
for (const cache of caches) {
if (!cache.key) continue;
const suffix = cache.key.substring(cacheKeyPrefix.length);
const match = suffix.match(versionRegex);
if (match && semver6.valid(match[1])) {
versionSet.add(match[1]);
}
}
if (versionSet.size === 0) {
logger.info(
"Could not parse any CodeQL versions from overlay-base database cache keys."
);
return [];
}
const versions = [...versionSet].sort(semver6.rcompare);
logger.info(
`Found overlay databases for the following CodeQL versions in the Actions cache: ${versions.join(", ")}`
);
return versions;
}
// src/tar.ts
var import_child_process = require("child_process");
var fs8 = __toESM(require("fs"));
var fs9 = __toESM(require("fs"));
var stream = __toESM(require("stream"));
var import_toolrunner = __toESM(require_toolrunner());
var io4 = __toESM(require_io());
var toolcache = __toESM(require_tool_cache());
var semver6 = __toESM(require_semver2());
var semver7 = __toESM(require_semver2());
var MIN_REQUIRED_BSD_TAR_VERSION = "3.4.3";
var MIN_REQUIRED_GNU_TAR_VERSION = "1.31";
async function getTarVersion() {
@@ -109179,9 +109405,9 @@ async function isZstdAvailable(logger) {
case "gnu":
return {
available: foundZstdBinary && // GNU tar only uses major and minor version numbers
semver6.gte(
semver6.coerce(version),
semver6.coerce(MIN_REQUIRED_GNU_TAR_VERSION)
semver7.gte(
semver7.coerce(version),
semver7.coerce(MIN_REQUIRED_GNU_TAR_VERSION)
),
foundZstdBinary,
version: tarVersion
@@ -109190,7 +109416,7 @@ async function isZstdAvailable(logger) {
return {
available: foundZstdBinary && // Do a loose comparison since these version numbers don't contain
// a patch version number.
semver6.gte(version, MIN_REQUIRED_BSD_TAR_VERSION),
semver7.gte(version, MIN_REQUIRED_BSD_TAR_VERSION),
foundZstdBinary,
version: tarVersion
};
@@ -109205,7 +109431,7 @@ async function isZstdAvailable(logger) {
}
}
async function extract(tarPath, dest, compressionMethod, tarVersion, logger) {
fs8.mkdirSync(dest, { recursive: true });
fs9.mkdirSync(dest, { recursive: true });
switch (compressionMethod) {
case "gzip":
return await toolcache.extractTar(tarPath, dest);
@@ -109289,7 +109515,7 @@ function inferCompressionMethod(tarPath) {
}
// src/tools-download.ts
var fs9 = __toESM(require("fs"));
var fs10 = __toESM(require("fs"));
var os2 = __toESM(require("os"));
var path8 = __toESM(require("path"));
var import_perf_hooks = require("perf_hooks");
@@ -109297,7 +109523,7 @@ var core10 = __toESM(require_core());
var import_http_client = __toESM(require_lib());
var toolcache2 = __toESM(require_tool_cache());
var import_follow_redirects = __toESM(require_follow_redirects());
var semver7 = __toESM(require_semver2());
var semver8 = __toESM(require_semver2());
var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024;
var TOOLCACHE_TOOL_NAME = "CodeQL";
function makeDownloadFirstToolsDownloadDurations(downloadDurationMs, extractionDurationMs) {
@@ -109396,7 +109622,7 @@ async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorizat
};
}
async function downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorization, headers, tarVersion, logger) {
fs9.mkdirSync(dest, { recursive: true });
fs10.mkdirSync(dest, { recursive: true });
const agent = new import_http_client.HttpClient().getAgent(codeqlURL);
headers = Object.assign(
{ "User-Agent": "CodeQL Action" },
@@ -109427,13 +109653,13 @@ function getToolcacheDirectory(version) {
return path8.join(
getRequiredEnvParam("RUNNER_TOOL_CACHE"),
TOOLCACHE_TOOL_NAME,
semver7.clean(version) || version,
semver8.clean(version) || version,
os2.arch() || ""
);
}
function writeToolcacheMarkerFile(extractedPath, logger) {
const markerFilePath = `${extractedPath}.complete`;
fs9.writeFileSync(markerFilePath, "");
fs10.writeFileSync(markerFilePath, "");
logger.info(`Created toolcache marker file ${markerFilePath}`);
}
function sanitizeUrlForStatusReport(url2) {
@@ -109552,13 +109778,13 @@ function tryGetTagNameFromUrl(url2, logger) {
return match[1];
}
function convertToSemVer(version, logger) {
if (!semver8.valid(version)) {
if (!semver9.valid(version)) {
logger.debug(
`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`
);
version = `0.0.0-${version}`;
}
const s = semver8.clean(version);
const s = semver9.clean(version);
if (!s) {
throw new Error(`Bundle version ${version} is not in SemVer format.`);
}
@@ -109568,7 +109794,7 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
const candidates = toolcache3.findAllVersions("CodeQL").filter(isGoodVersion).map((version) => ({
folder: toolcache3.find("CodeQL", version),
version
})).filter(({ folder }) => fs10.existsSync(path9.join(folder, "pinned-version")));
})).filter(({ folder }) => fs11.existsSync(path9.join(folder, "pinned-version")));
if (candidates.length === 1) {
const candidate = candidates[0];
logger.debug(
@@ -109590,7 +109816,55 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
}
return void 0;
}
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, features, logger) {
async function getEnabledVersionsWithOverlayBaseDatabases(defaultCliVersion, rawLanguages, features, logger) {
if (rawLanguages === void 0 || rawLanguages.length === 0) {
return [];
}
if (!await features.getValue("overlay_analysis_match_codeql_version" /* OverlayAnalysisMatchCodeqlVersion */)) {
return [];
}
let cachedVersions;
try {
cachedVersions = await getCodeQlVersionsForOverlayBaseDatabases(
rawLanguages,
logger
);
} catch (e) {
logger.warning(
`While setting up CodeQL, was unable to list overlay-base databases in the Actions cache. Details: ${e}`
);
return [];
}
if (cachedVersions === void 0 || cachedVersions.length === 0) {
return [];
}
const cachedVersionsSet = new Set(cachedVersions);
return defaultCliVersion.enabledVersions.filter(
(v) => cachedVersionsSet.has(v.cliVersion)
);
}
async function resolveDefaultCliVersion(defaultCliVersion, rawLanguages, features, logger) {
if (!isAnalyzingPullRequest()) {
return defaultCliVersion.enabledVersions[0];
}
const overlayVersions = await getEnabledVersionsWithOverlayBaseDatabases(
defaultCliVersion,
rawLanguages,
features,
logger
);
if (overlayVersions.length > 0) {
logger.info(
`Using CodeQL version ${overlayVersions[0].cliVersion} since this is the highest enabled version that has a cached overlay-base database.`
);
return overlayVersions[0];
}
logger.info(
`Using CodeQL version ${defaultCliVersion.enabledVersions[0].cliVersion} since no enabled versions with cached overlay-base databases were found.`
);
return defaultCliVersion.enabledVersions[0];
}
async function getCodeQLSource(toolsInput, defaultCliVersion, rawLanguages, apiDetails, variant, tarSupportsZstd, features, logger) {
if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) {
logger.info(`Using CodeQL CLI from local path ${toolsInput}`);
const compressionMethod2 = inferCompressionMethod(toolsInput);
@@ -109684,21 +109958,33 @@ async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, varian
);
}
}
cliVersion2 = defaultCliVersion.enabledVersions[0].cliVersion;
tagName = defaultCliVersion.enabledVersions[0].tagName;
const version = await resolveDefaultCliVersion(
defaultCliVersion,
rawLanguages,
features,
logger
);
cliVersion2 = version.cliVersion;
tagName = version.tagName;
}
} else if (toolsInput !== void 0) {
tagName = tryGetTagNameFromUrl(toolsInput, logger);
url2 = toolsInput;
if (tagName) {
const bundleVersion3 = tryGetBundleVersionFromTagName(tagName, logger);
if (bundleVersion3 && semver8.valid(bundleVersion3)) {
if (bundleVersion3 && semver9.valid(bundleVersion3)) {
cliVersion2 = convertToSemVer(bundleVersion3, logger);
}
}
} else {
cliVersion2 = defaultCliVersion.enabledVersions[0].cliVersion;
tagName = defaultCliVersion.enabledVersions[0].tagName;
const version = await resolveDefaultCliVersion(
defaultCliVersion,
rawLanguages,
features,
logger
);
cliVersion2 = version.cliVersion;
tagName = version.tagName;
}
const bundleVersion2 = tagName && tryGetBundleVersionFromTagName(tagName, logger);
const humanReadableVersion = cliVersion2 ?? (bundleVersion2 && convertToSemVer(bundleVersion2, logger)) ?? tagName ?? url2 ?? "unknown";
@@ -109895,7 +110181,7 @@ function getCanonicalToolcacheVersion(cliVersion2, bundleVersion2, logger) {
}
return cliVersion2;
}
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, rawLanguages, features, logger) {
if (!await isBinaryAccessible("tar", logger)) {
throw new ConfigurationError(
"Could not find tar in PATH, so unable to extract CodeQL bundle."
@@ -109905,6 +110191,7 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defau
const source = await getCodeQLSource(
toolsInput,
defaultCliVersion,
rawLanguages,
apiDetails,
variant,
zstdAvailability.available,
@@ -109963,7 +110250,7 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defau
async function useZstdBundle(cliVersion2, tarSupportsZstd) {
return (
// In testing, gzip performs better than zstd on Windows.
process.platform !== "win32" && tarSupportsZstd && semver8.gte(cliVersion2, CODEQL_VERSION_ZSTD_BUNDLE)
process.platform !== "win32" && tarSupportsZstd && semver9.gte(cliVersion2, CODEQL_VERSION_ZSTD_BUNDLE)
);
}
function getTempExtractionDir(tempDir) {
@@ -109995,7 +110282,7 @@ async function getNightlyToolsUrl(logger) {
}
}
function getLatestToolcacheVersion(logger) {
const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver8.compare(b, a));
const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver9.compare(b, a));
logger.debug(
`Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify(
allVersions
@@ -110015,7 +110302,7 @@ function isReservedToolsValue(tools) {
}
// src/tracer-config.ts
var fs11 = __toESM(require("fs"));
var fs12 = __toESM(require("fs"));
var path10 = __toESM(require("path"));
async function shouldEnableIndirectTracing(codeql, config) {
if (config.buildMode === "none" /* None */) {
@@ -110035,14 +110322,14 @@ async function endTracingForCluster(codeql, config, logger) {
config.dbLocation,
"temp/tracingEnvironment/end-tracing.json"
);
if (!fs11.existsSync(envVariablesFile)) {
if (!fs12.existsSync(envVariablesFile)) {
throw new Error(
`Environment file for ending tracing not found: ${envVariablesFile}`
);
}
try {
const endTracingEnvVariables = JSON.parse(
fs11.readFileSync(envVariablesFile, "utf8")
fs12.readFileSync(envVariablesFile, "utf8")
);
for (const [key, value] of Object.entries(endTracingEnvVariables)) {
if (value !== null) {
@@ -110065,7 +110352,7 @@ var CODEQL_NEXT_MINIMUM_VERSION = "2.19.4";
var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.15";
var GHES_MOST_RECENT_DEPRECATION_DATE = "2026-04-09";
var EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++";
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, checkVersion) {
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, rawLanguages, features, logger, checkVersion) {
try {
const {
codeqlFolder,
@@ -110079,6 +110366,7 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
tempDir,
variant,
defaultCliVersion,
rawLanguages,
features,
logger
);
@@ -110154,7 +110442,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
"tools",
"tracing-config.lua"
);
return fs12.existsSync(tracingConfigPath);
return fs13.existsSync(tracingConfigPath);
},
async isScannedLanguage(language) {
return !await this.isTracedLanguage(language);
@@ -110630,7 +110918,7 @@ async function writeCodeScanningConfigFile(config, logger) {
logger.startGroup("Augmented user configuration file contents");
logger.info(dump(augmentedConfig));
logger.endGroup();
fs12.writeFileSync(codeScanningConfigFile, dump(augmentedConfig));
fs13.writeFileSync(codeScanningConfigFile, dump(augmentedConfig));
return codeScanningConfigFile;
}
var TRAP_CACHE_SIZE_MB = 1024;
@@ -110722,7 +111010,7 @@ async function runAutobuild(config, language, logger) {
// src/dependency-caching.ts
var os3 = __toESM(require("os"));
var import_path2 = require("path");
var actionsCache3 = __toESM(require_cache5());
var actionsCache4 = __toESM(require_cache5());
var glob = __toESM(require_glob());
var CODEQL_DEPENDENCY_CACHE_PREFIX = "codeql-dependencies";
var CODEQL_DEPENDENCY_CACHE_VERSION = 1;
@@ -110860,7 +111148,7 @@ async function uploadDependencyCaches(codeql, features, config, logger) {
);
try {
const start = performance.now();
await actionsCache3.saveCache(
await actionsCache4.saveCache(
await cacheConfig.getDependencyPaths(codeql, features),
key
);
@@ -110872,7 +111160,7 @@ async function uploadDependencyCaches(codeql, features, config, logger) {
upload_duration_ms
});
} catch (error3) {
if (error3 instanceof actionsCache3.ReserveCacheError) {
if (error3 instanceof actionsCache4.ReserveCacheError) {
logger.info(
`Not uploading cache for ${language}, because ${key} is already in use.`
);
@@ -110980,7 +111268,7 @@ function dbIsFinalized(config, language, logger) {
const dbPath = getCodeQLDatabasePath(config, language);
try {
const dbInfo = load(
fs13.readFileSync(path12.resolve(dbPath, "codeql-database.yml"), "utf8")
fs14.readFileSync(path12.resolve(dbPath, "codeql-database.yml"), "utf8")
);
return !("inProgress" in dbInfo);
} catch {
@@ -111065,8 +111353,8 @@ function writeDiffRangeDataExtensionPack(logger, ranges, checkoutPath) {
ranges = [{ path: "", startLine: 0, endLine: 0 }];
}
const diffRangeDir = path12.join(getTemporaryDirectory(), "pr-diff-range");
fs13.mkdirSync(diffRangeDir, { recursive: true });
fs13.writeFileSync(
fs14.mkdirSync(diffRangeDir, { recursive: true });
fs14.writeFileSync(
path12.join(diffRangeDir, "qlpack.yml"),
`
name: codeql-action/pr-diff-range
@@ -111083,7 +111371,7 @@ dataExtensions:
checkoutPath
);
const extensionFilePath = path12.join(diffRangeDir, "pr-diff-range.yml");
fs13.writeFileSync(extensionFilePath, extensionContents);
fs14.writeFileSync(extensionFilePath, extensionContents);
logger.debug(
`Wrote pr-diff-range extension pack to ${extensionFilePath}:
${extensionContents}`
@@ -111235,7 +111523,7 @@ async function runQueries(sarifFolder, memoryFlag, threadsFlag, diffRangePackDir
}
function getPerQueryAlertCounts(sarifPath) {
const sarifObject = JSON.parse(
fs13.readFileSync(sarifPath, "utf8")
fs14.readFileSync(sarifPath, "utf8")
);
const perQueryAlertCounts = {};
for (const sarifRun of sarifObject.runs) {
@@ -111253,13 +111541,13 @@ async function runQueries(sarifFolder, memoryFlag, threadsFlag, diffRangePackDir
}
async function runFinalize(features, outputDir, threadsFlag, memoryFlag, codeql, config, logger) {
try {
await fs13.promises.rm(outputDir, { force: true, recursive: true });
await fs14.promises.rm(outputDir, { force: true, recursive: true });
} catch (error3) {
if (error3?.code !== "ENOENT") {
throw error3;
}
}
await fs13.promises.mkdir(outputDir, { recursive: true });
await fs14.promises.mkdir(outputDir, { recursive: true });
const timings = await finalizeDatabaseCreation(
codeql,
features,
@@ -111303,7 +111591,7 @@ async function warnIfGoInstalledAfterInit(config, logger) {
}
// src/database-upload.ts
var fs14 = __toESM(require("fs"));
var fs15 = __toESM(require("fs"));
async function cleanupAndUploadDatabases(repositoryNwo, codeql, config, apiDetails, features, logger) {
if (getRequiredInput("upload-database") !== "true") {
logger.debug("Database upload disabled in workflow. Skipping upload.");
@@ -111339,7 +111627,7 @@ async function cleanupAndUploadDatabases(repositoryNwo, codeql, config, apiDetai
const bundledDb = await bundleDb(config, language, codeql, language, {
includeDiagnostics: false
});
bundledDbSize = fs14.statSync(bundledDb).size;
bundledDbSize = fs15.statSync(bundledDb).size;
const commitOid = await getCommitOid(
getRequiredInput("checkout_path")
);
@@ -111402,7 +111690,7 @@ async function uploadBundledDatabase(repositoryNwo, language, commitOid, bundled
if (uploadsBaseUrl.endsWith("/")) {
uploadsBaseUrl = uploadsBaseUrl.slice(0, -1);
}
const bundledDbReadStream = fs14.createReadStream(bundledDb);
const bundledDbReadStream = fs15.createReadStream(bundledDb);
try {
const startTime = performance.now();
await client.request(
@@ -111432,151 +111720,6 @@ async function uploadBundledDatabase(repositoryNwo, language, commitOid, bundled
}
}
// src/overlay/caching.ts
var fs15 = __toESM(require("fs"));
var actionsCache4 = __toESM(require_cache5());
var semver9 = __toESM(require_semver2());
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
var CACHE_VERSION2 = 1;
var CACHE_PREFIX = "codeql-overlay-base-database";
var MAX_CACHE_OPERATION_MS2 = 6e5;
async function checkOverlayBaseDatabase(codeql, config, logger, warningPrefix) {
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
if (!fs15.existsSync(baseDatabaseOidsFilePath)) {
logger.warning(
`${warningPrefix}: ${baseDatabaseOidsFilePath} does not exist`
);
return false;
}
for (const language of config.languages) {
const dbPath = getCodeQLDatabasePath(config, language);
try {
const resolveDatabaseOutput = await codeql.resolveDatabase(dbPath);
if (resolveDatabaseOutput === void 0 || !("overlayBaseSpecifier" in resolveDatabaseOutput)) {
logger.info(`${warningPrefix}: no overlayBaseSpecifier defined`);
return false;
} else {
logger.debug(
`Overlay base specifier for ${language} overlay-base database found: ${resolveDatabaseOutput.overlayBaseSpecifier}`
);
}
} catch (e) {
logger.warning(`${warningPrefix}: failed to resolve database: ${e}`);
return false;
}
}
return true;
}
async function cleanupAndUploadOverlayBaseDatabaseToCache(codeql, config, logger) {
const overlayDatabaseMode = config.overlayDatabaseMode;
if (overlayDatabaseMode !== "overlay-base" /* OverlayBase */) {
logger.debug(
`Overlay database mode is ${overlayDatabaseMode}. Skip uploading overlay-base database to cache.`
);
return false;
}
if (!config.useOverlayDatabaseCaching) {
logger.debug(
"Overlay database caching is disabled. Skip uploading overlay-base database to cache."
);
return false;
}
if (isInTestMode()) {
logger.debug(
"In test mode. Skip uploading overlay-base database to cache."
);
return false;
}
const databaseIsValid = await checkOverlayBaseDatabase(
codeql,
config,
logger,
"Abort uploading overlay-base database to cache"
);
if (!databaseIsValid) {
return false;
}
await withGroupAsync("Cleaning up databases", async () => {
await codeql.databaseCleanupCluster(config, "overlay" /* Overlay */);
});
const dbLocation = config.dbLocation;
const databaseSizeBytes = await tryGetFolderBytes(dbLocation, logger);
if (databaseSizeBytes === void 0) {
logger.warning(
"Failed to determine database size. Skip uploading overlay-base database to cache."
);
return false;
}
if (databaseSizeBytes > OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES) {
const databaseSizeMB = Math.round(databaseSizeBytes / 1e6);
logger.warning(
`Database size (${databaseSizeMB} MB) exceeds maximum upload size (${OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB} MB). Skip uploading overlay-base database to cache.`
);
return false;
}
const codeQlVersion = (await codeql.getVersion()).version;
const checkoutPath = getRequiredInput("checkout_path");
const cacheSaveKey = await getCacheSaveKey(
config,
codeQlVersion,
checkoutPath,
logger
);
logger.info(
`Uploading overlay-base database to Actions cache with key ${cacheSaveKey}`
);
try {
const cacheId = await waitForResultWithTimeLimit(
MAX_CACHE_OPERATION_MS2,
actionsCache4.saveCache([dbLocation], cacheSaveKey),
() => {
}
);
if (cacheId === void 0) {
logger.warning("Timed out while uploading overlay-base database");
return false;
}
} catch (error3) {
logger.warning(
`Failed to upload overlay-base database to cache: ${error3 instanceof Error ? error3.message : String(error3)}`
);
return false;
}
logger.info(`Successfully uploaded overlay-base database from ${dbLocation}`);
return true;
}
async function getCacheSaveKey(config, codeQlVersion, checkoutPath, logger) {
let runId = 1;
let attemptId = 1;
try {
runId = getWorkflowRunID();
attemptId = getWorkflowRunAttempt();
} catch (e) {
logger.warning(
`Failed to get workflow run ID or attempt ID. Reason: ${getErrorMessage(e)}`
);
}
const sha = await getCommitOid(checkoutPath);
const restoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion
);
return `${restoreKeyPrefix}${sha}-${runId}-${attemptId}`;
}
async function getCacheRestoreKeyPrefix(config, codeQlVersion) {
return `${await getCacheKeyPrefixBase(config.languages)}${codeQlVersion}-`;
}
async function getCacheKeyPrefixBase(parsedLanguages) {
const languagesComponent = [...parsedLanguages].sort().join("_");
const cacheKeyComponents = {
automationID: await getAutomationID()
// Add more components here as needed in the future
};
const componentsHash = createCacheKeyHash(cacheKeyComponents);
return `${CACHE_PREFIX}-${CACHE_VERSION2}-${componentsHash}-${languagesComponent}-`;
}
// src/status-report.ts
var os4 = __toESM(require("os"));
var core13 = __toESM(require_core());
@@ -112919,7 +113062,7 @@ var core14 = __toESM(require_core());
var toolrunner4 = __toESM(require_toolrunner());
var github2 = __toESM(require_github());
var io6 = __toESM(require_io());
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, rawLanguages, features, logger) {
logger.startGroup("Setup CodeQL tools");
const {
codeql,
@@ -112933,6 +113076,7 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
tempDir,
variant,
defaultCliVersion,
rawLanguages,
features,
logger,
true
@@ -113089,6 +113233,8 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
tempDir,
gitHubVersion.type,
codeQLDefaultVersionInfo,
void 0,
// rawLanguages: upload-lib does not run analysis
features,
logger
);
+53 -47
View File
@@ -44531,11 +44531,11 @@ var require_valid = __commonJS({
"node_modules/semver/functions/valid.js"(exports2, module2) {
"use strict";
var parse2 = require_parse3();
var valid3 = (version, options) => {
var valid4 = (version, options) => {
const v = parse2(version, options);
return v ? v.version : null;
};
module2.exports = valid3;
module2.exports = valid4;
}
});
@@ -44678,8 +44678,8 @@ var require_rcompare = __commonJS({
"node_modules/semver/functions/rcompare.js"(exports2, module2) {
"use strict";
var compare2 = require_compare();
var rcompare2 = (a, b, loose) => compare2(b, a, loose);
module2.exports = rcompare2;
var rcompare3 = (a, b, loose) => compare2(b, a, loose);
module2.exports = rcompare3;
}
});
@@ -45895,7 +45895,7 @@ var require_semver2 = __commonJS({
var SemVer = require_semver();
var identifiers = require_identifiers();
var parse2 = require_parse3();
var valid3 = require_valid();
var valid4 = require_valid();
var clean3 = require_clean();
var inc = require_inc();
var diff = require_diff();
@@ -45904,7 +45904,7 @@ var require_semver2 = __commonJS({
var patch = require_patch();
var prerelease = require_prerelease();
var compare2 = require_compare();
var rcompare2 = require_rcompare();
var rcompare3 = require_rcompare();
var compareLoose = require_compare_loose();
var compareBuild = require_compare_build();
var sort = require_sort();
@@ -45933,7 +45933,7 @@ var require_semver2 = __commonJS({
var subset = require_subset();
module2.exports = {
parse: parse2,
valid: valid3,
valid: valid4,
clean: clean3,
inc,
diff,
@@ -45942,7 +45942,7 @@ var require_semver2 = __commonJS({
patch,
prerelease,
compare: compare2,
rcompare: rcompare2,
rcompare: rcompare3,
compareLoose,
compareBuild,
sort,
@@ -47732,16 +47732,16 @@ var require_attribute = __commonJS({
var result = new ValidatorResult(instance, schema2, options, ctx);
var self2 = this;
schema2.allOf.forEach(function(v, i) {
var valid3 = self2.validateSchema(instance, v, options, ctx);
if (!valid3.valid) {
var valid4 = self2.validateSchema(instance, v, options, ctx);
if (!valid4.valid) {
var id = v.$id || v.id;
var msg = id || v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]";
result.addError({
name: "allOf",
argument: { id: msg, length: valid3.errors.length, valid: valid3 },
message: "does not match allOf schema " + msg + " with " + valid3.errors.length + " error[s]:"
argument: { id: msg, length: valid4.errors.length, valid: valid4 },
message: "does not match allOf schema " + msg + " with " + valid4.errors.length + " error[s]:"
});
result.importErrors(valid3);
result.importErrors(valid4);
}
});
return result;
@@ -48030,8 +48030,8 @@ var require_attribute = __commonJS({
if (typeof schema2.exclusiveMinimum === "boolean") return;
if (!this.types.number(instance)) return;
var result = new ValidatorResult(instance, schema2, options, ctx);
var valid3 = instance > schema2.exclusiveMinimum;
if (!valid3) {
var valid4 = instance > schema2.exclusiveMinimum;
if (!valid4) {
result.addError({
name: "exclusiveMinimum",
argument: schema2.exclusiveMinimum,
@@ -48044,8 +48044,8 @@ var require_attribute = __commonJS({
if (typeof schema2.exclusiveMaximum === "boolean") return;
if (!this.types.number(instance)) return;
var result = new ValidatorResult(instance, schema2, options, ctx);
var valid3 = instance < schema2.exclusiveMaximum;
if (!valid3) {
var valid4 = instance < schema2.exclusiveMaximum;
if (!valid4) {
result.addError({
name: "exclusiveMaximum",
argument: schema2.exclusiveMaximum,
@@ -50828,8 +50828,8 @@ var require_semver3 = __commonJS({
return null;
}
}
exports2.valid = valid3;
function valid3(version, options) {
exports2.valid = valid4;
function valid4(version, options) {
var v = parse2(version, options);
return v ? v.version : null;
}
@@ -51129,8 +51129,8 @@ var require_semver3 = __commonJS({
var versionB = new SemVer(b, loose);
return versionA.compare(versionB) || versionA.compareBuild(versionB);
}
exports2.rcompare = rcompare2;
function rcompare2(a, b, loose) {
exports2.rcompare = rcompare3;
function rcompare3(a, b, loose) {
return compare2(b, a, loose);
}
exports2.sort = sort;
@@ -51958,7 +51958,7 @@ var require_cacheUtils = __commonJS({
var crypto2 = __importStar2(require("crypto"));
var fs8 = __importStar2(require("fs"));
var path9 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var semver10 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
var constants_1 = require_constants12();
var versionSalt = "1.0";
@@ -52051,7 +52051,7 @@ var require_cacheUtils = __commonJS({
function getCompressionMethod() {
return __awaiter2(this, void 0, void 0, function* () {
const versionOutput = yield getVersion("zstd", ["--quiet"]);
const version = semver9.clean(versionOutput);
const version = semver10.clean(versionOutput);
core15.debug(`zstd version: ${version}`);
if (versionOutput === "") {
return constants_1.CompressionMethod.Gzip;
@@ -93457,7 +93457,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache3;
exports2.saveCache = saveCache4;
var core15 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93634,7 +93634,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache3(cacheId, archivePath, signedUploadURL, options) {
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -99134,8 +99134,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache3;
exports2.saveCache = saveCache3;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
var core15 = __importStar2(require_core());
var path9 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -99192,7 +99192,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache3(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99336,7 +99336,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache3(paths_1, key_1, options_1) {
function saveCache4(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99573,7 +99573,7 @@ var require_manifest = __commonJS({
exports2._findMatch = _findMatch;
exports2._getOsVersion = _getOsVersion;
exports2._readLinuxVersionFile = _readLinuxVersionFile;
var semver9 = __importStar2(require_semver2());
var semver10 = __importStar2(require_semver2());
var core_1 = require_core();
var os2 = require("os");
var cp = require("child_process");
@@ -99587,7 +99587,7 @@ var require_manifest = __commonJS({
for (const candidate of candidates) {
const version = candidate.version;
(0, core_1.debug)(`check ${version} satisfies ${versionSpec}`);
if (semver9.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) {
if (semver10.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) {
file = candidate.files.find((item) => {
(0, core_1.debug)(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`);
let chk = item.arch === archFilter && item.platform === platFilter;
@@ -99596,7 +99596,7 @@ var require_manifest = __commonJS({
if (osVersion === item.platform_version) {
chk = true;
} else {
chk = semver9.satisfies(osVersion, item.platform_version);
chk = semver10.satisfies(osVersion, item.platform_version);
}
}
return chk;
@@ -99856,7 +99856,7 @@ var require_tool_cache = __commonJS({
var os2 = __importStar2(require("os"));
var path9 = __importStar2(require("path"));
var httpm = __importStar2(require_lib());
var semver9 = __importStar2(require_semver2());
var semver10 = __importStar2(require_semver2());
var stream = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var assert_1 = require("assert");
@@ -100129,7 +100129,7 @@ var require_tool_cache = __commonJS({
}
function cacheDir(sourceDir, tool, version, arch) {
return __awaiter2(this, void 0, void 0, function* () {
version = semver9.clean(version) || version;
version = semver10.clean(version) || version;
arch = arch || os2.arch();
core15.debug(`Caching tool ${tool} ${version} ${arch}`);
core15.debug(`source dir: ${sourceDir}`);
@@ -100147,7 +100147,7 @@ var require_tool_cache = __commonJS({
}
function cacheFile(sourceFile, targetFile, tool, version, arch) {
return __awaiter2(this, void 0, void 0, function* () {
version = semver9.clean(version) || version;
version = semver10.clean(version) || version;
arch = arch || os2.arch();
core15.debug(`Caching tool ${tool} ${version} ${arch}`);
core15.debug(`source file: ${sourceFile}`);
@@ -100177,7 +100177,7 @@ var require_tool_cache = __commonJS({
}
let toolPath = "";
if (versionSpec) {
versionSpec = semver9.clean(versionSpec) || "";
versionSpec = semver10.clean(versionSpec) || "";
const cachePath = path9.join(_getCacheDirectory(), toolName, versionSpec, arch);
core15.debug(`checking cache: ${cachePath}`);
if (fs8.existsSync(cachePath) && fs8.existsSync(`${cachePath}.complete`)) {
@@ -100257,7 +100257,7 @@ var require_tool_cache = __commonJS({
}
function _createToolPath(tool, version, arch) {
return __awaiter2(this, void 0, void 0, function* () {
const folderPath = path9.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch || "");
const folderPath = path9.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch || "");
core15.debug(`destination ${folderPath}`);
const markerPath = `${folderPath}.complete`;
yield io5.rmRF(folderPath);
@@ -100267,30 +100267,30 @@ var require_tool_cache = __commonJS({
});
}
function _completeToolPath(tool, version, arch) {
const folderPath = path9.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch || "");
const folderPath = path9.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch || "");
const markerPath = `${folderPath}.complete`;
fs8.writeFileSync(markerPath, "");
core15.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
const c = semver9.clean(versionSpec) || "";
const c = semver10.clean(versionSpec) || "";
core15.debug(`isExplicit: ${c}`);
const valid3 = semver9.valid(c) != null;
core15.debug(`explicit? ${valid3}`);
return valid3;
const valid4 = semver10.valid(c) != null;
core15.debug(`explicit? ${valid4}`);
return valid4;
}
function evaluateVersions(versions, versionSpec) {
let version = "";
core15.debug(`evaluating ${versions.length} versions`);
versions = versions.sort((a, b) => {
if (semver9.gt(a, b)) {
if (semver10.gt(a, b)) {
return 1;
}
return -1;
});
for (let i = versions.length - 1; i >= 0; i--) {
const potential = versions[i];
const satisfied = semver9.satisfies(potential, versionSpec);
const satisfied = semver10.satisfies(potential, versionSpec);
if (satisfied) {
version = potential;
break;
@@ -105375,20 +105375,26 @@ function appendExtraQueryExclusions(extraQueryExclusions, cliConfig) {
// src/setup-codeql.ts
var toolcache3 = __toESM(require_tool_cache());
var import_fast_deep_equal = __toESM(require_fast_deep_equal());
var semver8 = __toESM(require_semver2());
var semver9 = __toESM(require_semver2());
// src/overlay/caching.ts
var actionsCache3 = __toESM(require_cache5());
var semver6 = __toESM(require_semver2());
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
// src/tar.ts
var import_toolrunner = __toESM(require_toolrunner());
var io4 = __toESM(require_io());
var toolcache = __toESM(require_tool_cache());
var semver6 = __toESM(require_semver2());
var semver7 = __toESM(require_semver2());
// src/tools-download.ts
var core10 = __toESM(require_core());
var import_http_client = __toESM(require_lib());
var toolcache2 = __toESM(require_tool_cache());
var import_follow_redirects = __toESM(require_follow_redirects());
var semver7 = __toESM(require_semver2());
var semver8 = __toESM(require_semver2());
var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024;
// src/tracer-config.ts
+309 -135
View File
File diff suppressed because it is too large Load Diff
+332 -198
View File
@@ -104817,6 +104817,18 @@ function computeAutomationID(analysis_key, environment) {
}
return automationID;
}
async function listActionsCaches(keyPrefix, ref) {
const repositoryNwo = getRepositoryNwo();
return await getApiClient().paginate(
"GET /repos/{owner}/{repo}/actions/caches",
{
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
key: keyPrefix,
ref
}
);
}
async function getRepositoryProperties(repositoryNwo) {
return getApiClient().request("GET /repos/:owner/:repo/properties/values", {
owner: repositoryNwo.owner,
@@ -106556,6 +106568,17 @@ var BuiltInLanguage = /* @__PURE__ */ ((BuiltInLanguage2) => {
return BuiltInLanguage2;
})(BuiltInLanguage || {});
var builtInLanguageSet = new Set(builtin_default.languages);
function isBuiltInLanguage(language) {
return builtInLanguageSet.has(language);
}
function parseBuiltInLanguage(language) {
language = language.trim().toLowerCase();
language = builtin_default.aliases[language] ?? language;
if (isBuiltInLanguage(language)) {
return language;
}
return void 0;
}
// src/overlay/diagnostics.ts
async function addOverlayDisablementDiagnostics(config, codeql, overlayDisabledReason) {
@@ -107803,7 +107826,7 @@ var internal = {
};
// src/init.ts
var fs15 = __toESM(require("fs"));
var fs16 = __toESM(require("fs"));
var path15 = __toESM(require("path"));
var core12 = __toESM(require_core());
var toolrunner4 = __toESM(require_toolrunner());
@@ -107811,7 +107834,7 @@ var github2 = __toESM(require_github());
var io5 = __toESM(require_io());
// src/codeql.ts
var fs14 = __toESM(require("fs"));
var fs15 = __toESM(require("fs"));
var path14 = __toESM(require("path"));
var core11 = __toESM(require_core());
var toolrunner3 = __toESM(require_toolrunner());
@@ -108065,20 +108088,221 @@ function wrapCliConfigurationError(cliError) {
}
// src/setup-codeql.ts
var fs12 = __toESM(require("fs"));
var fs13 = __toESM(require("fs"));
var path12 = __toESM(require("path"));
var toolcache3 = __toESM(require_tool_cache());
var import_fast_deep_equal = __toESM(require_fast_deep_equal());
var semver8 = __toESM(require_semver2());
var semver9 = __toESM(require_semver2());
// src/overlay/caching.ts
var fs10 = __toESM(require("fs"));
var actionsCache4 = __toESM(require_cache5());
var semver6 = __toESM(require_semver2());
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
var CACHE_VERSION2 = 1;
var CACHE_PREFIX = "codeql-overlay-base-database";
var MAX_CACHE_OPERATION_MS3 = 6e5;
async function checkOverlayBaseDatabase(codeql, config, logger, warningPrefix) {
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
if (!fs10.existsSync(baseDatabaseOidsFilePath)) {
logger.warning(
`${warningPrefix}: ${baseDatabaseOidsFilePath} does not exist`
);
return false;
}
for (const language of config.languages) {
const dbPath = getCodeQLDatabasePath(config, language);
try {
const resolveDatabaseOutput = await codeql.resolveDatabase(dbPath);
if (resolveDatabaseOutput === void 0 || !("overlayBaseSpecifier" in resolveDatabaseOutput)) {
logger.info(`${warningPrefix}: no overlayBaseSpecifier defined`);
return false;
} else {
logger.debug(
`Overlay base specifier for ${language} overlay-base database found: ${resolveDatabaseOutput.overlayBaseSpecifier}`
);
}
} catch (e) {
logger.warning(`${warningPrefix}: failed to resolve database: ${e}`);
return false;
}
}
return true;
}
async function downloadOverlayBaseDatabaseFromCache(codeql, config, logger) {
const overlayDatabaseMode = config.overlayDatabaseMode;
if (overlayDatabaseMode !== "overlay" /* Overlay */) {
logger.debug(
`Overlay database mode is ${overlayDatabaseMode}. Skip downloading overlay-base database from cache.`
);
return void 0;
}
if (!config.useOverlayDatabaseCaching) {
logger.debug(
"Overlay database caching is disabled. Skip downloading overlay-base database from cache."
);
return void 0;
}
if (isInTestMode()) {
logger.debug(
"In test mode. Skip downloading overlay-base database from cache."
);
return void 0;
}
const dbLocation = config.dbLocation;
const codeQlVersion = (await codeql.getVersion()).version;
const cacheRestoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion
);
logger.info(
`Looking in Actions cache for overlay-base database with restore key ${cacheRestoreKeyPrefix}`
);
let databaseDownloadDurationMs = 0;
try {
const databaseDownloadStart = performance.now();
const foundKey = await waitForResultWithTimeLimit(
// This ten-minute limit for the cache restore operation is mainly to
// guard against the possibility that the cache service is unresponsive
// and hangs outside the data download.
//
// Data download (which is normally the most time-consuming part of the
// restore operation) should not run long enough to hit this limit. Even
// for an extremely large 10GB database, at a download speed of 40MB/s
// (see below), the download should complete within five minutes. If we
// do hit this limit, there are likely more serious problems other than
// mere slow download speed.
//
// This is important because we don't want any ongoing file operations
// on the database directory when we do hit this limit. Hitting this
// time limit takes us to a fallback path where we re-initialize the
// database from scratch at dbLocation, and having the cache restore
// operation continue to write into dbLocation in the background would
// really mess things up. We want to hit this limit only in the case
// of a hung cache service, not just slow download speed.
MAX_CACHE_OPERATION_MS3,
actionsCache4.restoreCache(
[dbLocation],
cacheRestoreKeyPrefix,
void 0,
{
// Azure SDK download (which is the default) uses 128MB segments; see
// https://github.com/actions/toolkit/blob/main/packages/cache/README.md.
// Setting segmentTimeoutInMs to 3000 translates to segment download
// speed of about 40 MB/s, which should be achievable unless the
// download is unreliable (in which case we do want to abort).
segmentTimeoutInMs: 3e3
}
),
() => {
logger.info("Timed out downloading overlay-base database from cache");
}
);
databaseDownloadDurationMs = Math.round(
performance.now() - databaseDownloadStart
);
if (foundKey === void 0) {
logger.info("No overlay-base database found in Actions cache");
return void 0;
}
logger.info(
`Downloaded overlay-base database in cache with key ${foundKey}`
);
} catch (error3) {
logger.warning(
`Failed to download overlay-base database from cache: ${error3 instanceof Error ? error3.message : String(error3)}`
);
return void 0;
}
const databaseIsValid = await checkOverlayBaseDatabase(
codeql,
config,
logger,
"Downloaded overlay-base database is invalid"
);
if (!databaseIsValid) {
logger.warning("Downloaded overlay-base database failed validation");
return void 0;
}
const databaseSizeBytes = await tryGetFolderBytes(dbLocation, logger);
if (databaseSizeBytes === void 0) {
logger.info(
"Filesystem error while accessing downloaded overlay-base database"
);
return void 0;
}
logger.info(`Successfully downloaded overlay-base database to ${dbLocation}`);
return {
databaseSizeBytes: Math.round(databaseSizeBytes),
databaseDownloadDurationMs
};
}
async function getCacheRestoreKeyPrefix(config, codeQlVersion) {
return `${await getCacheKeyPrefixBase(config.languages)}${codeQlVersion}-`;
}
async function getCacheKeyPrefixBase(parsedLanguages) {
const languagesComponent = [...parsedLanguages].sort().join("_");
const cacheKeyComponents = {
automationID: await getAutomationID()
// Add more components here as needed in the future
};
const componentsHash = createCacheKeyHash(cacheKeyComponents);
return `${CACHE_PREFIX}-${CACHE_VERSION2}-${componentsHash}-${languagesComponent}-`;
}
async function getCodeQlVersionsForOverlayBaseDatabases(rawLanguages, logger) {
const languages = rawLanguages.map(parseBuiltInLanguage);
if (languages.includes(void 0)) {
logger.warning(
"One or more provided languages are not recognized as built-in languages. Skipping searching for overlay-base databases in cache."
);
return void 0;
}
const cacheKeyPrefix = await getCacheKeyPrefixBase(
languages.filter((l) => l !== void 0)
);
logger.debug(
`Searching for overlay-base databases in Actions cache with prefix ${cacheKeyPrefix}`
);
const caches = await listActionsCaches(cacheKeyPrefix);
if (caches.length === 0) {
logger.info("No overlay-base databases found in Actions cache.");
return [];
}
logger.info(
`Found ${caches.length} overlay-base ${caches.length === 1 ? "database" : "databases"} in the Actions cache.`
);
const versionRegex = /^([\d.]+)-/;
const versionSet = /* @__PURE__ */ new Set();
for (const cache of caches) {
if (!cache.key) continue;
const suffix = cache.key.substring(cacheKeyPrefix.length);
const match = suffix.match(versionRegex);
if (match && semver6.valid(match[1])) {
versionSet.add(match[1]);
}
}
if (versionSet.size === 0) {
logger.info(
"Could not parse any CodeQL versions from overlay-base database cache keys."
);
return [];
}
const versions = [...versionSet].sort(semver6.rcompare);
logger.info(
`Found overlay databases for the following CodeQL versions in the Actions cache: ${versions.join(", ")}`
);
return versions;
}
// src/tar.ts
var import_child_process = require("child_process");
var fs10 = __toESM(require("fs"));
var fs11 = __toESM(require("fs"));
var stream = __toESM(require("stream"));
var import_toolrunner = __toESM(require_toolrunner());
var io4 = __toESM(require_io());
var toolcache = __toESM(require_tool_cache());
var semver6 = __toESM(require_semver2());
var semver7 = __toESM(require_semver2());
var MIN_REQUIRED_BSD_TAR_VERSION = "3.4.3";
var MIN_REQUIRED_GNU_TAR_VERSION = "1.31";
async function getTarVersion() {
@@ -108120,9 +108344,9 @@ async function isZstdAvailable(logger) {
case "gnu":
return {
available: foundZstdBinary && // GNU tar only uses major and minor version numbers
semver6.gte(
semver6.coerce(version),
semver6.coerce(MIN_REQUIRED_GNU_TAR_VERSION)
semver7.gte(
semver7.coerce(version),
semver7.coerce(MIN_REQUIRED_GNU_TAR_VERSION)
),
foundZstdBinary,
version: tarVersion
@@ -108131,7 +108355,7 @@ async function isZstdAvailable(logger) {
return {
available: foundZstdBinary && // Do a loose comparison since these version numbers don't contain
// a patch version number.
semver6.gte(version, MIN_REQUIRED_BSD_TAR_VERSION),
semver7.gte(version, MIN_REQUIRED_BSD_TAR_VERSION),
foundZstdBinary,
version: tarVersion
};
@@ -108146,7 +108370,7 @@ async function isZstdAvailable(logger) {
}
}
async function extract(tarPath, dest, compressionMethod, tarVersion, logger) {
fs10.mkdirSync(dest, { recursive: true });
fs11.mkdirSync(dest, { recursive: true });
switch (compressionMethod) {
case "gzip":
return await toolcache.extractTar(tarPath, dest);
@@ -108230,7 +108454,7 @@ function inferCompressionMethod(tarPath) {
}
// src/tools-download.ts
var fs11 = __toESM(require("fs"));
var fs12 = __toESM(require("fs"));
var os4 = __toESM(require("os"));
var path11 = __toESM(require("path"));
var import_perf_hooks2 = require("perf_hooks");
@@ -108238,7 +108462,7 @@ var core10 = __toESM(require_core());
var import_http_client = __toESM(require_lib());
var toolcache2 = __toESM(require_tool_cache());
var import_follow_redirects = __toESM(require_follow_redirects());
var semver7 = __toESM(require_semver2());
var semver8 = __toESM(require_semver2());
var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024;
var TOOLCACHE_TOOL_NAME = "CodeQL";
function makeDownloadFirstToolsDownloadDurations(downloadDurationMs, extractionDurationMs) {
@@ -108337,7 +108561,7 @@ async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorizat
};
}
async function downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorization, headers, tarVersion, logger) {
fs11.mkdirSync(dest, { recursive: true });
fs12.mkdirSync(dest, { recursive: true });
const agent = new import_http_client.HttpClient().getAgent(codeqlURL);
headers = Object.assign(
{ "User-Agent": "CodeQL Action" },
@@ -108368,13 +108592,13 @@ function getToolcacheDirectory(version) {
return path11.join(
getRequiredEnvParam("RUNNER_TOOL_CACHE"),
TOOLCACHE_TOOL_NAME,
semver7.clean(version) || version,
semver8.clean(version) || version,
os4.arch() || ""
);
}
function writeToolcacheMarkerFile(extractedPath, logger) {
const markerFilePath = `${extractedPath}.complete`;
fs11.writeFileSync(markerFilePath, "");
fs12.writeFileSync(markerFilePath, "");
logger.info(`Created toolcache marker file ${markerFilePath}`);
}
function sanitizeUrlForStatusReport(url) {
@@ -108493,13 +108717,13 @@ function tryGetTagNameFromUrl(url, logger) {
return match[1];
}
function convertToSemVer(version, logger) {
if (!semver8.valid(version)) {
if (!semver9.valid(version)) {
logger.debug(
`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`
);
version = `0.0.0-${version}`;
}
const s = semver8.clean(version);
const s = semver9.clean(version);
if (!s) {
throw new Error(`Bundle version ${version} is not in SemVer format.`);
}
@@ -108509,7 +108733,7 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
const candidates = toolcache3.findAllVersions("CodeQL").filter(isGoodVersion).map((version) => ({
folder: toolcache3.find("CodeQL", version),
version
})).filter(({ folder }) => fs12.existsSync(path12.join(folder, "pinned-version")));
})).filter(({ folder }) => fs13.existsSync(path12.join(folder, "pinned-version")));
if (candidates.length === 1) {
const candidate = candidates[0];
logger.debug(
@@ -108531,7 +108755,55 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
}
return void 0;
}
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, features, logger) {
async function getEnabledVersionsWithOverlayBaseDatabases(defaultCliVersion, rawLanguages, features, logger) {
if (rawLanguages === void 0 || rawLanguages.length === 0) {
return [];
}
if (!await features.getValue("overlay_analysis_match_codeql_version" /* OverlayAnalysisMatchCodeqlVersion */)) {
return [];
}
let cachedVersions;
try {
cachedVersions = await getCodeQlVersionsForOverlayBaseDatabases(
rawLanguages,
logger
);
} catch (e) {
logger.warning(
`While setting up CodeQL, was unable to list overlay-base databases in the Actions cache. Details: ${e}`
);
return [];
}
if (cachedVersions === void 0 || cachedVersions.length === 0) {
return [];
}
const cachedVersionsSet = new Set(cachedVersions);
return defaultCliVersion.enabledVersions.filter(
(v) => cachedVersionsSet.has(v.cliVersion)
);
}
async function resolveDefaultCliVersion(defaultCliVersion, rawLanguages, features, logger) {
if (!isAnalyzingPullRequest()) {
return defaultCliVersion.enabledVersions[0];
}
const overlayVersions = await getEnabledVersionsWithOverlayBaseDatabases(
defaultCliVersion,
rawLanguages,
features,
logger
);
if (overlayVersions.length > 0) {
logger.info(
`Using CodeQL version ${overlayVersions[0].cliVersion} since this is the highest enabled version that has a cached overlay-base database.`
);
return overlayVersions[0];
}
logger.info(
`Using CodeQL version ${defaultCliVersion.enabledVersions[0].cliVersion} since no enabled versions with cached overlay-base databases were found.`
);
return defaultCliVersion.enabledVersions[0];
}
async function getCodeQLSource(toolsInput, defaultCliVersion, rawLanguages, apiDetails, variant, tarSupportsZstd, features, logger) {
if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) {
logger.info(`Using CodeQL CLI from local path ${toolsInput}`);
const compressionMethod2 = inferCompressionMethod(toolsInput);
@@ -108625,21 +108897,33 @@ async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, varian
);
}
}
cliVersion2 = defaultCliVersion.enabledVersions[0].cliVersion;
tagName = defaultCliVersion.enabledVersions[0].tagName;
const version = await resolveDefaultCliVersion(
defaultCliVersion,
rawLanguages,
features,
logger
);
cliVersion2 = version.cliVersion;
tagName = version.tagName;
}
} else if (toolsInput !== void 0) {
tagName = tryGetTagNameFromUrl(toolsInput, logger);
url = toolsInput;
if (tagName) {
const bundleVersion3 = tryGetBundleVersionFromTagName(tagName, logger);
if (bundleVersion3 && semver8.valid(bundleVersion3)) {
if (bundleVersion3 && semver9.valid(bundleVersion3)) {
cliVersion2 = convertToSemVer(bundleVersion3, logger);
}
}
} else {
cliVersion2 = defaultCliVersion.enabledVersions[0].cliVersion;
tagName = defaultCliVersion.enabledVersions[0].tagName;
const version = await resolveDefaultCliVersion(
defaultCliVersion,
rawLanguages,
features,
logger
);
cliVersion2 = version.cliVersion;
tagName = version.tagName;
}
const bundleVersion2 = tagName && tryGetBundleVersionFromTagName(tagName, logger);
const humanReadableVersion = cliVersion2 ?? (bundleVersion2 && convertToSemVer(bundleVersion2, logger)) ?? tagName ?? url ?? "unknown";
@@ -108836,7 +109120,7 @@ function getCanonicalToolcacheVersion(cliVersion2, bundleVersion2, logger) {
}
return cliVersion2;
}
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, rawLanguages, features, logger) {
if (!await isBinaryAccessible("tar", logger)) {
throw new ConfigurationError(
"Could not find tar in PATH, so unable to extract CodeQL bundle."
@@ -108846,6 +109130,7 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defau
const source = await getCodeQLSource(
toolsInput,
defaultCliVersion,
rawLanguages,
apiDetails,
variant,
zstdAvailability.available,
@@ -108904,7 +109189,7 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defau
async function useZstdBundle(cliVersion2, tarSupportsZstd) {
return (
// In testing, gzip performs better than zstd on Windows.
process.platform !== "win32" && tarSupportsZstd && semver8.gte(cliVersion2, CODEQL_VERSION_ZSTD_BUNDLE)
process.platform !== "win32" && tarSupportsZstd && semver9.gte(cliVersion2, CODEQL_VERSION_ZSTD_BUNDLE)
);
}
function getTempExtractionDir(tempDir) {
@@ -108936,7 +109221,7 @@ async function getNightlyToolsUrl(logger) {
}
}
function getLatestToolcacheVersion(logger) {
const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver8.compare(b, a));
const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver9.compare(b, a));
logger.debug(
`Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify(
allVersions
@@ -108956,7 +109241,7 @@ function isReservedToolsValue(tools) {
}
// src/tracer-config.ts
var fs13 = __toESM(require("fs"));
var fs14 = __toESM(require("fs"));
var path13 = __toESM(require("path"));
async function shouldEnableIndirectTracing(codeql, config) {
if (config.buildMode === "none" /* None */) {
@@ -108969,7 +109254,7 @@ async function shouldEnableIndirectTracing(codeql, config) {
}
async function getTracerConfigForCluster(config) {
const tracingEnvVariables = JSON.parse(
fs13.readFileSync(
fs14.readFileSync(
path13.resolve(
config.dbLocation,
"temp/tracingEnvironment/start-tracing.json"
@@ -108995,7 +109280,7 @@ var CODEQL_NEXT_MINIMUM_VERSION = "2.19.4";
var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.15";
var GHES_MOST_RECENT_DEPRECATION_DATE = "2026-04-09";
var EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++";
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, checkVersion) {
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, rawLanguages, features, logger, checkVersion) {
try {
const {
codeqlFolder,
@@ -109009,6 +109294,7 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
tempDir,
variant,
defaultCliVersion,
rawLanguages,
features,
logger
);
@@ -109078,7 +109364,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
"tools",
"tracing-config.lua"
);
return fs14.existsSync(tracingConfigPath);
return fs15.existsSync(tracingConfigPath);
},
async isScannedLanguage(language) {
return !await this.isTracedLanguage(language);
@@ -109554,7 +109840,7 @@ async function writeCodeScanningConfigFile(config, logger) {
logger.startGroup("Augmented user configuration file contents");
logger.info(dump(augmentedConfig));
logger.endGroup();
fs14.writeFileSync(codeScanningConfigFile, dump(augmentedConfig));
fs15.writeFileSync(codeScanningConfigFile, dump(augmentedConfig));
return codeScanningConfigFile;
}
var TRAP_CACHE_SIZE_MB = 1024;
@@ -109598,7 +109884,7 @@ async function getJobRunUuidSarifOptions(codeql) {
}
// src/init.ts
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, rawLanguages, features, logger) {
logger.startGroup("Setup CodeQL tools");
const {
codeql,
@@ -109612,6 +109898,7 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
tempDir,
variant,
defaultCliVersion,
rawLanguages,
features,
logger,
true
@@ -109632,7 +109919,7 @@ async function initConfig2(features, inputs) {
});
}
async function runDatabaseInitCluster(databaseInitEnvironment, codeql, config, sourceRoot, processName, qlconfigFile, logger) {
fs15.mkdirSync(config.dbLocation, { recursive: true });
fs16.mkdirSync(config.dbLocation, { recursive: true });
await wrapEnvironment(
databaseInitEnvironment,
async () => await codeql.databaseInitCluster(
@@ -109668,24 +109955,24 @@ async function checkPacksForOverlayCompatibility(codeql, config, logger) {
function checkPackForOverlayCompatibility(packDir, codeQlOverlayVersion, logger) {
try {
let qlpackPath = path15.join(packDir, "qlpack.yml");
if (!fs15.existsSync(qlpackPath)) {
if (!fs16.existsSync(qlpackPath)) {
qlpackPath = path15.join(packDir, "codeql-pack.yml");
}
const qlpackContents = load(
fs15.readFileSync(qlpackPath, "utf8")
fs16.readFileSync(qlpackPath, "utf8")
);
if (!qlpackContents.buildMetadata) {
return true;
}
const packInfoPath = path15.join(packDir, ".packinfo");
if (!fs15.existsSync(packInfoPath)) {
if (!fs16.existsSync(packInfoPath)) {
logger.warning(
`The query pack at ${packDir} does not have a .packinfo file, so it cannot support overlay analysis. Recompiling the query pack with the latest CodeQL CLI should solve this problem.`
);
return false;
}
const packInfoFileContents = JSON.parse(
fs15.readFileSync(packInfoPath, "utf8")
fs16.readFileSync(packInfoPath, "utf8")
);
const packOverlayVersion = packInfoFileContents.overlayVersion;
if (typeof packOverlayVersion !== "number") {
@@ -109720,8 +110007,8 @@ async function checkInstallPython311(languages, codeql) {
]).exec();
}
}
function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = fs15.rmSync) {
if (fs15.existsSync(config.dbLocation) && (fs15.statSync(config.dbLocation).isFile() || fs15.readdirSync(config.dbLocation).length > 0)) {
function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = fs16.rmSync) {
if (fs16.existsSync(config.dbLocation) && (fs16.statSync(config.dbLocation).isFile() || fs16.readdirSync(config.dbLocation).length > 0)) {
if (!options.disableExistingDirectoryWarning) {
logger.warning(
`The database cluster directory ${config.dbLocation} must be empty. Attempting to clean it up.`
@@ -109825,163 +110112,6 @@ To opt out of this change, ${envVarOptOut}`;
core12.exportVariable("CODEQL_ACTION_DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION" /* DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION */, "true");
}
// src/overlay/caching.ts
var fs16 = __toESM(require("fs"));
var actionsCache4 = __toESM(require_cache5());
var semver9 = __toESM(require_semver2());
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
var CACHE_VERSION2 = 1;
var CACHE_PREFIX = "codeql-overlay-base-database";
var MAX_CACHE_OPERATION_MS3 = 6e5;
async function checkOverlayBaseDatabase(codeql, config, logger, warningPrefix) {
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
if (!fs16.existsSync(baseDatabaseOidsFilePath)) {
logger.warning(
`${warningPrefix}: ${baseDatabaseOidsFilePath} does not exist`
);
return false;
}
for (const language of config.languages) {
const dbPath = getCodeQLDatabasePath(config, language);
try {
const resolveDatabaseOutput = await codeql.resolveDatabase(dbPath);
if (resolveDatabaseOutput === void 0 || !("overlayBaseSpecifier" in resolveDatabaseOutput)) {
logger.info(`${warningPrefix}: no overlayBaseSpecifier defined`);
return false;
} else {
logger.debug(
`Overlay base specifier for ${language} overlay-base database found: ${resolveDatabaseOutput.overlayBaseSpecifier}`
);
}
} catch (e) {
logger.warning(`${warningPrefix}: failed to resolve database: ${e}`);
return false;
}
}
return true;
}
async function downloadOverlayBaseDatabaseFromCache(codeql, config, logger) {
const overlayDatabaseMode = config.overlayDatabaseMode;
if (overlayDatabaseMode !== "overlay" /* Overlay */) {
logger.debug(
`Overlay database mode is ${overlayDatabaseMode}. Skip downloading overlay-base database from cache.`
);
return void 0;
}
if (!config.useOverlayDatabaseCaching) {
logger.debug(
"Overlay database caching is disabled. Skip downloading overlay-base database from cache."
);
return void 0;
}
if (isInTestMode()) {
logger.debug(
"In test mode. Skip downloading overlay-base database from cache."
);
return void 0;
}
const dbLocation = config.dbLocation;
const codeQlVersion = (await codeql.getVersion()).version;
const cacheRestoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion
);
logger.info(
`Looking in Actions cache for overlay-base database with restore key ${cacheRestoreKeyPrefix}`
);
let databaseDownloadDurationMs = 0;
try {
const databaseDownloadStart = performance.now();
const foundKey = await waitForResultWithTimeLimit(
// This ten-minute limit for the cache restore operation is mainly to
// guard against the possibility that the cache service is unresponsive
// and hangs outside the data download.
//
// Data download (which is normally the most time-consuming part of the
// restore operation) should not run long enough to hit this limit. Even
// for an extremely large 10GB database, at a download speed of 40MB/s
// (see below), the download should complete within five minutes. If we
// do hit this limit, there are likely more serious problems other than
// mere slow download speed.
//
// This is important because we don't want any ongoing file operations
// on the database directory when we do hit this limit. Hitting this
// time limit takes us to a fallback path where we re-initialize the
// database from scratch at dbLocation, and having the cache restore
// operation continue to write into dbLocation in the background would
// really mess things up. We want to hit this limit only in the case
// of a hung cache service, not just slow download speed.
MAX_CACHE_OPERATION_MS3,
actionsCache4.restoreCache(
[dbLocation],
cacheRestoreKeyPrefix,
void 0,
{
// Azure SDK download (which is the default) uses 128MB segments; see
// https://github.com/actions/toolkit/blob/main/packages/cache/README.md.
// Setting segmentTimeoutInMs to 3000 translates to segment download
// speed of about 40 MB/s, which should be achievable unless the
// download is unreliable (in which case we do want to abort).
segmentTimeoutInMs: 3e3
}
),
() => {
logger.info("Timed out downloading overlay-base database from cache");
}
);
databaseDownloadDurationMs = Math.round(
performance.now() - databaseDownloadStart
);
if (foundKey === void 0) {
logger.info("No overlay-base database found in Actions cache");
return void 0;
}
logger.info(
`Downloaded overlay-base database in cache with key ${foundKey}`
);
} catch (error3) {
logger.warning(
`Failed to download overlay-base database from cache: ${error3 instanceof Error ? error3.message : String(error3)}`
);
return void 0;
}
const databaseIsValid = await checkOverlayBaseDatabase(
codeql,
config,
logger,
"Downloaded overlay-base database is invalid"
);
if (!databaseIsValid) {
logger.warning("Downloaded overlay-base database failed validation");
return void 0;
}
const databaseSizeBytes = await tryGetFolderBytes(dbLocation, logger);
if (databaseSizeBytes === void 0) {
logger.info(
"Filesystem error while accessing downloaded overlay-base database"
);
return void 0;
}
logger.info(`Successfully downloaded overlay-base database to ${dbLocation}`);
return {
databaseSizeBytes: Math.round(databaseSizeBytes),
databaseDownloadDurationMs
};
}
async function getCacheRestoreKeyPrefix(config, codeQlVersion) {
return `${await getCacheKeyPrefixBase(config.languages)}${codeQlVersion}-`;
}
async function getCacheKeyPrefixBase(parsedLanguages) {
const languagesComponent = [...parsedLanguages].sort().join("_");
const cacheKeyComponents = {
automationID: await getAutomationID()
// Add more components here as needed in the future
};
const componentsHash = createCacheKeyHash(cacheKeyComponents);
return `${CACHE_PREFIX}-${CACHE_VERSION2}-${componentsHash}-${languagesComponent}-`;
}
// src/status-report.ts
var os5 = __toESM(require("os"));
var core13 = __toESM(require_core());
@@ -110551,12 +110681,16 @@ async function run(startedAt) {
}
const codeQLDefaultVersionInfo = await features.getEnabledDefaultCliVersions(gitHubVersion.type);
toolsFeatureFlagsValid = codeQLDefaultVersionInfo.toolsFeatureFlagsValid;
const rawLanguages = getRawLanguagesNoAutodetect(
getOptionalInput("languages")
);
const initCodeQLResult = await initCodeQL(
getOptionalInput("tools"),
apiDetails,
getTemporaryDirectory(),
gitHubVersion.type,
codeQLDefaultVersionInfo,
rawLanguages,
features,
logger
);
+53 -47
View File
@@ -44531,11 +44531,11 @@ var require_valid = __commonJS({
"node_modules/semver/functions/valid.js"(exports2, module2) {
"use strict";
var parse2 = require_parse3();
var valid3 = (version, options) => {
var valid4 = (version, options) => {
const v = parse2(version, options);
return v ? v.version : null;
};
module2.exports = valid3;
module2.exports = valid4;
}
});
@@ -44678,8 +44678,8 @@ var require_rcompare = __commonJS({
"node_modules/semver/functions/rcompare.js"(exports2, module2) {
"use strict";
var compare2 = require_compare();
var rcompare2 = (a, b, loose) => compare2(b, a, loose);
module2.exports = rcompare2;
var rcompare3 = (a, b, loose) => compare2(b, a, loose);
module2.exports = rcompare3;
}
});
@@ -45895,7 +45895,7 @@ var require_semver2 = __commonJS({
var SemVer = require_semver();
var identifiers = require_identifiers();
var parse2 = require_parse3();
var valid3 = require_valid();
var valid4 = require_valid();
var clean3 = require_clean();
var inc = require_inc();
var diff = require_diff();
@@ -45904,7 +45904,7 @@ var require_semver2 = __commonJS({
var patch = require_patch();
var prerelease = require_prerelease();
var compare2 = require_compare();
var rcompare2 = require_rcompare();
var rcompare3 = require_rcompare();
var compareLoose = require_compare_loose();
var compareBuild = require_compare_build();
var sort = require_sort();
@@ -45933,7 +45933,7 @@ var require_semver2 = __commonJS({
var subset = require_subset();
module2.exports = {
parse: parse2,
valid: valid3,
valid: valid4,
clean: clean3,
inc,
diff,
@@ -45942,7 +45942,7 @@ var require_semver2 = __commonJS({
patch,
prerelease,
compare: compare2,
rcompare: rcompare2,
rcompare: rcompare3,
compareLoose,
compareBuild,
sort,
@@ -47732,16 +47732,16 @@ var require_attribute = __commonJS({
var result = new ValidatorResult(instance, schema2, options, ctx);
var self2 = this;
schema2.allOf.forEach(function(v, i) {
var valid3 = self2.validateSchema(instance, v, options, ctx);
if (!valid3.valid) {
var valid4 = self2.validateSchema(instance, v, options, ctx);
if (!valid4.valid) {
var id = v.$id || v.id;
var msg = id || v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]";
result.addError({
name: "allOf",
argument: { id: msg, length: valid3.errors.length, valid: valid3 },
message: "does not match allOf schema " + msg + " with " + valid3.errors.length + " error[s]:"
argument: { id: msg, length: valid4.errors.length, valid: valid4 },
message: "does not match allOf schema " + msg + " with " + valid4.errors.length + " error[s]:"
});
result.importErrors(valid3);
result.importErrors(valid4);
}
});
return result;
@@ -48030,8 +48030,8 @@ var require_attribute = __commonJS({
if (typeof schema2.exclusiveMinimum === "boolean") return;
if (!this.types.number(instance)) return;
var result = new ValidatorResult(instance, schema2, options, ctx);
var valid3 = instance > schema2.exclusiveMinimum;
if (!valid3) {
var valid4 = instance > schema2.exclusiveMinimum;
if (!valid4) {
result.addError({
name: "exclusiveMinimum",
argument: schema2.exclusiveMinimum,
@@ -48044,8 +48044,8 @@ var require_attribute = __commonJS({
if (typeof schema2.exclusiveMaximum === "boolean") return;
if (!this.types.number(instance)) return;
var result = new ValidatorResult(instance, schema2, options, ctx);
var valid3 = instance < schema2.exclusiveMaximum;
if (!valid3) {
var valid4 = instance < schema2.exclusiveMaximum;
if (!valid4) {
result.addError({
name: "exclusiveMaximum",
argument: schema2.exclusiveMaximum,
@@ -50828,8 +50828,8 @@ var require_semver3 = __commonJS({
return null;
}
}
exports2.valid = valid3;
function valid3(version, options) {
exports2.valid = valid4;
function valid4(version, options) {
var v = parse2(version, options);
return v ? v.version : null;
}
@@ -51129,8 +51129,8 @@ var require_semver3 = __commonJS({
var versionB = new SemVer(b, loose);
return versionA.compare(versionB) || versionA.compareBuild(versionB);
}
exports2.rcompare = rcompare2;
function rcompare2(a, b, loose) {
exports2.rcompare = rcompare3;
function rcompare3(a, b, loose) {
return compare2(b, a, loose);
}
exports2.sort = sort;
@@ -51958,7 +51958,7 @@ var require_cacheUtils = __commonJS({
var crypto2 = __importStar2(require("crypto"));
var fs6 = __importStar2(require("fs"));
var path7 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var semver10 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
var constants_1 = require_constants12();
var versionSalt = "1.0";
@@ -52051,7 +52051,7 @@ var require_cacheUtils = __commonJS({
function getCompressionMethod() {
return __awaiter2(this, void 0, void 0, function* () {
const versionOutput = yield getVersion("zstd", ["--quiet"]);
const version = semver9.clean(versionOutput);
const version = semver10.clean(versionOutput);
core14.debug(`zstd version: ${version}`);
if (versionOutput === "") {
return constants_1.CompressionMethod.Gzip;
@@ -93457,7 +93457,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache3;
exports2.saveCache = saveCache4;
var core14 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93634,7 +93634,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache3(cacheId, archivePath, signedUploadURL, options) {
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -99134,8 +99134,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache3;
exports2.saveCache = saveCache3;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
var core14 = __importStar2(require_core());
var path7 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -99192,7 +99192,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache3(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core14.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99336,7 +99336,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache3(paths_1, key_1, options_1) {
function saveCache4(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core14.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99573,7 +99573,7 @@ var require_manifest = __commonJS({
exports2._findMatch = _findMatch;
exports2._getOsVersion = _getOsVersion;
exports2._readLinuxVersionFile = _readLinuxVersionFile;
var semver9 = __importStar2(require_semver2());
var semver10 = __importStar2(require_semver2());
var core_1 = require_core();
var os2 = require("os");
var cp = require("child_process");
@@ -99587,7 +99587,7 @@ var require_manifest = __commonJS({
for (const candidate of candidates) {
const version = candidate.version;
(0, core_1.debug)(`check ${version} satisfies ${versionSpec}`);
if (semver9.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) {
if (semver10.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) {
file = candidate.files.find((item) => {
(0, core_1.debug)(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`);
let chk = item.arch === archFilter && item.platform === platFilter;
@@ -99596,7 +99596,7 @@ var require_manifest = __commonJS({
if (osVersion === item.platform_version) {
chk = true;
} else {
chk = semver9.satisfies(osVersion, item.platform_version);
chk = semver10.satisfies(osVersion, item.platform_version);
}
}
return chk;
@@ -99856,7 +99856,7 @@ var require_tool_cache = __commonJS({
var os2 = __importStar2(require("os"));
var path7 = __importStar2(require("path"));
var httpm = __importStar2(require_lib());
var semver9 = __importStar2(require_semver2());
var semver10 = __importStar2(require_semver2());
var stream = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var assert_1 = require("assert");
@@ -100129,7 +100129,7 @@ var require_tool_cache = __commonJS({
}
function cacheDir(sourceDir, tool, version, arch) {
return __awaiter2(this, void 0, void 0, function* () {
version = semver9.clean(version) || version;
version = semver10.clean(version) || version;
arch = arch || os2.arch();
core14.debug(`Caching tool ${tool} ${version} ${arch}`);
core14.debug(`source dir: ${sourceDir}`);
@@ -100147,7 +100147,7 @@ var require_tool_cache = __commonJS({
}
function cacheFile(sourceFile, targetFile, tool, version, arch) {
return __awaiter2(this, void 0, void 0, function* () {
version = semver9.clean(version) || version;
version = semver10.clean(version) || version;
arch = arch || os2.arch();
core14.debug(`Caching tool ${tool} ${version} ${arch}`);
core14.debug(`source file: ${sourceFile}`);
@@ -100177,7 +100177,7 @@ var require_tool_cache = __commonJS({
}
let toolPath = "";
if (versionSpec) {
versionSpec = semver9.clean(versionSpec) || "";
versionSpec = semver10.clean(versionSpec) || "";
const cachePath = path7.join(_getCacheDirectory(), toolName, versionSpec, arch);
core14.debug(`checking cache: ${cachePath}`);
if (fs6.existsSync(cachePath) && fs6.existsSync(`${cachePath}.complete`)) {
@@ -100257,7 +100257,7 @@ var require_tool_cache = __commonJS({
}
function _createToolPath(tool, version, arch) {
return __awaiter2(this, void 0, void 0, function* () {
const folderPath = path7.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch || "");
const folderPath = path7.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch || "");
core14.debug(`destination ${folderPath}`);
const markerPath = `${folderPath}.complete`;
yield io5.rmRF(folderPath);
@@ -100267,30 +100267,30 @@ var require_tool_cache = __commonJS({
});
}
function _completeToolPath(tool, version, arch) {
const folderPath = path7.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch || "");
const folderPath = path7.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch || "");
const markerPath = `${folderPath}.complete`;
fs6.writeFileSync(markerPath, "");
core14.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
const c = semver9.clean(versionSpec) || "";
const c = semver10.clean(versionSpec) || "";
core14.debug(`isExplicit: ${c}`);
const valid3 = semver9.valid(c) != null;
core14.debug(`explicit? ${valid3}`);
return valid3;
const valid4 = semver10.valid(c) != null;
core14.debug(`explicit? ${valid4}`);
return valid4;
}
function evaluateVersions(versions, versionSpec) {
let version = "";
core14.debug(`evaluating ${versions.length} versions`);
versions = versions.sort((a, b) => {
if (semver9.gt(a, b)) {
if (semver10.gt(a, b)) {
return 1;
}
return -1;
});
for (let i = versions.length - 1; i >= 0; i--) {
const potential = versions[i];
const satisfied = semver9.satisfies(potential, versionSpec);
const satisfied = semver10.satisfies(potential, versionSpec);
if (satisfied) {
version = potential;
break;
@@ -105034,20 +105034,26 @@ var toolrunner3 = __toESM(require_toolrunner());
// src/setup-codeql.ts
var toolcache3 = __toESM(require_tool_cache());
var import_fast_deep_equal = __toESM(require_fast_deep_equal());
var semver8 = __toESM(require_semver2());
var semver9 = __toESM(require_semver2());
// src/overlay/caching.ts
var actionsCache3 = __toESM(require_cache5());
var semver6 = __toESM(require_semver2());
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
// src/tar.ts
var import_toolrunner = __toESM(require_toolrunner());
var io4 = __toESM(require_io());
var toolcache = __toESM(require_tool_cache());
var semver6 = __toESM(require_semver2());
var semver7 = __toESM(require_semver2());
// src/tools-download.ts
var core10 = __toESM(require_core());
var import_http_client = __toESM(require_lib());
var toolcache2 = __toESM(require_tool_cache());
var import_follow_redirects = __toESM(require_follow_redirects());
var semver7 = __toESM(require_semver2());
var semver8 = __toESM(require_semver2());
var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024;
// src/tracer-config.ts
+312 -106
View File
@@ -203,7 +203,7 @@ var require_file_command = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.issueFileCommand = issueFileCommand;
exports2.prepareKeyValueMessage = prepareKeyValueMessage;
var crypto2 = __importStar2(require("crypto"));
var crypto3 = __importStar2(require("crypto"));
var fs10 = __importStar2(require("fs"));
var os3 = __importStar2(require("os"));
var utils_1 = require_utils();
@@ -220,7 +220,7 @@ var require_file_command = __commonJS({
});
}
function prepareKeyValueMessage(key, value) {
const delimiter = `ghadelimiter_${crypto2.randomUUID()}`;
const delimiter = `ghadelimiter_${crypto3.randomUUID()}`;
const convertedValue = (0, utils_1.toCommandValue)(value);
if (key.includes(delimiter)) {
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
@@ -4262,11 +4262,11 @@ var require_util2 = __commonJS({
var { isUint8Array } = require("node:util/types");
var { webidl } = require_webidl();
var supportedHashes = [];
var crypto2;
var crypto3;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
supportedHashes = crypto2.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
supportedHashes = crypto3.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
} catch {
}
function responseURL(response) {
@@ -4539,7 +4539,7 @@ var require_util2 = __commonJS({
}
}
function bytesMatch(bytes, metadataList) {
if (crypto2 === void 0) {
if (crypto3 === void 0) {
return true;
}
const parsedMetadata = parseMetadata(metadataList);
@@ -4554,7 +4554,7 @@ var require_util2 = __commonJS({
for (const item of metadata) {
const algorithm = item.algo;
const expectedValue = item.hash;
let actualValue = crypto2.createHash(algorithm).update(bytes).digest("base64");
let actualValue = crypto3.createHash(algorithm).update(bytes).digest("base64");
if (actualValue[actualValue.length - 1] === "=") {
if (actualValue[actualValue.length - 2] === "=") {
actualValue = actualValue.slice(0, -2);
@@ -5618,8 +5618,8 @@ var require_body = __commonJS({
var { multipartFormDataParser } = require_formdata_parser();
var random;
try {
const crypto2 = require("node:crypto");
random = (max) => crypto2.randomInt(0, max);
const crypto3 = require("node:crypto");
random = (max) => crypto3.randomInt(0, max);
} catch {
random = (max) => Math.floor(Math.random(max));
}
@@ -17023,13 +17023,13 @@ var require_frame = __commonJS({
"use strict";
var { maxUnsigned16Bit } = require_constants5();
var BUFFER_SIZE = 16386;
var crypto2;
var crypto3;
var buffer = null;
var bufIdx = BUFFER_SIZE;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
} catch {
crypto2 = {
crypto3 = {
// not full compatibility, but minimum.
randomFillSync: function randomFillSync(buffer2, _offset, _size) {
for (let i = 0; i < buffer2.length; ++i) {
@@ -17042,7 +17042,7 @@ var require_frame = __commonJS({
function generateMask() {
if (bufIdx === BUFFER_SIZE) {
bufIdx = 0;
crypto2.randomFillSync(buffer ??= Buffer.allocUnsafe(BUFFER_SIZE), 0, BUFFER_SIZE);
crypto3.randomFillSync(buffer ??= Buffer.allocUnsafe(BUFFER_SIZE), 0, BUFFER_SIZE);
}
return [buffer[bufIdx++], buffer[bufIdx++], buffer[bufIdx++], buffer[bufIdx++]];
}
@@ -17114,9 +17114,9 @@ var require_connection = __commonJS({
var { Headers, getHeadersList } = require_headers();
var { getDecodeSplit } = require_util2();
var { WebsocketFrameSend } = require_frame();
var crypto2;
var crypto3;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
} catch {
}
function establishWebSocketConnection(url, protocols, client, ws, onEstablish, options) {
@@ -17136,7 +17136,7 @@ var require_connection = __commonJS({
const headersList = getHeadersList(new Headers(options.headers));
request2.headersList = headersList;
}
const keyValue = crypto2.randomBytes(16).toString("base64");
const keyValue = crypto3.randomBytes(16).toString("base64");
request2.headersList.append("sec-websocket-key", keyValue);
request2.headersList.append("sec-websocket-version", "13");
for (const protocol of protocols) {
@@ -17166,7 +17166,7 @@ var require_connection = __commonJS({
return;
}
const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
const digest = crypto2.createHash("sha1").update(keyValue + uid).digest("base64");
const digest = crypto3.createHash("sha1").update(keyValue + uid).digest("base64");
if (secWSAccept !== digest) {
failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
return;
@@ -25260,11 +25260,11 @@ var require_util10 = __commonJS({
var { isUint8Array } = require("node:util/types");
var { webidl } = require_webidl2();
var supportedHashes = [];
var crypto2;
var crypto3;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
supportedHashes = crypto2.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
supportedHashes = crypto3.getHashes().filter((hash) => possibleRelevantHashes.includes(hash));
} catch {
}
function responseURL(response) {
@@ -25537,7 +25537,7 @@ var require_util10 = __commonJS({
}
}
function bytesMatch(bytes, metadataList) {
if (crypto2 === void 0) {
if (crypto3 === void 0) {
return true;
}
const parsedMetadata = parseMetadata(metadataList);
@@ -25552,7 +25552,7 @@ var require_util10 = __commonJS({
for (const item of metadata) {
const algorithm = item.algo;
const expectedValue = item.hash;
let actualValue = crypto2.createHash(algorithm).update(bytes).digest("base64");
let actualValue = crypto3.createHash(algorithm).update(bytes).digest("base64");
if (actualValue[actualValue.length - 1] === "=") {
if (actualValue[actualValue.length - 2] === "=") {
actualValue = actualValue.slice(0, -2);
@@ -26616,8 +26616,8 @@ var require_body2 = __commonJS({
var { multipartFormDataParser } = require_formdata_parser2();
var random;
try {
const crypto2 = require("node:crypto");
random = (max) => crypto2.randomInt(0, max);
const crypto3 = require("node:crypto");
random = (max) => crypto3.randomInt(0, max);
} catch {
random = (max) => Math.floor(Math.random(max));
}
@@ -38021,13 +38021,13 @@ var require_frame2 = __commonJS({
"use strict";
var { maxUnsigned16Bit } = require_constants10();
var BUFFER_SIZE = 16386;
var crypto2;
var crypto3;
var buffer = null;
var bufIdx = BUFFER_SIZE;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
} catch {
crypto2 = {
crypto3 = {
// not full compatibility, but minimum.
randomFillSync: function randomFillSync(buffer2, _offset, _size) {
for (let i = 0; i < buffer2.length; ++i) {
@@ -38040,7 +38040,7 @@ var require_frame2 = __commonJS({
function generateMask() {
if (bufIdx === BUFFER_SIZE) {
bufIdx = 0;
crypto2.randomFillSync(buffer ??= Buffer.allocUnsafe(BUFFER_SIZE), 0, BUFFER_SIZE);
crypto3.randomFillSync(buffer ??= Buffer.allocUnsafe(BUFFER_SIZE), 0, BUFFER_SIZE);
}
return [buffer[bufIdx++], buffer[bufIdx++], buffer[bufIdx++], buffer[bufIdx++]];
}
@@ -38112,9 +38112,9 @@ var require_connection2 = __commonJS({
var { Headers, getHeadersList } = require_headers2();
var { getDecodeSplit } = require_util10();
var { WebsocketFrameSend } = require_frame2();
var crypto2;
var crypto3;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
} catch {
}
function establishWebSocketConnection(url, protocols, client, ws, onEstablish, options) {
@@ -38134,7 +38134,7 @@ var require_connection2 = __commonJS({
const headersList = getHeadersList(new Headers(options.headers));
request2.headersList = headersList;
}
const keyValue = crypto2.randomBytes(16).toString("base64");
const keyValue = crypto3.randomBytes(16).toString("base64");
request2.headersList.append("sec-websocket-key", keyValue);
request2.headersList.append("sec-websocket-version", "13");
for (const protocol of protocols) {
@@ -38164,7 +38164,7 @@ var require_connection2 = __commonJS({
return;
}
const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
const digest = crypto2.createHash("sha1").update(keyValue + uid).digest("base64");
const digest = crypto3.createHash("sha1").update(keyValue + uid).digest("base64");
if (secWSAccept !== digest) {
failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
return;
@@ -44531,11 +44531,11 @@ var require_valid = __commonJS({
"node_modules/semver/functions/valid.js"(exports2, module2) {
"use strict";
var parse2 = require_parse3();
var valid3 = (version, options) => {
var valid4 = (version, options) => {
const v = parse2(version, options);
return v ? v.version : null;
};
module2.exports = valid3;
module2.exports = valid4;
}
});
@@ -44678,8 +44678,8 @@ var require_rcompare = __commonJS({
"node_modules/semver/functions/rcompare.js"(exports2, module2) {
"use strict";
var compare2 = require_compare();
var rcompare2 = (a, b, loose) => compare2(b, a, loose);
module2.exports = rcompare2;
var rcompare3 = (a, b, loose) => compare2(b, a, loose);
module2.exports = rcompare3;
}
});
@@ -45895,7 +45895,7 @@ var require_semver2 = __commonJS({
var SemVer = require_semver();
var identifiers = require_identifiers();
var parse2 = require_parse3();
var valid3 = require_valid();
var valid4 = require_valid();
var clean3 = require_clean();
var inc = require_inc();
var diff = require_diff();
@@ -45904,7 +45904,7 @@ var require_semver2 = __commonJS({
var patch = require_patch();
var prerelease = require_prerelease();
var compare2 = require_compare();
var rcompare2 = require_rcompare();
var rcompare3 = require_rcompare();
var compareLoose = require_compare_loose();
var compareBuild = require_compare_build();
var sort = require_sort();
@@ -45933,7 +45933,7 @@ var require_semver2 = __commonJS({
var subset = require_subset();
module2.exports = {
parse: parse2,
valid: valid3,
valid: valid4,
clean: clean3,
inc,
diff,
@@ -45942,7 +45942,7 @@ var require_semver2 = __commonJS({
patch,
prerelease,
compare: compare2,
rcompare: rcompare2,
rcompare: rcompare3,
compareLoose,
compareBuild,
sort,
@@ -47732,16 +47732,16 @@ var require_attribute = __commonJS({
var result = new ValidatorResult(instance, schema2, options, ctx);
var self2 = this;
schema2.allOf.forEach(function(v, i) {
var valid3 = self2.validateSchema(instance, v, options, ctx);
if (!valid3.valid) {
var valid4 = self2.validateSchema(instance, v, options, ctx);
if (!valid4.valid) {
var id = v.$id || v.id;
var msg = id || v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]";
result.addError({
name: "allOf",
argument: { id: msg, length: valid3.errors.length, valid: valid3 },
message: "does not match allOf schema " + msg + " with " + valid3.errors.length + " error[s]:"
argument: { id: msg, length: valid4.errors.length, valid: valid4 },
message: "does not match allOf schema " + msg + " with " + valid4.errors.length + " error[s]:"
});
result.importErrors(valid3);
result.importErrors(valid4);
}
});
return result;
@@ -48030,8 +48030,8 @@ var require_attribute = __commonJS({
if (typeof schema2.exclusiveMinimum === "boolean") return;
if (!this.types.number(instance)) return;
var result = new ValidatorResult(instance, schema2, options, ctx);
var valid3 = instance > schema2.exclusiveMinimum;
if (!valid3) {
var valid4 = instance > schema2.exclusiveMinimum;
if (!valid4) {
result.addError({
name: "exclusiveMinimum",
argument: schema2.exclusiveMinimum,
@@ -48044,8 +48044,8 @@ var require_attribute = __commonJS({
if (typeof schema2.exclusiveMaximum === "boolean") return;
if (!this.types.number(instance)) return;
var result = new ValidatorResult(instance, schema2, options, ctx);
var valid3 = instance < schema2.exclusiveMaximum;
if (!valid3) {
var valid4 = instance < schema2.exclusiveMaximum;
if (!valid4) {
result.addError({
name: "exclusiveMaximum",
argument: schema2.exclusiveMaximum,
@@ -50550,7 +50550,7 @@ var require_internal_hash_files = __commonJS({
};
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.hashFiles = hashFiles;
var crypto2 = __importStar2(require("crypto"));
var crypto3 = __importStar2(require("crypto"));
var core15 = __importStar2(require_core());
var fs10 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
@@ -50563,7 +50563,7 @@ var require_internal_hash_files = __commonJS({
const writeDelegate = verbose ? core15.info : core15.debug;
let hasMatch = false;
const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd();
const result = crypto2.createHash("sha256");
const result = crypto3.createHash("sha256");
let count = 0;
try {
for (var _e = true, _f = __asyncValues2(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
@@ -50579,7 +50579,7 @@ var require_internal_hash_files = __commonJS({
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash = crypto2.createHash("sha256");
const hash = crypto3.createHash("sha256");
const pipeline = util.promisify(stream2.pipeline);
yield pipeline(fs10.createReadStream(file), hash);
result.write(hash.digest());
@@ -50828,8 +50828,8 @@ var require_semver3 = __commonJS({
return null;
}
}
exports2.valid = valid3;
function valid3(version, options) {
exports2.valid = valid4;
function valid4(version, options) {
var v = parse2(version, options);
return v ? v.version : null;
}
@@ -51129,8 +51129,8 @@ var require_semver3 = __commonJS({
var versionB = new SemVer(b, loose);
return versionA.compare(versionB) || versionA.compareBuild(versionB);
}
exports2.rcompare = rcompare2;
function rcompare2(a, b, loose) {
exports2.rcompare = rcompare3;
function rcompare3(a, b, loose) {
return compare2(b, a, loose);
}
exports2.sort = sort;
@@ -51955,10 +51955,10 @@ var require_cacheUtils = __commonJS({
var exec = __importStar2(require_exec());
var glob = __importStar2(require_glob());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var crypto3 = __importStar2(require("crypto"));
var fs10 = __importStar2(require("fs"));
var path10 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var semver10 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
var constants_1 = require_constants12();
var versionSalt = "1.0";
@@ -51979,7 +51979,7 @@ var require_cacheUtils = __commonJS({
}
tempDirectory = path10.join(baseLocation, "actions", "temp");
}
const dest = path10.join(tempDirectory, crypto2.randomUUID());
const dest = path10.join(tempDirectory, crypto3.randomUUID());
yield io6.mkdirP(dest);
return dest;
});
@@ -52051,7 +52051,7 @@ var require_cacheUtils = __commonJS({
function getCompressionMethod() {
return __awaiter2(this, void 0, void 0, function* () {
const versionOutput = yield getVersion("zstd", ["--quiet"]);
const version = semver9.clean(versionOutput);
const version = semver10.clean(versionOutput);
core15.debug(`zstd version: ${version}`);
if (versionOutput === "") {
return constants_1.CompressionMethod.Gzip;
@@ -52087,7 +52087,7 @@ var require_cacheUtils = __commonJS({
components.push("windows-only");
}
components.push(versionSalt);
return crypto2.createHash("sha256").update(components.join("|")).digest("hex");
return crypto3.createHash("sha256").update(components.join("|")).digest("hex");
}
function getRuntimeToken() {
const token = process.env["ACTIONS_RUNTIME_TOKEN"];
@@ -93457,7 +93457,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache3;
exports2.saveCache = saveCache4;
var core15 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93634,7 +93634,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache3(cacheId, archivePath, signedUploadURL, options) {
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -99134,8 +99134,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache3;
exports2.saveCache = saveCache3;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
var core15 = __importStar2(require_core());
var path10 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -99192,7 +99192,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache3(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99336,7 +99336,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache3(paths_1, key_1, options_1) {
function saveCache4(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99573,7 +99573,7 @@ var require_manifest = __commonJS({
exports2._findMatch = _findMatch;
exports2._getOsVersion = _getOsVersion;
exports2._readLinuxVersionFile = _readLinuxVersionFile;
var semver9 = __importStar2(require_semver2());
var semver10 = __importStar2(require_semver2());
var core_1 = require_core();
var os3 = require("os");
var cp = require("child_process");
@@ -99587,7 +99587,7 @@ var require_manifest = __commonJS({
for (const candidate of candidates) {
const version = candidate.version;
(0, core_1.debug)(`check ${version} satisfies ${versionSpec}`);
if (semver9.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) {
if (semver10.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) {
file = candidate.files.find((item) => {
(0, core_1.debug)(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`);
let chk = item.arch === archFilter && item.platform === platFilter;
@@ -99596,7 +99596,7 @@ var require_manifest = __commonJS({
if (osVersion === item.platform_version) {
chk = true;
} else {
chk = semver9.satisfies(osVersion, item.platform_version);
chk = semver10.satisfies(osVersion, item.platform_version);
}
}
return chk;
@@ -99850,13 +99850,13 @@ var require_tool_cache = __commonJS({
exports2.evaluateVersions = evaluateVersions;
var core15 = __importStar2(require_core());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var crypto3 = __importStar2(require("crypto"));
var fs10 = __importStar2(require("fs"));
var mm = __importStar2(require_manifest());
var os3 = __importStar2(require("os"));
var path10 = __importStar2(require("path"));
var httpm = __importStar2(require_lib());
var semver9 = __importStar2(require_semver2());
var semver10 = __importStar2(require_semver2());
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var assert_1 = require("assert");
@@ -99875,7 +99875,7 @@ var require_tool_cache = __commonJS({
var userAgent2 = "actions/tool-cache";
function downloadTool2(url, dest, auth2, headers) {
return __awaiter2(this, void 0, void 0, function* () {
dest = dest || path10.join(_getTempDirectory(), crypto2.randomUUID());
dest = dest || path10.join(_getTempDirectory(), crypto3.randomUUID());
yield io6.mkdirP(path10.dirname(dest));
core15.debug(`Downloading ${url}`);
core15.debug(`Destination ${dest}`);
@@ -100129,7 +100129,7 @@ var require_tool_cache = __commonJS({
}
function cacheDir(sourceDir, tool, version, arch2) {
return __awaiter2(this, void 0, void 0, function* () {
version = semver9.clean(version) || version;
version = semver10.clean(version) || version;
arch2 = arch2 || os3.arch();
core15.debug(`Caching tool ${tool} ${version} ${arch2}`);
core15.debug(`source dir: ${sourceDir}`);
@@ -100147,7 +100147,7 @@ var require_tool_cache = __commonJS({
}
function cacheFile(sourceFile, targetFile, tool, version, arch2) {
return __awaiter2(this, void 0, void 0, function* () {
version = semver9.clean(version) || version;
version = semver10.clean(version) || version;
arch2 = arch2 || os3.arch();
core15.debug(`Caching tool ${tool} ${version} ${arch2}`);
core15.debug(`source file: ${sourceFile}`);
@@ -100177,7 +100177,7 @@ var require_tool_cache = __commonJS({
}
let toolPath = "";
if (versionSpec) {
versionSpec = semver9.clean(versionSpec) || "";
versionSpec = semver10.clean(versionSpec) || "";
const cachePath = path10.join(_getCacheDirectory(), toolName, versionSpec, arch2);
core15.debug(`checking cache: ${cachePath}`);
if (fs10.existsSync(cachePath) && fs10.existsSync(`${cachePath}.complete`)) {
@@ -100249,7 +100249,7 @@ var require_tool_cache = __commonJS({
function _createExtractFolder(dest) {
return __awaiter2(this, void 0, void 0, function* () {
if (!dest) {
dest = path10.join(_getTempDirectory(), crypto2.randomUUID());
dest = path10.join(_getTempDirectory(), crypto3.randomUUID());
}
yield io6.mkdirP(dest);
return dest;
@@ -100257,7 +100257,7 @@ var require_tool_cache = __commonJS({
}
function _createToolPath(tool, version, arch2) {
return __awaiter2(this, void 0, void 0, function* () {
const folderPath = path10.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || "");
const folderPath = path10.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch2 || "");
core15.debug(`destination ${folderPath}`);
const markerPath = `${folderPath}.complete`;
yield io6.rmRF(folderPath);
@@ -100267,30 +100267,30 @@ var require_tool_cache = __commonJS({
});
}
function _completeToolPath(tool, version, arch2) {
const folderPath = path10.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || "");
const folderPath = path10.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch2 || "");
const markerPath = `${folderPath}.complete`;
fs10.writeFileSync(markerPath, "");
core15.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
const c = semver9.clean(versionSpec) || "";
const c = semver10.clean(versionSpec) || "";
core15.debug(`isExplicit: ${c}`);
const valid3 = semver9.valid(c) != null;
core15.debug(`explicit? ${valid3}`);
return valid3;
const valid4 = semver10.valid(c) != null;
core15.debug(`explicit? ${valid4}`);
return valid4;
}
function evaluateVersions(versions, versionSpec) {
let version = "";
core15.debug(`evaluating ${versions.length} versions`);
versions = versions.sort((a, b) => {
if (semver9.gt(a, b)) {
if (semver10.gt(a, b)) {
return 1;
}
return -1;
});
for (let i = versions.length - 1; i >= 0; i--) {
const potential = versions[i];
const satisfied = semver9.satisfies(potential, versionSpec);
const satisfied = semver10.satisfies(potential, versionSpec);
if (satisfied) {
version = potential;
break;
@@ -103784,6 +103784,12 @@ async function checkForTimeout() {
process.exit();
}
}
function parseMatrixInput(matrixInput) {
if (matrixInput === void 0 || matrixInput === "null") {
return void 0;
}
return JSON.parse(matrixInput);
}
function wrapError(error3) {
return error3 instanceof Error ? error3 : new Error(String(error3));
}
@@ -104003,6 +104009,32 @@ async function runTool(cmd, args = [], opts = {}) {
}
return stdout;
}
function getPullRequestBranches() {
const pullRequest = github.context.payload.pull_request;
if (pullRequest) {
return {
base: pullRequest.base.ref,
// We use the head label instead of the head ref here, because the head
// ref lacks owner information and by itself does not uniquely identify
// the head branch (which may be in a forked repository).
head: pullRequest.head.label
};
}
const codeScanningRef = process.env.CODE_SCANNING_REF;
const codeScanningBaseBranch = process.env.CODE_SCANNING_BASE_BRANCH;
if (codeScanningRef && codeScanningBaseBranch) {
return {
base: codeScanningBaseBranch,
// PR analysis under Default Setup analyzes the PR head commit instead of
// the merge commit, so we can use the provided ref directly.
head: codeScanningRef
};
}
return void 0;
}
function isAnalyzingPullRequest() {
return getPullRequestBranches() !== void 0;
}
// src/api-client.ts
var core5 = __toESM(require_core());
@@ -104202,6 +104234,37 @@ async function getAnalysisKey() {
core5.exportVariable("CODEQL_ACTION_ANALYSIS_KEY" /* ANALYSIS_KEY */, analysisKey);
return analysisKey;
}
async function getAutomationID() {
const analysis_key = await getAnalysisKey();
const environment = getRequiredInput("matrix");
return computeAutomationID(analysis_key, environment);
}
function computeAutomationID(analysis_key, environment) {
let automationID = `${analysis_key}/`;
const matrix = parseMatrixInput(environment);
if (matrix !== void 0) {
for (const entry of Object.entries(matrix).sort()) {
if (typeof entry[1] === "string") {
automationID += `${entry[0]}:${entry[1]}/`;
} else {
automationID += `${entry[0]}:/`;
}
}
}
return automationID;
}
async function listActionsCaches(keyPrefix, ref) {
const repositoryNwo = getRepositoryNwo();
return await getApiClient().paginate(
"GET /repos/{owner}/{repo}/actions/caches",
{
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
key: keyPrefix,
ref
}
);
}
function isEnablementError(msg) {
return [
/Code Security must be enabled/i,
@@ -105384,7 +105447,13 @@ var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => {
var supportedAnalysisKinds = new Set(Object.values(AnalysisKind));
// src/caching-utils.ts
var crypto2 = __toESM(require("crypto"));
var core7 = __toESM(require_core());
var cacheKeyHashLength = 16;
function createCacheKeyHash(components) {
const componentsJson = JSON.stringify(components);
return crypto2.createHash("sha256").update(componentsJson).digest("hex").substring(0, cacheKeyHashLength);
}
// src/config/db-config.ts
var jsonschema = __toESM(require_lib2());
@@ -105529,6 +105598,17 @@ var builtin_default = {
// src/languages/index.ts
var builtInLanguageSet = new Set(builtin_default.languages);
function isBuiltInLanguage(language) {
return builtInLanguageSet.has(language);
}
function parseBuiltInLanguage(language) {
language = language.trim().toLowerCase();
language = builtin_default.aliases[language] ?? language;
if (isBuiltInLanguage(language)) {
return language;
}
return void 0;
}
// src/overlay/status.ts
var actionsCache = __toESM(require_cache5());
@@ -105584,7 +105664,68 @@ var fs8 = __toESM(require("fs"));
var path8 = __toESM(require("path"));
var toolcache3 = __toESM(require_tool_cache());
var import_fast_deep_equal = __toESM(require_fast_deep_equal());
var semver8 = __toESM(require_semver2());
var semver9 = __toESM(require_semver2());
// src/overlay/caching.ts
var actionsCache3 = __toESM(require_cache5());
var semver6 = __toESM(require_semver2());
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
var CACHE_VERSION = 1;
var CACHE_PREFIX = "codeql-overlay-base-database";
async function getCacheKeyPrefixBase(parsedLanguages) {
const languagesComponent = [...parsedLanguages].sort().join("_");
const cacheKeyComponents = {
automationID: await getAutomationID()
// Add more components here as needed in the future
};
const componentsHash = createCacheKeyHash(cacheKeyComponents);
return `${CACHE_PREFIX}-${CACHE_VERSION}-${componentsHash}-${languagesComponent}-`;
}
async function getCodeQlVersionsForOverlayBaseDatabases(rawLanguages, logger) {
const languages = rawLanguages.map(parseBuiltInLanguage);
if (languages.includes(void 0)) {
logger.warning(
"One or more provided languages are not recognized as built-in languages. Skipping searching for overlay-base databases in cache."
);
return void 0;
}
const cacheKeyPrefix = await getCacheKeyPrefixBase(
languages.filter((l) => l !== void 0)
);
logger.debug(
`Searching for overlay-base databases in Actions cache with prefix ${cacheKeyPrefix}`
);
const caches = await listActionsCaches(cacheKeyPrefix);
if (caches.length === 0) {
logger.info("No overlay-base databases found in Actions cache.");
return [];
}
logger.info(
`Found ${caches.length} overlay-base ${caches.length === 1 ? "database" : "databases"} in the Actions cache.`
);
const versionRegex = /^([\d.]+)-/;
const versionSet = /* @__PURE__ */ new Set();
for (const cache of caches) {
if (!cache.key) continue;
const suffix = cache.key.substring(cacheKeyPrefix.length);
const match = suffix.match(versionRegex);
if (match && semver6.valid(match[1])) {
versionSet.add(match[1]);
}
}
if (versionSet.size === 0) {
logger.info(
"Could not parse any CodeQL versions from overlay-base database cache keys."
);
return [];
}
const versions = [...versionSet].sort(semver6.rcompare);
logger.info(
`Found overlay databases for the following CodeQL versions in the Actions cache: ${versions.join(", ")}`
);
return versions;
}
// src/tar.ts
var import_child_process = require("child_process");
@@ -105593,7 +105734,7 @@ var stream = __toESM(require("stream"));
var import_toolrunner = __toESM(require_toolrunner());
var io4 = __toESM(require_io());
var toolcache = __toESM(require_tool_cache());
var semver6 = __toESM(require_semver2());
var semver7 = __toESM(require_semver2());
var MIN_REQUIRED_BSD_TAR_VERSION = "3.4.3";
var MIN_REQUIRED_GNU_TAR_VERSION = "1.31";
async function getTarVersion() {
@@ -105635,9 +105776,9 @@ async function isZstdAvailable(logger) {
case "gnu":
return {
available: foundZstdBinary && // GNU tar only uses major and minor version numbers
semver6.gte(
semver6.coerce(version),
semver6.coerce(MIN_REQUIRED_GNU_TAR_VERSION)
semver7.gte(
semver7.coerce(version),
semver7.coerce(MIN_REQUIRED_GNU_TAR_VERSION)
),
foundZstdBinary,
version: tarVersion
@@ -105646,7 +105787,7 @@ async function isZstdAvailable(logger) {
return {
available: foundZstdBinary && // Do a loose comparison since these version numbers don't contain
// a patch version number.
semver6.gte(version, MIN_REQUIRED_BSD_TAR_VERSION),
semver7.gte(version, MIN_REQUIRED_BSD_TAR_VERSION),
foundZstdBinary,
version: tarVersion
};
@@ -105753,7 +105894,7 @@ var core10 = __toESM(require_core());
var import_http_client = __toESM(require_lib());
var toolcache2 = __toESM(require_tool_cache());
var import_follow_redirects = __toESM(require_follow_redirects());
var semver7 = __toESM(require_semver2());
var semver8 = __toESM(require_semver2());
var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024;
var TOOLCACHE_TOOL_NAME = "CodeQL";
function makeDownloadFirstToolsDownloadDurations(downloadDurationMs, extractionDurationMs) {
@@ -105883,7 +106024,7 @@ function getToolcacheDirectory(version) {
return path7.join(
getRequiredEnvParam("RUNNER_TOOL_CACHE"),
TOOLCACHE_TOOL_NAME,
semver7.clean(version) || version,
semver8.clean(version) || version,
os.arch() || ""
);
}
@@ -106008,13 +106149,13 @@ function tryGetTagNameFromUrl(url, logger) {
return match[1];
}
function convertToSemVer(version, logger) {
if (!semver8.valid(version)) {
if (!semver9.valid(version)) {
logger.debug(
`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`
);
version = `0.0.0-${version}`;
}
const s = semver8.clean(version);
const s = semver9.clean(version);
if (!s) {
throw new Error(`Bundle version ${version} is not in SemVer format.`);
}
@@ -106046,7 +106187,55 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
}
return void 0;
}
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, features, logger) {
async function getEnabledVersionsWithOverlayBaseDatabases(defaultCliVersion, rawLanguages, features, logger) {
if (rawLanguages === void 0 || rawLanguages.length === 0) {
return [];
}
if (!await features.getValue("overlay_analysis_match_codeql_version" /* OverlayAnalysisMatchCodeqlVersion */)) {
return [];
}
let cachedVersions;
try {
cachedVersions = await getCodeQlVersionsForOverlayBaseDatabases(
rawLanguages,
logger
);
} catch (e) {
logger.warning(
`While setting up CodeQL, was unable to list overlay-base databases in the Actions cache. Details: ${e}`
);
return [];
}
if (cachedVersions === void 0 || cachedVersions.length === 0) {
return [];
}
const cachedVersionsSet = new Set(cachedVersions);
return defaultCliVersion.enabledVersions.filter(
(v) => cachedVersionsSet.has(v.cliVersion)
);
}
async function resolveDefaultCliVersion(defaultCliVersion, rawLanguages, features, logger) {
if (!isAnalyzingPullRequest()) {
return defaultCliVersion.enabledVersions[0];
}
const overlayVersions = await getEnabledVersionsWithOverlayBaseDatabases(
defaultCliVersion,
rawLanguages,
features,
logger
);
if (overlayVersions.length > 0) {
logger.info(
`Using CodeQL version ${overlayVersions[0].cliVersion} since this is the highest enabled version that has a cached overlay-base database.`
);
return overlayVersions[0];
}
logger.info(
`Using CodeQL version ${defaultCliVersion.enabledVersions[0].cliVersion} since no enabled versions with cached overlay-base databases were found.`
);
return defaultCliVersion.enabledVersions[0];
}
async function getCodeQLSource(toolsInput, defaultCliVersion, rawLanguages, apiDetails, variant, tarSupportsZstd, features, logger) {
if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) {
logger.info(`Using CodeQL CLI from local path ${toolsInput}`);
const compressionMethod2 = inferCompressionMethod(toolsInput);
@@ -106140,21 +106329,33 @@ async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, varian
);
}
}
cliVersion2 = defaultCliVersion.enabledVersions[0].cliVersion;
tagName = defaultCliVersion.enabledVersions[0].tagName;
const version = await resolveDefaultCliVersion(
defaultCliVersion,
rawLanguages,
features,
logger
);
cliVersion2 = version.cliVersion;
tagName = version.tagName;
}
} else if (toolsInput !== void 0) {
tagName = tryGetTagNameFromUrl(toolsInput, logger);
url = toolsInput;
if (tagName) {
const bundleVersion3 = tryGetBundleVersionFromTagName(tagName, logger);
if (bundleVersion3 && semver8.valid(bundleVersion3)) {
if (bundleVersion3 && semver9.valid(bundleVersion3)) {
cliVersion2 = convertToSemVer(bundleVersion3, logger);
}
}
} else {
cliVersion2 = defaultCliVersion.enabledVersions[0].cliVersion;
tagName = defaultCliVersion.enabledVersions[0].tagName;
const version = await resolveDefaultCliVersion(
defaultCliVersion,
rawLanguages,
features,
logger
);
cliVersion2 = version.cliVersion;
tagName = version.tagName;
}
const bundleVersion2 = tagName && tryGetBundleVersionFromTagName(tagName, logger);
const humanReadableVersion = cliVersion2 ?? (bundleVersion2 && convertToSemVer(bundleVersion2, logger)) ?? tagName ?? url ?? "unknown";
@@ -106351,7 +106552,7 @@ function getCanonicalToolcacheVersion(cliVersion2, bundleVersion2, logger) {
}
return cliVersion2;
}
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, rawLanguages, features, logger) {
if (!await isBinaryAccessible("tar", logger)) {
throw new ConfigurationError(
"Could not find tar in PATH, so unable to extract CodeQL bundle."
@@ -106361,6 +106562,7 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defau
const source = await getCodeQLSource(
toolsInput,
defaultCliVersion,
rawLanguages,
apiDetails,
variant,
zstdAvailability.available,
@@ -106419,7 +106621,7 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defau
async function useZstdBundle(cliVersion2, tarSupportsZstd) {
return (
// In testing, gzip performs better than zstd on Windows.
process.platform !== "win32" && tarSupportsZstd && semver8.gte(cliVersion2, CODEQL_VERSION_ZSTD_BUNDLE)
process.platform !== "win32" && tarSupportsZstd && semver9.gte(cliVersion2, CODEQL_VERSION_ZSTD_BUNDLE)
);
}
function getTempExtractionDir(tempDir) {
@@ -106451,7 +106653,7 @@ async function getNightlyToolsUrl(logger) {
}
}
function getLatestToolcacheVersion(logger) {
const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver8.compare(b, a));
const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver9.compare(b, a));
logger.debug(
`Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify(
allVersions
@@ -106488,7 +106690,7 @@ var CODEQL_NEXT_MINIMUM_VERSION = "2.19.4";
var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.15";
var GHES_MOST_RECENT_DEPRECATION_DATE = "2026-04-09";
var EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++";
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, checkVersion) {
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, rawLanguages, features, logger, checkVersion) {
try {
const {
codeqlFolder,
@@ -106502,6 +106704,7 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
tempDir,
variant,
defaultCliVersion,
rawLanguages,
features,
logger
);
@@ -107091,7 +107294,7 @@ async function getJobRunUuidSarifOptions(codeql) {
}
// src/init.ts
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, rawLanguages, features, logger) {
logger.startGroup("Setup CodeQL tools");
const {
codeql,
@@ -107105,6 +107308,7 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
tempDir,
variant,
defaultCliVersion,
rawLanguages,
features,
logger,
true
@@ -107405,6 +107609,8 @@ async function run(startedAt) {
getTemporaryDirectory(),
gitHubVersion.type,
codeQLDefaultVersionInfo,
void 0,
// rawLanguages: currently, setup-codeql is not language aware
features,
logger
);
+57 -51
View File
@@ -44531,11 +44531,11 @@ var require_valid = __commonJS({
"node_modules/semver/functions/valid.js"(exports2, module2) {
"use strict";
var parse2 = require_parse3();
var valid3 = (version, options) => {
var valid4 = (version, options) => {
const v = parse2(version, options);
return v ? v.version : null;
};
module2.exports = valid3;
module2.exports = valid4;
}
});
@@ -44678,8 +44678,8 @@ var require_rcompare = __commonJS({
"node_modules/semver/functions/rcompare.js"(exports2, module2) {
"use strict";
var compare2 = require_compare();
var rcompare2 = (a, b, loose) => compare2(b, a, loose);
module2.exports = rcompare2;
var rcompare3 = (a, b, loose) => compare2(b, a, loose);
module2.exports = rcompare3;
}
});
@@ -45895,7 +45895,7 @@ var require_semver2 = __commonJS({
var SemVer = require_semver();
var identifiers = require_identifiers();
var parse2 = require_parse3();
var valid3 = require_valid();
var valid4 = require_valid();
var clean3 = require_clean();
var inc = require_inc();
var diff = require_diff();
@@ -45904,7 +45904,7 @@ var require_semver2 = __commonJS({
var patch = require_patch();
var prerelease = require_prerelease();
var compare2 = require_compare();
var rcompare2 = require_rcompare();
var rcompare3 = require_rcompare();
var compareLoose = require_compare_loose();
var compareBuild = require_compare_build();
var sort = require_sort();
@@ -45933,7 +45933,7 @@ var require_semver2 = __commonJS({
var subset = require_subset();
module2.exports = {
parse: parse2,
valid: valid3,
valid: valid4,
clean: clean3,
inc,
diff,
@@ -45942,7 +45942,7 @@ var require_semver2 = __commonJS({
patch,
prerelease,
compare: compare2,
rcompare: rcompare2,
rcompare: rcompare3,
compareLoose,
compareBuild,
sort,
@@ -47732,16 +47732,16 @@ var require_attribute = __commonJS({
var result = new ValidatorResult(instance, schema2, options, ctx);
var self2 = this;
schema2.allOf.forEach(function(v, i) {
var valid3 = self2.validateSchema(instance, v, options, ctx);
if (!valid3.valid) {
var valid4 = self2.validateSchema(instance, v, options, ctx);
if (!valid4.valid) {
var id = v.$id || v.id;
var msg = id || v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]";
result.addError({
name: "allOf",
argument: { id: msg, length: valid3.errors.length, valid: valid3 },
message: "does not match allOf schema " + msg + " with " + valid3.errors.length + " error[s]:"
argument: { id: msg, length: valid4.errors.length, valid: valid4 },
message: "does not match allOf schema " + msg + " with " + valid4.errors.length + " error[s]:"
});
result.importErrors(valid3);
result.importErrors(valid4);
}
});
return result;
@@ -48030,8 +48030,8 @@ var require_attribute = __commonJS({
if (typeof schema2.exclusiveMinimum === "boolean") return;
if (!this.types.number(instance)) return;
var result = new ValidatorResult(instance, schema2, options, ctx);
var valid3 = instance > schema2.exclusiveMinimum;
if (!valid3) {
var valid4 = instance > schema2.exclusiveMinimum;
if (!valid4) {
result.addError({
name: "exclusiveMinimum",
argument: schema2.exclusiveMinimum,
@@ -48044,8 +48044,8 @@ var require_attribute = __commonJS({
if (typeof schema2.exclusiveMaximum === "boolean") return;
if (!this.types.number(instance)) return;
var result = new ValidatorResult(instance, schema2, options, ctx);
var valid3 = instance < schema2.exclusiveMaximum;
if (!valid3) {
var valid4 = instance < schema2.exclusiveMaximum;
if (!valid4) {
result.addError({
name: "exclusiveMaximum",
argument: schema2.exclusiveMaximum,
@@ -50828,8 +50828,8 @@ var require_semver3 = __commonJS({
return null;
}
}
exports2.valid = valid3;
function valid3(version, options) {
exports2.valid = valid4;
function valid4(version, options) {
var v = parse2(version, options);
return v ? v.version : null;
}
@@ -51129,8 +51129,8 @@ var require_semver3 = __commonJS({
var versionB = new SemVer(b, loose);
return versionA.compare(versionB) || versionA.compareBuild(versionB);
}
exports2.rcompare = rcompare2;
function rcompare2(a, b, loose) {
exports2.rcompare = rcompare3;
function rcompare3(a, b, loose) {
return compare2(b, a, loose);
}
exports2.sort = sort;
@@ -51958,7 +51958,7 @@ var require_cacheUtils = __commonJS({
var crypto2 = __importStar2(require("crypto"));
var fs3 = __importStar2(require("fs"));
var path4 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var semver10 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
var constants_1 = require_constants12();
var versionSalt = "1.0";
@@ -52051,7 +52051,7 @@ var require_cacheUtils = __commonJS({
function getCompressionMethod() {
return __awaiter2(this, void 0, void 0, function* () {
const versionOutput = yield getVersion("zstd", ["--quiet"]);
const version = semver9.clean(versionOutput);
const version = semver10.clean(versionOutput);
core15.debug(`zstd version: ${version}`);
if (versionOutput === "") {
return constants_1.CompressionMethod.Gzip;
@@ -93457,7 +93457,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache4;
exports2.saveCache = saveCache5;
var core15 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93634,7 +93634,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
function saveCache5(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -99134,8 +99134,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
exports2.restoreCache = restoreCache5;
exports2.saveCache = saveCache5;
var core15 = __importStar2(require_core());
var path4 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -99192,7 +99192,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache5(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99336,7 +99336,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache4(paths_1, key_1, options_1) {
function saveCache5(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -106616,7 +106616,7 @@ var require_stream_writable = __commonJS({
pna.nextTick(cb, er);
}
function validChunk(stream, state, chunk, cb) {
var valid3 = true;
var valid4 = true;
var er = false;
if (chunk === null) {
er = new TypeError("May not write null values to stream");
@@ -106626,9 +106626,9 @@ var require_stream_writable = __commonJS({
if (er) {
stream.emit("error", er);
pna.nextTick(cb, er);
valid3 = false;
valid4 = false;
}
return valid3;
return valid4;
}
Writable.prototype.write = function(chunk, encoding, cb) {
var state = this._writableState;
@@ -157681,7 +157681,7 @@ var require_manifest = __commonJS({
exports2._findMatch = _findMatch;
exports2._getOsVersion = _getOsVersion;
exports2._readLinuxVersionFile = _readLinuxVersionFile;
var semver9 = __importStar2(require_semver2());
var semver10 = __importStar2(require_semver2());
var core_1 = require_core();
var os2 = require("os");
var cp = require("child_process");
@@ -157695,7 +157695,7 @@ var require_manifest = __commonJS({
for (const candidate of candidates) {
const version = candidate.version;
(0, core_1.debug)(`check ${version} satisfies ${versionSpec}`);
if (semver9.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) {
if (semver10.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) {
file = candidate.files.find((item) => {
(0, core_1.debug)(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`);
let chk = item.arch === archFilter && item.platform === platFilter;
@@ -157704,7 +157704,7 @@ var require_manifest = __commonJS({
if (osVersion === item.platform_version) {
chk = true;
} else {
chk = semver9.satisfies(osVersion, item.platform_version);
chk = semver10.satisfies(osVersion, item.platform_version);
}
}
return chk;
@@ -157964,7 +157964,7 @@ var require_tool_cache = __commonJS({
var os2 = __importStar2(require("os"));
var path4 = __importStar2(require("path"));
var httpm = __importStar2(require_lib());
var semver9 = __importStar2(require_semver2());
var semver10 = __importStar2(require_semver2());
var stream = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var assert_1 = require("assert");
@@ -158237,7 +158237,7 @@ var require_tool_cache = __commonJS({
}
function cacheDir(sourceDir, tool, version, arch) {
return __awaiter2(this, void 0, void 0, function* () {
version = semver9.clean(version) || version;
version = semver10.clean(version) || version;
arch = arch || os2.arch();
core15.debug(`Caching tool ${tool} ${version} ${arch}`);
core15.debug(`source dir: ${sourceDir}`);
@@ -158255,7 +158255,7 @@ var require_tool_cache = __commonJS({
}
function cacheFile(sourceFile, targetFile, tool, version, arch) {
return __awaiter2(this, void 0, void 0, function* () {
version = semver9.clean(version) || version;
version = semver10.clean(version) || version;
arch = arch || os2.arch();
core15.debug(`Caching tool ${tool} ${version} ${arch}`);
core15.debug(`source file: ${sourceFile}`);
@@ -158285,7 +158285,7 @@ var require_tool_cache = __commonJS({
}
let toolPath = "";
if (versionSpec) {
versionSpec = semver9.clean(versionSpec) || "";
versionSpec = semver10.clean(versionSpec) || "";
const cachePath = path4.join(_getCacheDirectory(), toolName, versionSpec, arch);
core15.debug(`checking cache: ${cachePath}`);
if (fs3.existsSync(cachePath) && fs3.existsSync(`${cachePath}.complete`)) {
@@ -158365,7 +158365,7 @@ var require_tool_cache = __commonJS({
}
function _createToolPath(tool, version, arch) {
return __awaiter2(this, void 0, void 0, function* () {
const folderPath = path4.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch || "");
const folderPath = path4.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch || "");
core15.debug(`destination ${folderPath}`);
const markerPath = `${folderPath}.complete`;
yield io6.rmRF(folderPath);
@@ -158375,30 +158375,30 @@ var require_tool_cache = __commonJS({
});
}
function _completeToolPath(tool, version, arch) {
const folderPath = path4.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch || "");
const folderPath = path4.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch || "");
const markerPath = `${folderPath}.complete`;
fs3.writeFileSync(markerPath, "");
core15.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
const c = semver9.clean(versionSpec) || "";
const c = semver10.clean(versionSpec) || "";
core15.debug(`isExplicit: ${c}`);
const valid3 = semver9.valid(c) != null;
core15.debug(`explicit? ${valid3}`);
return valid3;
const valid4 = semver10.valid(c) != null;
core15.debug(`explicit? ${valid4}`);
return valid4;
}
function evaluateVersions(versions, versionSpec) {
let version = "";
core15.debug(`evaluating ${versions.length} versions`);
versions = versions.sort((a, b) => {
if (semver9.gt(a, b)) {
if (semver10.gt(a, b)) {
return 1;
}
return -1;
});
for (let i = versions.length - 1; i >= 0; i--) {
const potential = versions[i];
const satisfied = semver9.satisfies(potential, versionSpec);
const satisfied = semver10.satisfies(potential, versionSpec);
if (satisfied) {
version = potential;
break;
@@ -162446,24 +162446,30 @@ var cliErrorsConfig = {
// src/setup-codeql.ts
var toolcache3 = __toESM(require_tool_cache());
var import_fast_deep_equal = __toESM(require_fast_deep_equal());
var semver8 = __toESM(require_semver2());
var semver9 = __toESM(require_semver2());
// src/overlay/caching.ts
var actionsCache3 = __toESM(require_cache5());
var semver6 = __toESM(require_semver2());
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
// src/tar.ts
var import_toolrunner = __toESM(require_toolrunner());
var io4 = __toESM(require_io());
var toolcache = __toESM(require_tool_cache());
var semver6 = __toESM(require_semver2());
var semver7 = __toESM(require_semver2());
// src/tools-download.ts
var core10 = __toESM(require_core());
var import_http_client = __toESM(require_lib());
var toolcache2 = __toESM(require_tool_cache());
var import_follow_redirects = __toESM(require_follow_redirects());
var semver7 = __toESM(require_semver2());
var semver8 = __toESM(require_semver2());
var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024;
// src/dependency-caching.ts
var actionsCache3 = __toESM(require_cache5());
var actionsCache4 = __toESM(require_cache5());
var glob = __toESM(require_glob());
// src/artifact-scanner.ts
+294 -108
View File
@@ -203,7 +203,7 @@ var require_file_command = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.issueFileCommand = issueFileCommand;
exports2.prepareKeyValueMessage = prepareKeyValueMessage;
var crypto2 = __importStar2(require("crypto"));
var crypto3 = __importStar2(require("crypto"));
var fs14 = __importStar2(require("fs"));
var os2 = __importStar2(require("os"));
var utils_1 = require_utils();
@@ -220,7 +220,7 @@ var require_file_command = __commonJS({
});
}
function prepareKeyValueMessage(key, value) {
const delimiter = `ghadelimiter_${crypto2.randomUUID()}`;
const delimiter = `ghadelimiter_${crypto3.randomUUID()}`;
const convertedValue = (0, utils_1.toCommandValue)(value);
if (key.includes(delimiter)) {
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
@@ -4262,11 +4262,11 @@ var require_util2 = __commonJS({
var { isUint8Array } = require("node:util/types");
var { webidl } = require_webidl();
var supportedHashes = [];
var crypto2;
var crypto3;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
supportedHashes = crypto2.getHashes().filter((hash2) => possibleRelevantHashes.includes(hash2));
supportedHashes = crypto3.getHashes().filter((hash2) => possibleRelevantHashes.includes(hash2));
} catch {
}
function responseURL(response) {
@@ -4539,7 +4539,7 @@ var require_util2 = __commonJS({
}
}
function bytesMatch(bytes, metadataList) {
if (crypto2 === void 0) {
if (crypto3 === void 0) {
return true;
}
const parsedMetadata = parseMetadata(metadataList);
@@ -4554,7 +4554,7 @@ var require_util2 = __commonJS({
for (const item of metadata) {
const algorithm = item.algo;
const expectedValue = item.hash;
let actualValue = crypto2.createHash(algorithm).update(bytes).digest("base64");
let actualValue = crypto3.createHash(algorithm).update(bytes).digest("base64");
if (actualValue[actualValue.length - 1] === "=") {
if (actualValue[actualValue.length - 2] === "=") {
actualValue = actualValue.slice(0, -2);
@@ -5618,8 +5618,8 @@ var require_body = __commonJS({
var { multipartFormDataParser } = require_formdata_parser();
var random;
try {
const crypto2 = require("node:crypto");
random = (max) => crypto2.randomInt(0, max);
const crypto3 = require("node:crypto");
random = (max) => crypto3.randomInt(0, max);
} catch {
random = (max) => Math.floor(Math.random(max));
}
@@ -17023,13 +17023,13 @@ var require_frame = __commonJS({
"use strict";
var { maxUnsigned16Bit } = require_constants5();
var BUFFER_SIZE = 16386;
var crypto2;
var crypto3;
var buffer = null;
var bufIdx = BUFFER_SIZE;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
} catch {
crypto2 = {
crypto3 = {
// not full compatibility, but minimum.
randomFillSync: function randomFillSync(buffer2, _offset, _size) {
for (let i = 0; i < buffer2.length; ++i) {
@@ -17042,7 +17042,7 @@ var require_frame = __commonJS({
function generateMask() {
if (bufIdx === BUFFER_SIZE) {
bufIdx = 0;
crypto2.randomFillSync(buffer ??= Buffer.allocUnsafe(BUFFER_SIZE), 0, BUFFER_SIZE);
crypto3.randomFillSync(buffer ??= Buffer.allocUnsafe(BUFFER_SIZE), 0, BUFFER_SIZE);
}
return [buffer[bufIdx++], buffer[bufIdx++], buffer[bufIdx++], buffer[bufIdx++]];
}
@@ -17114,9 +17114,9 @@ var require_connection = __commonJS({
var { Headers, getHeadersList } = require_headers();
var { getDecodeSplit } = require_util2();
var { WebsocketFrameSend } = require_frame();
var crypto2;
var crypto3;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
} catch {
}
function establishWebSocketConnection(url2, protocols, client, ws, onEstablish, options) {
@@ -17136,7 +17136,7 @@ var require_connection = __commonJS({
const headersList = getHeadersList(new Headers(options.headers));
request2.headersList = headersList;
}
const keyValue = crypto2.randomBytes(16).toString("base64");
const keyValue = crypto3.randomBytes(16).toString("base64");
request2.headersList.append("sec-websocket-key", keyValue);
request2.headersList.append("sec-websocket-version", "13");
for (const protocol of protocols) {
@@ -17166,7 +17166,7 @@ var require_connection = __commonJS({
return;
}
const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
const digest = crypto2.createHash("sha1").update(keyValue + uid).digest("base64");
const digest = crypto3.createHash("sha1").update(keyValue + uid).digest("base64");
if (secWSAccept !== digest) {
failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
return;
@@ -21922,16 +21922,16 @@ var require_attribute = __commonJS({
var result = new ValidatorResult(instance, schema2, options, ctx);
var self2 = this;
schema2.allOf.forEach(function(v, i) {
var valid3 = self2.validateSchema(instance, v, options, ctx);
if (!valid3.valid) {
var valid4 = self2.validateSchema(instance, v, options, ctx);
if (!valid4.valid) {
var id = v.$id || v.id;
var msg = id || v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]";
result.addError({
name: "allOf",
argument: { id: msg, length: valid3.errors.length, valid: valid3 },
message: "does not match allOf schema " + msg + " with " + valid3.errors.length + " error[s]:"
argument: { id: msg, length: valid4.errors.length, valid: valid4 },
message: "does not match allOf schema " + msg + " with " + valid4.errors.length + " error[s]:"
});
result.importErrors(valid3);
result.importErrors(valid4);
}
});
return result;
@@ -22220,8 +22220,8 @@ var require_attribute = __commonJS({
if (typeof schema2.exclusiveMinimum === "boolean") return;
if (!this.types.number(instance)) return;
var result = new ValidatorResult(instance, schema2, options, ctx);
var valid3 = instance > schema2.exclusiveMinimum;
if (!valid3) {
var valid4 = instance > schema2.exclusiveMinimum;
if (!valid4) {
result.addError({
name: "exclusiveMinimum",
argument: schema2.exclusiveMinimum,
@@ -22234,8 +22234,8 @@ var require_attribute = __commonJS({
if (typeof schema2.exclusiveMaximum === "boolean") return;
if (!this.types.number(instance)) return;
var result = new ValidatorResult(instance, schema2, options, ctx);
var valid3 = instance < schema2.exclusiveMaximum;
if (!valid3) {
var valid4 = instance < schema2.exclusiveMaximum;
if (!valid4) {
result.addError({
name: "exclusiveMaximum",
argument: schema2.exclusiveMaximum,
@@ -26565,11 +26565,11 @@ var require_util10 = __commonJS({
var { isUint8Array } = require("node:util/types");
var { webidl } = require_webidl2();
var supportedHashes = [];
var crypto2;
var crypto3;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
supportedHashes = crypto2.getHashes().filter((hash2) => possibleRelevantHashes.includes(hash2));
supportedHashes = crypto3.getHashes().filter((hash2) => possibleRelevantHashes.includes(hash2));
} catch {
}
function responseURL(response) {
@@ -26842,7 +26842,7 @@ var require_util10 = __commonJS({
}
}
function bytesMatch(bytes, metadataList) {
if (crypto2 === void 0) {
if (crypto3 === void 0) {
return true;
}
const parsedMetadata = parseMetadata(metadataList);
@@ -26857,7 +26857,7 @@ var require_util10 = __commonJS({
for (const item of metadata) {
const algorithm = item.algo;
const expectedValue = item.hash;
let actualValue = crypto2.createHash(algorithm).update(bytes).digest("base64");
let actualValue = crypto3.createHash(algorithm).update(bytes).digest("base64");
if (actualValue[actualValue.length - 1] === "=") {
if (actualValue[actualValue.length - 2] === "=") {
actualValue = actualValue.slice(0, -2);
@@ -27921,8 +27921,8 @@ var require_body2 = __commonJS({
var { multipartFormDataParser } = require_formdata_parser2();
var random;
try {
const crypto2 = require("node:crypto");
random = (max) => crypto2.randomInt(0, max);
const crypto3 = require("node:crypto");
random = (max) => crypto3.randomInt(0, max);
} catch {
random = (max) => Math.floor(Math.random(max));
}
@@ -39326,13 +39326,13 @@ var require_frame2 = __commonJS({
"use strict";
var { maxUnsigned16Bit } = require_constants10();
var BUFFER_SIZE = 16386;
var crypto2;
var crypto3;
var buffer = null;
var bufIdx = BUFFER_SIZE;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
} catch {
crypto2 = {
crypto3 = {
// not full compatibility, but minimum.
randomFillSync: function randomFillSync(buffer2, _offset, _size) {
for (let i = 0; i < buffer2.length; ++i) {
@@ -39345,7 +39345,7 @@ var require_frame2 = __commonJS({
function generateMask() {
if (bufIdx === BUFFER_SIZE) {
bufIdx = 0;
crypto2.randomFillSync(buffer ??= Buffer.allocUnsafe(BUFFER_SIZE), 0, BUFFER_SIZE);
crypto3.randomFillSync(buffer ??= Buffer.allocUnsafe(BUFFER_SIZE), 0, BUFFER_SIZE);
}
return [buffer[bufIdx++], buffer[bufIdx++], buffer[bufIdx++], buffer[bufIdx++]];
}
@@ -39417,9 +39417,9 @@ var require_connection2 = __commonJS({
var { Headers, getHeadersList } = require_headers2();
var { getDecodeSplit } = require_util10();
var { WebsocketFrameSend } = require_frame2();
var crypto2;
var crypto3;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
} catch {
}
function establishWebSocketConnection(url2, protocols, client, ws, onEstablish, options) {
@@ -39439,7 +39439,7 @@ var require_connection2 = __commonJS({
const headersList = getHeadersList(new Headers(options.headers));
request2.headersList = headersList;
}
const keyValue = crypto2.randomBytes(16).toString("base64");
const keyValue = crypto3.randomBytes(16).toString("base64");
request2.headersList.append("sec-websocket-key", keyValue);
request2.headersList.append("sec-websocket-version", "13");
for (const protocol of protocols) {
@@ -39469,7 +39469,7 @@ var require_connection2 = __commonJS({
return;
}
const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
const digest = crypto2.createHash("sha1").update(keyValue + uid).digest("base64");
const digest = crypto3.createHash("sha1").update(keyValue + uid).digest("base64");
if (secWSAccept !== digest) {
failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
return;
@@ -45836,11 +45836,11 @@ var require_valid = __commonJS({
"node_modules/semver/functions/valid.js"(exports2, module2) {
"use strict";
var parse2 = require_parse3();
var valid3 = (version, options) => {
var valid4 = (version, options) => {
const v = parse2(version, options);
return v ? v.version : null;
};
module2.exports = valid3;
module2.exports = valid4;
}
});
@@ -45983,8 +45983,8 @@ var require_rcompare = __commonJS({
"node_modules/semver/functions/rcompare.js"(exports2, module2) {
"use strict";
var compare3 = require_compare();
var rcompare2 = (a, b, loose) => compare3(b, a, loose);
module2.exports = rcompare2;
var rcompare3 = (a, b, loose) => compare3(b, a, loose);
module2.exports = rcompare3;
}
});
@@ -47200,7 +47200,7 @@ var require_semver2 = __commonJS({
var SemVer = require_semver();
var identifiers = require_identifiers();
var parse2 = require_parse3();
var valid3 = require_valid();
var valid4 = require_valid();
var clean3 = require_clean();
var inc = require_inc();
var diff = require_diff();
@@ -47209,7 +47209,7 @@ var require_semver2 = __commonJS({
var patch = require_patch();
var prerelease = require_prerelease();
var compare3 = require_compare();
var rcompare2 = require_rcompare();
var rcompare3 = require_rcompare();
var compareLoose = require_compare_loose();
var compareBuild = require_compare_build();
var sort = require_sort();
@@ -47238,7 +47238,7 @@ var require_semver2 = __commonJS({
var subset = require_subset();
module2.exports = {
parse: parse2,
valid: valid3,
valid: valid4,
clean: clean3,
inc,
diff,
@@ -47247,7 +47247,7 @@ var require_semver2 = __commonJS({
patch,
prerelease,
compare: compare3,
rcompare: rcompare2,
rcompare: rcompare3,
compareLoose,
compareBuild,
sort,
@@ -50550,7 +50550,7 @@ var require_internal_hash_files = __commonJS({
};
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.hashFiles = hashFiles;
var crypto2 = __importStar2(require("crypto"));
var crypto3 = __importStar2(require("crypto"));
var core14 = __importStar2(require_core());
var fs14 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
@@ -50563,7 +50563,7 @@ var require_internal_hash_files = __commonJS({
const writeDelegate = verbose ? core14.info : core14.debug;
let hasMatch = false;
const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd();
const result = crypto2.createHash("sha256");
const result = crypto3.createHash("sha256");
let count = 0;
try {
for (var _e = true, _f = __asyncValues2(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
@@ -50579,7 +50579,7 @@ var require_internal_hash_files = __commonJS({
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash2 = crypto2.createHash("sha256");
const hash2 = crypto3.createHash("sha256");
const pipeline = util.promisify(stream2.pipeline);
yield pipeline(fs14.createReadStream(file), hash2);
result.write(hash2.digest());
@@ -50828,8 +50828,8 @@ var require_semver3 = __commonJS({
return null;
}
}
exports2.valid = valid3;
function valid3(version, options) {
exports2.valid = valid4;
function valid4(version, options) {
var v = parse2(version, options);
return v ? v.version : null;
}
@@ -51129,8 +51129,8 @@ var require_semver3 = __commonJS({
var versionB = new SemVer(b, loose);
return versionA.compare(versionB) || versionA.compareBuild(versionB);
}
exports2.rcompare = rcompare2;
function rcompare2(a, b, loose) {
exports2.rcompare = rcompare3;
function rcompare3(a, b, loose) {
return compare3(b, a, loose);
}
exports2.sort = sort;
@@ -51955,10 +51955,10 @@ var require_cacheUtils = __commonJS({
var exec = __importStar2(require_exec());
var glob = __importStar2(require_glob());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var crypto3 = __importStar2(require("crypto"));
var fs14 = __importStar2(require("fs"));
var path12 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var semver10 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
var constants_1 = require_constants12();
var versionSalt = "1.0";
@@ -51979,7 +51979,7 @@ var require_cacheUtils = __commonJS({
}
tempDirectory = path12.join(baseLocation, "actions", "temp");
}
const dest = path12.join(tempDirectory, crypto2.randomUUID());
const dest = path12.join(tempDirectory, crypto3.randomUUID());
yield io6.mkdirP(dest);
return dest;
});
@@ -52051,7 +52051,7 @@ var require_cacheUtils = __commonJS({
function getCompressionMethod() {
return __awaiter2(this, void 0, void 0, function* () {
const versionOutput = yield getVersion("zstd", ["--quiet"]);
const version = semver9.clean(versionOutput);
const version = semver10.clean(versionOutput);
core14.debug(`zstd version: ${version}`);
if (versionOutput === "") {
return constants_1.CompressionMethod.Gzip;
@@ -52087,7 +52087,7 @@ var require_cacheUtils = __commonJS({
components.push("windows-only");
}
components.push(versionSalt);
return crypto2.createHash("sha256").update(components.join("|")).digest("hex");
return crypto3.createHash("sha256").update(components.join("|")).digest("hex");
}
function getRuntimeToken() {
const token = process.env["ACTIONS_RUNTIME_TOKEN"];
@@ -93457,7 +93457,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache3;
exports2.saveCache = saveCache4;
var core14 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93634,7 +93634,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache3(cacheId, archivePath, signedUploadURL, options) {
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -99134,8 +99134,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache3;
exports2.saveCache = saveCache3;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
var core14 = __importStar2(require_core());
var path12 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -99192,7 +99192,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache3(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core14.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99336,7 +99336,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache3(paths_1, key_1, options_1) {
function saveCache4(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core14.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99573,7 +99573,7 @@ var require_manifest = __commonJS({
exports2._findMatch = _findMatch;
exports2._getOsVersion = _getOsVersion;
exports2._readLinuxVersionFile = _readLinuxVersionFile;
var semver9 = __importStar2(require_semver2());
var semver10 = __importStar2(require_semver2());
var core_1 = require_core();
var os2 = require("os");
var cp = require("child_process");
@@ -99587,7 +99587,7 @@ var require_manifest = __commonJS({
for (const candidate of candidates) {
const version = candidate.version;
(0, core_1.debug)(`check ${version} satisfies ${versionSpec}`);
if (semver9.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) {
if (semver10.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) {
file = candidate.files.find((item) => {
(0, core_1.debug)(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`);
let chk = item.arch === archFilter && item.platform === platFilter;
@@ -99596,7 +99596,7 @@ var require_manifest = __commonJS({
if (osVersion === item.platform_version) {
chk = true;
} else {
chk = semver9.satisfies(osVersion, item.platform_version);
chk = semver10.satisfies(osVersion, item.platform_version);
}
}
return chk;
@@ -99850,13 +99850,13 @@ var require_tool_cache = __commonJS({
exports2.evaluateVersions = evaluateVersions;
var core14 = __importStar2(require_core());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var crypto3 = __importStar2(require("crypto"));
var fs14 = __importStar2(require("fs"));
var mm = __importStar2(require_manifest());
var os2 = __importStar2(require("os"));
var path12 = __importStar2(require("path"));
var httpm = __importStar2(require_lib());
var semver9 = __importStar2(require_semver2());
var semver10 = __importStar2(require_semver2());
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var assert_1 = require("assert");
@@ -99875,7 +99875,7 @@ var require_tool_cache = __commonJS({
var userAgent2 = "actions/tool-cache";
function downloadTool2(url2, dest, auth2, headers) {
return __awaiter2(this, void 0, void 0, function* () {
dest = dest || path12.join(_getTempDirectory(), crypto2.randomUUID());
dest = dest || path12.join(_getTempDirectory(), crypto3.randomUUID());
yield io6.mkdirP(path12.dirname(dest));
core14.debug(`Downloading ${url2}`);
core14.debug(`Destination ${dest}`);
@@ -100129,7 +100129,7 @@ var require_tool_cache = __commonJS({
}
function cacheDir(sourceDir, tool, version, arch2) {
return __awaiter2(this, void 0, void 0, function* () {
version = semver9.clean(version) || version;
version = semver10.clean(version) || version;
arch2 = arch2 || os2.arch();
core14.debug(`Caching tool ${tool} ${version} ${arch2}`);
core14.debug(`source dir: ${sourceDir}`);
@@ -100147,7 +100147,7 @@ var require_tool_cache = __commonJS({
}
function cacheFile(sourceFile, targetFile, tool, version, arch2) {
return __awaiter2(this, void 0, void 0, function* () {
version = semver9.clean(version) || version;
version = semver10.clean(version) || version;
arch2 = arch2 || os2.arch();
core14.debug(`Caching tool ${tool} ${version} ${arch2}`);
core14.debug(`source file: ${sourceFile}`);
@@ -100177,7 +100177,7 @@ var require_tool_cache = __commonJS({
}
let toolPath = "";
if (versionSpec) {
versionSpec = semver9.clean(versionSpec) || "";
versionSpec = semver10.clean(versionSpec) || "";
const cachePath = path12.join(_getCacheDirectory(), toolName, versionSpec, arch2);
core14.debug(`checking cache: ${cachePath}`);
if (fs14.existsSync(cachePath) && fs14.existsSync(`${cachePath}.complete`)) {
@@ -100249,7 +100249,7 @@ var require_tool_cache = __commonJS({
function _createExtractFolder(dest) {
return __awaiter2(this, void 0, void 0, function* () {
if (!dest) {
dest = path12.join(_getTempDirectory(), crypto2.randomUUID());
dest = path12.join(_getTempDirectory(), crypto3.randomUUID());
}
yield io6.mkdirP(dest);
return dest;
@@ -100257,7 +100257,7 @@ var require_tool_cache = __commonJS({
}
function _createToolPath(tool, version, arch2) {
return __awaiter2(this, void 0, void 0, function* () {
const folderPath = path12.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || "");
const folderPath = path12.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch2 || "");
core14.debug(`destination ${folderPath}`);
const markerPath = `${folderPath}.complete`;
yield io6.rmRF(folderPath);
@@ -100267,30 +100267,30 @@ var require_tool_cache = __commonJS({
});
}
function _completeToolPath(tool, version, arch2) {
const folderPath = path12.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || "");
const folderPath = path12.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch2 || "");
const markerPath = `${folderPath}.complete`;
fs14.writeFileSync(markerPath, "");
core14.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
const c = semver9.clean(versionSpec) || "";
const c = semver10.clean(versionSpec) || "";
core14.debug(`isExplicit: ${c}`);
const valid3 = semver9.valid(c) != null;
core14.debug(`explicit? ${valid3}`);
return valid3;
const valid4 = semver10.valid(c) != null;
core14.debug(`explicit? ${valid4}`);
return valid4;
}
function evaluateVersions(versions, versionSpec) {
let version = "";
core14.debug(`evaluating ${versions.length} versions`);
versions = versions.sort((a, b) => {
if (semver9.gt(a, b)) {
if (semver10.gt(a, b)) {
return 1;
}
return -1;
});
for (let i = versions.length - 1; i >= 0; i--) {
const potential = versions[i];
const satisfied = semver9.satisfies(potential, versionSpec);
const satisfied = semver10.satisfies(potential, versionSpec);
if (satisfied) {
version = potential;
break;
@@ -106809,6 +106809,32 @@ async function runTool(cmd, args = [], opts = {}) {
}
return stdout;
}
function getPullRequestBranches() {
const pullRequest = github.context.payload.pull_request;
if (pullRequest) {
return {
base: pullRequest.base.ref,
// We use the head label instead of the head ref here, because the head
// ref lacks owner information and by itself does not uniquely identify
// the head branch (which may be in a forked repository).
head: pullRequest.head.label
};
}
const codeScanningRef = process.env.CODE_SCANNING_REF;
const codeScanningBaseBranch = process.env.CODE_SCANNING_BASE_BRANCH;
if (codeScanningRef && codeScanningBaseBranch) {
return {
base: codeScanningBaseBranch,
// PR analysis under Default Setup analyzes the PR head commit instead of
// the merge commit, so we can use the provided ref directly.
head: codeScanningRef
};
}
return void 0;
}
function isAnalyzingPullRequest() {
return getPullRequestBranches() !== void 0;
}
var qualityCategoryMapping = {
"c#": "csharp",
cpp: "c-cpp",
@@ -107091,6 +107117,11 @@ async function getAnalysisKey() {
core5.exportVariable("CODEQL_ACTION_ANALYSIS_KEY" /* ANALYSIS_KEY */, analysisKey);
return analysisKey;
}
async function getAutomationID() {
const analysis_key = await getAnalysisKey();
const environment = getRequiredInput("matrix");
return computeAutomationID(analysis_key, environment);
}
function computeAutomationID(analysis_key, environment) {
let automationID = `${analysis_key}/`;
const matrix = parseMatrixInput(environment);
@@ -107105,6 +107136,18 @@ function computeAutomationID(analysis_key, environment) {
}
return automationID;
}
async function listActionsCaches(keyPrefix, ref) {
const repositoryNwo = getRepositoryNwo();
return await getApiClient().paginate(
"GET /repos/{owner}/{repo}/actions/caches",
{
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
key: keyPrefix,
ref
}
);
}
function isEnablementError(msg) {
return [
/Code Security must be enabled/i,
@@ -107403,7 +107446,13 @@ var path6 = __toESM(require("path"));
var core9 = __toESM(require_core());
// src/caching-utils.ts
var crypto2 = __toESM(require("crypto"));
var core6 = __toESM(require_core());
var cacheKeyHashLength = 16;
function createCacheKeyHash(components) {
const componentsJson = JSON.stringify(components);
return crypto2.createHash("sha256").update(componentsJson).digest("hex").substring(0, cacheKeyHashLength);
}
// src/config/db-config.ts
var jsonschema = __toESM(require_lib2());
@@ -108136,6 +108185,17 @@ var builtin_default = {
// src/languages/index.ts
var builtInLanguageSet = new Set(builtin_default.languages);
function isBuiltInLanguage(language) {
return builtInLanguageSet.has(language);
}
function parseBuiltInLanguage(language) {
language = language.trim().toLowerCase();
language = builtin_default.aliases[language] ?? language;
if (isBuiltInLanguage(language)) {
return language;
}
return void 0;
}
// src/overlay/status.ts
var actionsCache = __toESM(require_cache5());
@@ -108215,7 +108275,7 @@ var fs9 = __toESM(require("fs"));
var path8 = __toESM(require("path"));
var toolcache3 = __toESM(require_tool_cache());
var import_fast_deep_equal = __toESM(require_fast_deep_equal());
var semver8 = __toESM(require_semver2());
var semver9 = __toESM(require_semver2());
// node_modules/uuid/dist-node/stringify.js
var byteToHex = [];
@@ -108261,6 +108321,67 @@ function _v4(options, buf, offset) {
}
var v4_default = v4;
// src/overlay/caching.ts
var actionsCache3 = __toESM(require_cache5());
var semver6 = __toESM(require_semver2());
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
var CACHE_VERSION = 1;
var CACHE_PREFIX = "codeql-overlay-base-database";
async function getCacheKeyPrefixBase(parsedLanguages) {
const languagesComponent = [...parsedLanguages].sort().join("_");
const cacheKeyComponents = {
automationID: await getAutomationID()
// Add more components here as needed in the future
};
const componentsHash = createCacheKeyHash(cacheKeyComponents);
return `${CACHE_PREFIX}-${CACHE_VERSION}-${componentsHash}-${languagesComponent}-`;
}
async function getCodeQlVersionsForOverlayBaseDatabases(rawLanguages, logger) {
const languages = rawLanguages.map(parseBuiltInLanguage);
if (languages.includes(void 0)) {
logger.warning(
"One or more provided languages are not recognized as built-in languages. Skipping searching for overlay-base databases in cache."
);
return void 0;
}
const cacheKeyPrefix = await getCacheKeyPrefixBase(
languages.filter((l) => l !== void 0)
);
logger.debug(
`Searching for overlay-base databases in Actions cache with prefix ${cacheKeyPrefix}`
);
const caches = await listActionsCaches(cacheKeyPrefix);
if (caches.length === 0) {
logger.info("No overlay-base databases found in Actions cache.");
return [];
}
logger.info(
`Found ${caches.length} overlay-base ${caches.length === 1 ? "database" : "databases"} in the Actions cache.`
);
const versionRegex = /^([\d.]+)-/;
const versionSet = /* @__PURE__ */ new Set();
for (const cache of caches) {
if (!cache.key) continue;
const suffix = cache.key.substring(cacheKeyPrefix.length);
const match = suffix.match(versionRegex);
if (match && semver6.valid(match[1])) {
versionSet.add(match[1]);
}
}
if (versionSet.size === 0) {
logger.info(
"Could not parse any CodeQL versions from overlay-base database cache keys."
);
return [];
}
const versions = [...versionSet].sort(semver6.rcompare);
logger.info(
`Found overlay databases for the following CodeQL versions in the Actions cache: ${versions.join(", ")}`
);
return versions;
}
// src/tar.ts
var import_child_process = require("child_process");
var fs7 = __toESM(require("fs"));
@@ -108268,7 +108389,7 @@ var stream = __toESM(require("stream"));
var import_toolrunner = __toESM(require_toolrunner());
var io4 = __toESM(require_io());
var toolcache = __toESM(require_tool_cache());
var semver6 = __toESM(require_semver2());
var semver7 = __toESM(require_semver2());
var MIN_REQUIRED_BSD_TAR_VERSION = "3.4.3";
var MIN_REQUIRED_GNU_TAR_VERSION = "1.31";
async function getTarVersion() {
@@ -108310,9 +108431,9 @@ async function isZstdAvailable(logger) {
case "gnu":
return {
available: foundZstdBinary && // GNU tar only uses major and minor version numbers
semver6.gte(
semver6.coerce(version),
semver6.coerce(MIN_REQUIRED_GNU_TAR_VERSION)
semver7.gte(
semver7.coerce(version),
semver7.coerce(MIN_REQUIRED_GNU_TAR_VERSION)
),
foundZstdBinary,
version: tarVersion
@@ -108321,7 +108442,7 @@ async function isZstdAvailable(logger) {
return {
available: foundZstdBinary && // Do a loose comparison since these version numbers don't contain
// a patch version number.
semver6.gte(version, MIN_REQUIRED_BSD_TAR_VERSION),
semver7.gte(version, MIN_REQUIRED_BSD_TAR_VERSION),
foundZstdBinary,
version: tarVersion
};
@@ -108428,7 +108549,7 @@ var core10 = __toESM(require_core());
var import_http_client = __toESM(require_lib());
var toolcache2 = __toESM(require_tool_cache());
var import_follow_redirects = __toESM(require_follow_redirects());
var semver7 = __toESM(require_semver2());
var semver8 = __toESM(require_semver2());
var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024;
var TOOLCACHE_TOOL_NAME = "CodeQL";
function makeDownloadFirstToolsDownloadDurations(downloadDurationMs, extractionDurationMs) {
@@ -108558,7 +108679,7 @@ function getToolcacheDirectory(version) {
return path7.join(
getRequiredEnvParam("RUNNER_TOOL_CACHE"),
TOOLCACHE_TOOL_NAME,
semver7.clean(version) || version,
semver8.clean(version) || version,
os.arch() || ""
);
}
@@ -108683,13 +108804,13 @@ function tryGetTagNameFromUrl(url2, logger) {
return match[1];
}
function convertToSemVer(version, logger) {
if (!semver8.valid(version)) {
if (!semver9.valid(version)) {
logger.debug(
`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`
);
version = `0.0.0-${version}`;
}
const s = semver8.clean(version);
const s = semver9.clean(version);
if (!s) {
throw new Error(`Bundle version ${version} is not in SemVer format.`);
}
@@ -108721,7 +108842,55 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
}
return void 0;
}
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, features, logger) {
async function getEnabledVersionsWithOverlayBaseDatabases(defaultCliVersion, rawLanguages, features, logger) {
if (rawLanguages === void 0 || rawLanguages.length === 0) {
return [];
}
if (!await features.getValue("overlay_analysis_match_codeql_version" /* OverlayAnalysisMatchCodeqlVersion */)) {
return [];
}
let cachedVersions;
try {
cachedVersions = await getCodeQlVersionsForOverlayBaseDatabases(
rawLanguages,
logger
);
} catch (e) {
logger.warning(
`While setting up CodeQL, was unable to list overlay-base databases in the Actions cache. Details: ${e}`
);
return [];
}
if (cachedVersions === void 0 || cachedVersions.length === 0) {
return [];
}
const cachedVersionsSet = new Set(cachedVersions);
return defaultCliVersion.enabledVersions.filter(
(v) => cachedVersionsSet.has(v.cliVersion)
);
}
async function resolveDefaultCliVersion(defaultCliVersion, rawLanguages, features, logger) {
if (!isAnalyzingPullRequest()) {
return defaultCliVersion.enabledVersions[0];
}
const overlayVersions = await getEnabledVersionsWithOverlayBaseDatabases(
defaultCliVersion,
rawLanguages,
features,
logger
);
if (overlayVersions.length > 0) {
logger.info(
`Using CodeQL version ${overlayVersions[0].cliVersion} since this is the highest enabled version that has a cached overlay-base database.`
);
return overlayVersions[0];
}
logger.info(
`Using CodeQL version ${defaultCliVersion.enabledVersions[0].cliVersion} since no enabled versions with cached overlay-base databases were found.`
);
return defaultCliVersion.enabledVersions[0];
}
async function getCodeQLSource(toolsInput, defaultCliVersion, rawLanguages, apiDetails, variant, tarSupportsZstd, features, logger) {
if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) {
logger.info(`Using CodeQL CLI from local path ${toolsInput}`);
const compressionMethod2 = inferCompressionMethod(toolsInput);
@@ -108815,21 +108984,33 @@ async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, varian
);
}
}
cliVersion2 = defaultCliVersion.enabledVersions[0].cliVersion;
tagName = defaultCliVersion.enabledVersions[0].tagName;
const version = await resolveDefaultCliVersion(
defaultCliVersion,
rawLanguages,
features,
logger
);
cliVersion2 = version.cliVersion;
tagName = version.tagName;
}
} else if (toolsInput !== void 0) {
tagName = tryGetTagNameFromUrl(toolsInput, logger);
url2 = toolsInput;
if (tagName) {
const bundleVersion3 = tryGetBundleVersionFromTagName(tagName, logger);
if (bundleVersion3 && semver8.valid(bundleVersion3)) {
if (bundleVersion3 && semver9.valid(bundleVersion3)) {
cliVersion2 = convertToSemVer(bundleVersion3, logger);
}
}
} else {
cliVersion2 = defaultCliVersion.enabledVersions[0].cliVersion;
tagName = defaultCliVersion.enabledVersions[0].tagName;
const version = await resolveDefaultCliVersion(
defaultCliVersion,
rawLanguages,
features,
logger
);
cliVersion2 = version.cliVersion;
tagName = version.tagName;
}
const bundleVersion2 = tagName && tryGetBundleVersionFromTagName(tagName, logger);
const humanReadableVersion = cliVersion2 ?? (bundleVersion2 && convertToSemVer(bundleVersion2, logger)) ?? tagName ?? url2 ?? "unknown";
@@ -109026,7 +109207,7 @@ function getCanonicalToolcacheVersion(cliVersion2, bundleVersion2, logger) {
}
return cliVersion2;
}
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, rawLanguages, features, logger) {
if (!await isBinaryAccessible("tar", logger)) {
throw new ConfigurationError(
"Could not find tar in PATH, so unable to extract CodeQL bundle."
@@ -109036,6 +109217,7 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defau
const source = await getCodeQLSource(
toolsInput,
defaultCliVersion,
rawLanguages,
apiDetails,
variant,
zstdAvailability.available,
@@ -109094,7 +109276,7 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defau
async function useZstdBundle(cliVersion2, tarSupportsZstd) {
return (
// In testing, gzip performs better than zstd on Windows.
process.platform !== "win32" && tarSupportsZstd && semver8.gte(cliVersion2, CODEQL_VERSION_ZSTD_BUNDLE)
process.platform !== "win32" && tarSupportsZstd && semver9.gte(cliVersion2, CODEQL_VERSION_ZSTD_BUNDLE)
);
}
function getTempExtractionDir(tempDir) {
@@ -109126,7 +109308,7 @@ async function getNightlyToolsUrl(logger) {
}
}
function getLatestToolcacheVersion(logger) {
const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver8.compare(b, a));
const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver9.compare(b, a));
logger.debug(
`Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify(
allVersions
@@ -109163,7 +109345,7 @@ var CODEQL_NEXT_MINIMUM_VERSION = "2.19.4";
var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.15";
var GHES_MOST_RECENT_DEPRECATION_DATE = "2026-04-09";
var EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++";
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, checkVersion) {
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, rawLanguages, features, logger, checkVersion) {
try {
const {
codeqlFolder,
@@ -109177,6 +109359,7 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
tempDir,
variant,
defaultCliVersion,
rawLanguages,
features,
logger
);
@@ -110898,7 +111081,7 @@ var core12 = __toESM(require_core());
var toolrunner4 = __toESM(require_toolrunner());
var github2 = __toESM(require_github());
var io5 = __toESM(require_io());
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, rawLanguages, features, logger) {
logger.startGroup("Setup CodeQL tools");
const {
codeql,
@@ -110912,6 +111095,7 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
tempDir,
variant,
defaultCliVersion,
rawLanguages,
features,
logger,
true
@@ -111068,6 +111252,8 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
tempDir,
gitHubVersion.type,
codeQLDefaultVersionInfo,
void 0,
// rawLanguages: upload-lib does not run analysis
features,
logger
);
@@ -111083,7 +111269,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
return readSarifFile(outputFile);
}
function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) {
const automationID = getAutomationID(category, analysis_key, environment);
const automationID = getAutomationID2(category, analysis_key, environment);
if (automationID !== void 0) {
for (const run of sarifFile.runs || []) {
if (run.automationDetails === void 0) {
@@ -111096,7 +111282,7 @@ function populateRunAutomationDetails(sarifFile, category, analysis_key, environ
}
return sarifFile;
}
function getAutomationID(category, analysis_key, environment) {
function getAutomationID2(category, analysis_key, environment) {
if (category !== void 0) {
let automationID = category;
if (!automationID.endsWith("/")) {
+57 -51
View File
@@ -44531,11 +44531,11 @@ var require_valid = __commonJS({
"node_modules/semver/functions/valid.js"(exports2, module2) {
"use strict";
var parse2 = require_parse3();
var valid3 = (version, options) => {
var valid4 = (version, options) => {
const v = parse2(version, options);
return v ? v.version : null;
};
module2.exports = valid3;
module2.exports = valid4;
}
});
@@ -44678,8 +44678,8 @@ var require_rcompare = __commonJS({
"node_modules/semver/functions/rcompare.js"(exports2, module2) {
"use strict";
var compare2 = require_compare();
var rcompare2 = (a, b, loose) => compare2(b, a, loose);
module2.exports = rcompare2;
var rcompare3 = (a, b, loose) => compare2(b, a, loose);
module2.exports = rcompare3;
}
});
@@ -45895,7 +45895,7 @@ var require_semver2 = __commonJS({
var SemVer = require_semver();
var identifiers = require_identifiers();
var parse2 = require_parse3();
var valid3 = require_valid();
var valid4 = require_valid();
var clean3 = require_clean();
var inc = require_inc();
var diff = require_diff();
@@ -45904,7 +45904,7 @@ var require_semver2 = __commonJS({
var patch = require_patch();
var prerelease = require_prerelease();
var compare2 = require_compare();
var rcompare2 = require_rcompare();
var rcompare3 = require_rcompare();
var compareLoose = require_compare_loose();
var compareBuild = require_compare_build();
var sort = require_sort();
@@ -45933,7 +45933,7 @@ var require_semver2 = __commonJS({
var subset = require_subset();
module2.exports = {
parse: parse2,
valid: valid3,
valid: valid4,
clean: clean3,
inc,
diff,
@@ -45942,7 +45942,7 @@ var require_semver2 = __commonJS({
patch,
prerelease,
compare: compare2,
rcompare: rcompare2,
rcompare: rcompare3,
compareLoose,
compareBuild,
sort,
@@ -98792,7 +98792,7 @@ var require_stream_writable = __commonJS({
pna.nextTick(cb, er);
}
function validChunk(stream, state, chunk, cb) {
var valid3 = true;
var valid4 = true;
var er = false;
if (chunk === null) {
er = new TypeError("May not write null values to stream");
@@ -98802,9 +98802,9 @@ var require_stream_writable = __commonJS({
if (er) {
stream.emit("error", er);
pna.nextTick(cb, er);
valid3 = false;
valid4 = false;
}
return valid3;
return valid4;
}
Writable.prototype.write = function(chunk, encoding, cb) {
var state = this._writableState;
@@ -150217,16 +150217,16 @@ var require_attribute = __commonJS({
var result = new ValidatorResult(instance, schema2, options, ctx);
var self2 = this;
schema2.allOf.forEach(function(v, i) {
var valid3 = self2.validateSchema(instance, v, options, ctx);
if (!valid3.valid) {
var valid4 = self2.validateSchema(instance, v, options, ctx);
if (!valid4.valid) {
var id = v.$id || v.id;
var msg = id || v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]";
result.addError({
name: "allOf",
argument: { id: msg, length: valid3.errors.length, valid: valid3 },
message: "does not match allOf schema " + msg + " with " + valid3.errors.length + " error[s]:"
argument: { id: msg, length: valid4.errors.length, valid: valid4 },
message: "does not match allOf schema " + msg + " with " + valid4.errors.length + " error[s]:"
});
result.importErrors(valid3);
result.importErrors(valid4);
}
});
return result;
@@ -150515,8 +150515,8 @@ var require_attribute = __commonJS({
if (typeof schema2.exclusiveMinimum === "boolean") return;
if (!this.types.number(instance)) return;
var result = new ValidatorResult(instance, schema2, options, ctx);
var valid3 = instance > schema2.exclusiveMinimum;
if (!valid3) {
var valid4 = instance > schema2.exclusiveMinimum;
if (!valid4) {
result.addError({
name: "exclusiveMinimum",
argument: schema2.exclusiveMinimum,
@@ -150529,8 +150529,8 @@ var require_attribute = __commonJS({
if (typeof schema2.exclusiveMaximum === "boolean") return;
if (!this.types.number(instance)) return;
var result = new ValidatorResult(instance, schema2, options, ctx);
var valid3 = instance < schema2.exclusiveMaximum;
if (!valid3) {
var valid4 = instance < schema2.exclusiveMaximum;
if (!valid4) {
result.addError({
name: "exclusiveMaximum",
argument: schema2.exclusiveMaximum,
@@ -153258,8 +153258,8 @@ var require_semver3 = __commonJS({
return null;
}
}
exports2.valid = valid3;
function valid3(version, options) {
exports2.valid = valid4;
function valid4(version, options) {
var v = parse2(version, options);
return v ? v.version : null;
}
@@ -153559,8 +153559,8 @@ var require_semver3 = __commonJS({
var versionB = new SemVer(b, loose);
return versionA.compare(versionB) || versionA.compareBuild(versionB);
}
exports2.rcompare = rcompare2;
function rcompare2(a, b, loose) {
exports2.rcompare = rcompare3;
function rcompare3(a, b, loose) {
return compare2(b, a, loose);
}
exports2.sort = sort;
@@ -154388,7 +154388,7 @@ var require_cacheUtils = __commonJS({
var crypto2 = __importStar2(require("crypto"));
var fs3 = __importStar2(require("fs"));
var path3 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var semver10 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
var constants_1 = require_constants24();
var versionSalt = "1.0";
@@ -154481,7 +154481,7 @@ var require_cacheUtils = __commonJS({
function getCompressionMethod() {
return __awaiter2(this, void 0, void 0, function* () {
const versionOutput = yield getVersion("zstd", ["--quiet"]);
const version = semver9.clean(versionOutput);
const version = semver10.clean(versionOutput);
core15.debug(`zstd version: ${version}`);
if (versionOutput === "") {
return constants_1.CompressionMethod.Gzip;
@@ -155791,7 +155791,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache4;
exports2.saveCache = saveCache5;
var core15 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -155968,7 +155968,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
function saveCache5(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -157242,8 +157242,8 @@ var require_cache6 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
exports2.restoreCache = restoreCache5;
exports2.saveCache = saveCache5;
var core15 = __importStar2(require_core());
var path3 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -157300,7 +157300,7 @@ var require_cache6 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache5(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -157444,7 +157444,7 @@ var require_cache6 = __commonJS({
return void 0;
});
}
function saveCache4(paths_1, key_1, options_1) {
function saveCache5(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -157681,7 +157681,7 @@ var require_manifest = __commonJS({
exports2._findMatch = _findMatch;
exports2._getOsVersion = _getOsVersion;
exports2._readLinuxVersionFile = _readLinuxVersionFile;
var semver9 = __importStar2(require_semver2());
var semver10 = __importStar2(require_semver2());
var core_1 = require_core();
var os2 = require("os");
var cp = require("child_process");
@@ -157695,7 +157695,7 @@ var require_manifest = __commonJS({
for (const candidate of candidates) {
const version = candidate.version;
(0, core_1.debug)(`check ${version} satisfies ${versionSpec}`);
if (semver9.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) {
if (semver10.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) {
file = candidate.files.find((item) => {
(0, core_1.debug)(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`);
let chk = item.arch === archFilter && item.platform === platFilter;
@@ -157704,7 +157704,7 @@ var require_manifest = __commonJS({
if (osVersion === item.platform_version) {
chk = true;
} else {
chk = semver9.satisfies(osVersion, item.platform_version);
chk = semver10.satisfies(osVersion, item.platform_version);
}
}
return chk;
@@ -157964,7 +157964,7 @@ var require_tool_cache = __commonJS({
var os2 = __importStar2(require("os"));
var path3 = __importStar2(require("path"));
var httpm = __importStar2(require_lib());
var semver9 = __importStar2(require_semver2());
var semver10 = __importStar2(require_semver2());
var stream = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var assert_1 = require("assert");
@@ -158237,7 +158237,7 @@ var require_tool_cache = __commonJS({
}
function cacheDir(sourceDir, tool, version, arch) {
return __awaiter2(this, void 0, void 0, function* () {
version = semver9.clean(version) || version;
version = semver10.clean(version) || version;
arch = arch || os2.arch();
core15.debug(`Caching tool ${tool} ${version} ${arch}`);
core15.debug(`source dir: ${sourceDir}`);
@@ -158255,7 +158255,7 @@ var require_tool_cache = __commonJS({
}
function cacheFile(sourceFile, targetFile, tool, version, arch) {
return __awaiter2(this, void 0, void 0, function* () {
version = semver9.clean(version) || version;
version = semver10.clean(version) || version;
arch = arch || os2.arch();
core15.debug(`Caching tool ${tool} ${version} ${arch}`);
core15.debug(`source file: ${sourceFile}`);
@@ -158285,7 +158285,7 @@ var require_tool_cache = __commonJS({
}
let toolPath = "";
if (versionSpec) {
versionSpec = semver9.clean(versionSpec) || "";
versionSpec = semver10.clean(versionSpec) || "";
const cachePath = path3.join(_getCacheDirectory(), toolName, versionSpec, arch);
core15.debug(`checking cache: ${cachePath}`);
if (fs3.existsSync(cachePath) && fs3.existsSync(`${cachePath}.complete`)) {
@@ -158365,7 +158365,7 @@ var require_tool_cache = __commonJS({
}
function _createToolPath(tool, version, arch) {
return __awaiter2(this, void 0, void 0, function* () {
const folderPath = path3.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch || "");
const folderPath = path3.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch || "");
core15.debug(`destination ${folderPath}`);
const markerPath = `${folderPath}.complete`;
yield io6.rmRF(folderPath);
@@ -158375,30 +158375,30 @@ var require_tool_cache = __commonJS({
});
}
function _completeToolPath(tool, version, arch) {
const folderPath = path3.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch || "");
const folderPath = path3.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch || "");
const markerPath = `${folderPath}.complete`;
fs3.writeFileSync(markerPath, "");
core15.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
const c = semver9.clean(versionSpec) || "";
const c = semver10.clean(versionSpec) || "";
core15.debug(`isExplicit: ${c}`);
const valid3 = semver9.valid(c) != null;
core15.debug(`explicit? ${valid3}`);
return valid3;
const valid4 = semver10.valid(c) != null;
core15.debug(`explicit? ${valid4}`);
return valid4;
}
function evaluateVersions(versions, versionSpec) {
let version = "";
core15.debug(`evaluating ${versions.length} versions`);
versions = versions.sort((a, b) => {
if (semver9.gt(a, b)) {
if (semver10.gt(a, b)) {
return 1;
}
return -1;
});
for (let i = versions.length - 1; i >= 0; i--) {
const potential = versions[i];
const satisfied = semver9.satisfies(potential, versionSpec);
const satisfied = semver10.satisfies(potential, versionSpec);
if (satisfied) {
version = potential;
break;
@@ -162433,24 +162433,30 @@ var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = {
// src/setup-codeql.ts
var toolcache3 = __toESM(require_tool_cache());
var import_fast_deep_equal = __toESM(require_fast_deep_equal());
var semver8 = __toESM(require_semver2());
var semver9 = __toESM(require_semver2());
// src/overlay/caching.ts
var actionsCache3 = __toESM(require_cache6());
var semver6 = __toESM(require_semver2());
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
// src/tar.ts
var import_toolrunner = __toESM(require_toolrunner());
var io4 = __toESM(require_io());
var toolcache = __toESM(require_tool_cache());
var semver6 = __toESM(require_semver2());
var semver7 = __toESM(require_semver2());
// src/tools-download.ts
var core10 = __toESM(require_core());
var import_http_client = __toESM(require_lib());
var toolcache2 = __toESM(require_tool_cache());
var import_follow_redirects = __toESM(require_follow_redirects());
var semver7 = __toESM(require_semver2());
var semver8 = __toESM(require_semver2());
var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024;
// src/dependency-caching.ts
var actionsCache3 = __toESM(require_cache6());
var actionsCache4 = __toESM(require_cache6());
var glob = __toESM(require_glob2());
// src/artifact-scanner.ts
+294 -108
View File
@@ -203,7 +203,7 @@ var require_file_command = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.issueFileCommand = issueFileCommand;
exports2.prepareKeyValueMessage = prepareKeyValueMessage;
var crypto2 = __importStar2(require("crypto"));
var crypto3 = __importStar2(require("crypto"));
var fs15 = __importStar2(require("fs"));
var os3 = __importStar2(require("os"));
var utils_1 = require_utils();
@@ -220,7 +220,7 @@ var require_file_command = __commonJS({
});
}
function prepareKeyValueMessage(key, value) {
const delimiter = `ghadelimiter_${crypto2.randomUUID()}`;
const delimiter = `ghadelimiter_${crypto3.randomUUID()}`;
const convertedValue = (0, utils_1.toCommandValue)(value);
if (key.includes(delimiter)) {
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
@@ -4262,11 +4262,11 @@ var require_util2 = __commonJS({
var { isUint8Array } = require("node:util/types");
var { webidl } = require_webidl();
var supportedHashes = [];
var crypto2;
var crypto3;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
supportedHashes = crypto2.getHashes().filter((hash2) => possibleRelevantHashes.includes(hash2));
supportedHashes = crypto3.getHashes().filter((hash2) => possibleRelevantHashes.includes(hash2));
} catch {
}
function responseURL(response) {
@@ -4539,7 +4539,7 @@ var require_util2 = __commonJS({
}
}
function bytesMatch(bytes, metadataList) {
if (crypto2 === void 0) {
if (crypto3 === void 0) {
return true;
}
const parsedMetadata = parseMetadata(metadataList);
@@ -4554,7 +4554,7 @@ var require_util2 = __commonJS({
for (const item of metadata) {
const algorithm = item.algo;
const expectedValue = item.hash;
let actualValue = crypto2.createHash(algorithm).update(bytes).digest("base64");
let actualValue = crypto3.createHash(algorithm).update(bytes).digest("base64");
if (actualValue[actualValue.length - 1] === "=") {
if (actualValue[actualValue.length - 2] === "=") {
actualValue = actualValue.slice(0, -2);
@@ -5618,8 +5618,8 @@ var require_body = __commonJS({
var { multipartFormDataParser } = require_formdata_parser();
var random;
try {
const crypto2 = require("node:crypto");
random = (max) => crypto2.randomInt(0, max);
const crypto3 = require("node:crypto");
random = (max) => crypto3.randomInt(0, max);
} catch {
random = (max) => Math.floor(Math.random(max));
}
@@ -17023,13 +17023,13 @@ var require_frame = __commonJS({
"use strict";
var { maxUnsigned16Bit } = require_constants5();
var BUFFER_SIZE = 16386;
var crypto2;
var crypto3;
var buffer = null;
var bufIdx = BUFFER_SIZE;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
} catch {
crypto2 = {
crypto3 = {
// not full compatibility, but minimum.
randomFillSync: function randomFillSync(buffer2, _offset, _size) {
for (let i = 0; i < buffer2.length; ++i) {
@@ -17042,7 +17042,7 @@ var require_frame = __commonJS({
function generateMask() {
if (bufIdx === BUFFER_SIZE) {
bufIdx = 0;
crypto2.randomFillSync(buffer ??= Buffer.allocUnsafe(BUFFER_SIZE), 0, BUFFER_SIZE);
crypto3.randomFillSync(buffer ??= Buffer.allocUnsafe(BUFFER_SIZE), 0, BUFFER_SIZE);
}
return [buffer[bufIdx++], buffer[bufIdx++], buffer[bufIdx++], buffer[bufIdx++]];
}
@@ -17114,9 +17114,9 @@ var require_connection = __commonJS({
var { Headers, getHeadersList } = require_headers();
var { getDecodeSplit } = require_util2();
var { WebsocketFrameSend } = require_frame();
var crypto2;
var crypto3;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
} catch {
}
function establishWebSocketConnection(url2, protocols, client, ws, onEstablish, options) {
@@ -17136,7 +17136,7 @@ var require_connection = __commonJS({
const headersList = getHeadersList(new Headers(options.headers));
request2.headersList = headersList;
}
const keyValue = crypto2.randomBytes(16).toString("base64");
const keyValue = crypto3.randomBytes(16).toString("base64");
request2.headersList.append("sec-websocket-key", keyValue);
request2.headersList.append("sec-websocket-version", "13");
for (const protocol of protocols) {
@@ -17166,7 +17166,7 @@ var require_connection = __commonJS({
return;
}
const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
const digest = crypto2.createHash("sha1").update(keyValue + uid).digest("base64");
const digest = crypto3.createHash("sha1").update(keyValue + uid).digest("base64");
if (secWSAccept !== digest) {
failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
return;
@@ -25260,11 +25260,11 @@ var require_util10 = __commonJS({
var { isUint8Array } = require("node:util/types");
var { webidl } = require_webidl2();
var supportedHashes = [];
var crypto2;
var crypto3;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
const possibleRelevantHashes = ["sha256", "sha384", "sha512"];
supportedHashes = crypto2.getHashes().filter((hash2) => possibleRelevantHashes.includes(hash2));
supportedHashes = crypto3.getHashes().filter((hash2) => possibleRelevantHashes.includes(hash2));
} catch {
}
function responseURL(response) {
@@ -25537,7 +25537,7 @@ var require_util10 = __commonJS({
}
}
function bytesMatch(bytes, metadataList) {
if (crypto2 === void 0) {
if (crypto3 === void 0) {
return true;
}
const parsedMetadata = parseMetadata(metadataList);
@@ -25552,7 +25552,7 @@ var require_util10 = __commonJS({
for (const item of metadata) {
const algorithm = item.algo;
const expectedValue = item.hash;
let actualValue = crypto2.createHash(algorithm).update(bytes).digest("base64");
let actualValue = crypto3.createHash(algorithm).update(bytes).digest("base64");
if (actualValue[actualValue.length - 1] === "=") {
if (actualValue[actualValue.length - 2] === "=") {
actualValue = actualValue.slice(0, -2);
@@ -26616,8 +26616,8 @@ var require_body2 = __commonJS({
var { multipartFormDataParser } = require_formdata_parser2();
var random;
try {
const crypto2 = require("node:crypto");
random = (max) => crypto2.randomInt(0, max);
const crypto3 = require("node:crypto");
random = (max) => crypto3.randomInt(0, max);
} catch {
random = (max) => Math.floor(Math.random(max));
}
@@ -38021,13 +38021,13 @@ var require_frame2 = __commonJS({
"use strict";
var { maxUnsigned16Bit } = require_constants10();
var BUFFER_SIZE = 16386;
var crypto2;
var crypto3;
var buffer = null;
var bufIdx = BUFFER_SIZE;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
} catch {
crypto2 = {
crypto3 = {
// not full compatibility, but minimum.
randomFillSync: function randomFillSync(buffer2, _offset, _size) {
for (let i = 0; i < buffer2.length; ++i) {
@@ -38040,7 +38040,7 @@ var require_frame2 = __commonJS({
function generateMask() {
if (bufIdx === BUFFER_SIZE) {
bufIdx = 0;
crypto2.randomFillSync(buffer ??= Buffer.allocUnsafe(BUFFER_SIZE), 0, BUFFER_SIZE);
crypto3.randomFillSync(buffer ??= Buffer.allocUnsafe(BUFFER_SIZE), 0, BUFFER_SIZE);
}
return [buffer[bufIdx++], buffer[bufIdx++], buffer[bufIdx++], buffer[bufIdx++]];
}
@@ -38112,9 +38112,9 @@ var require_connection2 = __commonJS({
var { Headers, getHeadersList } = require_headers2();
var { getDecodeSplit } = require_util10();
var { WebsocketFrameSend } = require_frame2();
var crypto2;
var crypto3;
try {
crypto2 = require("node:crypto");
crypto3 = require("node:crypto");
} catch {
}
function establishWebSocketConnection(url2, protocols, client, ws, onEstablish, options) {
@@ -38134,7 +38134,7 @@ var require_connection2 = __commonJS({
const headersList = getHeadersList(new Headers(options.headers));
request2.headersList = headersList;
}
const keyValue = crypto2.randomBytes(16).toString("base64");
const keyValue = crypto3.randomBytes(16).toString("base64");
request2.headersList.append("sec-websocket-key", keyValue);
request2.headersList.append("sec-websocket-version", "13");
for (const protocol of protocols) {
@@ -38164,7 +38164,7 @@ var require_connection2 = __commonJS({
return;
}
const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
const digest = crypto2.createHash("sha1").update(keyValue + uid).digest("base64");
const digest = crypto3.createHash("sha1").update(keyValue + uid).digest("base64");
if (secWSAccept !== digest) {
failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
return;
@@ -44531,11 +44531,11 @@ var require_valid = __commonJS({
"node_modules/semver/functions/valid.js"(exports2, module2) {
"use strict";
var parse2 = require_parse3();
var valid3 = (version, options) => {
var valid4 = (version, options) => {
const v = parse2(version, options);
return v ? v.version : null;
};
module2.exports = valid3;
module2.exports = valid4;
}
});
@@ -44678,8 +44678,8 @@ var require_rcompare = __commonJS({
"node_modules/semver/functions/rcompare.js"(exports2, module2) {
"use strict";
var compare3 = require_compare();
var rcompare2 = (a, b, loose) => compare3(b, a, loose);
module2.exports = rcompare2;
var rcompare3 = (a, b, loose) => compare3(b, a, loose);
module2.exports = rcompare3;
}
});
@@ -45895,7 +45895,7 @@ var require_semver2 = __commonJS({
var SemVer = require_semver();
var identifiers = require_identifiers();
var parse2 = require_parse3();
var valid3 = require_valid();
var valid4 = require_valid();
var clean3 = require_clean();
var inc = require_inc();
var diff = require_diff();
@@ -45904,7 +45904,7 @@ var require_semver2 = __commonJS({
var patch = require_patch();
var prerelease = require_prerelease();
var compare3 = require_compare();
var rcompare2 = require_rcompare();
var rcompare3 = require_rcompare();
var compareLoose = require_compare_loose();
var compareBuild = require_compare_build();
var sort = require_sort();
@@ -45933,7 +45933,7 @@ var require_semver2 = __commonJS({
var subset = require_subset();
module2.exports = {
parse: parse2,
valid: valid3,
valid: valid4,
clean: clean3,
inc,
diff,
@@ -45942,7 +45942,7 @@ var require_semver2 = __commonJS({
patch,
prerelease,
compare: compare3,
rcompare: rcompare2,
rcompare: rcompare3,
compareLoose,
compareBuild,
sort,
@@ -47732,16 +47732,16 @@ var require_attribute = __commonJS({
var result = new ValidatorResult(instance, schema2, options, ctx);
var self2 = this;
schema2.allOf.forEach(function(v, i) {
var valid3 = self2.validateSchema(instance, v, options, ctx);
if (!valid3.valid) {
var valid4 = self2.validateSchema(instance, v, options, ctx);
if (!valid4.valid) {
var id = v.$id || v.id;
var msg = id || v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]";
result.addError({
name: "allOf",
argument: { id: msg, length: valid3.errors.length, valid: valid3 },
message: "does not match allOf schema " + msg + " with " + valid3.errors.length + " error[s]:"
argument: { id: msg, length: valid4.errors.length, valid: valid4 },
message: "does not match allOf schema " + msg + " with " + valid4.errors.length + " error[s]:"
});
result.importErrors(valid3);
result.importErrors(valid4);
}
});
return result;
@@ -48030,8 +48030,8 @@ var require_attribute = __commonJS({
if (typeof schema2.exclusiveMinimum === "boolean") return;
if (!this.types.number(instance)) return;
var result = new ValidatorResult(instance, schema2, options, ctx);
var valid3 = instance > schema2.exclusiveMinimum;
if (!valid3) {
var valid4 = instance > schema2.exclusiveMinimum;
if (!valid4) {
result.addError({
name: "exclusiveMinimum",
argument: schema2.exclusiveMinimum,
@@ -48044,8 +48044,8 @@ var require_attribute = __commonJS({
if (typeof schema2.exclusiveMaximum === "boolean") return;
if (!this.types.number(instance)) return;
var result = new ValidatorResult(instance, schema2, options, ctx);
var valid3 = instance < schema2.exclusiveMaximum;
if (!valid3) {
var valid4 = instance < schema2.exclusiveMaximum;
if (!valid4) {
result.addError({
name: "exclusiveMaximum",
argument: schema2.exclusiveMaximum,
@@ -50550,7 +50550,7 @@ var require_internal_hash_files = __commonJS({
};
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.hashFiles = hashFiles;
var crypto2 = __importStar2(require("crypto"));
var crypto3 = __importStar2(require("crypto"));
var core16 = __importStar2(require_core());
var fs15 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
@@ -50563,7 +50563,7 @@ var require_internal_hash_files = __commonJS({
const writeDelegate = verbose ? core16.info : core16.debug;
let hasMatch = false;
const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd();
const result = crypto2.createHash("sha256");
const result = crypto3.createHash("sha256");
let count = 0;
try {
for (var _e = true, _f = __asyncValues2(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
@@ -50579,7 +50579,7 @@ var require_internal_hash_files = __commonJS({
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash2 = crypto2.createHash("sha256");
const hash2 = crypto3.createHash("sha256");
const pipeline = util.promisify(stream2.pipeline);
yield pipeline(fs15.createReadStream(file), hash2);
result.write(hash2.digest());
@@ -50828,8 +50828,8 @@ var require_semver3 = __commonJS({
return null;
}
}
exports2.valid = valid3;
function valid3(version, options) {
exports2.valid = valid4;
function valid4(version, options) {
var v = parse2(version, options);
return v ? v.version : null;
}
@@ -51129,8 +51129,8 @@ var require_semver3 = __commonJS({
var versionB = new SemVer(b, loose);
return versionA.compare(versionB) || versionA.compareBuild(versionB);
}
exports2.rcompare = rcompare2;
function rcompare2(a, b, loose) {
exports2.rcompare = rcompare3;
function rcompare3(a, b, loose) {
return compare3(b, a, loose);
}
exports2.sort = sort;
@@ -51955,10 +51955,10 @@ var require_cacheUtils = __commonJS({
var exec = __importStar2(require_exec());
var glob = __importStar2(require_glob());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var crypto3 = __importStar2(require("crypto"));
var fs15 = __importStar2(require("fs"));
var path13 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var semver10 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
var constants_1 = require_constants12();
var versionSalt = "1.0";
@@ -51979,7 +51979,7 @@ var require_cacheUtils = __commonJS({
}
tempDirectory = path13.join(baseLocation, "actions", "temp");
}
const dest = path13.join(tempDirectory, crypto2.randomUUID());
const dest = path13.join(tempDirectory, crypto3.randomUUID());
yield io6.mkdirP(dest);
return dest;
});
@@ -52051,7 +52051,7 @@ var require_cacheUtils = __commonJS({
function getCompressionMethod() {
return __awaiter2(this, void 0, void 0, function* () {
const versionOutput = yield getVersion("zstd", ["--quiet"]);
const version = semver9.clean(versionOutput);
const version = semver10.clean(versionOutput);
core16.debug(`zstd version: ${version}`);
if (versionOutput === "") {
return constants_1.CompressionMethod.Gzip;
@@ -52087,7 +52087,7 @@ var require_cacheUtils = __commonJS({
components.push("windows-only");
}
components.push(versionSalt);
return crypto2.createHash("sha256").update(components.join("|")).digest("hex");
return crypto3.createHash("sha256").update(components.join("|")).digest("hex");
}
function getRuntimeToken() {
const token = process.env["ACTIONS_RUNTIME_TOKEN"];
@@ -93457,7 +93457,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache3;
exports2.saveCache = saveCache4;
var core16 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93634,7 +93634,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache3(cacheId, archivePath, signedUploadURL, options) {
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -99134,8 +99134,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache3;
exports2.saveCache = saveCache3;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
var core16 = __importStar2(require_core());
var path13 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -99192,7 +99192,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache3(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core16.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99336,7 +99336,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache3(paths_1, key_1, options_1) {
function saveCache4(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core16.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99573,7 +99573,7 @@ var require_manifest = __commonJS({
exports2._findMatch = _findMatch;
exports2._getOsVersion = _getOsVersion;
exports2._readLinuxVersionFile = _readLinuxVersionFile;
var semver9 = __importStar2(require_semver2());
var semver10 = __importStar2(require_semver2());
var core_1 = require_core();
var os3 = require("os");
var cp = require("child_process");
@@ -99587,7 +99587,7 @@ var require_manifest = __commonJS({
for (const candidate of candidates) {
const version = candidate.version;
(0, core_1.debug)(`check ${version} satisfies ${versionSpec}`);
if (semver9.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) {
if (semver10.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) {
file = candidate.files.find((item) => {
(0, core_1.debug)(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`);
let chk = item.arch === archFilter && item.platform === platFilter;
@@ -99596,7 +99596,7 @@ var require_manifest = __commonJS({
if (osVersion === item.platform_version) {
chk = true;
} else {
chk = semver9.satisfies(osVersion, item.platform_version);
chk = semver10.satisfies(osVersion, item.platform_version);
}
}
return chk;
@@ -99850,13 +99850,13 @@ var require_tool_cache = __commonJS({
exports2.evaluateVersions = evaluateVersions;
var core16 = __importStar2(require_core());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var crypto3 = __importStar2(require("crypto"));
var fs15 = __importStar2(require("fs"));
var mm = __importStar2(require_manifest());
var os3 = __importStar2(require("os"));
var path13 = __importStar2(require("path"));
var httpm = __importStar2(require_lib());
var semver9 = __importStar2(require_semver2());
var semver10 = __importStar2(require_semver2());
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var assert_1 = require("assert");
@@ -99875,7 +99875,7 @@ var require_tool_cache = __commonJS({
var userAgent2 = "actions/tool-cache";
function downloadTool2(url2, dest, auth2, headers) {
return __awaiter2(this, void 0, void 0, function* () {
dest = dest || path13.join(_getTempDirectory(), crypto2.randomUUID());
dest = dest || path13.join(_getTempDirectory(), crypto3.randomUUID());
yield io6.mkdirP(path13.dirname(dest));
core16.debug(`Downloading ${url2}`);
core16.debug(`Destination ${dest}`);
@@ -100129,7 +100129,7 @@ var require_tool_cache = __commonJS({
}
function cacheDir(sourceDir, tool, version, arch2) {
return __awaiter2(this, void 0, void 0, function* () {
version = semver9.clean(version) || version;
version = semver10.clean(version) || version;
arch2 = arch2 || os3.arch();
core16.debug(`Caching tool ${tool} ${version} ${arch2}`);
core16.debug(`source dir: ${sourceDir}`);
@@ -100147,7 +100147,7 @@ var require_tool_cache = __commonJS({
}
function cacheFile(sourceFile, targetFile, tool, version, arch2) {
return __awaiter2(this, void 0, void 0, function* () {
version = semver9.clean(version) || version;
version = semver10.clean(version) || version;
arch2 = arch2 || os3.arch();
core16.debug(`Caching tool ${tool} ${version} ${arch2}`);
core16.debug(`source file: ${sourceFile}`);
@@ -100177,7 +100177,7 @@ var require_tool_cache = __commonJS({
}
let toolPath = "";
if (versionSpec) {
versionSpec = semver9.clean(versionSpec) || "";
versionSpec = semver10.clean(versionSpec) || "";
const cachePath = path13.join(_getCacheDirectory(), toolName, versionSpec, arch2);
core16.debug(`checking cache: ${cachePath}`);
if (fs15.existsSync(cachePath) && fs15.existsSync(`${cachePath}.complete`)) {
@@ -100249,7 +100249,7 @@ var require_tool_cache = __commonJS({
function _createExtractFolder(dest) {
return __awaiter2(this, void 0, void 0, function* () {
if (!dest) {
dest = path13.join(_getTempDirectory(), crypto2.randomUUID());
dest = path13.join(_getTempDirectory(), crypto3.randomUUID());
}
yield io6.mkdirP(dest);
return dest;
@@ -100257,7 +100257,7 @@ var require_tool_cache = __commonJS({
}
function _createToolPath(tool, version, arch2) {
return __awaiter2(this, void 0, void 0, function* () {
const folderPath = path13.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || "");
const folderPath = path13.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch2 || "");
core16.debug(`destination ${folderPath}`);
const markerPath = `${folderPath}.complete`;
yield io6.rmRF(folderPath);
@@ -100267,30 +100267,30 @@ var require_tool_cache = __commonJS({
});
}
function _completeToolPath(tool, version, arch2) {
const folderPath = path13.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || "");
const folderPath = path13.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch2 || "");
const markerPath = `${folderPath}.complete`;
fs15.writeFileSync(markerPath, "");
core16.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
const c = semver9.clean(versionSpec) || "";
const c = semver10.clean(versionSpec) || "";
core16.debug(`isExplicit: ${c}`);
const valid3 = semver9.valid(c) != null;
core16.debug(`explicit? ${valid3}`);
return valid3;
const valid4 = semver10.valid(c) != null;
core16.debug(`explicit? ${valid4}`);
return valid4;
}
function evaluateVersions(versions, versionSpec) {
let version = "";
core16.debug(`evaluating ${versions.length} versions`);
versions = versions.sort((a, b) => {
if (semver9.gt(a, b)) {
if (semver10.gt(a, b)) {
return 1;
}
return -1;
});
for (let i = versions.length - 1; i >= 0; i--) {
const potential = versions[i];
const satisfied = semver9.satisfies(potential, versionSpec);
const satisfied = semver10.satisfies(potential, versionSpec);
if (satisfied) {
version = potential;
break;
@@ -106847,6 +106847,32 @@ var persistInputs = function() {
);
core4.saveState(persistedInputsKey, JSON.stringify(inputEnvironmentVariables));
};
function getPullRequestBranches() {
const pullRequest = github.context.payload.pull_request;
if (pullRequest) {
return {
base: pullRequest.base.ref,
// We use the head label instead of the head ref here, because the head
// ref lacks owner information and by itself does not uniquely identify
// the head branch (which may be in a forked repository).
head: pullRequest.head.label
};
}
const codeScanningRef = process.env.CODE_SCANNING_REF;
const codeScanningBaseBranch = process.env.CODE_SCANNING_BASE_BRANCH;
if (codeScanningRef && codeScanningBaseBranch) {
return {
base: codeScanningBaseBranch,
// PR analysis under Default Setup analyzes the PR head commit instead of
// the merge commit, so we can use the provided ref directly.
head: codeScanningRef
};
}
return void 0;
}
function isAnalyzingPullRequest() {
return getPullRequestBranches() !== void 0;
}
var qualityCategoryMapping = {
"c#": "csharp",
cpp: "c-cpp",
@@ -107139,6 +107165,11 @@ async function getAnalysisKey() {
core5.exportVariable("CODEQL_ACTION_ANALYSIS_KEY" /* ANALYSIS_KEY */, analysisKey);
return analysisKey;
}
async function getAutomationID() {
const analysis_key = await getAnalysisKey();
const environment = getRequiredInput("matrix");
return computeAutomationID(analysis_key, environment);
}
function computeAutomationID(analysis_key, environment) {
let automationID = `${analysis_key}/`;
const matrix = parseMatrixInput(environment);
@@ -107153,6 +107184,18 @@ function computeAutomationID(analysis_key, environment) {
}
return automationID;
}
async function listActionsCaches(keyPrefix, ref) {
const repositoryNwo = getRepositoryNwo();
return await getApiClient().paginate(
"GET /repos/{owner}/{repo}/actions/caches",
{
owner: repositoryNwo.owner,
repo: repositoryNwo.repo,
key: keyPrefix,
ref
}
);
}
function isEnablementError(msg) {
return [
/Code Security must be enabled/i,
@@ -108202,7 +108245,13 @@ var path7 = __toESM(require("path"));
var core9 = __toESM(require_core());
// src/caching-utils.ts
var crypto2 = __toESM(require("crypto"));
var core8 = __toESM(require_core());
var cacheKeyHashLength = 16;
function createCacheKeyHash(components) {
const componentsJson = JSON.stringify(components);
return crypto2.createHash("sha256").update(componentsJson).digest("hex").substring(0, cacheKeyHashLength);
}
// src/config/db-config.ts
var jsonschema = __toESM(require_lib2());
@@ -108343,6 +108392,17 @@ var builtin_default = {
// src/languages/index.ts
var builtInLanguageSet = new Set(builtin_default.languages);
function isBuiltInLanguage(language) {
return builtInLanguageSet.has(language);
}
function parseBuiltInLanguage(language) {
language = language.trim().toLowerCase();
language = builtin_default.aliases[language] ?? language;
if (isBuiltInLanguage(language)) {
return language;
}
return void 0;
}
// src/overlay/status.ts
var actionsCache = __toESM(require_cache5());
@@ -108892,7 +108952,7 @@ var fs11 = __toESM(require("fs"));
var path9 = __toESM(require("path"));
var toolcache3 = __toESM(require_tool_cache());
var import_fast_deep_equal = __toESM(require_fast_deep_equal());
var semver8 = __toESM(require_semver2());
var semver9 = __toESM(require_semver2());
// node_modules/uuid/dist-node/stringify.js
var byteToHex = [];
@@ -108938,6 +108998,67 @@ function _v4(options, buf, offset) {
}
var v4_default = v4;
// src/overlay/caching.ts
var actionsCache3 = __toESM(require_cache5());
var semver6 = __toESM(require_semver2());
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
var CACHE_VERSION = 1;
var CACHE_PREFIX = "codeql-overlay-base-database";
async function getCacheKeyPrefixBase(parsedLanguages) {
const languagesComponent = [...parsedLanguages].sort().join("_");
const cacheKeyComponents = {
automationID: await getAutomationID()
// Add more components here as needed in the future
};
const componentsHash = createCacheKeyHash(cacheKeyComponents);
return `${CACHE_PREFIX}-${CACHE_VERSION}-${componentsHash}-${languagesComponent}-`;
}
async function getCodeQlVersionsForOverlayBaseDatabases(rawLanguages, logger) {
const languages = rawLanguages.map(parseBuiltInLanguage);
if (languages.includes(void 0)) {
logger.warning(
"One or more provided languages are not recognized as built-in languages. Skipping searching for overlay-base databases in cache."
);
return void 0;
}
const cacheKeyPrefix = await getCacheKeyPrefixBase(
languages.filter((l) => l !== void 0)
);
logger.debug(
`Searching for overlay-base databases in Actions cache with prefix ${cacheKeyPrefix}`
);
const caches = await listActionsCaches(cacheKeyPrefix);
if (caches.length === 0) {
logger.info("No overlay-base databases found in Actions cache.");
return [];
}
logger.info(
`Found ${caches.length} overlay-base ${caches.length === 1 ? "database" : "databases"} in the Actions cache.`
);
const versionRegex = /^([\d.]+)-/;
const versionSet = /* @__PURE__ */ new Set();
for (const cache of caches) {
if (!cache.key) continue;
const suffix = cache.key.substring(cacheKeyPrefix.length);
const match = suffix.match(versionRegex);
if (match && semver6.valid(match[1])) {
versionSet.add(match[1]);
}
}
if (versionSet.size === 0) {
logger.info(
"Could not parse any CodeQL versions from overlay-base database cache keys."
);
return [];
}
const versions = [...versionSet].sort(semver6.rcompare);
logger.info(
`Found overlay databases for the following CodeQL versions in the Actions cache: ${versions.join(", ")}`
);
return versions;
}
// src/tar.ts
var import_child_process = require("child_process");
var fs9 = __toESM(require("fs"));
@@ -108945,7 +109066,7 @@ var stream = __toESM(require("stream"));
var import_toolrunner = __toESM(require_toolrunner());
var io4 = __toESM(require_io());
var toolcache = __toESM(require_tool_cache());
var semver6 = __toESM(require_semver2());
var semver7 = __toESM(require_semver2());
var MIN_REQUIRED_BSD_TAR_VERSION = "3.4.3";
var MIN_REQUIRED_GNU_TAR_VERSION = "1.31";
async function getTarVersion() {
@@ -108987,9 +109108,9 @@ async function isZstdAvailable(logger) {
case "gnu":
return {
available: foundZstdBinary && // GNU tar only uses major and minor version numbers
semver6.gte(
semver6.coerce(version),
semver6.coerce(MIN_REQUIRED_GNU_TAR_VERSION)
semver7.gte(
semver7.coerce(version),
semver7.coerce(MIN_REQUIRED_GNU_TAR_VERSION)
),
foundZstdBinary,
version: tarVersion
@@ -108998,7 +109119,7 @@ async function isZstdAvailable(logger) {
return {
available: foundZstdBinary && // Do a loose comparison since these version numbers don't contain
// a patch version number.
semver6.gte(version, MIN_REQUIRED_BSD_TAR_VERSION),
semver7.gte(version, MIN_REQUIRED_BSD_TAR_VERSION),
foundZstdBinary,
version: tarVersion
};
@@ -109105,7 +109226,7 @@ var core11 = __toESM(require_core());
var import_http_client = __toESM(require_lib());
var toolcache2 = __toESM(require_tool_cache());
var import_follow_redirects = __toESM(require_follow_redirects());
var semver7 = __toESM(require_semver2());
var semver8 = __toESM(require_semver2());
var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024;
var TOOLCACHE_TOOL_NAME = "CodeQL";
function makeDownloadFirstToolsDownloadDurations(downloadDurationMs, extractionDurationMs) {
@@ -109235,7 +109356,7 @@ function getToolcacheDirectory(version) {
return path8.join(
getRequiredEnvParam("RUNNER_TOOL_CACHE"),
TOOLCACHE_TOOL_NAME,
semver7.clean(version) || version,
semver8.clean(version) || version,
os2.arch() || ""
);
}
@@ -109360,13 +109481,13 @@ function tryGetTagNameFromUrl(url2, logger) {
return match[1];
}
function convertToSemVer(version, logger) {
if (!semver8.valid(version)) {
if (!semver9.valid(version)) {
logger.debug(
`Bundle version ${version} is not in SemVer format. Will treat it as pre-release 0.0.0-${version}.`
);
version = `0.0.0-${version}`;
}
const s = semver8.clean(version);
const s = semver9.clean(version);
if (!s) {
throw new Error(`Bundle version ${version} is not in SemVer format.`);
}
@@ -109398,7 +109519,55 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
}
return void 0;
}
async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, variant, tarSupportsZstd, features, logger) {
async function getEnabledVersionsWithOverlayBaseDatabases(defaultCliVersion, rawLanguages, features, logger) {
if (rawLanguages === void 0 || rawLanguages.length === 0) {
return [];
}
if (!await features.getValue("overlay_analysis_match_codeql_version" /* OverlayAnalysisMatchCodeqlVersion */)) {
return [];
}
let cachedVersions;
try {
cachedVersions = await getCodeQlVersionsForOverlayBaseDatabases(
rawLanguages,
logger
);
} catch (e) {
logger.warning(
`While setting up CodeQL, was unable to list overlay-base databases in the Actions cache. Details: ${e}`
);
return [];
}
if (cachedVersions === void 0 || cachedVersions.length === 0) {
return [];
}
const cachedVersionsSet = new Set(cachedVersions);
return defaultCliVersion.enabledVersions.filter(
(v) => cachedVersionsSet.has(v.cliVersion)
);
}
async function resolveDefaultCliVersion(defaultCliVersion, rawLanguages, features, logger) {
if (!isAnalyzingPullRequest()) {
return defaultCliVersion.enabledVersions[0];
}
const overlayVersions = await getEnabledVersionsWithOverlayBaseDatabases(
defaultCliVersion,
rawLanguages,
features,
logger
);
if (overlayVersions.length > 0) {
logger.info(
`Using CodeQL version ${overlayVersions[0].cliVersion} since this is the highest enabled version that has a cached overlay-base database.`
);
return overlayVersions[0];
}
logger.info(
`Using CodeQL version ${defaultCliVersion.enabledVersions[0].cliVersion} since no enabled versions with cached overlay-base databases were found.`
);
return defaultCliVersion.enabledVersions[0];
}
async function getCodeQLSource(toolsInput, defaultCliVersion, rawLanguages, apiDetails, variant, tarSupportsZstd, features, logger) {
if (toolsInput && !isReservedToolsValue(toolsInput) && !toolsInput.startsWith("http")) {
logger.info(`Using CodeQL CLI from local path ${toolsInput}`);
const compressionMethod2 = inferCompressionMethod(toolsInput);
@@ -109492,21 +109661,33 @@ async function getCodeQLSource(toolsInput, defaultCliVersion, apiDetails, varian
);
}
}
cliVersion2 = defaultCliVersion.enabledVersions[0].cliVersion;
tagName = defaultCliVersion.enabledVersions[0].tagName;
const version = await resolveDefaultCliVersion(
defaultCliVersion,
rawLanguages,
features,
logger
);
cliVersion2 = version.cliVersion;
tagName = version.tagName;
}
} else if (toolsInput !== void 0) {
tagName = tryGetTagNameFromUrl(toolsInput, logger);
url2 = toolsInput;
if (tagName) {
const bundleVersion3 = tryGetBundleVersionFromTagName(tagName, logger);
if (bundleVersion3 && semver8.valid(bundleVersion3)) {
if (bundleVersion3 && semver9.valid(bundleVersion3)) {
cliVersion2 = convertToSemVer(bundleVersion3, logger);
}
}
} else {
cliVersion2 = defaultCliVersion.enabledVersions[0].cliVersion;
tagName = defaultCliVersion.enabledVersions[0].tagName;
const version = await resolveDefaultCliVersion(
defaultCliVersion,
rawLanguages,
features,
logger
);
cliVersion2 = version.cliVersion;
tagName = version.tagName;
}
const bundleVersion2 = tagName && tryGetBundleVersionFromTagName(tagName, logger);
const humanReadableVersion = cliVersion2 ?? (bundleVersion2 && convertToSemVer(bundleVersion2, logger)) ?? tagName ?? url2 ?? "unknown";
@@ -109703,7 +109884,7 @@ function getCanonicalToolcacheVersion(cliVersion2, bundleVersion2, logger) {
}
return cliVersion2;
}
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, rawLanguages, features, logger) {
if (!await isBinaryAccessible("tar", logger)) {
throw new ConfigurationError(
"Could not find tar in PATH, so unable to extract CodeQL bundle."
@@ -109713,6 +109894,7 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defau
const source = await getCodeQLSource(
toolsInput,
defaultCliVersion,
rawLanguages,
apiDetails,
variant,
zstdAvailability.available,
@@ -109771,7 +109953,7 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defau
async function useZstdBundle(cliVersion2, tarSupportsZstd) {
return (
// In testing, gzip performs better than zstd on Windows.
process.platform !== "win32" && tarSupportsZstd && semver8.gte(cliVersion2, CODEQL_VERSION_ZSTD_BUNDLE)
process.platform !== "win32" && tarSupportsZstd && semver9.gte(cliVersion2, CODEQL_VERSION_ZSTD_BUNDLE)
);
}
function getTempExtractionDir(tempDir) {
@@ -109803,7 +109985,7 @@ async function getNightlyToolsUrl(logger) {
}
}
function getLatestToolcacheVersion(logger) {
const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver8.compare(b, a));
const allVersions = toolcache3.findAllVersions("CodeQL").sort((a, b) => semver9.compare(b, a));
logger.debug(
`Found the following versions of the CodeQL tools in the toolcache: ${JSON.stringify(
allVersions
@@ -109840,7 +110022,7 @@ var CODEQL_NEXT_MINIMUM_VERSION = "2.19.4";
var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.15";
var GHES_MOST_RECENT_DEPRECATION_DATE = "2026-04-09";
var EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++";
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger, checkVersion) {
async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, rawLanguages, features, logger, checkVersion) {
try {
const {
codeqlFolder,
@@ -109854,6 +110036,7 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
tempDir,
variant,
defaultCliVersion,
rawLanguages,
features,
logger
);
@@ -111575,7 +111758,7 @@ var core13 = __toESM(require_core());
var toolrunner4 = __toESM(require_toolrunner());
var github2 = __toESM(require_github());
var io5 = __toESM(require_io());
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) {
async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, rawLanguages, features, logger) {
logger.startGroup("Setup CodeQL tools");
const {
codeql,
@@ -111589,6 +111772,7 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
tempDir,
variant,
defaultCliVersion,
rawLanguages,
features,
logger,
true
@@ -111674,6 +111858,8 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
tempDir,
gitHubVersion.type,
codeQLDefaultVersionInfo,
void 0,
// rawLanguages: upload-lib does not run analysis
features,
logger
);
@@ -111689,7 +111875,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
return readSarifFile(outputFile);
}
function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) {
const automationID = getAutomationID(category, analysis_key, environment);
const automationID = getAutomationID2(category, analysis_key, environment);
if (automationID !== void 0) {
for (const run2 of sarifFile.runs || []) {
if (run2.automationDetails === void 0) {
@@ -111702,7 +111888,7 @@ function populateRunAutomationDetails(sarifFile, category, analysis_key, environ
}
return sarifFile;
}
function getAutomationID(category, analysis_key, environment) {
function getAutomationID2(category, analysis_key, environment) {
if (category !== void 0) {
let automationID = category;
if (!automationID.endsWith("/")) {
+10
View File
@@ -72,6 +72,7 @@ async function installIntoToolcache({
cliVersion !== undefined
? { enabledVersions: [{ cliVersion, tagName }] }
: SAMPLE_DEFAULT_CLI_VERSION,
undefined, // rawLanguages
createFeatures([]),
getRunnerLogger(true),
false,
@@ -143,6 +144,7 @@ test.serial(
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
undefined, // rawLanguages
features,
getRunnerLogger(true),
false,
@@ -175,6 +177,7 @@ test.serial(
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
undefined, // rawLanguages
features,
getRunnerLogger(true),
false,
@@ -214,6 +217,7 @@ test.serial(
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
undefined, // rawLanguages
features,
getRunnerLogger(true),
false,
@@ -264,6 +268,7 @@ for (const {
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
undefined, // rawLanguages
features,
getRunnerLogger(true),
false,
@@ -308,6 +313,7 @@ for (const toolcacheVersion of [
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
undefined, // rawLanguages
features,
getRunnerLogger(true),
false,
@@ -352,6 +358,7 @@ test.serial(
},
],
},
undefined, // rawLanguages
features,
getRunnerLogger(true),
false,
@@ -398,6 +405,7 @@ test.serial(
},
],
},
undefined, // rawLanguages
features,
getRunnerLogger(true),
false,
@@ -437,6 +445,7 @@ test.serial(
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
undefined, // rawLanguages
features,
getRunnerLogger(true),
false,
@@ -478,6 +487,7 @@ test.serial(
tmpDir,
util.GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
undefined, // rawLanguages
features,
getRunnerLogger(true),
false,
+3
View File
@@ -305,6 +305,7 @@ const EXTRACTION_DEBUG_MODE_VERBOSITY = "progress++";
* @param tempDir
* @param variant
* @param defaultCliVersion
* @param rawLanguages Raw set of languages.
* @param features Information about the features that are enabled.
* @param logger
* @param checkVersion Whether to check that CodeQL CLI meets the minimum
@@ -317,6 +318,7 @@ export async function setupCodeQL(
tempDir: string,
variant: util.GitHubVariant,
defaultCliVersion: CodeQLDefaultVersionInfo,
rawLanguages: string[] | undefined,
features: FeatureEnablement,
logger: Logger,
checkVersion: boolean,
@@ -340,6 +342,7 @@ export async function setupCodeQL(
tempDir,
variant,
defaultCliVersion,
rawLanguages,
features,
logger,
);
+4
View File
@@ -301,12 +301,16 @@ async function run(startedAt: Date) {
const codeQLDefaultVersionInfo =
await features.getEnabledDefaultCliVersions(gitHubVersion.type);
toolsFeatureFlagsValid = codeQLDefaultVersionInfo.toolsFeatureFlagsValid;
const rawLanguages = configUtils.getRawLanguagesNoAutodetect(
getOptionalInput("languages"),
);
const initCodeQLResult = await initCodeQL(
getOptionalInput("tools"),
apiDetails,
getTemporaryDirectory(),
gitHubVersion.type,
codeQLDefaultVersionInfo,
rawLanguages,
features,
logger,
);
+2
View File
@@ -39,6 +39,7 @@ export async function initCodeQL(
tempDir: string,
variant: util.GitHubVariant,
defaultCliVersion: CodeQLDefaultVersionInfo,
rawLanguages: string[] | undefined,
features: FeatureEnablement,
logger: Logger,
): Promise<{
@@ -61,6 +62,7 @@ export async function initCodeQL(
tempDir,
variant,
defaultCliVersion,
rawLanguages,
features,
logger,
true,
+1
View File
@@ -145,6 +145,7 @@ async function run(startedAt: Date): Promise<void> {
getTemporaryDirectory(),
gitHubVersion.type,
codeQLDefaultVersionInfo,
undefined, // rawLanguages: currently, setup-codeql is not language aware
features,
logger,
);
+161
View File
@@ -107,6 +107,7 @@ test.serial(
const source = await setupCodeql.getCodeQLSource(
`https://github.com/github/codeql-action/releases/download/${tagName}/codeql-bundle-linux64.tar.gz`,
SAMPLE_DEFAULT_CLI_VERSION,
undefined, // rawLanguages
SAMPLE_DOTCOM_API_DETAILS,
GitHubVariant.DOTCOM,
false,
@@ -130,6 +131,7 @@ test.serial(
const source = await setupCodeql.getCodeQLSource(
"linked",
SAMPLE_DEFAULT_CLI_VERSION,
undefined, // rawLanguages
SAMPLE_DOTCOM_API_DETAILS,
GitHubVariant.DOTCOM,
false,
@@ -155,6 +157,7 @@ test.serial(
const source = await setupCodeql.getCodeQLSource(
"latest",
SAMPLE_DEFAULT_CLI_VERSION,
undefined, // rawLanguages
SAMPLE_DOTCOM_API_DETAILS,
GitHubVariant.DOTCOM,
false,
@@ -211,6 +214,7 @@ test.serial(
"tmp/codeql_action_test/",
GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
undefined, // rawLanguages
features,
logger,
);
@@ -266,6 +270,7 @@ test.serial(
"tmp/codeql_action_test/",
GitHubVariant.DOTCOM,
SAMPLE_DEFAULT_CLI_VERSION,
undefined, // rawLanguages
features,
logger,
);
@@ -317,6 +322,7 @@ test.serial(
const source = await setupCodeql.getCodeQLSource(
"nightly",
SAMPLE_DEFAULT_CLI_VERSION,
undefined, // rawLanguages
SAMPLE_DOTCOM_API_DETAILS,
GitHubVariant.DOTCOM,
false,
@@ -378,6 +384,7 @@ test.serial(
const source = await setupCodeql.getCodeQLSource(
undefined,
SAMPLE_DEFAULT_CLI_VERSION,
undefined, // rawLanguages
SAMPLE_DOTCOM_API_DETAILS,
GitHubVariant.DOTCOM,
false,
@@ -432,6 +439,7 @@ test.serial(
const source = await setupCodeql.getCodeQLSource(
"toolcache",
SAMPLE_DEFAULT_CLI_VERSION,
undefined, // rawLanguages
SAMPLE_DOTCOM_API_DETAILS,
GitHubVariant.DOTCOM,
false,
@@ -499,6 +507,7 @@ const toolcacheInputFallbackMacro = test.macro({
const source = await setupCodeql.getCodeQLSource(
"toolcache",
SAMPLE_DEFAULT_CLI_VERSION,
undefined, // rawLanguages
SAMPLE_DOTCOM_API_DETAILS,
GitHubVariant.DOTCOM,
false,
@@ -601,3 +610,155 @@ test.serial(
t.is(setupCodeql.getLatestToolcacheVersion(getRunnerLogger(true)), "3.2.1");
},
);
function makeOverlayMatchFeatures(
matchFlagEnabled: boolean,
): FeatureEnablement {
return {
getEnabledDefaultCliVersions: async () => {
throw new Error("not implemented");
},
getValue: async (feature) => {
if (feature === Feature.OverlayAnalysisMatchCodeqlVersion) {
return matchFlagEnabled;
}
return false;
},
};
}
const overlayMatchEnabledVersions = {
enabledVersions: [
{ cliVersion: "2.20.2", tagName: "codeql-bundle-v2.20.2" },
{ cliVersion: "2.20.1", tagName: "codeql-bundle-v2.20.1" },
{ cliVersion: "2.20.0", tagName: "codeql-bundle-v2.20.0" },
],
toolsFeatureFlagsValid: true,
};
test.serial(
"getEnabledVersionsWithOverlayBaseDatabases returns flag-enabled versions present in cache, sorted desc",
async (t) => {
sinon.stub(api, "getAutomationID").resolves("test/");
sinon.stub(api, "listActionsCaches").resolves([
// Newer than any flag-enabled version: should be filtered out.
{
key: "codeql-overlay-base-database-1-aaaaaaaaaaaaaaaa-javascript-2.21.0-abc-1-1",
},
// Flag-enabled versions present in the cache.
{
key: "codeql-overlay-base-database-1-aaaaaaaaaaaaaaaa-javascript-2.20.1-def-2-1",
},
{
key: "codeql-overlay-base-database-1-aaaaaaaaaaaaaaaa-javascript-2.20.0-ghi-3-1",
},
]);
const result = await setupCodeql.getEnabledVersionsWithOverlayBaseDatabases(
overlayMatchEnabledVersions,
["javascript"],
makeOverlayMatchFeatures(true),
getRunnerLogger(true),
);
t.deepEqual(result, [
{ cliVersion: "2.20.1", tagName: "codeql-bundle-v2.20.1" },
{ cliVersion: "2.20.0", tagName: "codeql-bundle-v2.20.0" },
]);
},
);
test.serial(
"getEnabledVersionsWithOverlayBaseDatabases returns empty when no cached version is flag-enabled",
async (t) => {
sinon.stub(api, "getAutomationID").resolves("test/");
sinon.stub(api, "listActionsCaches").resolves([
{
key: "codeql-overlay-base-database-1-aaaaaaaaaaaaaaaa-javascript-2.19.0-abc-1-1",
},
]);
const result = await setupCodeql.getEnabledVersionsWithOverlayBaseDatabases(
overlayMatchEnabledVersions,
["javascript"],
makeOverlayMatchFeatures(true),
getRunnerLogger(true),
);
t.deepEqual(result, []);
},
);
test.serial(
"getEnabledVersionsWithOverlayBaseDatabases does not list caches when gate is off",
async (t) => {
const listStub = sinon.stub(api, "listActionsCaches").resolves([]);
const result = await setupCodeql.getEnabledVersionsWithOverlayBaseDatabases(
overlayMatchEnabledVersions,
["javascript"],
makeOverlayMatchFeatures(false),
getRunnerLogger(true),
);
t.deepEqual(result, []);
t.assert(
listStub.notCalled,
"Should not list Actions caches when the gating feature flag is off.",
);
},
);
test.serial(
"getEnabledVersionsWithOverlayBaseDatabases does not list caches when rawLanguages is empty",
async (t) => {
const listStub = sinon.stub(api, "listActionsCaches").resolves([]);
const result = await setupCodeql.getEnabledVersionsWithOverlayBaseDatabases(
overlayMatchEnabledVersions,
undefined,
makeOverlayMatchFeatures(true),
getRunnerLogger(true),
);
t.deepEqual(result, []);
t.assert(
listStub.notCalled,
"Should not list Actions caches without rawLanguages.",
);
},
);
test.serial(
"getEnabledVersionsWithOverlayBaseDatabases returns empty when listing caches throws",
async (t) => {
sinon.stub(api, "getAutomationID").resolves("test/");
sinon.stub(api, "listActionsCaches").rejects(new Error("listing failed"));
const result = await setupCodeql.getEnabledVersionsWithOverlayBaseDatabases(
overlayMatchEnabledVersions,
["javascript"],
makeOverlayMatchFeatures(true),
getRunnerLogger(true),
);
t.deepEqual(result, []);
},
);
test.serial(
"getEnabledVersionsWithOverlayBaseDatabases includes the highest version when it is cached",
async (t) => {
sinon.stub(api, "getAutomationID").resolves("test/");
sinon.stub(api, "listActionsCaches").resolves([
{
key: "codeql-overlay-base-database-1-aaaaaaaaaaaaaaaa-javascript-2.20.2-abc-1-1",
},
]);
const result = await setupCodeql.getEnabledVersionsWithOverlayBaseDatabases(
overlayMatchEnabledVersions,
["javascript"],
makeOverlayMatchFeatures(true),
getRunnerLogger(true),
);
t.deepEqual(result, [
{ cliVersion: "2.20.2", tagName: "codeql-bundle-v2.20.2" },
]);
},
);
+98 -6
View File
@@ -7,17 +7,23 @@ import { default as deepEqual } from "fast-deep-equal";
import * as semver from "semver";
import { v4 as uuidV4 } from "uuid";
import { isDynamicWorkflow, isRunningLocalAction } from "./actions-util";
import {
isAnalyzingPullRequest,
isDynamicWorkflow,
isRunningLocalAction,
} from "./actions-util";
import * as api from "./api-client";
import * as defaults from "./defaults.json";
import { addNoLanguageDiagnostic, makeDiagnostic } from "./diagnostics";
import {
CODEQL_VERSION_ZSTD_BUNDLE,
CodeQLDefaultVersionInfo,
CodeQLVersionInfo,
Feature,
FeatureEnablement,
} from "./feature-flags";
import { Logger } from "./logging";
import { getCodeQlVersionsForOverlayBaseDatabases } from "./overlay/caching";
import * as tar from "./tar";
import {
downloadAndExtract,
@@ -264,12 +270,84 @@ async function findOverridingToolsInCache(
return undefined;
}
/** Returns the sorted set of enabled versions that have cached overlay-base databases. */
export async function getEnabledVersionsWithOverlayBaseDatabases(
defaultCliVersion: CodeQLDefaultVersionInfo,
rawLanguages: string[] | undefined,
features: FeatureEnablement,
logger: Logger,
): Promise<CodeQLVersionInfo[]> {
if (rawLanguages === undefined || rawLanguages.length === 0) {
return [];
}
if (!(await features.getValue(Feature.OverlayAnalysisMatchCodeqlVersion))) {
return [];
}
let cachedVersions: string[] | undefined;
try {
cachedVersions = await getCodeQlVersionsForOverlayBaseDatabases(
rawLanguages,
logger,
);
} catch (e) {
logger.warning(
`While setting up CodeQL, was unable to list overlay-base databases in the Actions cache. Details: ${e}`,
);
return [];
}
if (cachedVersions === undefined || cachedVersions.length === 0) {
return [];
}
const cachedVersionsSet = new Set(cachedVersions);
return defaultCliVersion.enabledVersions.filter((v) =>
cachedVersionsSet.has(v.cliVersion),
);
}
/**
* Resolves the newest enabled default CLI version that has a cached overlay-base database for the
* relevant languages, if analyzing a pull request and one exists. Otherwise, falls back to the
* newest enabled default CLI version.
*/
async function resolveDefaultCliVersion(
defaultCliVersion: CodeQLDefaultVersionInfo,
rawLanguages: string[] | undefined,
features: FeatureEnablement,
logger: Logger,
): Promise<CodeQLVersionInfo> {
if (!isAnalyzingPullRequest()) {
return defaultCliVersion.enabledVersions[0];
}
const overlayVersions = await getEnabledVersionsWithOverlayBaseDatabases(
defaultCliVersion,
rawLanguages,
features,
logger,
);
if (overlayVersions.length > 0) {
logger.info(
`Using CodeQL version ${overlayVersions[0].cliVersion} since this is the ` +
`highest enabled version that has a cached overlay-base database.`,
);
return overlayVersions[0];
}
logger.info(
`Using CodeQL version ${defaultCliVersion.enabledVersions[0].cliVersion} since no enabled ` +
`versions with cached overlay-base databases were found.`,
);
return defaultCliVersion.enabledVersions[0];
}
/**
* Determines where the CodeQL CLI we want to use comes from. This can be from a local file,
* the Actions toolcache, or a download.
*
* @param toolsInput The argument provided for the `tools` input, if any.
* @param defaultCliVersion The default CLI version that's linked to the CodeQL Action.
* @param rawLanguages Raw set of languages.
* @param apiDetails Information about the GitHub API.
* @param variant The GitHub variant we are running on.
* @param tarSupportsZstd Whether zstd is supported by `tar`.
@@ -281,6 +359,7 @@ async function findOverridingToolsInCache(
export async function getCodeQLSource(
toolsInput: string | undefined,
defaultCliVersion: CodeQLDefaultVersionInfo,
rawLanguages: string[] | undefined,
apiDetails: api.GitHubApiDetails,
variant: util.GitHubVariant,
tarSupportsZstd: boolean,
@@ -438,8 +517,14 @@ export async function getCodeQLSource(
}
}
cliVersion = defaultCliVersion.enabledVersions[0].cliVersion;
tagName = defaultCliVersion.enabledVersions[0].tagName;
const version = await resolveDefaultCliVersion(
defaultCliVersion,
rawLanguages,
features,
logger,
);
cliVersion = version.cliVersion;
tagName = version.tagName;
}
} else if (toolsInput !== undefined) {
// If a tools URL was provided, then use that.
@@ -454,9 +539,14 @@ export async function getCodeQLSource(
}
}
} else {
// Otherwise, use the default CLI version passed in.
cliVersion = defaultCliVersion.enabledVersions[0].cliVersion;
tagName = defaultCliVersion.enabledVersions[0].tagName;
const version = await resolveDefaultCliVersion(
defaultCliVersion,
rawLanguages,
features,
logger,
);
cliVersion = version.cliVersion;
tagName = version.tagName;
}
const bundleVersion =
@@ -791,6 +881,7 @@ export async function setupCodeQLBundle(
tempDir: string,
variant: util.GitHubVariant,
defaultCliVersion: CodeQLDefaultVersionInfo,
rawLanguages: string[] | undefined,
features: FeatureEnablement,
logger: Logger,
): Promise<SetupCodeQLResult> {
@@ -804,6 +895,7 @@ export async function setupCodeQLBundle(
const source = await getCodeQLSource(
toolsInput,
defaultCliVersion,
rawLanguages,
apiDetails,
variant,
zstdAvailability.available,
+1
View File
@@ -165,6 +165,7 @@ async function combineSarifFilesUsingCLI(
tempDir,
gitHubVersion.type,
codeQLDefaultVersionInfo,
undefined, // rawLanguages: upload-lib does not run analysis
features,
logger,
);