Merge branch 'main' into henrymercer/record-all-builtin-languages

This commit is contained in:
Henry Mercer
2026-04-13 18:00:09 +01:00
30 changed files with 6091 additions and 6098 deletions
+3 -1
View File
@@ -1,7 +1,9 @@
version: 2
updates:
- package-ecosystem: npm
directory: "/"
directories:
- "/"
- "/pr-checks"
schedule:
interval: weekly
cooldown:
+9 -12
View File
@@ -93210,7 +93210,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache5;
exports2.saveCache = saveCache4;
var core15 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93387,7 +93387,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache5(cacheId, archivePath, signedUploadURL, options) {
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -98887,8 +98887,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache5;
exports2.saveCache = saveCache5;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
var core15 = __importStar2(require_core());
var path9 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -98945,7 +98945,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache5(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99089,7 +99089,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache5(paths_1, key_1, options_1) {
function saveCache4(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -162086,7 +162086,6 @@ var semver5 = __toESM(require_semver2());
// src/overlay/index.ts
var fs3 = __toESM(require("fs"));
var path4 = __toESM(require("path"));
var actionsCache = __toESM(require_cache5());
// src/git-utils.ts
var fs2 = __toESM(require("fs"));
@@ -162282,8 +162281,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
const gitFileOidsJson = JSON.stringify(gitFileOids);
@@ -162637,10 +162634,10 @@ var builtin_default = {
var builtInLanguageSet = new Set(builtin_default.languages);
// src/overlay/status.ts
var actionsCache2 = __toESM(require_cache5());
var actionsCache = __toESM(require_cache5());
// src/trap-caching.ts
var actionsCache3 = __toESM(require_cache5());
var actionsCache2 = __toESM(require_cache5());
// src/config-utils.ts
var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4;
@@ -163322,7 +163319,7 @@ var core12 = __toESM(require_core());
// src/dependency-caching.ts
var import_path = require("path");
var actionsCache4 = __toESM(require_cache5());
var actionsCache3 = __toESM(require_cache5());
var glob = __toESM(require_glob());
function getJavaTempDependencyDir() {
return (0, import_path.join)(getTemporaryDirectory(), "codeql_java", "repository");
+219 -216
View File
@@ -204,7 +204,7 @@ var require_file_command = __commonJS({
exports2.issueFileCommand = issueFileCommand;
exports2.prepareKeyValueMessage = prepareKeyValueMessage;
var crypto3 = __importStar2(require("crypto"));
var fs19 = __importStar2(require("fs"));
var fs20 = __importStar2(require("fs"));
var os5 = __importStar2(require("os"));
var utils_1 = require_utils();
function issueFileCommand(command, message) {
@@ -212,10 +212,10 @@ var require_file_command = __commonJS({
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs19.existsSync(filePath)) {
if (!fs20.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs19.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os5.EOL}`, {
fs20.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os5.EOL}`, {
encoding: "utf8"
});
}
@@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({
exports2.isRooted = isRooted;
exports2.tryGetExecutablePath = tryGetExecutablePath;
exports2.getCmdPath = getCmdPath;
var fs19 = __importStar2(require("fs"));
var fs20 = __importStar2(require("fs"));
var path16 = __importStar2(require("path"));
_a = fs19.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
_a = fs20.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
exports2.IS_WINDOWS = process.platform === "win32";
function readlink(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
const result = yield fs19.promises.readlink(fsPath);
const result = yield fs20.promises.readlink(fsPath);
if (exports2.IS_WINDOWS && !result.endsWith("\\")) {
return `${result}\\`;
}
@@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({
});
}
exports2.UV_FS_O_EXLOCK = 268435456;
exports2.READONLY = fs19.constants.O_RDONLY;
exports2.READONLY = fs20.constants.O_RDONLY;
function exists(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
try {
@@ -50297,7 +50297,7 @@ var require_internal_globber = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.DefaultGlobber = void 0;
var core17 = __importStar2(require_core());
var fs19 = __importStar2(require("fs"));
var fs20 = __importStar2(require("fs"));
var globOptionsHelper = __importStar2(require_internal_glob_options_helper());
var path16 = __importStar2(require("path"));
var patternHelper = __importStar2(require_internal_pattern_helper());
@@ -50351,7 +50351,7 @@ var require_internal_globber = __commonJS({
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
core17.debug(`Search path '${searchPath}'`);
try {
yield __await2(fs19.promises.lstat(searchPath));
yield __await2(fs20.promises.lstat(searchPath));
} catch (err) {
if (err.code === "ENOENT") {
continue;
@@ -50385,7 +50385,7 @@ var require_internal_globber = __commonJS({
continue;
}
const childLevel = item.level + 1;
const childItems = (yield __await2(fs19.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path16.join(item.path, x), childLevel));
const childItems = (yield __await2(fs20.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path16.join(item.path, x), childLevel));
stack.push(...childItems.reverse());
} else if (match & internal_match_kind_1.MatchKind.File) {
yield yield __await2(item.path);
@@ -50420,7 +50420,7 @@ var require_internal_globber = __commonJS({
let stats;
if (options.followSymbolicLinks) {
try {
stats = yield fs19.promises.stat(item.path);
stats = yield fs20.promises.stat(item.path);
} catch (err) {
if (err.code === "ENOENT") {
if (options.omitBrokenSymbolicLinks) {
@@ -50432,10 +50432,10 @@ var require_internal_globber = __commonJS({
throw err;
}
} else {
stats = yield fs19.promises.lstat(item.path);
stats = yield fs20.promises.lstat(item.path);
}
if (stats.isDirectory() && options.followSymbolicLinks) {
const realPath = yield fs19.promises.realpath(item.path);
const realPath = yield fs20.promises.realpath(item.path);
while (traversalChain.length >= item.level) {
traversalChain.pop();
}
@@ -50544,7 +50544,7 @@ var require_internal_hash_files = __commonJS({
exports2.hashFiles = hashFiles2;
var crypto3 = __importStar2(require("crypto"));
var core17 = __importStar2(require_core());
var fs19 = __importStar2(require("fs"));
var fs20 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var path16 = __importStar2(require("path"));
@@ -50567,13 +50567,13 @@ var require_internal_hash_files = __commonJS({
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
continue;
}
if (fs19.statSync(file).isDirectory()) {
if (fs20.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash2 = crypto3.createHash("sha256");
const pipeline = util.promisify(stream2.pipeline);
yield pipeline(fs19.createReadStream(file), hash2);
yield pipeline(fs20.createReadStream(file), hash2);
result.write(hash2.digest());
count++;
if (!hasMatch) {
@@ -51948,7 +51948,7 @@ var require_cacheUtils = __commonJS({
var glob2 = __importStar2(require_glob());
var io7 = __importStar2(require_io());
var crypto3 = __importStar2(require("crypto"));
var fs19 = __importStar2(require("fs"));
var fs20 = __importStar2(require("fs"));
var path16 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
@@ -51977,7 +51977,7 @@ var require_cacheUtils = __commonJS({
});
}
function getArchiveFileSizeInBytes(filePath) {
return fs19.statSync(filePath).size;
return fs20.statSync(filePath).size;
}
function resolvePaths(patterns) {
return __awaiter2(this, void 0, void 0, function* () {
@@ -52015,7 +52015,7 @@ var require_cacheUtils = __commonJS({
}
function unlinkFile(filePath) {
return __awaiter2(this, void 0, void 0, function* () {
return util.promisify(fs19.unlink)(filePath);
return util.promisify(fs20.unlink)(filePath);
});
}
function getVersion(app_1) {
@@ -52057,7 +52057,7 @@ var require_cacheUtils = __commonJS({
}
function getGnuTarPathOnWindows() {
return __awaiter2(this, void 0, void 0, function* () {
if (fs19.existsSync(constants_1.GnuTarPathOnWindows)) {
if (fs20.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion("tar");
@@ -92617,7 +92617,7 @@ var require_downloadUtils = __commonJS({
var http_client_1 = require_lib();
var storage_blob_1 = require_commonjs15();
var buffer = __importStar2(require("buffer"));
var fs19 = __importStar2(require("fs"));
var fs20 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var utils = __importStar2(require_cacheUtils());
@@ -92728,7 +92728,7 @@ var require_downloadUtils = __commonJS({
exports2.DownloadProgress = DownloadProgress;
function downloadCacheHttpClient(archiveLocation, archivePath) {
return __awaiter2(this, void 0, void 0, function* () {
const writeStream = fs19.createWriteStream(archivePath);
const writeStream = fs20.createWriteStream(archivePath);
const httpClient = new http_client_1.HttpClient("actions/cache");
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () {
return httpClient.get(archiveLocation);
@@ -92753,7 +92753,7 @@ var require_downloadUtils = __commonJS({
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
return __awaiter2(this, void 0, void 0, function* () {
var _a;
const archiveDescriptor = yield fs19.promises.open(archivePath, "w");
const archiveDescriptor = yield fs20.promises.open(archivePath, "w");
const httpClient = new http_client_1.HttpClient("actions/cache", void 0, {
socketTimeout: options.timeoutInMs,
keepAlive: true
@@ -92869,7 +92869,7 @@ var require_downloadUtils = __commonJS({
} else {
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
const downloadProgress = new DownloadProgress(contentLength);
const fd = fs19.openSync(archivePath, "w");
const fd = fs20.openSync(archivePath, "w");
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
@@ -92887,12 +92887,12 @@ var require_downloadUtils = __commonJS({
controller.abort();
throw new Error("Aborting cache download as the download time exceeded the timeout.");
} else if (Buffer.isBuffer(result)) {
fs19.writeFileSync(fd, result);
fs20.writeFileSync(fd, result);
}
}
} finally {
downloadProgress.stopDisplayTimer();
fs19.closeSync(fd);
fs20.closeSync(fd);
}
}
});
@@ -93214,7 +93214,7 @@ var require_cacheHttpClient = __commonJS({
var core17 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
var fs19 = __importStar2(require("fs"));
var fs20 = __importStar2(require("fs"));
var url_1 = require("url");
var utils = __importStar2(require_cacheUtils());
var uploadUtils_1 = require_uploadUtils();
@@ -93349,7 +93349,7 @@ Other caches with similar key:`);
return __awaiter2(this, void 0, void 0, function* () {
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs19.openSync(archivePath, "r");
const fd = fs20.openSync(archivePath, "r");
const uploadOptions = (0, options_1.getUploadOptions)(options);
const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency);
const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize);
@@ -93363,7 +93363,7 @@ Other caches with similar key:`);
const start = offset;
const end = offset + chunkSize - 1;
offset += maxChunkSize;
yield uploadChunk(httpClient, resourceUrl, () => fs19.createReadStream(archivePath, {
yield uploadChunk(httpClient, resourceUrl, () => fs20.createReadStream(archivePath, {
fd,
start,
end,
@@ -93374,7 +93374,7 @@ Other caches with similar key:`);
}
})));
} finally {
fs19.closeSync(fd);
fs20.closeSync(fd);
}
return;
});
@@ -99330,7 +99330,7 @@ var require_manifest = __commonJS({
var core_1 = require_core();
var os5 = require("os");
var cp = require("child_process");
var fs19 = require("fs");
var fs20 = require("fs");
function _findMatch(versionSpec, stable, candidates, archFilter) {
return __awaiter2(this, void 0, void 0, function* () {
const platFilter = os5.platform();
@@ -99392,10 +99392,10 @@ var require_manifest = __commonJS({
const lsbReleaseFile = "/etc/lsb-release";
const osReleaseFile = "/etc/os-release";
let contents = "";
if (fs19.existsSync(lsbReleaseFile)) {
contents = fs19.readFileSync(lsbReleaseFile).toString();
} else if (fs19.existsSync(osReleaseFile)) {
contents = fs19.readFileSync(osReleaseFile).toString();
if (fs20.existsSync(lsbReleaseFile)) {
contents = fs20.readFileSync(lsbReleaseFile).toString();
} else if (fs20.existsSync(osReleaseFile)) {
contents = fs20.readFileSync(osReleaseFile).toString();
}
return contents;
}
@@ -99604,7 +99604,7 @@ var require_tool_cache = __commonJS({
var core17 = __importStar2(require_core());
var io7 = __importStar2(require_io());
var crypto3 = __importStar2(require("crypto"));
var fs19 = __importStar2(require("fs"));
var fs20 = __importStar2(require("fs"));
var mm = __importStar2(require_manifest());
var os5 = __importStar2(require("os"));
var path16 = __importStar2(require("path"));
@@ -99650,7 +99650,7 @@ var require_tool_cache = __commonJS({
}
function downloadToolAttempt(url2, dest, auth2, headers) {
return __awaiter2(this, void 0, void 0, function* () {
if (fs19.existsSync(dest)) {
if (fs20.existsSync(dest)) {
throw new Error(`Destination file path ${dest} already exists`);
}
const http = new httpm.HttpClient(userAgent2, [], {
@@ -99674,7 +99674,7 @@ var require_tool_cache = __commonJS({
const readStream = responseMessageFactory();
let succeeded = false;
try {
yield pipeline(readStream, fs19.createWriteStream(dest));
yield pipeline(readStream, fs20.createWriteStream(dest));
core17.debug("download complete");
succeeded = true;
return dest;
@@ -99886,11 +99886,11 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os5.arch();
core17.debug(`Caching tool ${tool} ${version} ${arch2}`);
core17.debug(`source dir: ${sourceDir}`);
if (!fs19.statSync(sourceDir).isDirectory()) {
if (!fs20.statSync(sourceDir).isDirectory()) {
throw new Error("sourceDir is not a directory");
}
const destPath = yield _createToolPath(tool, version, arch2);
for (const itemName of fs19.readdirSync(sourceDir)) {
for (const itemName of fs20.readdirSync(sourceDir)) {
const s = path16.join(sourceDir, itemName);
yield io7.cp(s, destPath, { recursive: true });
}
@@ -99904,7 +99904,7 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os5.arch();
core17.debug(`Caching tool ${tool} ${version} ${arch2}`);
core17.debug(`source file: ${sourceFile}`);
if (!fs19.statSync(sourceFile).isFile()) {
if (!fs20.statSync(sourceFile).isFile()) {
throw new Error("sourceFile is not a file");
}
const destFolder = yield _createToolPath(tool, version, arch2);
@@ -99933,7 +99933,7 @@ var require_tool_cache = __commonJS({
versionSpec = semver9.clean(versionSpec) || "";
const cachePath = path16.join(_getCacheDirectory(), toolName, versionSpec, arch2);
core17.debug(`checking cache: ${cachePath}`);
if (fs19.existsSync(cachePath) && fs19.existsSync(`${cachePath}.complete`)) {
if (fs20.existsSync(cachePath) && fs20.existsSync(`${cachePath}.complete`)) {
core17.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`);
toolPath = cachePath;
} else {
@@ -99946,12 +99946,12 @@ var require_tool_cache = __commonJS({
const versions = [];
arch2 = arch2 || os5.arch();
const toolPath = path16.join(_getCacheDirectory(), toolName);
if (fs19.existsSync(toolPath)) {
const children = fs19.readdirSync(toolPath);
if (fs20.existsSync(toolPath)) {
const children = fs20.readdirSync(toolPath);
for (const child of children) {
if (isExplicitVersion(child)) {
const fullPath = path16.join(toolPath, child, arch2 || "");
if (fs19.existsSync(fullPath) && fs19.existsSync(`${fullPath}.complete`)) {
if (fs20.existsSync(fullPath) && fs20.existsSync(`${fullPath}.complete`)) {
versions.push(child);
}
}
@@ -100022,7 +100022,7 @@ var require_tool_cache = __commonJS({
function _completeToolPath(tool, version, arch2) {
const folderPath = path16.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || "");
const markerPath = `${folderPath}.complete`;
fs19.writeFileSync(markerPath, "");
fs20.writeFileSync(markerPath, "");
core17.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
@@ -103529,7 +103529,7 @@ __export(analyze_action_exports, {
runPromise: () => runPromise
});
module.exports = __toCommonJS(analyze_action_exports);
var fs18 = __toESM(require("fs"));
var fs19 = __toESM(require("fs"));
var import_path4 = __toESM(require("path"));
var import_perf_hooks3 = require("perf_hooks");
var core16 = __toESM(require_core());
@@ -103558,21 +103558,21 @@ async function getFolderSize(itemPath, options) {
getFolderSize.loose = async (itemPath, options) => await core(itemPath, options);
getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true });
async function core(rootItemPath, options = {}, returnType = {}) {
const fs19 = options.fs || await import("node:fs/promises");
const fs20 = options.fs || await import("node:fs/promises");
let folderSize = 0n;
const foundInos = /* @__PURE__ */ new Set();
const errors = [];
await processItem(rootItemPath);
async function processItem(itemPath) {
if (options.ignore?.test(itemPath)) return;
const stats = returnType.strict ? await fs19.lstat(itemPath, { bigint: true }) : await fs19.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
const stats = returnType.strict ? await fs20.lstat(itemPath, { bigint: true }) : await fs20.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
if (typeof stats !== "object") return;
if (!foundInos.has(stats.ino)) {
foundInos.add(stats.ino);
folderSize += stats.size;
}
if (stats.isDirectory()) {
const directoryItems = returnType.strict ? await fs19.readdir(itemPath) : await fs19.readdir(itemPath).catch((error3) => errors.push(error3));
const directoryItems = returnType.strict ? await fs20.readdir(itemPath) : await fs20.readdir(itemPath).catch((error3) => errors.push(error3));
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
@@ -107647,7 +107647,6 @@ var cliVersion = "2.25.1";
// src/overlay/index.ts
var fs4 = __toESM(require("fs"));
var path5 = __toESM(require("path"));
var actionsCache = __toESM(require_cache5());
// src/git-utils.ts
var fs3 = __toESM(require("fs"));
@@ -107877,8 +107876,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
const gitFileOidsJson = JSON.stringify(gitFileOids);
@@ -107980,141 +107977,6 @@ async function getDiffRangeFilePaths(sourceRoot, logger) {
).filter((rel) => !rel.startsWith(".."));
return [...new Set(relativePaths)];
}
var CACHE_VERSION = 1;
var CACHE_PREFIX = "codeql-overlay-base-database";
var MAX_CACHE_OPERATION_MS = 6e5;
async function checkOverlayBaseDatabase(codeql, config, logger, warningPrefix) {
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
if (!fs4.existsSync(baseDatabaseOidsFilePath)) {
logger.warning(
`${warningPrefix}: ${baseDatabaseOidsFilePath} does not exist`
);
return false;
}
for (const language of config.languages) {
const dbPath = getCodeQLDatabasePath(config, language);
try {
const resolveDatabaseOutput = await codeql.resolveDatabase(dbPath);
if (resolveDatabaseOutput === void 0 || !("overlayBaseSpecifier" in resolveDatabaseOutput)) {
logger.info(`${warningPrefix}: no overlayBaseSpecifier defined`);
return false;
} else {
logger.debug(
`Overlay base specifier for ${language} overlay-base database found: ${resolveDatabaseOutput.overlayBaseSpecifier}`
);
}
} catch (e) {
logger.warning(`${warningPrefix}: failed to resolve database: ${e}`);
return false;
}
}
return true;
}
async function cleanupAndUploadOverlayBaseDatabaseToCache(codeql, config, logger) {
const overlayDatabaseMode = config.overlayDatabaseMode;
if (overlayDatabaseMode !== "overlay-base" /* OverlayBase */) {
logger.debug(
`Overlay database mode is ${overlayDatabaseMode}. Skip uploading overlay-base database to cache.`
);
return false;
}
if (!config.useOverlayDatabaseCaching) {
logger.debug(
"Overlay database caching is disabled. Skip uploading overlay-base database to cache."
);
return false;
}
if (isInTestMode()) {
logger.debug(
"In test mode. Skip uploading overlay-base database to cache."
);
return false;
}
const databaseIsValid = await checkOverlayBaseDatabase(
codeql,
config,
logger,
"Abort uploading overlay-base database to cache"
);
if (!databaseIsValid) {
return false;
}
await withGroupAsync("Cleaning up databases", async () => {
await codeql.databaseCleanupCluster(config, "overlay" /* Overlay */);
});
const dbLocation = config.dbLocation;
const databaseSizeBytes = await tryGetFolderBytes(dbLocation, logger);
if (databaseSizeBytes === void 0) {
logger.warning(
"Failed to determine database size. Skip uploading overlay-base database to cache."
);
return false;
}
if (databaseSizeBytes > OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES) {
const databaseSizeMB = Math.round(databaseSizeBytes / 1e6);
logger.warning(
`Database size (${databaseSizeMB} MB) exceeds maximum upload size (${OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB} MB). Skip uploading overlay-base database to cache.`
);
return false;
}
const codeQlVersion = (await codeql.getVersion()).version;
const checkoutPath = getRequiredInput("checkout_path");
const cacheSaveKey = await getCacheSaveKey(
config,
codeQlVersion,
checkoutPath,
logger
);
logger.info(
`Uploading overlay-base database to Actions cache with key ${cacheSaveKey}`
);
try {
const cacheId = await waitForResultWithTimeLimit(
MAX_CACHE_OPERATION_MS,
actionsCache.saveCache([dbLocation], cacheSaveKey),
() => {
}
);
if (cacheId === void 0) {
logger.warning("Timed out while uploading overlay-base database");
return false;
}
} catch (error3) {
logger.warning(
`Failed to upload overlay-base database to cache: ${error3 instanceof Error ? error3.message : String(error3)}`
);
return false;
}
logger.info(`Successfully uploaded overlay-base database from ${dbLocation}`);
return true;
}
async function getCacheSaveKey(config, codeQlVersion, checkoutPath, logger) {
let runId = 1;
let attemptId = 1;
try {
runId = getWorkflowRunID();
attemptId = getWorkflowRunAttempt();
} catch (e) {
logger.warning(
`Failed to get workflow run ID or attempt ID. Reason: ${getErrorMessage(e)}`
);
}
const sha = await getCommitOid(checkoutPath);
const restoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion
);
return `${restoreKeyPrefix}${sha}-${runId}-${attemptId}`;
}
async function getCacheRestoreKeyPrefix(config, codeQlVersion) {
const languages = [...config.languages].sort().join("_");
const cacheKeyComponents = {
automationID: await getAutomationID()
// Add more components here as needed in the future
};
const componentsHash = createCacheKeyHash(cacheKeyComponents);
return `${CACHE_PREFIX}-${CACHE_VERSION}-${componentsHash}-${languages}-${codeQlVersion}-`;
}
// src/tools-features.ts
var semver4 = __toESM(require_semver2());
@@ -108711,14 +108573,14 @@ var builtin_default = {
var builtInLanguageSet = new Set(builtin_default.languages);
// src/overlay/status.ts
var actionsCache2 = __toESM(require_cache5());
var actionsCache = __toESM(require_cache5());
// src/trap-caching.ts
var actionsCache3 = __toESM(require_cache5());
var CACHE_VERSION2 = 1;
var actionsCache2 = __toESM(require_cache5());
var CACHE_VERSION = 1;
var CODEQL_TRAP_CACHE_PREFIX = "codeql-trap";
var MINIMUM_CACHE_MB_TO_UPLOAD = 10;
var MAX_CACHE_OPERATION_MS2 = 12e4;
var MAX_CACHE_OPERATION_MS = 12e4;
async function uploadTrapCaches(codeql, config, logger) {
if (!await isAnalyzingDefaultBranch()) return false;
for (const language of config.languages) {
@@ -108744,8 +108606,8 @@ async function uploadTrapCaches(codeql, config, logger) {
);
logger.info(`Uploading TRAP cache to Actions cache with key ${key}`);
await waitForResultWithTimeLimit(
MAX_CACHE_OPERATION_MS2,
actionsCache3.saveCache([cacheDir], key),
MAX_CACHE_OPERATION_MS,
actionsCache2.saveCache([cacheDir], key),
() => {
logger.info(
`Timed out waiting for TRAP cache for ${language} to upload, will continue without uploading`
@@ -108835,7 +108697,7 @@ async function cacheKey(codeql, language, baseSha) {
return `${await cachePrefix(codeql, language)}${baseSha}`;
}
async function cachePrefix(codeql, language) {
return `${CODEQL_TRAP_CACHE_PREFIX}-${CACHE_VERSION2}-${(await codeql.getVersion()).version}-${language}-`;
return `${CODEQL_TRAP_CACHE_PREFIX}-${CACHE_VERSION}-${(await codeql.getVersion()).version}-${language}-`;
}
// src/config-utils.ts
@@ -110573,7 +110435,7 @@ async function runAutobuild(config, language, logger) {
// src/dependency-caching.ts
var os3 = __toESM(require("os"));
var import_path2 = require("path");
var actionsCache4 = __toESM(require_cache5());
var actionsCache3 = __toESM(require_cache5());
var glob = __toESM(require_glob());
var CODEQL_DEPENDENCY_CACHE_PREFIX = "codeql-dependencies";
var CODEQL_DEPENDENCY_CACHE_VERSION = 1;
@@ -110711,7 +110573,7 @@ async function uploadDependencyCaches(codeql, features, config, logger) {
);
try {
const start = performance.now();
await actionsCache4.saveCache(
await actionsCache3.saveCache(
await cacheConfig.getDependencyPaths(codeql, features),
key
);
@@ -110723,7 +110585,7 @@ async function uploadDependencyCaches(codeql, features, config, logger) {
upload_duration_ms
});
} catch (error3) {
if (error3 instanceof actionsCache4.ReserveCacheError) {
if (error3 instanceof actionsCache3.ReserveCacheError) {
logger.info(
`Not uploading cache for ${language}, because ${key} is already in use.`
);
@@ -111289,6 +111151,147 @@ async function uploadBundledDatabase(repositoryNwo, language, commitOid, bundled
}
}
// src/overlay/caching.ts
var fs15 = __toESM(require("fs"));
var actionsCache4 = __toESM(require_cache5());
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
var CACHE_VERSION2 = 1;
var CACHE_PREFIX = "codeql-overlay-base-database";
var MAX_CACHE_OPERATION_MS2 = 6e5;
async function checkOverlayBaseDatabase(codeql, config, logger, warningPrefix) {
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
if (!fs15.existsSync(baseDatabaseOidsFilePath)) {
logger.warning(
`${warningPrefix}: ${baseDatabaseOidsFilePath} does not exist`
);
return false;
}
for (const language of config.languages) {
const dbPath = getCodeQLDatabasePath(config, language);
try {
const resolveDatabaseOutput = await codeql.resolveDatabase(dbPath);
if (resolveDatabaseOutput === void 0 || !("overlayBaseSpecifier" in resolveDatabaseOutput)) {
logger.info(`${warningPrefix}: no overlayBaseSpecifier defined`);
return false;
} else {
logger.debug(
`Overlay base specifier for ${language} overlay-base database found: ${resolveDatabaseOutput.overlayBaseSpecifier}`
);
}
} catch (e) {
logger.warning(`${warningPrefix}: failed to resolve database: ${e}`);
return false;
}
}
return true;
}
async function cleanupAndUploadOverlayBaseDatabaseToCache(codeql, config, logger) {
const overlayDatabaseMode = config.overlayDatabaseMode;
if (overlayDatabaseMode !== "overlay-base" /* OverlayBase */) {
logger.debug(
`Overlay database mode is ${overlayDatabaseMode}. Skip uploading overlay-base database to cache.`
);
return false;
}
if (!config.useOverlayDatabaseCaching) {
logger.debug(
"Overlay database caching is disabled. Skip uploading overlay-base database to cache."
);
return false;
}
if (isInTestMode()) {
logger.debug(
"In test mode. Skip uploading overlay-base database to cache."
);
return false;
}
const databaseIsValid = await checkOverlayBaseDatabase(
codeql,
config,
logger,
"Abort uploading overlay-base database to cache"
);
if (!databaseIsValid) {
return false;
}
await withGroupAsync("Cleaning up databases", async () => {
await codeql.databaseCleanupCluster(config, "overlay" /* Overlay */);
});
const dbLocation = config.dbLocation;
const databaseSizeBytes = await tryGetFolderBytes(dbLocation, logger);
if (databaseSizeBytes === void 0) {
logger.warning(
"Failed to determine database size. Skip uploading overlay-base database to cache."
);
return false;
}
if (databaseSizeBytes > OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES) {
const databaseSizeMB = Math.round(databaseSizeBytes / 1e6);
logger.warning(
`Database size (${databaseSizeMB} MB) exceeds maximum upload size (${OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB} MB). Skip uploading overlay-base database to cache.`
);
return false;
}
const codeQlVersion = (await codeql.getVersion()).version;
const checkoutPath = getRequiredInput("checkout_path");
const cacheSaveKey = await getCacheSaveKey(
config,
codeQlVersion,
checkoutPath,
logger
);
logger.info(
`Uploading overlay-base database to Actions cache with key ${cacheSaveKey}`
);
try {
const cacheId = await waitForResultWithTimeLimit(
MAX_CACHE_OPERATION_MS2,
actionsCache4.saveCache([dbLocation], cacheSaveKey),
() => {
}
);
if (cacheId === void 0) {
logger.warning("Timed out while uploading overlay-base database");
return false;
}
} catch (error3) {
logger.warning(
`Failed to upload overlay-base database to cache: ${error3 instanceof Error ? error3.message : String(error3)}`
);
return false;
}
logger.info(`Successfully uploaded overlay-base database from ${dbLocation}`);
return true;
}
async function getCacheSaveKey(config, codeQlVersion, checkoutPath, logger) {
let runId = 1;
let attemptId = 1;
try {
runId = getWorkflowRunID();
attemptId = getWorkflowRunAttempt();
} catch (e) {
logger.warning(
`Failed to get workflow run ID or attempt ID. Reason: ${getErrorMessage(e)}`
);
}
const sha = await getCommitOid(checkoutPath);
const restoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion
);
return `${restoreKeyPrefix}${sha}-${runId}-${attemptId}`;
}
async function getCacheRestoreKeyPrefix(config, codeQlVersion) {
const languages = [...config.languages].sort().join("_");
const cacheKeyComponents = {
automationID: await getAutomationID()
// Add more components here as needed in the future
};
const componentsHash = createCacheKeyHash(cacheKeyComponents);
return `${CACHE_PREFIX}-${CACHE_VERSION2}-${componentsHash}-${languages}-${codeQlVersion}-`;
}
// src/status-report.ts
var os4 = __toESM(require("os"));
var core13 = __toESM(require_core());
@@ -111497,7 +111500,7 @@ async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error
}
// src/upload-lib.ts
var fs17 = __toESM(require("fs"));
var fs18 = __toESM(require("fs"));
var path14 = __toESM(require("path"));
var url = __toESM(require("url"));
var import_zlib = __toESM(require("zlib"));
@@ -111505,7 +111508,7 @@ var core15 = __toESM(require_core());
var jsonschema2 = __toESM(require_lib2());
// src/fingerprints.ts
var fs15 = __toESM(require("fs"));
var fs16 = __toESM(require("fs"));
var import_path3 = __toESM(require("path"));
// node_modules/long/index.js
@@ -112493,7 +112496,7 @@ async function hash(callback, filepath) {
}
updateHash(current);
};
const readStream = fs15.createReadStream(filepath, "utf8");
const readStream = fs16.createReadStream(filepath, "utf8");
for await (const data of readStream) {
for (let i = 0; i < data.length; ++i) {
processCharacter(data.charCodeAt(i));
@@ -112568,11 +112571,11 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
if (!import_path3.default.isAbsolute(uri)) {
uri = srcRootPrefix + uri;
}
if (!fs15.existsSync(uri)) {
if (!fs16.existsSync(uri)) {
logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`);
return void 0;
}
if (fs15.statSync(uri).isDirectory()) {
if (fs16.statSync(uri).isDirectory()) {
logger.debug(`Unable to compute fingerprint for directory: ${uri}`);
return void 0;
}
@@ -112661,7 +112664,7 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
}
// src/sarif/index.ts
var fs16 = __toESM(require("fs"));
var fs17 = __toESM(require("fs"));
var InvalidSarifUploadError = class extends Error {
};
function getToolNames(sarifFile) {
@@ -112676,7 +112679,7 @@ function getToolNames(sarifFile) {
return Object.keys(toolNames);
}
function readSarifFile(sarifFilePath) {
return JSON.parse(fs16.readFileSync(sarifFilePath, "utf8"));
return JSON.parse(fs17.readFileSync(sarifFilePath, "utf8"));
}
function combineSarifFiles(sarifFiles, logger) {
logger.info(`Loading SARIF file(s)`);
@@ -112809,8 +112812,8 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
codeQL = initCodeQLResult.codeql;
}
const baseTempDir = path14.resolve(tempDir, "combined-sarif");
fs17.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs17.mkdtempSync(path14.resolve(baseTempDir, "output-"));
fs18.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs18.mkdtempSync(path14.resolve(baseTempDir, "output-"));
const outputFile = path14.resolve(outputDirectory, "combined-sarif.sarif");
await codeQL.mergeResults(sarifFiles, outputFile, {
mergeRunsFromEqualCategory: true
@@ -112852,7 +112855,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
`SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}`
);
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
fs17.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
fs18.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
return "dummy-sarif-id";
}
const client = getApiClient();
@@ -112886,7 +112889,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
function findSarifFilesInDir(sarifPath, isSarif) {
const sarifFiles = [];
const walkSarifFiles = (dir) => {
const entries = fs17.readdirSync(dir, { withFileTypes: true });
const entries = fs18.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile() && isSarif(entry.name)) {
sarifFiles.push(path14.resolve(dir, entry.name));
@@ -112899,7 +112902,7 @@ function findSarifFilesInDir(sarifPath, isSarif) {
return sarifFiles;
}
async function getGroupedSarifFilePaths(logger, sarifPath) {
const stats = fs17.statSync(sarifPath, { throwIfNoEntry: false });
const stats = fs18.statSync(sarifPath, { throwIfNoEntry: false });
if (stats === void 0) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
}
@@ -113034,7 +113037,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
payloadObj.base_sha = mergeBaseCommitOid;
} else if (process.env.GITHUB_EVENT_PATH) {
const githubEvent = JSON.parse(
fs17.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
fs18.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
);
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha;
@@ -113138,9 +113141,9 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post
};
}
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
if (!fs17.existsSync(outputDir)) {
fs17.mkdirSync(outputDir, { recursive: true });
} else if (!fs17.lstatSync(outputDir).isDirectory()) {
if (!fs18.existsSync(outputDir)) {
fs18.mkdirSync(outputDir, { recursive: true });
} else if (!fs18.lstatSync(outputDir).isDirectory()) {
throw new ConfigurationError(
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`
);
@@ -113150,7 +113153,7 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
`upload${uploadTarget.sarifExtension}`
);
logger.info(`Writing processed SARIF file to ${outputFile}`);
fs17.writeFileSync(outputFile, sarifPayload);
fs18.writeFileSync(outputFile, sarifPayload);
}
var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3;
var STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1e3;
@@ -113386,7 +113389,7 @@ function doesGoExtractionOutputExist(config) {
"trap",
"go" /* go */
);
return fs18.existsSync(trapDirectory) && fs18.readdirSync(trapDirectory).some(
return fs19.existsSync(trapDirectory) && fs19.readdirSync(trapDirectory).some(
(fileName) => [
".trap",
".trap.gz",
+8 -11
View File
@@ -93210,7 +93210,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache4;
exports2.saveCache = saveCache3;
var core15 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93387,7 +93387,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
function saveCache3(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -98887,8 +98887,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
exports2.restoreCache = restoreCache3;
exports2.saveCache = saveCache3;
var core15 = __importStar2(require_core());
var path9 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -98945,7 +98945,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache3(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99089,7 +99089,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache4(paths_1, key_1, options_1) {
function saveCache3(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -104140,7 +104140,6 @@ var cliVersion = "2.25.1";
// src/overlay/index.ts
var fs3 = __toESM(require("fs"));
var path4 = __toESM(require("path"));
var actionsCache = __toESM(require_cache5());
// src/git-utils.ts
var fs2 = __toESM(require("fs"));
@@ -104336,8 +104335,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
const gitFileOidsJson = JSON.stringify(gitFileOids);
@@ -105012,10 +105009,10 @@ var builtin_default = {
var builtInLanguageSet = new Set(builtin_default.languages);
// src/overlay/status.ts
var actionsCache2 = __toESM(require_cache5());
var actionsCache = __toESM(require_cache5());
// src/trap-caching.ts
var actionsCache3 = __toESM(require_cache5());
var actionsCache2 = __toESM(require_cache5());
// src/config-utils.ts
var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4;
+12 -15
View File
@@ -93210,7 +93210,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache5;
exports2.saveCache = saveCache4;
var core19 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93387,7 +93387,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache5(cacheId, archivePath, signedUploadURL, options) {
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -98887,8 +98887,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache5;
exports2.saveCache = saveCache5;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
var core19 = __importStar2(require_core());
var path19 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -98945,7 +98945,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache5(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core19.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99089,7 +99089,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache5(paths_1, key_1, options_1) {
function saveCache4(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core19.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -165564,7 +165564,6 @@ var cliVersion = "2.25.1";
// src/overlay/index.ts
var fs4 = __toESM(require("fs"));
var path5 = __toESM(require("path"));
var actionsCache = __toESM(require_cache5());
// src/git-utils.ts
var fs3 = __toESM(require("fs"));
@@ -165794,8 +165793,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
const gitFileOidsJson = JSON.stringify(gitFileOids);
@@ -166499,7 +166496,7 @@ var builtInLanguageSet = new Set(builtin_default.languages);
// src/overlay/status.ts
var fs7 = __toESM(require("fs"));
var path7 = __toESM(require("path"));
var actionsCache2 = __toESM(require_cache5());
var actionsCache = __toESM(require_cache5());
var MAX_CACHE_OPERATION_MS = 3e4;
var STATUS_FILE_NAME = "overlay-status.json";
function getStatusFilePath(languages) {
@@ -166530,7 +166527,7 @@ async function saveOverlayStatus(codeql, languages, diskUsage, status, logger) {
await fs7.promises.writeFile(statusFile, JSON.stringify(status));
const cacheId = await waitForResultWithTimeLimit(
MAX_CACHE_OPERATION_MS,
actionsCache2.saveCache([statusFile], cacheKey),
actionsCache.saveCache([statusFile], cacheKey),
() => {
logger.warning("Timed out saving overlay status to cache.");
}
@@ -166553,7 +166550,7 @@ async function getCacheKey(codeql, languages, diskUsage) {
}
// src/trap-caching.ts
var actionsCache3 = __toESM(require_cache5());
var actionsCache2 = __toESM(require_cache5());
// src/config-utils.ts
var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4;
@@ -168216,7 +168213,7 @@ var io5 = __toESM(require_io());
var core12 = __toESM(require_core());
// src/dependency-caching.ts
var actionsCache4 = __toESM(require_cache5());
var actionsCache3 = __toESM(require_cache5());
var glob = __toESM(require_glob());
var CODEQL_DEPENDENCY_CACHE_PREFIX = "codeql-dependencies";
async function getDependencyCacheUsage(logger) {
@@ -170048,7 +170045,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
return readSarifFile(outputFile);
}
function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) {
const automationID = getAutomationID2(category, analysis_key, environment);
const automationID = getAutomationID(category, analysis_key, environment);
if (automationID !== void 0) {
for (const run2 of sarifFile.runs || []) {
if (run2.automationDetails === void 0) {
@@ -170061,7 +170058,7 @@ function populateRunAutomationDetails(sarifFile, category, analysis_key, environ
}
return sarifFile;
}
function getAutomationID2(category, analysis_key, environment) {
function getAutomationID(category, analysis_key, environment) {
if (category !== void 0) {
let automationID = category;
if (!automationID.endsWith("/")) {
+221 -218
View File
@@ -204,7 +204,7 @@ var require_file_command = __commonJS({
exports2.issueFileCommand = issueFileCommand;
exports2.prepareKeyValueMessage = prepareKeyValueMessage;
var crypto3 = __importStar2(require("crypto"));
var fs18 = __importStar2(require("fs"));
var fs19 = __importStar2(require("fs"));
var os6 = __importStar2(require("os"));
var utils_1 = require_utils();
function issueFileCommand(command, message) {
@@ -212,10 +212,10 @@ var require_file_command = __commonJS({
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs18.existsSync(filePath)) {
if (!fs19.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs18.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os6.EOL}`, {
fs19.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os6.EOL}`, {
encoding: "utf8"
});
}
@@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({
exports2.isRooted = isRooted;
exports2.tryGetExecutablePath = tryGetExecutablePath;
exports2.getCmdPath = getCmdPath;
var fs18 = __importStar2(require("fs"));
var fs19 = __importStar2(require("fs"));
var path18 = __importStar2(require("path"));
_a = fs18.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
_a = fs19.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
exports2.IS_WINDOWS = process.platform === "win32";
function readlink(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
const result = yield fs18.promises.readlink(fsPath);
const result = yield fs19.promises.readlink(fsPath);
if (exports2.IS_WINDOWS && !result.endsWith("\\")) {
return `${result}\\`;
}
@@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({
});
}
exports2.UV_FS_O_EXLOCK = 268435456;
exports2.READONLY = fs18.constants.O_RDONLY;
exports2.READONLY = fs19.constants.O_RDONLY;
function exists(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
try {
@@ -50448,7 +50448,7 @@ var require_internal_globber = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.DefaultGlobber = void 0;
var core16 = __importStar2(require_core());
var fs18 = __importStar2(require("fs"));
var fs19 = __importStar2(require("fs"));
var globOptionsHelper = __importStar2(require_internal_glob_options_helper());
var path18 = __importStar2(require("path"));
var patternHelper = __importStar2(require_internal_pattern_helper());
@@ -50502,7 +50502,7 @@ var require_internal_globber = __commonJS({
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
core16.debug(`Search path '${searchPath}'`);
try {
yield __await2(fs18.promises.lstat(searchPath));
yield __await2(fs19.promises.lstat(searchPath));
} catch (err) {
if (err.code === "ENOENT") {
continue;
@@ -50536,7 +50536,7 @@ var require_internal_globber = __commonJS({
continue;
}
const childLevel = item.level + 1;
const childItems = (yield __await2(fs18.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path18.join(item.path, x), childLevel));
const childItems = (yield __await2(fs19.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path18.join(item.path, x), childLevel));
stack.push(...childItems.reverse());
} else if (match & internal_match_kind_1.MatchKind.File) {
yield yield __await2(item.path);
@@ -50571,7 +50571,7 @@ var require_internal_globber = __commonJS({
let stats;
if (options.followSymbolicLinks) {
try {
stats = yield fs18.promises.stat(item.path);
stats = yield fs19.promises.stat(item.path);
} catch (err) {
if (err.code === "ENOENT") {
if (options.omitBrokenSymbolicLinks) {
@@ -50583,10 +50583,10 @@ var require_internal_globber = __commonJS({
throw err;
}
} else {
stats = yield fs18.promises.lstat(item.path);
stats = yield fs19.promises.lstat(item.path);
}
if (stats.isDirectory() && options.followSymbolicLinks) {
const realPath = yield fs18.promises.realpath(item.path);
const realPath = yield fs19.promises.realpath(item.path);
while (traversalChain.length >= item.level) {
traversalChain.pop();
}
@@ -50695,7 +50695,7 @@ var require_internal_hash_files = __commonJS({
exports2.hashFiles = hashFiles2;
var crypto3 = __importStar2(require("crypto"));
var core16 = __importStar2(require_core());
var fs18 = __importStar2(require("fs"));
var fs19 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var path18 = __importStar2(require("path"));
@@ -50718,13 +50718,13 @@ var require_internal_hash_files = __commonJS({
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
continue;
}
if (fs18.statSync(file).isDirectory()) {
if (fs19.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash = crypto3.createHash("sha256");
const pipeline = util.promisify(stream2.pipeline);
yield pipeline(fs18.createReadStream(file), hash);
yield pipeline(fs19.createReadStream(file), hash);
result.write(hash.digest());
count++;
if (!hasMatch) {
@@ -52099,7 +52099,7 @@ var require_cacheUtils = __commonJS({
var glob2 = __importStar2(require_glob());
var io7 = __importStar2(require_io());
var crypto3 = __importStar2(require("crypto"));
var fs18 = __importStar2(require("fs"));
var fs19 = __importStar2(require("fs"));
var path18 = __importStar2(require("path"));
var semver10 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
@@ -52128,7 +52128,7 @@ var require_cacheUtils = __commonJS({
});
}
function getArchiveFileSizeInBytes(filePath) {
return fs18.statSync(filePath).size;
return fs19.statSync(filePath).size;
}
function resolvePaths(patterns) {
return __awaiter2(this, void 0, void 0, function* () {
@@ -52166,7 +52166,7 @@ var require_cacheUtils = __commonJS({
}
function unlinkFile(filePath) {
return __awaiter2(this, void 0, void 0, function* () {
return util.promisify(fs18.unlink)(filePath);
return util.promisify(fs19.unlink)(filePath);
});
}
function getVersion(app_1) {
@@ -52208,7 +52208,7 @@ var require_cacheUtils = __commonJS({
}
function getGnuTarPathOnWindows() {
return __awaiter2(this, void 0, void 0, function* () {
if (fs18.existsSync(constants_1.GnuTarPathOnWindows)) {
if (fs19.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion("tar");
@@ -92768,7 +92768,7 @@ var require_downloadUtils = __commonJS({
var http_client_1 = require_lib();
var storage_blob_1 = require_commonjs15();
var buffer = __importStar2(require("buffer"));
var fs18 = __importStar2(require("fs"));
var fs19 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var utils = __importStar2(require_cacheUtils());
@@ -92879,7 +92879,7 @@ var require_downloadUtils = __commonJS({
exports2.DownloadProgress = DownloadProgress;
function downloadCacheHttpClient(archiveLocation, archivePath) {
return __awaiter2(this, void 0, void 0, function* () {
const writeStream = fs18.createWriteStream(archivePath);
const writeStream = fs19.createWriteStream(archivePath);
const httpClient = new http_client_1.HttpClient("actions/cache");
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () {
return httpClient.get(archiveLocation);
@@ -92904,7 +92904,7 @@ var require_downloadUtils = __commonJS({
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
return __awaiter2(this, void 0, void 0, function* () {
var _a;
const archiveDescriptor = yield fs18.promises.open(archivePath, "w");
const archiveDescriptor = yield fs19.promises.open(archivePath, "w");
const httpClient = new http_client_1.HttpClient("actions/cache", void 0, {
socketTimeout: options.timeoutInMs,
keepAlive: true
@@ -93020,7 +93020,7 @@ var require_downloadUtils = __commonJS({
} else {
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
const downloadProgress = new DownloadProgress(contentLength);
const fd = fs18.openSync(archivePath, "w");
const fd = fs19.openSync(archivePath, "w");
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
@@ -93038,12 +93038,12 @@ var require_downloadUtils = __commonJS({
controller.abort();
throw new Error("Aborting cache download as the download time exceeded the timeout.");
} else if (Buffer.isBuffer(result)) {
fs18.writeFileSync(fd, result);
fs19.writeFileSync(fd, result);
}
}
} finally {
downloadProgress.stopDisplayTimer();
fs18.closeSync(fd);
fs19.closeSync(fd);
}
}
});
@@ -93365,7 +93365,7 @@ var require_cacheHttpClient = __commonJS({
var core16 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
var fs18 = __importStar2(require("fs"));
var fs19 = __importStar2(require("fs"));
var url_1 = require("url");
var utils = __importStar2(require_cacheUtils());
var uploadUtils_1 = require_uploadUtils();
@@ -93500,7 +93500,7 @@ Other caches with similar key:`);
return __awaiter2(this, void 0, void 0, function* () {
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs18.openSync(archivePath, "r");
const fd = fs19.openSync(archivePath, "r");
const uploadOptions = (0, options_1.getUploadOptions)(options);
const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency);
const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize);
@@ -93514,7 +93514,7 @@ Other caches with similar key:`);
const start = offset;
const end = offset + chunkSize - 1;
offset += maxChunkSize;
yield uploadChunk(httpClient, resourceUrl, () => fs18.createReadStream(archivePath, {
yield uploadChunk(httpClient, resourceUrl, () => fs19.createReadStream(archivePath, {
fd,
start,
end,
@@ -93525,7 +93525,7 @@ Other caches with similar key:`);
}
})));
} finally {
fs18.closeSync(fd);
fs19.closeSync(fd);
}
return;
});
@@ -99481,7 +99481,7 @@ var require_manifest = __commonJS({
var core_1 = require_core();
var os6 = require("os");
var cp = require("child_process");
var fs18 = require("fs");
var fs19 = require("fs");
function _findMatch(versionSpec, stable, candidates, archFilter) {
return __awaiter2(this, void 0, void 0, function* () {
const platFilter = os6.platform();
@@ -99543,10 +99543,10 @@ var require_manifest = __commonJS({
const lsbReleaseFile = "/etc/lsb-release";
const osReleaseFile = "/etc/os-release";
let contents = "";
if (fs18.existsSync(lsbReleaseFile)) {
contents = fs18.readFileSync(lsbReleaseFile).toString();
} else if (fs18.existsSync(osReleaseFile)) {
contents = fs18.readFileSync(osReleaseFile).toString();
if (fs19.existsSync(lsbReleaseFile)) {
contents = fs19.readFileSync(lsbReleaseFile).toString();
} else if (fs19.existsSync(osReleaseFile)) {
contents = fs19.readFileSync(osReleaseFile).toString();
}
return contents;
}
@@ -99755,7 +99755,7 @@ var require_tool_cache = __commonJS({
var core16 = __importStar2(require_core());
var io7 = __importStar2(require_io());
var crypto3 = __importStar2(require("crypto"));
var fs18 = __importStar2(require("fs"));
var fs19 = __importStar2(require("fs"));
var mm = __importStar2(require_manifest());
var os6 = __importStar2(require("os"));
var path18 = __importStar2(require("path"));
@@ -99801,7 +99801,7 @@ var require_tool_cache = __commonJS({
}
function downloadToolAttempt(url, dest, auth2, headers) {
return __awaiter2(this, void 0, void 0, function* () {
if (fs18.existsSync(dest)) {
if (fs19.existsSync(dest)) {
throw new Error(`Destination file path ${dest} already exists`);
}
const http = new httpm.HttpClient(userAgent2, [], {
@@ -99825,7 +99825,7 @@ var require_tool_cache = __commonJS({
const readStream = responseMessageFactory();
let succeeded = false;
try {
yield pipeline(readStream, fs18.createWriteStream(dest));
yield pipeline(readStream, fs19.createWriteStream(dest));
core16.debug("download complete");
succeeded = true;
return dest;
@@ -100037,11 +100037,11 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os6.arch();
core16.debug(`Caching tool ${tool} ${version} ${arch2}`);
core16.debug(`source dir: ${sourceDir}`);
if (!fs18.statSync(sourceDir).isDirectory()) {
if (!fs19.statSync(sourceDir).isDirectory()) {
throw new Error("sourceDir is not a directory");
}
const destPath = yield _createToolPath(tool, version, arch2);
for (const itemName of fs18.readdirSync(sourceDir)) {
for (const itemName of fs19.readdirSync(sourceDir)) {
const s = path18.join(sourceDir, itemName);
yield io7.cp(s, destPath, { recursive: true });
}
@@ -100055,7 +100055,7 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os6.arch();
core16.debug(`Caching tool ${tool} ${version} ${arch2}`);
core16.debug(`source file: ${sourceFile}`);
if (!fs18.statSync(sourceFile).isFile()) {
if (!fs19.statSync(sourceFile).isFile()) {
throw new Error("sourceFile is not a file");
}
const destFolder = yield _createToolPath(tool, version, arch2);
@@ -100084,7 +100084,7 @@ var require_tool_cache = __commonJS({
versionSpec = semver10.clean(versionSpec) || "";
const cachePath = path18.join(_getCacheDirectory(), toolName, versionSpec, arch2);
core16.debug(`checking cache: ${cachePath}`);
if (fs18.existsSync(cachePath) && fs18.existsSync(`${cachePath}.complete`)) {
if (fs19.existsSync(cachePath) && fs19.existsSync(`${cachePath}.complete`)) {
core16.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`);
toolPath = cachePath;
} else {
@@ -100097,12 +100097,12 @@ var require_tool_cache = __commonJS({
const versions = [];
arch2 = arch2 || os6.arch();
const toolPath = path18.join(_getCacheDirectory(), toolName);
if (fs18.existsSync(toolPath)) {
const children = fs18.readdirSync(toolPath);
if (fs19.existsSync(toolPath)) {
const children = fs19.readdirSync(toolPath);
for (const child of children) {
if (isExplicitVersion(child)) {
const fullPath = path18.join(toolPath, child, arch2 || "");
if (fs18.existsSync(fullPath) && fs18.existsSync(`${fullPath}.complete`)) {
if (fs19.existsSync(fullPath) && fs19.existsSync(`${fullPath}.complete`)) {
versions.push(child);
}
}
@@ -100173,7 +100173,7 @@ var require_tool_cache = __commonJS({
function _completeToolPath(tool, version, arch2) {
const folderPath = path18.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch2 || "");
const markerPath = `${folderPath}.complete`;
fs18.writeFileSync(markerPath, "");
fs19.writeFileSync(markerPath, "");
core16.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
@@ -100784,7 +100784,7 @@ __export(init_action_exports, {
CODEQL_VERSION_JAR_MINIMIZATION: () => CODEQL_VERSION_JAR_MINIMIZATION
});
module.exports = __toCommonJS(init_action_exports);
var fs17 = __toESM(require("fs"));
var fs18 = __toESM(require("fs"));
var path17 = __toESM(require("path"));
var core15 = __toESM(require_core());
var github3 = __toESM(require_github());
@@ -100869,21 +100869,21 @@ async function getFolderSize(itemPath, options) {
getFolderSize.loose = async (itemPath, options) => await core(itemPath, options);
getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true });
async function core(rootItemPath, options = {}, returnType = {}) {
const fs18 = options.fs || await import("node:fs/promises");
const fs19 = options.fs || await import("node:fs/promises");
let folderSize = 0n;
const foundInos = /* @__PURE__ */ new Set();
const errors = [];
await processItem(rootItemPath);
async function processItem(itemPath) {
if (options.ignore?.test(itemPath)) return;
const stats = returnType.strict ? await fs18.lstat(itemPath, { bigint: true }) : await fs18.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
const stats = returnType.strict ? await fs19.lstat(itemPath, { bigint: true }) : await fs19.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
if (typeof stats !== "object") return;
if (!foundInos.has(stats.ino)) {
foundInos.add(stats.ino);
folderSize += stats.size;
}
if (stats.isDirectory()) {
const directoryItems = returnType.strict ? await fs18.readdir(itemPath) : await fs18.readdir(itemPath).catch((error3) => errors.push(error3));
const directoryItems = returnType.strict ? await fs19.readdir(itemPath) : await fs19.readdir(itemPath).catch((error3) => errors.push(error3));
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
@@ -105195,7 +105195,6 @@ var cliVersion = "2.25.1";
// src/overlay/index.ts
var fs4 = __toESM(require("fs"));
var path6 = __toESM(require("path"));
var actionsCache = __toESM(require_cache5());
// src/git-utils.ts
var fs3 = __toESM(require("fs"));
@@ -105440,8 +105439,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
const gitFileOidsJson = JSON.stringify(gitFileOids);
@@ -105543,153 +105540,6 @@ async function getDiffRangeFilePaths(sourceRoot, logger) {
).filter((rel) => !rel.startsWith(".."));
return [...new Set(relativePaths)];
}
var CACHE_VERSION = 1;
var CACHE_PREFIX = "codeql-overlay-base-database";
var MAX_CACHE_OPERATION_MS = 6e5;
async function checkOverlayBaseDatabase(codeql, config, logger, warningPrefix) {
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
if (!fs4.existsSync(baseDatabaseOidsFilePath)) {
logger.warning(
`${warningPrefix}: ${baseDatabaseOidsFilePath} does not exist`
);
return false;
}
for (const language of config.languages) {
const dbPath = getCodeQLDatabasePath(config, language);
try {
const resolveDatabaseOutput = await codeql.resolveDatabase(dbPath);
if (resolveDatabaseOutput === void 0 || !("overlayBaseSpecifier" in resolveDatabaseOutput)) {
logger.info(`${warningPrefix}: no overlayBaseSpecifier defined`);
return false;
} else {
logger.debug(
`Overlay base specifier for ${language} overlay-base database found: ${resolveDatabaseOutput.overlayBaseSpecifier}`
);
}
} catch (e) {
logger.warning(`${warningPrefix}: failed to resolve database: ${e}`);
return false;
}
}
return true;
}
async function downloadOverlayBaseDatabaseFromCache(codeql, config, logger) {
const overlayDatabaseMode = config.overlayDatabaseMode;
if (overlayDatabaseMode !== "overlay" /* Overlay */) {
logger.debug(
`Overlay database mode is ${overlayDatabaseMode}. Skip downloading overlay-base database from cache.`
);
return void 0;
}
if (!config.useOverlayDatabaseCaching) {
logger.debug(
"Overlay database caching is disabled. Skip downloading overlay-base database from cache."
);
return void 0;
}
if (isInTestMode()) {
logger.debug(
"In test mode. Skip downloading overlay-base database from cache."
);
return void 0;
}
const dbLocation = config.dbLocation;
const codeQlVersion = (await codeql.getVersion()).version;
const cacheRestoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion
);
logger.info(
`Looking in Actions cache for overlay-base database with restore key ${cacheRestoreKeyPrefix}`
);
let databaseDownloadDurationMs = 0;
try {
const databaseDownloadStart = performance.now();
const foundKey = await waitForResultWithTimeLimit(
// This ten-minute limit for the cache restore operation is mainly to
// guard against the possibility that the cache service is unresponsive
// and hangs outside the data download.
//
// Data download (which is normally the most time-consuming part of the
// restore operation) should not run long enough to hit this limit. Even
// for an extremely large 10GB database, at a download speed of 40MB/s
// (see below), the download should complete within five minutes. If we
// do hit this limit, there are likely more serious problems other than
// mere slow download speed.
//
// This is important because we don't want any ongoing file operations
// on the database directory when we do hit this limit. Hitting this
// time limit takes us to a fallback path where we re-initialize the
// database from scratch at dbLocation, and having the cache restore
// operation continue to write into dbLocation in the background would
// really mess things up. We want to hit this limit only in the case
// of a hung cache service, not just slow download speed.
MAX_CACHE_OPERATION_MS,
actionsCache.restoreCache(
[dbLocation],
cacheRestoreKeyPrefix,
void 0,
{
// Azure SDK download (which is the default) uses 128MB segments; see
// https://github.com/actions/toolkit/blob/main/packages/cache/README.md.
// Setting segmentTimeoutInMs to 3000 translates to segment download
// speed of about 40 MB/s, which should be achievable unless the
// download is unreliable (in which case we do want to abort).
segmentTimeoutInMs: 3e3
}
),
() => {
logger.info("Timed out downloading overlay-base database from cache");
}
);
databaseDownloadDurationMs = Math.round(
performance.now() - databaseDownloadStart
);
if (foundKey === void 0) {
logger.info("No overlay-base database found in Actions cache");
return void 0;
}
logger.info(
`Downloaded overlay-base database in cache with key ${foundKey}`
);
} catch (error3) {
logger.warning(
`Failed to download overlay-base database from cache: ${error3 instanceof Error ? error3.message : String(error3)}`
);
return void 0;
}
const databaseIsValid = await checkOverlayBaseDatabase(
codeql,
config,
logger,
"Downloaded overlay-base database is invalid"
);
if (!databaseIsValid) {
logger.warning("Downloaded overlay-base database failed validation");
return void 0;
}
const databaseSizeBytes = await tryGetFolderBytes(dbLocation, logger);
if (databaseSizeBytes === void 0) {
logger.info(
"Filesystem error while accessing downloaded overlay-base database"
);
return void 0;
}
logger.info(`Successfully downloaded overlay-base database to ${dbLocation}`);
return {
databaseSizeBytes: Math.round(databaseSizeBytes),
databaseDownloadDurationMs
};
}
async function getCacheRestoreKeyPrefix(config, codeQlVersion) {
const languages = [...config.languages].sort().join("_");
const cacheKeyComponents = {
automationID: await getAutomationID()
// Add more components here as needed in the future
};
const componentsHash = createCacheKeyHash(cacheKeyComponents);
return `${CACHE_PREFIX}-${CACHE_VERSION}-${componentsHash}-${languages}-${codeQlVersion}-`;
}
// src/tools-features.ts
var semver4 = __toESM(require_semver2());
@@ -106477,8 +106327,8 @@ Improved incremental analysis will be automatically retried when the next versio
// src/overlay/status.ts
var fs7 = __toESM(require("fs"));
var path8 = __toESM(require("path"));
var actionsCache2 = __toESM(require_cache5());
var MAX_CACHE_OPERATION_MS2 = 3e4;
var actionsCache = __toESM(require_cache5());
var MAX_CACHE_OPERATION_MS = 3e4;
var STATUS_FILE_NAME = "overlay-status.json";
function getStatusFilePath(languages) {
return path8.join(
@@ -106510,8 +106360,8 @@ async function getOverlayStatus(codeql, languages, diskUsage, logger) {
try {
await fs7.promises.mkdir(path8.dirname(statusFile), { recursive: true });
const foundKey = await waitForResultWithTimeLimit(
MAX_CACHE_OPERATION_MS2,
actionsCache2.restoreCache([statusFile], cacheKey3),
MAX_CACHE_OPERATION_MS,
actionsCache.restoreCache([statusFile], cacheKey3),
() => {
logger.warning("Timed out restoring overlay status from cache.");
}
@@ -106550,10 +106400,10 @@ async function getCacheKey(codeql, languages, diskUsage) {
// src/trap-caching.ts
var fs8 = __toESM(require("fs"));
var path9 = __toESM(require("path"));
var actionsCache3 = __toESM(require_cache5());
var CACHE_VERSION2 = 1;
var actionsCache2 = __toESM(require_cache5());
var CACHE_VERSION = 1;
var CODEQL_TRAP_CACHE_PREFIX = "codeql-trap";
var MAX_CACHE_OPERATION_MS3 = 12e4;
var MAX_CACHE_OPERATION_MS2 = 12e4;
async function downloadTrapCaches(codeql, languages, logger) {
const result = {};
const languagesSupportingCaching = await getLanguagesSupportingCaching(
@@ -106594,8 +106444,8 @@ async function downloadTrapCaches(codeql, languages, logger) {
`Looking in Actions cache for TRAP cache with key ${preferredKey}`
);
const found = await waitForResultWithTimeLimit(
MAX_CACHE_OPERATION_MS3,
actionsCache3.restoreCache([cacheDir], preferredKey, [
MAX_CACHE_OPERATION_MS2,
actionsCache2.restoreCache([cacheDir], preferredKey, [
// Fall back to any cache with the right key prefix
await cachePrefix(codeql, language)
]),
@@ -106653,7 +106503,7 @@ async function cacheKey(codeql, language, baseSha) {
return `${await cachePrefix(codeql, language)}${baseSha}`;
}
async function cachePrefix(codeql, language) {
return `${CODEQL_TRAP_CACHE_PREFIX}-${CACHE_VERSION2}-${(await codeql.getVersion()).version}-${language}-`;
return `${CODEQL_TRAP_CACHE_PREFIX}-${CACHE_VERSION}-${(await codeql.getVersion()).version}-${language}-`;
}
// src/config-utils.ts
@@ -107475,7 +107325,7 @@ async function logGeneratedFilesTelemetry(config, duration, generatedFilesCount)
// src/dependency-caching.ts
var os3 = __toESM(require("os"));
var import_path2 = require("path");
var actionsCache4 = __toESM(require_cache5());
var actionsCache3 = __toESM(require_cache5());
var glob = __toESM(require_glob());
var CODEQL_DEPENDENCY_CACHE_PREFIX = "codeql-dependencies";
var CODEQL_DEPENDENCY_CACHE_VERSION = 1;
@@ -107602,7 +107452,7 @@ async function downloadDependencyCaches(codeql, features, languages, logger) {
)}`
);
const start = performance.now();
const hitKey = await actionsCache4.restoreCache(
const hitKey = await actionsCache3.restoreCache(
await cacheConfig.getDependencyPaths(codeql, features),
primaryKey,
restoreKeys
@@ -109684,6 +109534,159 @@ To opt out of this change, ${envVarOptOut}`;
core12.exportVariable("CODEQL_ACTION_DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION" /* DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION */, "true");
}
// src/overlay/caching.ts
var fs16 = __toESM(require("fs"));
var actionsCache4 = __toESM(require_cache5());
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
var CACHE_VERSION2 = 1;
var CACHE_PREFIX = "codeql-overlay-base-database";
var MAX_CACHE_OPERATION_MS3 = 6e5;
async function checkOverlayBaseDatabase(codeql, config, logger, warningPrefix) {
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
if (!fs16.existsSync(baseDatabaseOidsFilePath)) {
logger.warning(
`${warningPrefix}: ${baseDatabaseOidsFilePath} does not exist`
);
return false;
}
for (const language of config.languages) {
const dbPath = getCodeQLDatabasePath(config, language);
try {
const resolveDatabaseOutput = await codeql.resolveDatabase(dbPath);
if (resolveDatabaseOutput === void 0 || !("overlayBaseSpecifier" in resolveDatabaseOutput)) {
logger.info(`${warningPrefix}: no overlayBaseSpecifier defined`);
return false;
} else {
logger.debug(
`Overlay base specifier for ${language} overlay-base database found: ${resolveDatabaseOutput.overlayBaseSpecifier}`
);
}
} catch (e) {
logger.warning(`${warningPrefix}: failed to resolve database: ${e}`);
return false;
}
}
return true;
}
async function downloadOverlayBaseDatabaseFromCache(codeql, config, logger) {
const overlayDatabaseMode = config.overlayDatabaseMode;
if (overlayDatabaseMode !== "overlay" /* Overlay */) {
logger.debug(
`Overlay database mode is ${overlayDatabaseMode}. Skip downloading overlay-base database from cache.`
);
return void 0;
}
if (!config.useOverlayDatabaseCaching) {
logger.debug(
"Overlay database caching is disabled. Skip downloading overlay-base database from cache."
);
return void 0;
}
if (isInTestMode()) {
logger.debug(
"In test mode. Skip downloading overlay-base database from cache."
);
return void 0;
}
const dbLocation = config.dbLocation;
const codeQlVersion = (await codeql.getVersion()).version;
const cacheRestoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion
);
logger.info(
`Looking in Actions cache for overlay-base database with restore key ${cacheRestoreKeyPrefix}`
);
let databaseDownloadDurationMs = 0;
try {
const databaseDownloadStart = performance.now();
const foundKey = await waitForResultWithTimeLimit(
// This ten-minute limit for the cache restore operation is mainly to
// guard against the possibility that the cache service is unresponsive
// and hangs outside the data download.
//
// Data download (which is normally the most time-consuming part of the
// restore operation) should not run long enough to hit this limit. Even
// for an extremely large 10GB database, at a download speed of 40MB/s
// (see below), the download should complete within five minutes. If we
// do hit this limit, there are likely more serious problems other than
// mere slow download speed.
//
// This is important because we don't want any ongoing file operations
// on the database directory when we do hit this limit. Hitting this
// time limit takes us to a fallback path where we re-initialize the
// database from scratch at dbLocation, and having the cache restore
// operation continue to write into dbLocation in the background would
// really mess things up. We want to hit this limit only in the case
// of a hung cache service, not just slow download speed.
MAX_CACHE_OPERATION_MS3,
actionsCache4.restoreCache(
[dbLocation],
cacheRestoreKeyPrefix,
void 0,
{
// Azure SDK download (which is the default) uses 128MB segments; see
// https://github.com/actions/toolkit/blob/main/packages/cache/README.md.
// Setting segmentTimeoutInMs to 3000 translates to segment download
// speed of about 40 MB/s, which should be achievable unless the
// download is unreliable (in which case we do want to abort).
segmentTimeoutInMs: 3e3
}
),
() => {
logger.info("Timed out downloading overlay-base database from cache");
}
);
databaseDownloadDurationMs = Math.round(
performance.now() - databaseDownloadStart
);
if (foundKey === void 0) {
logger.info("No overlay-base database found in Actions cache");
return void 0;
}
logger.info(
`Downloaded overlay-base database in cache with key ${foundKey}`
);
} catch (error3) {
logger.warning(
`Failed to download overlay-base database from cache: ${error3 instanceof Error ? error3.message : String(error3)}`
);
return void 0;
}
const databaseIsValid = await checkOverlayBaseDatabase(
codeql,
config,
logger,
"Downloaded overlay-base database is invalid"
);
if (!databaseIsValid) {
logger.warning("Downloaded overlay-base database failed validation");
return void 0;
}
const databaseSizeBytes = await tryGetFolderBytes(dbLocation, logger);
if (databaseSizeBytes === void 0) {
logger.info(
"Filesystem error while accessing downloaded overlay-base database"
);
return void 0;
}
logger.info(`Successfully downloaded overlay-base database to ${dbLocation}`);
return {
databaseSizeBytes: Math.round(databaseSizeBytes),
databaseDownloadDurationMs
};
}
async function getCacheRestoreKeyPrefix(config, codeQlVersion) {
const languages = [...config.languages].sort().join("_");
const cacheKeyComponents = {
automationID: await getAutomationID()
// Add more components here as needed in the future
};
const componentsHash = createCacheKeyHash(cacheKeyComponents);
return `${CACHE_PREFIX}-${CACHE_VERSION2}-${componentsHash}-${languages}-${codeQlVersion}-`;
}
// src/status-report.ts
var os5 = __toESM(require("os"));
var core13 = __toESM(require_core());
@@ -109938,7 +109941,7 @@ async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error
}
// src/workflow.ts
var fs16 = __toESM(require("fs"));
var fs17 = __toESM(require("fs"));
var path16 = __toESM(require("path"));
var import_zlib = __toESM(require("zlib"));
var core14 = __toESM(require_core());
@@ -110090,7 +110093,7 @@ async function getWorkflow(logger) {
);
}
const workflowPath = await getWorkflowAbsolutePath(logger);
return load(fs16.readFileSync(workflowPath, "utf-8"));
return load(fs17.readFileSync(workflowPath, "utf-8"));
}
async function getWorkflowAbsolutePath(logger) {
const relativePath = await getWorkflowRelativePath();
@@ -110098,7 +110101,7 @@ async function getWorkflowAbsolutePath(logger) {
getRequiredEnvParam("GITHUB_WORKSPACE"),
relativePath
);
if (fs16.existsSync(absolutePath)) {
if (fs17.existsSync(absolutePath)) {
logger.debug(
`Derived the following absolute path for the currently executing workflow: ${absolutePath}.`
);
@@ -110430,16 +110433,16 @@ async function run(startedAt) {
"codeql-action-go-tracing",
"bin"
);
fs17.mkdirSync(tempBinPath, { recursive: true });
fs18.mkdirSync(tempBinPath, { recursive: true });
core15.addPath(tempBinPath);
const goWrapperPath = path17.resolve(tempBinPath, "go");
fs17.writeFileSync(
fs18.writeFileSync(
goWrapperPath,
`#!/bin/bash
exec ${goBinaryPath} "$@"`
);
fs17.chmodSync(goWrapperPath, "755");
fs18.chmodSync(goWrapperPath, "755");
core15.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goWrapperPath);
} catch (e) {
logger.warning(
+8 -11
View File
@@ -93210,7 +93210,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache4;
exports2.saveCache = saveCache3;
var core14 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93387,7 +93387,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
function saveCache3(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -98887,8 +98887,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
exports2.restoreCache = restoreCache3;
exports2.saveCache = saveCache3;
var core14 = __importStar2(require_core());
var path7 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -98945,7 +98945,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache3(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core14.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99089,7 +99089,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache4(paths_1, key_1, options_1) {
function saveCache3(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core14.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -104133,7 +104133,6 @@ var semver5 = __toESM(require_semver2());
// src/overlay/index.ts
var fs3 = __toESM(require("fs"));
var path4 = __toESM(require("path"));
var actionsCache = __toESM(require_cache5());
// src/git-utils.ts
var fs2 = __toESM(require("fs"));
@@ -104329,8 +104328,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
const gitFileOidsJson = JSON.stringify(gitFileOids);
@@ -104680,10 +104677,10 @@ var builtin_default = {
var builtInLanguageSet = new Set(builtin_default.languages);
// src/overlay/status.ts
var actionsCache2 = __toESM(require_cache5());
var actionsCache = __toESM(require_cache5());
// src/trap-caching.ts
var actionsCache3 = __toESM(require_cache5());
var actionsCache2 = __toESM(require_cache5());
// src/config-utils.ts
var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4;
+1344 -1345
View File
File diff suppressed because it is too large Load Diff
+9 -14
View File
@@ -93210,7 +93210,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache5;
exports2.saveCache = saveCache4;
var core15 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93387,7 +93387,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache5(cacheId, archivePath, signedUploadURL, options) {
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -98887,8 +98887,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache5;
exports2.saveCache = saveCache5;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
var core15 = __importStar2(require_core());
var path4 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -98945,7 +98945,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache5(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99089,7 +99089,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache5(paths_1, key_1, options_1) {
function saveCache4(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -161699,9 +161699,6 @@ function getActionsLogger() {
// src/feature-flags.ts
var semver5 = __toESM(require_semver2());
// src/overlay/index.ts
var actionsCache = __toESM(require_cache5());
// src/git-utils.ts
var core8 = __toESM(require_core());
var toolrunner2 = __toESM(require_toolrunner());
@@ -161717,8 +161714,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
// src/tools-features.ts
var semver4 = __toESM(require_semver2());
@@ -161964,10 +161959,10 @@ var builtin_default = {
var builtInLanguageSet = new Set(builtin_default.languages);
// src/overlay/status.ts
var actionsCache2 = __toESM(require_cache5());
var actionsCache = __toESM(require_cache5());
// src/trap-caching.ts
var actionsCache3 = __toESM(require_cache5());
var actionsCache2 = __toESM(require_cache5());
// src/config-utils.ts
var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4;
@@ -162197,7 +162192,7 @@ var semver7 = __toESM(require_semver2());
var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024;
// src/dependency-caching.ts
var actionsCache4 = __toESM(require_cache5());
var actionsCache3 = __toESM(require_cache5());
var glob = __toESM(require_glob());
// src/artifact-scanner.ts
+2153 -2158
View File
File diff suppressed because it is too large Load Diff
+10 -13
View File
@@ -93210,7 +93210,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache4;
exports2.saveCache = saveCache3;
var core14 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93387,7 +93387,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
function saveCache3(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -98887,8 +98887,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
exports2.restoreCache = restoreCache3;
exports2.saveCache = saveCache3;
var core14 = __importStar2(require_core());
var path12 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -98945,7 +98945,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache3(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core14.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99089,7 +99089,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache4(paths_1, key_1, options_1) {
function saveCache3(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core14.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -107255,7 +107255,6 @@ var cliVersion = "2.25.1";
// src/overlay/index.ts
var fs4 = __toESM(require("fs"));
var path5 = __toESM(require("path"));
var actionsCache = __toESM(require_cache5());
// src/git-utils.ts
var fs3 = __toESM(require("fs"));
@@ -107485,8 +107484,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
const gitFileOidsJson = JSON.stringify(gitFileOids);
@@ -107859,10 +107856,10 @@ var builtin_default = {
var builtInLanguageSet = new Set(builtin_default.languages);
// src/overlay/status.ts
var actionsCache2 = __toESM(require_cache5());
var actionsCache = __toESM(require_cache5());
// src/trap-caching.ts
var actionsCache3 = __toESM(require_cache5());
var actionsCache2 = __toESM(require_cache5());
// src/config-utils.ts
var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4;
@@ -110816,7 +110813,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
return readSarifFile(outputFile);
}
function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) {
const automationID = getAutomationID2(category, analysis_key, environment);
const automationID = getAutomationID(category, analysis_key, environment);
if (automationID !== void 0) {
for (const run of sarifFile.runs || []) {
if (run.automationDetails === void 0) {
@@ -110829,7 +110826,7 @@ function populateRunAutomationDetails(sarifFile, category, analysis_key, environ
}
return sarifFile;
}
function getAutomationID2(category, analysis_key, environment) {
function getAutomationID(category, analysis_key, environment) {
if (category !== void 0) {
let automationID = category;
if (!automationID.endsWith("/")) {
+9 -14
View File
@@ -155544,7 +155544,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache5;
exports2.saveCache = saveCache4;
var core15 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -155721,7 +155721,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache5(cacheId, archivePath, signedUploadURL, options) {
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -156995,8 +156995,8 @@ var require_cache6 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache5;
exports2.saveCache = saveCache5;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
var core15 = __importStar2(require_core());
var path3 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -157053,7 +157053,7 @@ var require_cache6 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache5(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -157197,7 +157197,7 @@ var require_cache6 = __commonJS({
return void 0;
});
}
function saveCache5(paths_1, key_1, options_1) {
function saveCache4(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -161865,9 +161865,6 @@ function withGroup(groupName, f) {
// src/feature-flags.ts
var semver5 = __toESM(require_semver2());
// src/overlay/index.ts
var actionsCache = __toESM(require_cache6());
// src/git-utils.ts
var core8 = __toESM(require_core());
var toolrunner2 = __toESM(require_toolrunner());
@@ -161883,8 +161880,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
// src/tools-features.ts
var semver4 = __toESM(require_semver2());
@@ -162134,10 +162129,10 @@ var builtin_default = {
var builtInLanguageSet = new Set(builtin_default.languages);
// src/overlay/status.ts
var actionsCache2 = __toESM(require_cache6());
var actionsCache = __toESM(require_cache6());
// src/trap-caching.ts
var actionsCache3 = __toESM(require_cache6());
var actionsCache2 = __toESM(require_cache6());
// src/config-utils.ts
var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4;
@@ -162184,7 +162179,7 @@ var semver7 = __toESM(require_semver2());
var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024;
// src/dependency-caching.ts
var actionsCache4 = __toESM(require_cache6());
var actionsCache3 = __toESM(require_cache6());
var glob = __toESM(require_glob2());
// src/artifact-scanner.ts
+1344 -1347
View File
File diff suppressed because it is too large Load Diff
+1
View File
@@ -3,6 +3,7 @@
"compilerOptions": {
/* Basic Options */
"lib": ["esnext"],
"module": "preserve",
"rootDir": "..",
"sourceMap": false,
"noEmit": true,
+1 -1
View File
@@ -32,7 +32,7 @@ import { EnvVar } from "./environment";
import { initFeatures } from "./feature-flags";
import { BuiltInLanguage } from "./languages";
import { getActionsLogger, Logger } from "./logging";
import { cleanupAndUploadOverlayBaseDatabaseToCache } from "./overlay";
import { cleanupAndUploadOverlayBaseDatabaseToCache } from "./overlay/caching";
import { getRepositoryNwo } from "./repository";
import * as statusReport from "./status-report";
import {
+1 -1
View File
@@ -23,7 +23,7 @@ import { EnvVar } from "./environment";
import { FeatureEnablement, Feature } from "./feature-flags";
import { BuiltInLanguage, Language } from "./languages";
import { Logger, withGroupAsync } from "./logging";
import { OverlayDatabaseMode } from "./overlay";
import { OverlayDatabaseMode } from "./overlay/overlay-database-mode";
import type * as sarif from "./sarif";
import { DatabaseCreationTimings, EventReport } from "./status-report";
import { endTracingForCluster } from "./tracer-config";
+2 -5
View File
@@ -24,11 +24,8 @@ import {
import { isAnalyzingDefaultBranch } from "./git-utils";
import { Language } from "./languages";
import { Logger } from "./logging";
import {
OverlayDatabaseMode,
writeBaseDatabaseOidsFile,
writeOverlayChangesFile,
} from "./overlay";
import { writeBaseDatabaseOidsFile, writeOverlayChangesFile } from "./overlay";
import { OverlayDatabaseMode } from "./overlay/overlay-database-mode";
import * as setupCodeql from "./setup-codeql";
import { ZstdAvailability } from "./tar";
import { ToolsDownloadStatusReport } from "./tools-download";
+2 -1
View File
@@ -20,8 +20,9 @@ import * as gitUtils from "./git-utils";
import { GitVersionInfo } from "./git-utils";
import { BuiltInLanguage, Language } from "./languages";
import { getRunnerLogger } from "./logging";
import { CODEQL_OVERLAY_MINIMUM_VERSION, OverlayDatabaseMode } from "./overlay";
import { CODEQL_OVERLAY_MINIMUM_VERSION } from "./overlay";
import { OverlayDisabledReason } from "./overlay/diagnostics";
import { OverlayDatabaseMode } from "./overlay/overlay-database-mode";
import * as overlayStatus from "./overlay/status";
import { parseRepositoryNwo } from "./repository";
import {
+2 -1
View File
@@ -50,11 +50,12 @@ import {
} from "./git-utils";
import { BuiltInLanguage, Language } from "./languages";
import { Logger } from "./logging";
import { CODEQL_OVERLAY_MINIMUM_VERSION, OverlayDatabaseMode } from "./overlay";
import { CODEQL_OVERLAY_MINIMUM_VERSION } from "./overlay";
import {
addOverlayDisablementDiagnostics,
OverlayDisabledReason,
} from "./overlay/diagnostics";
import { OverlayDatabaseMode } from "./overlay/overlay-database-mode";
import { shouldSkipOverlayAnalysis } from "./overlay/status";
import { RepositoryNwo } from "./repository";
import { ToolsFeature } from "./tools-features";
+1 -1
View File
@@ -12,7 +12,7 @@ import { Config } from "./config-utils";
import { Feature, FeatureEnablement } from "./feature-flags";
import * as gitUtils from "./git-utils";
import { Logger, withGroupAsync } from "./logging";
import { OverlayDatabaseMode } from "./overlay";
import { OverlayDatabaseMode } from "./overlay/overlay-database-mode";
import { RepositoryNwo } from "./repository";
import * as util from "./util";
import { asHTTPError, bundleDb, CleanupLevel, parseGitHubUrl } from "./util";
+1 -1
View File
@@ -12,7 +12,7 @@ import { EnvVar } from "./environment";
import { Feature } from "./feature-flags";
import * as initActionPostHelper from "./init-action-post-helper";
import { getRunnerLogger } from "./logging";
import { OverlayDatabaseMode } from "./overlay";
import { OverlayDatabaseMode } from "./overlay/overlay-database-mode";
import * as overlayStatus from "./overlay/status";
import { parseRepositoryNwo } from "./repository";
import {
+1 -1
View File
@@ -21,7 +21,7 @@ import * as dependencyCaching from "./dependency-caching";
import { EnvVar } from "./environment";
import { Feature, FeatureEnablement } from "./feature-flags";
import { Logger } from "./logging";
import { OverlayDatabaseMode } from "./overlay";
import { OverlayDatabaseMode } from "./overlay/overlay-database-mode";
import {
createOverlayStatus,
OverlayStatus,
+2 -2
View File
@@ -63,8 +63,8 @@ import { getActionsLogger, Logger, withGroupAsync } from "./logging";
import {
downloadOverlayBaseDatabaseFromCache,
OverlayBaseDatabaseDownloadStats,
OverlayDatabaseMode,
} from "./overlay";
} from "./overlay/caching";
import { OverlayDatabaseMode } from "./overlay/overlay-database-mode";
import { getRepositoryNwo, RepositoryNwo } from "./repository";
import { ToolsSource } from "./setup-codeql";
import {
+287
View File
@@ -0,0 +1,287 @@
import * as fs from "fs";
import * as path from "path";
import * as actionsCache from "@actions/cache";
import test from "ava";
import * as sinon from "sinon";
import * as actionsUtil from "../actions-util";
import * as apiClient from "../api-client";
import { ResolveDatabaseOutput } from "../codeql";
import * as gitUtils from "../git-utils";
import { BuiltInLanguage } from "../languages";
import { getRunnerLogger } from "../logging";
import {
createTestConfig,
mockCodeQLVersion,
setupTests,
} from "../testing-utils";
import * as utils from "../util";
import { withTmpDir } from "../util";
import {
downloadOverlayBaseDatabaseFromCache,
getCacheRestoreKeyPrefix,
getCacheSaveKey,
} from "./caching";
import { OverlayDatabaseMode } from "./overlay-database-mode";
setupTests(test);
interface DownloadOverlayBaseDatabaseTestCase {
overlayDatabaseMode: OverlayDatabaseMode;
useOverlayDatabaseCaching: boolean;
isInTestMode: boolean;
restoreCacheResult: string | undefined | Error;
hasBaseDatabaseOidsFile: boolean;
tryGetFolderBytesSucceeds: boolean;
codeQLVersion: string;
resolveDatabaseOutput: ResolveDatabaseOutput | Error;
}
const defaultDownloadTestCase: DownloadOverlayBaseDatabaseTestCase = {
overlayDatabaseMode: OverlayDatabaseMode.Overlay,
useOverlayDatabaseCaching: true,
isInTestMode: false,
restoreCacheResult: "cache-key",
hasBaseDatabaseOidsFile: true,
tryGetFolderBytesSucceeds: true,
codeQLVersion: "2.20.5",
resolveDatabaseOutput: { overlayBaseSpecifier: "20250626:XXX" },
};
const testDownloadOverlayBaseDatabaseFromCache = test.macro({
exec: async (
t,
_title: string,
partialTestCase: Partial<DownloadOverlayBaseDatabaseTestCase>,
expectDownloadSuccess: boolean,
) => {
await withTmpDir(async (tmpDir) => {
const dbLocation = path.join(tmpDir, "db");
await fs.promises.mkdir(dbLocation, { recursive: true });
const logger = getRunnerLogger(true);
const testCase = { ...defaultDownloadTestCase, ...partialTestCase };
const config = createTestConfig({
dbLocation,
languages: [BuiltInLanguage.java],
});
config.overlayDatabaseMode = testCase.overlayDatabaseMode;
config.useOverlayDatabaseCaching = testCase.useOverlayDatabaseCaching;
if (testCase.hasBaseDatabaseOidsFile) {
const baseDatabaseOidsFile = path.join(
dbLocation,
"base-database-oids.json",
);
await fs.promises.writeFile(baseDatabaseOidsFile, JSON.stringify({}));
}
const stubs: sinon.SinonStub[] = [];
const getAutomationIDStub = sinon
.stub(apiClient, "getAutomationID")
.resolves("test-automation-id/");
stubs.push(getAutomationIDStub);
const isInTestModeStub = sinon
.stub(utils, "isInTestMode")
.returns(testCase.isInTestMode);
stubs.push(isInTestModeStub);
if (testCase.restoreCacheResult instanceof Error) {
const restoreCacheStub = sinon
.stub(actionsCache, "restoreCache")
.rejects(testCase.restoreCacheResult);
stubs.push(restoreCacheStub);
} else {
const restoreCacheStub = sinon
.stub(actionsCache, "restoreCache")
.resolves(testCase.restoreCacheResult);
stubs.push(restoreCacheStub);
}
const tryGetFolderBytesStub = sinon
.stub(utils, "tryGetFolderBytes")
.resolves(testCase.tryGetFolderBytesSucceeds ? 1024 * 1024 : undefined);
stubs.push(tryGetFolderBytesStub);
const codeql = mockCodeQLVersion(testCase.codeQLVersion);
if (testCase.resolveDatabaseOutput instanceof Error) {
const resolveDatabaseStub = sinon
.stub(codeql, "resolveDatabase")
.rejects(testCase.resolveDatabaseOutput);
stubs.push(resolveDatabaseStub);
} else {
const resolveDatabaseStub = sinon
.stub(codeql, "resolveDatabase")
.resolves(testCase.resolveDatabaseOutput);
stubs.push(resolveDatabaseStub);
}
try {
const result = await downloadOverlayBaseDatabaseFromCache(
codeql,
config,
logger,
);
if (expectDownloadSuccess) {
t.truthy(result);
} else {
t.is(result, undefined);
}
} finally {
for (const stub of stubs) {
stub.restore();
}
}
});
},
title: (_, title) => `downloadOverlayBaseDatabaseFromCache: ${title}`,
});
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns stats when successful",
{},
true,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when mode is OverlayDatabaseMode.OverlayBase",
{
overlayDatabaseMode: OverlayDatabaseMode.OverlayBase,
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when mode is OverlayDatabaseMode.None",
{
overlayDatabaseMode: OverlayDatabaseMode.None,
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when caching is disabled",
{
useOverlayDatabaseCaching: false,
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined in test mode",
{
isInTestMode: true,
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when cache miss",
{
restoreCacheResult: undefined,
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when download fails",
{
restoreCacheResult: new Error("Download failed"),
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when downloaded database is invalid",
{
hasBaseDatabaseOidsFile: false,
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when downloaded database doesn't have an overlayBaseSpecifier",
{
resolveDatabaseOutput: {},
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when resolving database metadata fails",
{
resolveDatabaseOutput: new Error("Failed to resolve database metadata"),
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when filesystem error occurs",
{
tryGetFolderBytesSucceeds: false,
},
false,
);
test.serial("overlay-base database cache keys remain stable", async (t) => {
const logger = getRunnerLogger(true);
const config = createTestConfig({ languages: ["python", "javascript"] });
const codeQlVersion = "2.23.0";
const commitOid = "abc123def456";
sinon.stub(apiClient, "getAutomationID").resolves("test-automation-id/");
sinon.stub(gitUtils, "getCommitOid").resolves(commitOid);
sinon.stub(actionsUtil, "getWorkflowRunID").returns(12345);
sinon.stub(actionsUtil, "getWorkflowRunAttempt").returns(1);
const saveKey = await getCacheSaveKey(
config,
codeQlVersion,
"checkout-path",
logger,
);
const expectedSaveKey =
"codeql-overlay-base-database-1-c5666c509a2d9895-javascript_python-2.23.0-abc123def456-12345-1";
t.is(
saveKey,
expectedSaveKey,
"Cache save key changed unexpectedly. " +
"This may indicate breaking changes in the cache key generation logic.",
);
const restoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion,
);
const expectedRestoreKeyPrefix =
"codeql-overlay-base-database-1-c5666c509a2d9895-javascript_python-2.23.0-";
t.is(
restoreKeyPrefix,
expectedRestoreKeyPrefix,
"Cache restore key prefix changed unexpectedly. " +
"This may indicate breaking changes in the cache key generation logic.",
);
t.true(
saveKey.startsWith(restoreKeyPrefix),
`Expected save key "${saveKey}" to start with restore key prefix "${restoreKeyPrefix}"`,
);
});
+428
View File
@@ -0,0 +1,428 @@
import * as fs from "fs";
import * as actionsCache from "@actions/cache";
import {
getRequiredInput,
getWorkflowRunAttempt,
getWorkflowRunID,
} from "../actions-util";
import { getAutomationID } from "../api-client";
import { createCacheKeyHash } from "../caching-utils";
import { type CodeQL } from "../codeql";
import { type Config } from "../config-utils";
import { getCommitOid } from "../git-utils";
import { Logger, withGroupAsync } from "../logging";
import {
CleanupLevel,
getBaseDatabaseOidsFilePath,
getCodeQLDatabasePath,
getErrorMessage,
isInTestMode,
tryGetFolderBytes,
waitForResultWithTimeLimit,
} from "../util";
import { OverlayDatabaseMode } from "./overlay-database-mode";
/**
* The maximum (uncompressed) size of the overlay base database that we will
* upload. By default, the Actions Cache has an overall capacity of 10 GB, and
* the Actions Cache client library uses zstd compression.
*
* Ideally we would apply a size limit to the compressed overlay-base database,
* but we cannot do so because compression is handled transparently by the
* Actions Cache client library. Instead we place a limit on the uncompressed
* size of the overlay-base database.
*
* Assuming 2.5:1 compression ratio, the 7.5 GB limit on uncompressed data would
* translate to a limit of around 3 GB after compression.
*/
const OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
const OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES =
OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1_000_000;
// Constants for database caching
const CACHE_VERSION = 1;
const CACHE_PREFIX = "codeql-overlay-base-database";
// The purpose of this ten-minute limit is to guard against the possibility
// that the cache service is unresponsive, which would otherwise cause the
// entire action to hang. Normally we expect cache operations to complete
// within two minutes.
const MAX_CACHE_OPERATION_MS = 600_000;
/**
* Checks that the overlay-base database is valid by checking for the
* existence of the base database OIDs file.
*
* @param config The configuration object
* @param logger The logger instance
* @param warningPrefix Prefix for the check failure warning message
* @returns True if the verification succeeded, false otherwise
*/
async function checkOverlayBaseDatabase(
codeql: CodeQL,
config: Config,
logger: Logger,
warningPrefix: string,
): Promise<boolean> {
// An overlay-base database should contain the base database OIDs file.
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
if (!fs.existsSync(baseDatabaseOidsFilePath)) {
logger.warning(
`${warningPrefix}: ${baseDatabaseOidsFilePath} does not exist`,
);
return false;
}
for (const language of config.languages) {
const dbPath = getCodeQLDatabasePath(config, language);
try {
const resolveDatabaseOutput = await codeql.resolveDatabase(dbPath);
if (
resolveDatabaseOutput === undefined ||
!("overlayBaseSpecifier" in resolveDatabaseOutput)
) {
logger.info(`${warningPrefix}: no overlayBaseSpecifier defined`);
return false;
} else {
logger.debug(
`Overlay base specifier for ${language} overlay-base database found: ` +
`${resolveDatabaseOutput.overlayBaseSpecifier}`,
);
}
} catch (e) {
logger.warning(`${warningPrefix}: failed to resolve database: ${e}`);
return false;
}
}
return true;
}
/**
* Uploads the overlay-base database to the GitHub Actions cache. If conditions
* for uploading are not met, the function does nothing and returns false.
*
* This function uses the `checkout_path` input to determine the repository path
* and works only when called from `analyze` or `upload-sarif`.
*
* @param codeql The CodeQL instance
* @param config The configuration object
* @param logger The logger instance
* @returns A promise that resolves to true if the upload was performed and
* successfully completed, or false otherwise
*/
export async function cleanupAndUploadOverlayBaseDatabaseToCache(
codeql: CodeQL,
config: Config,
logger: Logger,
): Promise<boolean> {
const overlayDatabaseMode = config.overlayDatabaseMode;
if (overlayDatabaseMode !== OverlayDatabaseMode.OverlayBase) {
logger.debug(
`Overlay database mode is ${overlayDatabaseMode}. ` +
"Skip uploading overlay-base database to cache.",
);
return false;
}
if (!config.useOverlayDatabaseCaching) {
logger.debug(
"Overlay database caching is disabled. " +
"Skip uploading overlay-base database to cache.",
);
return false;
}
if (isInTestMode()) {
logger.debug(
"In test mode. Skip uploading overlay-base database to cache.",
);
return false;
}
const databaseIsValid = await checkOverlayBaseDatabase(
codeql,
config,
logger,
"Abort uploading overlay-base database to cache",
);
if (!databaseIsValid) {
return false;
}
// Clean up the database using the overlay cleanup level.
await withGroupAsync("Cleaning up databases", async () => {
await codeql.databaseCleanupCluster(config, CleanupLevel.Overlay);
});
const dbLocation = config.dbLocation;
const databaseSizeBytes = await tryGetFolderBytes(dbLocation, logger);
if (databaseSizeBytes === undefined) {
logger.warning(
"Failed to determine database size. " +
"Skip uploading overlay-base database to cache.",
);
return false;
}
if (databaseSizeBytes > OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES) {
const databaseSizeMB = Math.round(databaseSizeBytes / 1_000_000);
logger.warning(
`Database size (${databaseSizeMB} MB) ` +
`exceeds maximum upload size (${OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB} MB). ` +
"Skip uploading overlay-base database to cache.",
);
return false;
}
const codeQlVersion = (await codeql.getVersion()).version;
const checkoutPath = getRequiredInput("checkout_path");
const cacheSaveKey = await getCacheSaveKey(
config,
codeQlVersion,
checkoutPath,
logger,
);
logger.info(
`Uploading overlay-base database to Actions cache with key ${cacheSaveKey}`,
);
try {
const cacheId = await waitForResultWithTimeLimit(
MAX_CACHE_OPERATION_MS,
actionsCache.saveCache([dbLocation], cacheSaveKey),
() => {},
);
if (cacheId === undefined) {
logger.warning("Timed out while uploading overlay-base database");
return false;
}
} catch (error) {
logger.warning(
"Failed to upload overlay-base database to cache: " +
`${error instanceof Error ? error.message : String(error)}`,
);
return false;
}
logger.info(`Successfully uploaded overlay-base database from ${dbLocation}`);
return true;
}
export interface OverlayBaseDatabaseDownloadStats {
databaseSizeBytes: number;
databaseDownloadDurationMs: number;
}
/**
* Downloads the overlay-base database from the GitHub Actions cache. If conditions
* for downloading are not met, the function does nothing and returns false.
*
* @param codeql The CodeQL instance
* @param config The configuration object
* @param logger The logger instance
* @returns A promise that resolves to download statistics if an overlay-base
* database was successfully downloaded, or undefined if the download was
* either not performed or failed.
*/
export async function downloadOverlayBaseDatabaseFromCache(
codeql: CodeQL,
config: Config,
logger: Logger,
): Promise<OverlayBaseDatabaseDownloadStats | undefined> {
const overlayDatabaseMode = config.overlayDatabaseMode;
if (overlayDatabaseMode !== OverlayDatabaseMode.Overlay) {
logger.debug(
`Overlay database mode is ${overlayDatabaseMode}. ` +
"Skip downloading overlay-base database from cache.",
);
return undefined;
}
if (!config.useOverlayDatabaseCaching) {
logger.debug(
"Overlay database caching is disabled. " +
"Skip downloading overlay-base database from cache.",
);
return undefined;
}
if (isInTestMode()) {
logger.debug(
"In test mode. Skip downloading overlay-base database from cache.",
);
return undefined;
}
const dbLocation = config.dbLocation;
const codeQlVersion = (await codeql.getVersion()).version;
const cacheRestoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion,
);
logger.info(
"Looking in Actions cache for overlay-base database with " +
`restore key ${cacheRestoreKeyPrefix}`,
);
let databaseDownloadDurationMs = 0;
try {
const databaseDownloadStart = performance.now();
const foundKey = await waitForResultWithTimeLimit(
// This ten-minute limit for the cache restore operation is mainly to
// guard against the possibility that the cache service is unresponsive
// and hangs outside the data download.
//
// Data download (which is normally the most time-consuming part of the
// restore operation) should not run long enough to hit this limit. Even
// for an extremely large 10GB database, at a download speed of 40MB/s
// (see below), the download should complete within five minutes. If we
// do hit this limit, there are likely more serious problems other than
// mere slow download speed.
//
// This is important because we don't want any ongoing file operations
// on the database directory when we do hit this limit. Hitting this
// time limit takes us to a fallback path where we re-initialize the
// database from scratch at dbLocation, and having the cache restore
// operation continue to write into dbLocation in the background would
// really mess things up. We want to hit this limit only in the case
// of a hung cache service, not just slow download speed.
MAX_CACHE_OPERATION_MS,
actionsCache.restoreCache(
[dbLocation],
cacheRestoreKeyPrefix,
undefined,
{
// Azure SDK download (which is the default) uses 128MB segments; see
// https://github.com/actions/toolkit/blob/main/packages/cache/README.md.
// Setting segmentTimeoutInMs to 3000 translates to segment download
// speed of about 40 MB/s, which should be achievable unless the
// download is unreliable (in which case we do want to abort).
segmentTimeoutInMs: 3000,
},
),
() => {
logger.info("Timed out downloading overlay-base database from cache");
},
);
databaseDownloadDurationMs = Math.round(
performance.now() - databaseDownloadStart,
);
if (foundKey === undefined) {
logger.info("No overlay-base database found in Actions cache");
return undefined;
}
logger.info(
`Downloaded overlay-base database in cache with key ${foundKey}`,
);
} catch (error) {
logger.warning(
"Failed to download overlay-base database from cache: " +
`${error instanceof Error ? error.message : String(error)}`,
);
return undefined;
}
const databaseIsValid = await checkOverlayBaseDatabase(
codeql,
config,
logger,
"Downloaded overlay-base database is invalid",
);
if (!databaseIsValid) {
logger.warning("Downloaded overlay-base database failed validation");
return undefined;
}
const databaseSizeBytes = await tryGetFolderBytes(dbLocation, logger);
if (databaseSizeBytes === undefined) {
logger.info(
"Filesystem error while accessing downloaded overlay-base database",
);
// The problem that warrants reporting download failure is not that we are
// unable to determine the size of the database. Rather, it is that we
// encountered a filesystem error while accessing the database, which
// indicates that an overlay analysis will likely fail.
return undefined;
}
logger.info(`Successfully downloaded overlay-base database to ${dbLocation}`);
return {
databaseSizeBytes: Math.round(databaseSizeBytes),
databaseDownloadDurationMs,
};
}
/**
* Computes the cache key for saving the overlay-base database to the GitHub
* Actions cache.
*
* The key consists of the restore key prefix (which does not include the
* commit SHA) and the commit SHA of the current checkout.
*/
export async function getCacheSaveKey(
config: Config,
codeQlVersion: string,
checkoutPath: string,
logger: Logger,
): Promise<string> {
let runId = 1;
let attemptId = 1;
try {
runId = getWorkflowRunID();
attemptId = getWorkflowRunAttempt();
} catch (e) {
logger.warning(
`Failed to get workflow run ID or attempt ID. Reason: ${getErrorMessage(e)}`,
);
}
const sha = await getCommitOid(checkoutPath);
const restoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion,
);
return `${restoreKeyPrefix}${sha}-${runId}-${attemptId}`;
}
/**
* Computes the cache key prefix for restoring the overlay-base database from
* the GitHub Actions cache.
*
* Actions cache supports using multiple restore keys to indicate preference,
* and this function could in principle take advantage of that feature by
* returning a list of restore key prefixes. However, since overlay-base
* databases are built from the default branch and used in PR analysis, it is
* exceedingly unlikely that the commit SHA will ever be the same.
*
* Therefore, this function returns only a single restore key prefix, which does
* not include the commit SHA. This allows us to restore the most recent
* compatible overlay-base database.
*/
export async function getCacheRestoreKeyPrefix(
config: Config,
codeQlVersion: string,
): Promise<string> {
const languages = [...config.languages].sort().join("_");
const cacheKeyComponents = {
automationID: await getAutomationID(),
// Add more components here as needed in the future
};
const componentsHash = createCacheKeyHash(cacheKeyComponents);
// For a cached overlay-base database to be considered compatible for overlay
// analysis, all components in the cache restore key must match:
//
// CACHE_PREFIX: distinguishes overlay-base databases from other cache objects
// CACHE_VERSION: cache format version
// componentsHash: hash of additional components (see above for details)
// languages: the languages included in the overlay-base database
// codeQlVersion: CodeQL bundle version
//
// Technically we can also include languages and codeQlVersion in the
// componentsHash, but including them explicitly in the cache key makes it
// easier to debug and understand the cache key structure.
return `${CACHE_PREFIX}-${CACHE_VERSION}-${componentsHash}-${languages}-${codeQlVersion}-`;
}
+2 -276
View File
@@ -1,32 +1,16 @@
import * as fs from "fs";
import * as path from "path";
import * as actionsCache from "@actions/cache";
import test from "ava";
import * as sinon from "sinon";
import * as actionsUtil from "../actions-util";
import * as apiClient from "../api-client";
import { ResolveDatabaseOutput } from "../codeql";
import * as gitUtils from "../git-utils";
import { BuiltInLanguage } from "../languages";
import { getRunnerLogger } from "../logging";
import {
createTestConfig,
mockCodeQLVersion,
setupTests,
} from "../testing-utils";
import * as utils from "../util";
import { createTestConfig, setupTests } from "../testing-utils";
import { withTmpDir } from "../util";
import {
downloadOverlayBaseDatabaseFromCache,
getCacheRestoreKeyPrefix,
getCacheSaveKey,
OverlayDatabaseMode,
writeBaseDatabaseOidsFile,
writeOverlayChangesFile,
} from ".";
import { writeBaseDatabaseOidsFile, writeOverlayChangesFile } from ".";
setupTests(test);
@@ -344,261 +328,3 @@ test.serial(
});
},
);
interface DownloadOverlayBaseDatabaseTestCase {
overlayDatabaseMode: OverlayDatabaseMode;
useOverlayDatabaseCaching: boolean;
isInTestMode: boolean;
restoreCacheResult: string | undefined | Error;
hasBaseDatabaseOidsFile: boolean;
tryGetFolderBytesSucceeds: boolean;
codeQLVersion: string;
resolveDatabaseOutput: ResolveDatabaseOutput | Error;
}
const defaultDownloadTestCase: DownloadOverlayBaseDatabaseTestCase = {
overlayDatabaseMode: OverlayDatabaseMode.Overlay,
useOverlayDatabaseCaching: true,
isInTestMode: false,
restoreCacheResult: "cache-key",
hasBaseDatabaseOidsFile: true,
tryGetFolderBytesSucceeds: true,
codeQLVersion: "2.20.5",
resolveDatabaseOutput: { overlayBaseSpecifier: "20250626:XXX" },
};
const testDownloadOverlayBaseDatabaseFromCache = test.macro({
exec: async (
t,
_title: string,
partialTestCase: Partial<DownloadOverlayBaseDatabaseTestCase>,
expectDownloadSuccess: boolean,
) => {
await withTmpDir(async (tmpDir) => {
const dbLocation = path.join(tmpDir, "db");
await fs.promises.mkdir(dbLocation, { recursive: true });
const logger = getRunnerLogger(true);
const testCase = { ...defaultDownloadTestCase, ...partialTestCase };
const config = createTestConfig({
dbLocation,
languages: [BuiltInLanguage.java],
});
config.overlayDatabaseMode = testCase.overlayDatabaseMode;
config.useOverlayDatabaseCaching = testCase.useOverlayDatabaseCaching;
if (testCase.hasBaseDatabaseOidsFile) {
const baseDatabaseOidsFile = path.join(
dbLocation,
"base-database-oids.json",
);
await fs.promises.writeFile(baseDatabaseOidsFile, JSON.stringify({}));
}
const stubs: sinon.SinonStub[] = [];
const getAutomationIDStub = sinon
.stub(apiClient, "getAutomationID")
.resolves("test-automation-id/");
stubs.push(getAutomationIDStub);
const isInTestModeStub = sinon
.stub(utils, "isInTestMode")
.returns(testCase.isInTestMode);
stubs.push(isInTestModeStub);
if (testCase.restoreCacheResult instanceof Error) {
const restoreCacheStub = sinon
.stub(actionsCache, "restoreCache")
.rejects(testCase.restoreCacheResult);
stubs.push(restoreCacheStub);
} else {
const restoreCacheStub = sinon
.stub(actionsCache, "restoreCache")
.resolves(testCase.restoreCacheResult);
stubs.push(restoreCacheStub);
}
const tryGetFolderBytesStub = sinon
.stub(utils, "tryGetFolderBytes")
.resolves(testCase.tryGetFolderBytesSucceeds ? 1024 * 1024 : undefined);
stubs.push(tryGetFolderBytesStub);
const codeql = mockCodeQLVersion(testCase.codeQLVersion);
if (testCase.resolveDatabaseOutput instanceof Error) {
const resolveDatabaseStub = sinon
.stub(codeql, "resolveDatabase")
.rejects(testCase.resolveDatabaseOutput);
stubs.push(resolveDatabaseStub);
} else {
const resolveDatabaseStub = sinon
.stub(codeql, "resolveDatabase")
.resolves(testCase.resolveDatabaseOutput);
stubs.push(resolveDatabaseStub);
}
try {
const result = await downloadOverlayBaseDatabaseFromCache(
codeql,
config,
logger,
);
if (expectDownloadSuccess) {
t.truthy(result);
} else {
t.is(result, undefined);
}
} finally {
for (const stub of stubs) {
stub.restore();
}
}
});
},
title: (_, title) => `downloadOverlayBaseDatabaseFromCache: ${title}`,
});
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns stats when successful",
{},
true,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when mode is OverlayDatabaseMode.OverlayBase",
{
overlayDatabaseMode: OverlayDatabaseMode.OverlayBase,
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when mode is OverlayDatabaseMode.None",
{
overlayDatabaseMode: OverlayDatabaseMode.None,
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when caching is disabled",
{
useOverlayDatabaseCaching: false,
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined in test mode",
{
isInTestMode: true,
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when cache miss",
{
restoreCacheResult: undefined,
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when download fails",
{
restoreCacheResult: new Error("Download failed"),
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when downloaded database is invalid",
{
hasBaseDatabaseOidsFile: false,
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when downloaded database doesn't have an overlayBaseSpecifier",
{
resolveDatabaseOutput: {},
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when resolving database metadata fails",
{
resolveDatabaseOutput: new Error("Failed to resolve database metadata"),
},
false,
);
test.serial(
testDownloadOverlayBaseDatabaseFromCache,
"returns undefined when filesystem error occurs",
{
tryGetFolderBytesSucceeds: false,
},
false,
);
test.serial("overlay-base database cache keys remain stable", async (t) => {
const logger = getRunnerLogger(true);
const config = createTestConfig({ languages: ["python", "javascript"] });
const codeQlVersion = "2.23.0";
const commitOid = "abc123def456";
sinon.stub(apiClient, "getAutomationID").resolves("test-automation-id/");
sinon.stub(gitUtils, "getCommitOid").resolves(commitOid);
sinon.stub(actionsUtil, "getWorkflowRunID").returns(12345);
sinon.stub(actionsUtil, "getWorkflowRunAttempt").returns(1);
const saveKey = await getCacheSaveKey(
config,
codeQlVersion,
"checkout-path",
logger,
);
const expectedSaveKey =
"codeql-overlay-base-database-1-c5666c509a2d9895-javascript_python-2.23.0-abc123def456-12345-1";
t.is(
saveKey,
expectedSaveKey,
"Cache save key changed unexpectedly. " +
"This may indicate breaking changes in the cache key generation logic.",
);
const restoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion,
);
const expectedRestoreKeyPrefix =
"codeql-overlay-base-database-1-c5666c509a2d9895-javascript_python-2.23.0-";
t.is(
restoreKeyPrefix,
expectedRestoreKeyPrefix,
"Cache restore key prefix changed unexpectedly. " +
"This may indicate breaking changes in the cache key generation logic.",
);
t.true(
saveKey.startsWith(restoreKeyPrefix),
`Expected save key "${saveKey}" to start with restore key prefix "${restoreKeyPrefix}"`,
);
});
+4 -431
View File
@@ -1,37 +1,12 @@
import * as fs from "fs";
import * as path from "path";
import * as actionsCache from "@actions/cache";
import * as actionsUtil from "../actions-util";
import {
getOptionalInput,
getRequiredInput,
getTemporaryDirectory,
getWorkflowRunAttempt,
getWorkflowRunID,
} from "../actions-util";
import { getAutomationID } from "../api-client";
import { createCacheKeyHash } from "../caching-utils";
import { type CodeQL } from "../codeql";
import { getOptionalInput, getTemporaryDirectory } from "../actions-util";
import { type Config } from "../config-utils";
import { getCommitOid, getFileOidsUnderPath, getGitRoot } from "../git-utils";
import { Logger, withGroupAsync } from "../logging";
import {
CleanupLevel,
getBaseDatabaseOidsFilePath,
getCodeQLDatabasePath,
getErrorMessage,
isInTestMode,
tryGetFolderBytes,
waitForResultWithTimeLimit,
} from "../util";
export enum OverlayDatabaseMode {
Overlay = "overlay",
OverlayBase = "overlay-base",
None = "none",
}
import { getFileOidsUnderPath, getGitRoot } from "../git-utils";
import { Logger } from "../logging";
import { getBaseDatabaseOidsFilePath } from "../util";
export const CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8";
@@ -45,23 +20,6 @@ export const CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
export const CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
export const CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
/**
* The maximum (uncompressed) size of the overlay base database that we will
* upload. By default, the Actions Cache has an overall capacity of 10 GB, and
* the Actions Cache client library uses zstd compression.
*
* Ideally we would apply a size limit to the compressed overlay-base database,
* but we cannot do so because compression is handled transparently by the
* Actions Cache client library. Instead we place a limit on the uncompressed
* size of the overlay-base database.
*
* Assuming 2.5:1 compression ratio, the 7.5 GB limit on uncompressed data would
* translate to a limit of around 3 GB after compression.
*/
const OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
const OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES =
OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1_000_000;
/**
* Writes a JSON file containing Git OIDs for all tracked files (represented
* by path relative to the source root) under the source root. The file is
@@ -235,388 +193,3 @@ async function getDiffRangeFilePaths(
.filter((rel) => !rel.startsWith(".."));
return [...new Set(relativePaths)];
}
// Constants for database caching
const CACHE_VERSION = 1;
const CACHE_PREFIX = "codeql-overlay-base-database";
// The purpose of this ten-minute limit is to guard against the possibility
// that the cache service is unresponsive, which would otherwise cause the
// entire action to hang. Normally we expect cache operations to complete
// within two minutes.
const MAX_CACHE_OPERATION_MS = 600_000;
/**
* Checks that the overlay-base database is valid by checking for the
* existence of the base database OIDs file.
*
* @param config The configuration object
* @param logger The logger instance
* @param warningPrefix Prefix for the check failure warning message
* @returns True if the verification succeeded, false otherwise
*/
async function checkOverlayBaseDatabase(
codeql: CodeQL,
config: Config,
logger: Logger,
warningPrefix: string,
): Promise<boolean> {
// An overlay-base database should contain the base database OIDs file.
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
if (!fs.existsSync(baseDatabaseOidsFilePath)) {
logger.warning(
`${warningPrefix}: ${baseDatabaseOidsFilePath} does not exist`,
);
return false;
}
for (const language of config.languages) {
const dbPath = getCodeQLDatabasePath(config, language);
try {
const resolveDatabaseOutput = await codeql.resolveDatabase(dbPath);
if (
resolveDatabaseOutput === undefined ||
!("overlayBaseSpecifier" in resolveDatabaseOutput)
) {
logger.info(`${warningPrefix}: no overlayBaseSpecifier defined`);
return false;
} else {
logger.debug(
`Overlay base specifier for ${language} overlay-base database found: ` +
`${resolveDatabaseOutput.overlayBaseSpecifier}`,
);
}
} catch (e) {
logger.warning(`${warningPrefix}: failed to resolve database: ${e}`);
return false;
}
}
return true;
}
/**
* Uploads the overlay-base database to the GitHub Actions cache. If conditions
* for uploading are not met, the function does nothing and returns false.
*
* This function uses the `checkout_path` input to determine the repository path
* and works only when called from `analyze` or `upload-sarif`.
*
* @param codeql The CodeQL instance
* @param config The configuration object
* @param logger The logger instance
* @returns A promise that resolves to true if the upload was performed and
* successfully completed, or false otherwise
*/
export async function cleanupAndUploadOverlayBaseDatabaseToCache(
codeql: CodeQL,
config: Config,
logger: Logger,
): Promise<boolean> {
const overlayDatabaseMode = config.overlayDatabaseMode;
if (overlayDatabaseMode !== OverlayDatabaseMode.OverlayBase) {
logger.debug(
`Overlay database mode is ${overlayDatabaseMode}. ` +
"Skip uploading overlay-base database to cache.",
);
return false;
}
if (!config.useOverlayDatabaseCaching) {
logger.debug(
"Overlay database caching is disabled. " +
"Skip uploading overlay-base database to cache.",
);
return false;
}
if (isInTestMode()) {
logger.debug(
"In test mode. Skip uploading overlay-base database to cache.",
);
return false;
}
const databaseIsValid = await checkOverlayBaseDatabase(
codeql,
config,
logger,
"Abort uploading overlay-base database to cache",
);
if (!databaseIsValid) {
return false;
}
// Clean up the database using the overlay cleanup level.
await withGroupAsync("Cleaning up databases", async () => {
await codeql.databaseCleanupCluster(config, CleanupLevel.Overlay);
});
const dbLocation = config.dbLocation;
const databaseSizeBytes = await tryGetFolderBytes(dbLocation, logger);
if (databaseSizeBytes === undefined) {
logger.warning(
"Failed to determine database size. " +
"Skip uploading overlay-base database to cache.",
);
return false;
}
if (databaseSizeBytes > OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES) {
const databaseSizeMB = Math.round(databaseSizeBytes / 1_000_000);
logger.warning(
`Database size (${databaseSizeMB} MB) ` +
`exceeds maximum upload size (${OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB} MB). ` +
"Skip uploading overlay-base database to cache.",
);
return false;
}
const codeQlVersion = (await codeql.getVersion()).version;
const checkoutPath = getRequiredInput("checkout_path");
const cacheSaveKey = await getCacheSaveKey(
config,
codeQlVersion,
checkoutPath,
logger,
);
logger.info(
`Uploading overlay-base database to Actions cache with key ${cacheSaveKey}`,
);
try {
const cacheId = await waitForResultWithTimeLimit(
MAX_CACHE_OPERATION_MS,
actionsCache.saveCache([dbLocation], cacheSaveKey),
() => {},
);
if (cacheId === undefined) {
logger.warning("Timed out while uploading overlay-base database");
return false;
}
} catch (error) {
logger.warning(
"Failed to upload overlay-base database to cache: " +
`${error instanceof Error ? error.message : String(error)}`,
);
return false;
}
logger.info(`Successfully uploaded overlay-base database from ${dbLocation}`);
return true;
}
export interface OverlayBaseDatabaseDownloadStats {
databaseSizeBytes: number;
databaseDownloadDurationMs: number;
}
/**
* Downloads the overlay-base database from the GitHub Actions cache. If conditions
* for downloading are not met, the function does nothing and returns false.
*
* @param codeql The CodeQL instance
* @param config The configuration object
* @param logger The logger instance
* @returns A promise that resolves to download statistics if an overlay-base
* database was successfully downloaded, or undefined if the download was
* either not performed or failed.
*/
export async function downloadOverlayBaseDatabaseFromCache(
codeql: CodeQL,
config: Config,
logger: Logger,
): Promise<OverlayBaseDatabaseDownloadStats | undefined> {
const overlayDatabaseMode = config.overlayDatabaseMode;
if (overlayDatabaseMode !== OverlayDatabaseMode.Overlay) {
logger.debug(
`Overlay database mode is ${overlayDatabaseMode}. ` +
"Skip downloading overlay-base database from cache.",
);
return undefined;
}
if (!config.useOverlayDatabaseCaching) {
logger.debug(
"Overlay database caching is disabled. " +
"Skip downloading overlay-base database from cache.",
);
return undefined;
}
if (isInTestMode()) {
logger.debug(
"In test mode. Skip downloading overlay-base database from cache.",
);
return undefined;
}
const dbLocation = config.dbLocation;
const codeQlVersion = (await codeql.getVersion()).version;
const cacheRestoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion,
);
logger.info(
"Looking in Actions cache for overlay-base database with " +
`restore key ${cacheRestoreKeyPrefix}`,
);
let databaseDownloadDurationMs = 0;
try {
const databaseDownloadStart = performance.now();
const foundKey = await waitForResultWithTimeLimit(
// This ten-minute limit for the cache restore operation is mainly to
// guard against the possibility that the cache service is unresponsive
// and hangs outside the data download.
//
// Data download (which is normally the most time-consuming part of the
// restore operation) should not run long enough to hit this limit. Even
// for an extremely large 10GB database, at a download speed of 40MB/s
// (see below), the download should complete within five minutes. If we
// do hit this limit, there are likely more serious problems other than
// mere slow download speed.
//
// This is important because we don't want any ongoing file operations
// on the database directory when we do hit this limit. Hitting this
// time limit takes us to a fallback path where we re-initialize the
// database from scratch at dbLocation, and having the cache restore
// operation continue to write into dbLocation in the background would
// really mess things up. We want to hit this limit only in the case
// of a hung cache service, not just slow download speed.
MAX_CACHE_OPERATION_MS,
actionsCache.restoreCache(
[dbLocation],
cacheRestoreKeyPrefix,
undefined,
{
// Azure SDK download (which is the default) uses 128MB segments; see
// https://github.com/actions/toolkit/blob/main/packages/cache/README.md.
// Setting segmentTimeoutInMs to 3000 translates to segment download
// speed of about 40 MB/s, which should be achievable unless the
// download is unreliable (in which case we do want to abort).
segmentTimeoutInMs: 3000,
},
),
() => {
logger.info("Timed out downloading overlay-base database from cache");
},
);
databaseDownloadDurationMs = Math.round(
performance.now() - databaseDownloadStart,
);
if (foundKey === undefined) {
logger.info("No overlay-base database found in Actions cache");
return undefined;
}
logger.info(
`Downloaded overlay-base database in cache with key ${foundKey}`,
);
} catch (error) {
logger.warning(
"Failed to download overlay-base database from cache: " +
`${error instanceof Error ? error.message : String(error)}`,
);
return undefined;
}
const databaseIsValid = await checkOverlayBaseDatabase(
codeql,
config,
logger,
"Downloaded overlay-base database is invalid",
);
if (!databaseIsValid) {
logger.warning("Downloaded overlay-base database failed validation");
return undefined;
}
const databaseSizeBytes = await tryGetFolderBytes(dbLocation, logger);
if (databaseSizeBytes === undefined) {
logger.info(
"Filesystem error while accessing downloaded overlay-base database",
);
// The problem that warrants reporting download failure is not that we are
// unable to determine the size of the database. Rather, it is that we
// encountered a filesystem error while accessing the database, which
// indicates that an overlay analysis will likely fail.
return undefined;
}
logger.info(`Successfully downloaded overlay-base database to ${dbLocation}`);
return {
databaseSizeBytes: Math.round(databaseSizeBytes),
databaseDownloadDurationMs,
};
}
/**
* Computes the cache key for saving the overlay-base database to the GitHub
* Actions cache.
*
* The key consists of the restore key prefix (which does not include the
* commit SHA) and the commit SHA of the current checkout.
*/
export async function getCacheSaveKey(
config: Config,
codeQlVersion: string,
checkoutPath: string,
logger: Logger,
): Promise<string> {
let runId = 1;
let attemptId = 1;
try {
runId = getWorkflowRunID();
attemptId = getWorkflowRunAttempt();
} catch (e) {
logger.warning(
`Failed to get workflow run ID or attempt ID. Reason: ${getErrorMessage(e)}`,
);
}
const sha = await getCommitOid(checkoutPath);
const restoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion,
);
return `${restoreKeyPrefix}${sha}-${runId}-${attemptId}`;
}
/**
* Computes the cache key prefix for restoring the overlay-base database from
* the GitHub Actions cache.
*
* Actions cache supports using multiple restore keys to indicate preference,
* and this function could in principle take advantage of that feature by
* returning a list of restore key prefixes. However, since overlay-base
* databases are built from the default branch and used in PR analysis, it is
* exceedingly unlikely that the commit SHA will ever be the same.
*
* Therefore, this function returns only a single restore key prefix, which does
* not include the commit SHA. This allows us to restore the most recent
* compatible overlay-base database.
*/
export async function getCacheRestoreKeyPrefix(
config: Config,
codeQlVersion: string,
): Promise<string> {
const languages = [...config.languages].sort().join("_");
const cacheKeyComponents = {
automationID: await getAutomationID(),
// Add more components here as needed in the future
};
const componentsHash = createCacheKeyHash(cacheKeyComponents);
// For a cached overlay-base database to be considered compatible for overlay
// analysis, all components in the cache restore key must match:
//
// CACHE_PREFIX: distinguishes overlay-base databases from other cache objects
// CACHE_VERSION: cache format version
// componentsHash: hash of additional components (see above for details)
// languages: the languages included in the overlay-base database
// codeQlVersion: CodeQL bundle version
//
// Technically we can also include languages and codeQlVersion in the
// componentsHash, but including them explicitly in the cache key makes it
// easier to debug and understand the cache key structure.
return `${CACHE_PREFIX}-${CACHE_VERSION}-${componentsHash}-${languages}-${codeQlVersion}-`;
}
+5
View File
@@ -0,0 +1,5 @@
export enum OverlayDatabaseMode {
Overlay = "overlay",
OverlayBase = "overlay-base",
None = "none",
}
+1 -1
View File
@@ -18,7 +18,7 @@ import { DocUrl } from "./doc-url";
import { EnvVar } from "./environment";
import { getRef } from "./git-utils";
import { Logger } from "./logging";
import { OverlayBaseDatabaseDownloadStats } from "./overlay";
import { OverlayBaseDatabaseDownloadStats } from "./overlay/caching";
import { getRepositoryNwo } from "./repository";
import { ToolsSource } from "./setup-codeql";
import {
+1 -1
View File
@@ -21,7 +21,7 @@ import {
FeatureEnablement,
} from "./feature-flags";
import { Logger } from "./logging";
import { OverlayDatabaseMode } from "./overlay";
import { OverlayDatabaseMode } from "./overlay/overlay-database-mode";
import {
DEFAULT_DEBUG_ARTIFACT_NAME,
DEFAULT_DEBUG_DATABASE_NAME,