Refactoring: Introduce overlay/caching.ts

This commit is contained in:
Henry Mercer
2026-04-10 14:19:10 +01:00
parent a26cb68cc7
commit 4e8c9ce33c
28 changed files with 6087 additions and 6097 deletions
+9 -12
View File
@@ -93210,7 +93210,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache5;
exports2.saveCache = saveCache4;
var core15 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93387,7 +93387,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache5(cacheId, archivePath, signedUploadURL, options) {
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -98887,8 +98887,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache5;
exports2.saveCache = saveCache5;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
var core15 = __importStar2(require_core());
var path9 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -98945,7 +98945,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache5(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99089,7 +99089,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache5(paths_1, key_1, options_1) {
function saveCache4(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -162086,7 +162086,6 @@ var semver5 = __toESM(require_semver2());
// src/overlay/index.ts
var fs3 = __toESM(require("fs"));
var path4 = __toESM(require("path"));
var actionsCache = __toESM(require_cache5());
// src/git-utils.ts
var fs2 = __toESM(require("fs"));
@@ -162282,8 +162281,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
const gitFileOidsJson = JSON.stringify(gitFileOids);
@@ -162607,10 +162604,10 @@ var featureConfig = {
};
// src/overlay/status.ts
var actionsCache2 = __toESM(require_cache5());
var actionsCache = __toESM(require_cache5());
// src/trap-caching.ts
var actionsCache3 = __toESM(require_cache5());
var actionsCache2 = __toESM(require_cache5());
// src/config-utils.ts
var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4;
@@ -163292,7 +163289,7 @@ var core12 = __toESM(require_core());
// src/dependency-caching.ts
var import_path = require("path");
var actionsCache4 = __toESM(require_cache5());
var actionsCache3 = __toESM(require_cache5());
var glob = __toESM(require_glob());
function getJavaTempDependencyDir() {
return (0, import_path.join)(getTemporaryDirectory(), "codeql_java", "repository");
+219 -216
View File
@@ -204,7 +204,7 @@ var require_file_command = __commonJS({
exports2.issueFileCommand = issueFileCommand;
exports2.prepareKeyValueMessage = prepareKeyValueMessage;
var crypto3 = __importStar2(require("crypto"));
var fs19 = __importStar2(require("fs"));
var fs20 = __importStar2(require("fs"));
var os5 = __importStar2(require("os"));
var utils_1 = require_utils();
function issueFileCommand(command, message) {
@@ -212,10 +212,10 @@ var require_file_command = __commonJS({
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs19.existsSync(filePath)) {
if (!fs20.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs19.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os5.EOL}`, {
fs20.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os5.EOL}`, {
encoding: "utf8"
});
}
@@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({
exports2.isRooted = isRooted;
exports2.tryGetExecutablePath = tryGetExecutablePath;
exports2.getCmdPath = getCmdPath;
var fs19 = __importStar2(require("fs"));
var fs20 = __importStar2(require("fs"));
var path16 = __importStar2(require("path"));
_a = fs19.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
_a = fs20.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
exports2.IS_WINDOWS = process.platform === "win32";
function readlink(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
const result = yield fs19.promises.readlink(fsPath);
const result = yield fs20.promises.readlink(fsPath);
if (exports2.IS_WINDOWS && !result.endsWith("\\")) {
return `${result}\\`;
}
@@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({
});
}
exports2.UV_FS_O_EXLOCK = 268435456;
exports2.READONLY = fs19.constants.O_RDONLY;
exports2.READONLY = fs20.constants.O_RDONLY;
function exists(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
try {
@@ -50297,7 +50297,7 @@ var require_internal_globber = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.DefaultGlobber = void 0;
var core17 = __importStar2(require_core());
var fs19 = __importStar2(require("fs"));
var fs20 = __importStar2(require("fs"));
var globOptionsHelper = __importStar2(require_internal_glob_options_helper());
var path16 = __importStar2(require("path"));
var patternHelper = __importStar2(require_internal_pattern_helper());
@@ -50351,7 +50351,7 @@ var require_internal_globber = __commonJS({
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
core17.debug(`Search path '${searchPath}'`);
try {
yield __await2(fs19.promises.lstat(searchPath));
yield __await2(fs20.promises.lstat(searchPath));
} catch (err) {
if (err.code === "ENOENT") {
continue;
@@ -50385,7 +50385,7 @@ var require_internal_globber = __commonJS({
continue;
}
const childLevel = item.level + 1;
const childItems = (yield __await2(fs19.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path16.join(item.path, x), childLevel));
const childItems = (yield __await2(fs20.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path16.join(item.path, x), childLevel));
stack.push(...childItems.reverse());
} else if (match & internal_match_kind_1.MatchKind.File) {
yield yield __await2(item.path);
@@ -50420,7 +50420,7 @@ var require_internal_globber = __commonJS({
let stats;
if (options.followSymbolicLinks) {
try {
stats = yield fs19.promises.stat(item.path);
stats = yield fs20.promises.stat(item.path);
} catch (err) {
if (err.code === "ENOENT") {
if (options.omitBrokenSymbolicLinks) {
@@ -50432,10 +50432,10 @@ var require_internal_globber = __commonJS({
throw err;
}
} else {
stats = yield fs19.promises.lstat(item.path);
stats = yield fs20.promises.lstat(item.path);
}
if (stats.isDirectory() && options.followSymbolicLinks) {
const realPath = yield fs19.promises.realpath(item.path);
const realPath = yield fs20.promises.realpath(item.path);
while (traversalChain.length >= item.level) {
traversalChain.pop();
}
@@ -50544,7 +50544,7 @@ var require_internal_hash_files = __commonJS({
exports2.hashFiles = hashFiles2;
var crypto3 = __importStar2(require("crypto"));
var core17 = __importStar2(require_core());
var fs19 = __importStar2(require("fs"));
var fs20 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var path16 = __importStar2(require("path"));
@@ -50567,13 +50567,13 @@ var require_internal_hash_files = __commonJS({
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
continue;
}
if (fs19.statSync(file).isDirectory()) {
if (fs20.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash2 = crypto3.createHash("sha256");
const pipeline = util.promisify(stream2.pipeline);
yield pipeline(fs19.createReadStream(file), hash2);
yield pipeline(fs20.createReadStream(file), hash2);
result.write(hash2.digest());
count++;
if (!hasMatch) {
@@ -51948,7 +51948,7 @@ var require_cacheUtils = __commonJS({
var glob2 = __importStar2(require_glob());
var io7 = __importStar2(require_io());
var crypto3 = __importStar2(require("crypto"));
var fs19 = __importStar2(require("fs"));
var fs20 = __importStar2(require("fs"));
var path16 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
@@ -51977,7 +51977,7 @@ var require_cacheUtils = __commonJS({
});
}
function getArchiveFileSizeInBytes(filePath) {
return fs19.statSync(filePath).size;
return fs20.statSync(filePath).size;
}
function resolvePaths(patterns) {
return __awaiter2(this, void 0, void 0, function* () {
@@ -52015,7 +52015,7 @@ var require_cacheUtils = __commonJS({
}
function unlinkFile(filePath) {
return __awaiter2(this, void 0, void 0, function* () {
return util.promisify(fs19.unlink)(filePath);
return util.promisify(fs20.unlink)(filePath);
});
}
function getVersion(app_1) {
@@ -52057,7 +52057,7 @@ var require_cacheUtils = __commonJS({
}
function getGnuTarPathOnWindows() {
return __awaiter2(this, void 0, void 0, function* () {
if (fs19.existsSync(constants_1.GnuTarPathOnWindows)) {
if (fs20.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion("tar");
@@ -92617,7 +92617,7 @@ var require_downloadUtils = __commonJS({
var http_client_1 = require_lib();
var storage_blob_1 = require_commonjs15();
var buffer = __importStar2(require("buffer"));
var fs19 = __importStar2(require("fs"));
var fs20 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var utils = __importStar2(require_cacheUtils());
@@ -92728,7 +92728,7 @@ var require_downloadUtils = __commonJS({
exports2.DownloadProgress = DownloadProgress;
function downloadCacheHttpClient(archiveLocation, archivePath) {
return __awaiter2(this, void 0, void 0, function* () {
const writeStream = fs19.createWriteStream(archivePath);
const writeStream = fs20.createWriteStream(archivePath);
const httpClient = new http_client_1.HttpClient("actions/cache");
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () {
return httpClient.get(archiveLocation);
@@ -92753,7 +92753,7 @@ var require_downloadUtils = __commonJS({
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
return __awaiter2(this, void 0, void 0, function* () {
var _a;
const archiveDescriptor = yield fs19.promises.open(archivePath, "w");
const archiveDescriptor = yield fs20.promises.open(archivePath, "w");
const httpClient = new http_client_1.HttpClient("actions/cache", void 0, {
socketTimeout: options.timeoutInMs,
keepAlive: true
@@ -92869,7 +92869,7 @@ var require_downloadUtils = __commonJS({
} else {
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
const downloadProgress = new DownloadProgress(contentLength);
const fd = fs19.openSync(archivePath, "w");
const fd = fs20.openSync(archivePath, "w");
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
@@ -92887,12 +92887,12 @@ var require_downloadUtils = __commonJS({
controller.abort();
throw new Error("Aborting cache download as the download time exceeded the timeout.");
} else if (Buffer.isBuffer(result)) {
fs19.writeFileSync(fd, result);
fs20.writeFileSync(fd, result);
}
}
} finally {
downloadProgress.stopDisplayTimer();
fs19.closeSync(fd);
fs20.closeSync(fd);
}
}
});
@@ -93214,7 +93214,7 @@ var require_cacheHttpClient = __commonJS({
var core17 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
var fs19 = __importStar2(require("fs"));
var fs20 = __importStar2(require("fs"));
var url_1 = require("url");
var utils = __importStar2(require_cacheUtils());
var uploadUtils_1 = require_uploadUtils();
@@ -93349,7 +93349,7 @@ Other caches with similar key:`);
return __awaiter2(this, void 0, void 0, function* () {
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs19.openSync(archivePath, "r");
const fd = fs20.openSync(archivePath, "r");
const uploadOptions = (0, options_1.getUploadOptions)(options);
const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency);
const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize);
@@ -93363,7 +93363,7 @@ Other caches with similar key:`);
const start = offset;
const end = offset + chunkSize - 1;
offset += maxChunkSize;
yield uploadChunk(httpClient, resourceUrl, () => fs19.createReadStream(archivePath, {
yield uploadChunk(httpClient, resourceUrl, () => fs20.createReadStream(archivePath, {
fd,
start,
end,
@@ -93374,7 +93374,7 @@ Other caches with similar key:`);
}
})));
} finally {
fs19.closeSync(fd);
fs20.closeSync(fd);
}
return;
});
@@ -99330,7 +99330,7 @@ var require_manifest = __commonJS({
var core_1 = require_core();
var os5 = require("os");
var cp = require("child_process");
var fs19 = require("fs");
var fs20 = require("fs");
function _findMatch(versionSpec, stable, candidates, archFilter) {
return __awaiter2(this, void 0, void 0, function* () {
const platFilter = os5.platform();
@@ -99392,10 +99392,10 @@ var require_manifest = __commonJS({
const lsbReleaseFile = "/etc/lsb-release";
const osReleaseFile = "/etc/os-release";
let contents = "";
if (fs19.existsSync(lsbReleaseFile)) {
contents = fs19.readFileSync(lsbReleaseFile).toString();
} else if (fs19.existsSync(osReleaseFile)) {
contents = fs19.readFileSync(osReleaseFile).toString();
if (fs20.existsSync(lsbReleaseFile)) {
contents = fs20.readFileSync(lsbReleaseFile).toString();
} else if (fs20.existsSync(osReleaseFile)) {
contents = fs20.readFileSync(osReleaseFile).toString();
}
return contents;
}
@@ -99604,7 +99604,7 @@ var require_tool_cache = __commonJS({
var core17 = __importStar2(require_core());
var io7 = __importStar2(require_io());
var crypto3 = __importStar2(require("crypto"));
var fs19 = __importStar2(require("fs"));
var fs20 = __importStar2(require("fs"));
var mm = __importStar2(require_manifest());
var os5 = __importStar2(require("os"));
var path16 = __importStar2(require("path"));
@@ -99650,7 +99650,7 @@ var require_tool_cache = __commonJS({
}
function downloadToolAttempt(url2, dest, auth2, headers) {
return __awaiter2(this, void 0, void 0, function* () {
if (fs19.existsSync(dest)) {
if (fs20.existsSync(dest)) {
throw new Error(`Destination file path ${dest} already exists`);
}
const http = new httpm.HttpClient(userAgent2, [], {
@@ -99674,7 +99674,7 @@ var require_tool_cache = __commonJS({
const readStream = responseMessageFactory();
let succeeded = false;
try {
yield pipeline(readStream, fs19.createWriteStream(dest));
yield pipeline(readStream, fs20.createWriteStream(dest));
core17.debug("download complete");
succeeded = true;
return dest;
@@ -99886,11 +99886,11 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os5.arch();
core17.debug(`Caching tool ${tool} ${version} ${arch2}`);
core17.debug(`source dir: ${sourceDir}`);
if (!fs19.statSync(sourceDir).isDirectory()) {
if (!fs20.statSync(sourceDir).isDirectory()) {
throw new Error("sourceDir is not a directory");
}
const destPath = yield _createToolPath(tool, version, arch2);
for (const itemName of fs19.readdirSync(sourceDir)) {
for (const itemName of fs20.readdirSync(sourceDir)) {
const s = path16.join(sourceDir, itemName);
yield io7.cp(s, destPath, { recursive: true });
}
@@ -99904,7 +99904,7 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os5.arch();
core17.debug(`Caching tool ${tool} ${version} ${arch2}`);
core17.debug(`source file: ${sourceFile}`);
if (!fs19.statSync(sourceFile).isFile()) {
if (!fs20.statSync(sourceFile).isFile()) {
throw new Error("sourceFile is not a file");
}
const destFolder = yield _createToolPath(tool, version, arch2);
@@ -99933,7 +99933,7 @@ var require_tool_cache = __commonJS({
versionSpec = semver9.clean(versionSpec) || "";
const cachePath = path16.join(_getCacheDirectory(), toolName, versionSpec, arch2);
core17.debug(`checking cache: ${cachePath}`);
if (fs19.existsSync(cachePath) && fs19.existsSync(`${cachePath}.complete`)) {
if (fs20.existsSync(cachePath) && fs20.existsSync(`${cachePath}.complete`)) {
core17.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`);
toolPath = cachePath;
} else {
@@ -99946,12 +99946,12 @@ var require_tool_cache = __commonJS({
const versions = [];
arch2 = arch2 || os5.arch();
const toolPath = path16.join(_getCacheDirectory(), toolName);
if (fs19.existsSync(toolPath)) {
const children = fs19.readdirSync(toolPath);
if (fs20.existsSync(toolPath)) {
const children = fs20.readdirSync(toolPath);
for (const child of children) {
if (isExplicitVersion(child)) {
const fullPath = path16.join(toolPath, child, arch2 || "");
if (fs19.existsSync(fullPath) && fs19.existsSync(`${fullPath}.complete`)) {
if (fs20.existsSync(fullPath) && fs20.existsSync(`${fullPath}.complete`)) {
versions.push(child);
}
}
@@ -100022,7 +100022,7 @@ var require_tool_cache = __commonJS({
function _completeToolPath(tool, version, arch2) {
const folderPath = path16.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || "");
const markerPath = `${folderPath}.complete`;
fs19.writeFileSync(markerPath, "");
fs20.writeFileSync(markerPath, "");
core17.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
@@ -103529,7 +103529,7 @@ __export(analyze_action_exports, {
runPromise: () => runPromise
});
module.exports = __toCommonJS(analyze_action_exports);
var fs18 = __toESM(require("fs"));
var fs19 = __toESM(require("fs"));
var import_path4 = __toESM(require("path"));
var import_perf_hooks3 = require("perf_hooks");
var core16 = __toESM(require_core());
@@ -103558,21 +103558,21 @@ async function getFolderSize(itemPath, options) {
getFolderSize.loose = async (itemPath, options) => await core(itemPath, options);
getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true });
async function core(rootItemPath, options = {}, returnType = {}) {
const fs19 = options.fs || await import("node:fs/promises");
const fs20 = options.fs || await import("node:fs/promises");
let folderSize = 0n;
const foundInos = /* @__PURE__ */ new Set();
const errors = [];
await processItem(rootItemPath);
async function processItem(itemPath) {
if (options.ignore?.test(itemPath)) return;
const stats = returnType.strict ? await fs19.lstat(itemPath, { bigint: true }) : await fs19.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
const stats = returnType.strict ? await fs20.lstat(itemPath, { bigint: true }) : await fs20.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
if (typeof stats !== "object") return;
if (!foundInos.has(stats.ino)) {
foundInos.add(stats.ino);
folderSize += stats.size;
}
if (stats.isDirectory()) {
const directoryItems = returnType.strict ? await fs19.readdir(itemPath) : await fs19.readdir(itemPath).catch((error3) => errors.push(error3));
const directoryItems = returnType.strict ? await fs20.readdir(itemPath) : await fs20.readdir(itemPath).catch((error3) => errors.push(error3));
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
@@ -107647,7 +107647,6 @@ var cliVersion = "2.25.1";
// src/overlay/index.ts
var fs4 = __toESM(require("fs"));
var path5 = __toESM(require("path"));
var actionsCache = __toESM(require_cache5());
// src/git-utils.ts
var fs3 = __toESM(require("fs"));
@@ -107877,8 +107876,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
const gitFileOidsJson = JSON.stringify(gitFileOids);
@@ -107980,141 +107977,6 @@ async function getDiffRangeFilePaths(sourceRoot, logger) {
).filter((rel) => !rel.startsWith(".."));
return [...new Set(relativePaths)];
}
var CACHE_VERSION = 1;
var CACHE_PREFIX = "codeql-overlay-base-database";
var MAX_CACHE_OPERATION_MS = 6e5;
async function checkOverlayBaseDatabase(codeql, config, logger, warningPrefix) {
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
if (!fs4.existsSync(baseDatabaseOidsFilePath)) {
logger.warning(
`${warningPrefix}: ${baseDatabaseOidsFilePath} does not exist`
);
return false;
}
for (const language of config.languages) {
const dbPath = getCodeQLDatabasePath(config, language);
try {
const resolveDatabaseOutput = await codeql.resolveDatabase(dbPath);
if (resolveDatabaseOutput === void 0 || !("overlayBaseSpecifier" in resolveDatabaseOutput)) {
logger.info(`${warningPrefix}: no overlayBaseSpecifier defined`);
return false;
} else {
logger.debug(
`Overlay base specifier for ${language} overlay-base database found: ${resolveDatabaseOutput.overlayBaseSpecifier}`
);
}
} catch (e) {
logger.warning(`${warningPrefix}: failed to resolve database: ${e}`);
return false;
}
}
return true;
}
async function cleanupAndUploadOverlayBaseDatabaseToCache(codeql, config, logger) {
const overlayDatabaseMode = config.overlayDatabaseMode;
if (overlayDatabaseMode !== "overlay-base" /* OverlayBase */) {
logger.debug(
`Overlay database mode is ${overlayDatabaseMode}. Skip uploading overlay-base database to cache.`
);
return false;
}
if (!config.useOverlayDatabaseCaching) {
logger.debug(
"Overlay database caching is disabled. Skip uploading overlay-base database to cache."
);
return false;
}
if (isInTestMode()) {
logger.debug(
"In test mode. Skip uploading overlay-base database to cache."
);
return false;
}
const databaseIsValid = await checkOverlayBaseDatabase(
codeql,
config,
logger,
"Abort uploading overlay-base database to cache"
);
if (!databaseIsValid) {
return false;
}
await withGroupAsync("Cleaning up databases", async () => {
await codeql.databaseCleanupCluster(config, "overlay" /* Overlay */);
});
const dbLocation = config.dbLocation;
const databaseSizeBytes = await tryGetFolderBytes(dbLocation, logger);
if (databaseSizeBytes === void 0) {
logger.warning(
"Failed to determine database size. Skip uploading overlay-base database to cache."
);
return false;
}
if (databaseSizeBytes > OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES) {
const databaseSizeMB = Math.round(databaseSizeBytes / 1e6);
logger.warning(
`Database size (${databaseSizeMB} MB) exceeds maximum upload size (${OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB} MB). Skip uploading overlay-base database to cache.`
);
return false;
}
const codeQlVersion = (await codeql.getVersion()).version;
const checkoutPath = getRequiredInput("checkout_path");
const cacheSaveKey = await getCacheSaveKey(
config,
codeQlVersion,
checkoutPath,
logger
);
logger.info(
`Uploading overlay-base database to Actions cache with key ${cacheSaveKey}`
);
try {
const cacheId = await waitForResultWithTimeLimit(
MAX_CACHE_OPERATION_MS,
actionsCache.saveCache([dbLocation], cacheSaveKey),
() => {
}
);
if (cacheId === void 0) {
logger.warning("Timed out while uploading overlay-base database");
return false;
}
} catch (error3) {
logger.warning(
`Failed to upload overlay-base database to cache: ${error3 instanceof Error ? error3.message : String(error3)}`
);
return false;
}
logger.info(`Successfully uploaded overlay-base database from ${dbLocation}`);
return true;
}
async function getCacheSaveKey(config, codeQlVersion, checkoutPath, logger) {
let runId = 1;
let attemptId = 1;
try {
runId = getWorkflowRunID();
attemptId = getWorkflowRunAttempt();
} catch (e) {
logger.warning(
`Failed to get workflow run ID or attempt ID. Reason: ${getErrorMessage(e)}`
);
}
const sha = await getCommitOid(checkoutPath);
const restoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion
);
return `${restoreKeyPrefix}${sha}-${runId}-${attemptId}`;
}
async function getCacheRestoreKeyPrefix(config, codeQlVersion) {
const languages = [...config.languages].sort().join("_");
const cacheKeyComponents = {
automationID: await getAutomationID()
// Add more components here as needed in the future
};
const componentsHash = createCacheKeyHash(cacheKeyComponents);
return `${CACHE_PREFIX}-${CACHE_VERSION}-${componentsHash}-${languages}-${codeQlVersion}-`;
}
// src/tools-features.ts
var semver4 = __toESM(require_semver2());
@@ -108681,14 +108543,14 @@ ${jsonContents}`
}
// src/overlay/status.ts
var actionsCache2 = __toESM(require_cache5());
var actionsCache = __toESM(require_cache5());
// src/trap-caching.ts
var actionsCache3 = __toESM(require_cache5());
var CACHE_VERSION2 = 1;
var actionsCache2 = __toESM(require_cache5());
var CACHE_VERSION = 1;
var CODEQL_TRAP_CACHE_PREFIX = "codeql-trap";
var MINIMUM_CACHE_MB_TO_UPLOAD = 10;
var MAX_CACHE_OPERATION_MS2 = 12e4;
var MAX_CACHE_OPERATION_MS = 12e4;
async function uploadTrapCaches(codeql, config, logger) {
if (!await isAnalyzingDefaultBranch()) return false;
for (const language of config.languages) {
@@ -108714,8 +108576,8 @@ async function uploadTrapCaches(codeql, config, logger) {
);
logger.info(`Uploading TRAP cache to Actions cache with key ${key}`);
await waitForResultWithTimeLimit(
MAX_CACHE_OPERATION_MS2,
actionsCache3.saveCache([cacheDir], key),
MAX_CACHE_OPERATION_MS,
actionsCache2.saveCache([cacheDir], key),
() => {
logger.info(
`Timed out waiting for TRAP cache for ${language} to upload, will continue without uploading`
@@ -108805,7 +108667,7 @@ async function cacheKey(codeql, language, baseSha) {
return `${await cachePrefix(codeql, language)}${baseSha}`;
}
async function cachePrefix(codeql, language) {
return `${CODEQL_TRAP_CACHE_PREFIX}-${CACHE_VERSION2}-${(await codeql.getVersion()).version}-${language}-`;
return `${CODEQL_TRAP_CACHE_PREFIX}-${CACHE_VERSION}-${(await codeql.getVersion()).version}-${language}-`;
}
// src/config-utils.ts
@@ -110543,7 +110405,7 @@ async function runAutobuild(config, language, logger) {
// src/dependency-caching.ts
var os3 = __toESM(require("os"));
var import_path2 = require("path");
var actionsCache4 = __toESM(require_cache5());
var actionsCache3 = __toESM(require_cache5());
var glob = __toESM(require_glob());
var CODEQL_DEPENDENCY_CACHE_PREFIX = "codeql-dependencies";
var CODEQL_DEPENDENCY_CACHE_VERSION = 1;
@@ -110681,7 +110543,7 @@ async function uploadDependencyCaches(codeql, features, config, logger) {
);
try {
const start = performance.now();
await actionsCache4.saveCache(
await actionsCache3.saveCache(
await cacheConfig.getDependencyPaths(codeql, features),
key
);
@@ -110693,7 +110555,7 @@ async function uploadDependencyCaches(codeql, features, config, logger) {
upload_duration_ms
});
} catch (error3) {
if (error3 instanceof actionsCache4.ReserveCacheError) {
if (error3 instanceof actionsCache3.ReserveCacheError) {
logger.info(
`Not uploading cache for ${language}, because ${key} is already in use.`
);
@@ -111259,6 +111121,147 @@ async function uploadBundledDatabase(repositoryNwo, language, commitOid, bundled
}
}
// src/overlay/caching.ts
var fs15 = __toESM(require("fs"));
var actionsCache4 = __toESM(require_cache5());
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
var CACHE_VERSION2 = 1;
var CACHE_PREFIX = "codeql-overlay-base-database";
var MAX_CACHE_OPERATION_MS2 = 6e5;
async function checkOverlayBaseDatabase(codeql, config, logger, warningPrefix) {
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
if (!fs15.existsSync(baseDatabaseOidsFilePath)) {
logger.warning(
`${warningPrefix}: ${baseDatabaseOidsFilePath} does not exist`
);
return false;
}
for (const language of config.languages) {
const dbPath = getCodeQLDatabasePath(config, language);
try {
const resolveDatabaseOutput = await codeql.resolveDatabase(dbPath);
if (resolveDatabaseOutput === void 0 || !("overlayBaseSpecifier" in resolveDatabaseOutput)) {
logger.info(`${warningPrefix}: no overlayBaseSpecifier defined`);
return false;
} else {
logger.debug(
`Overlay base specifier for ${language} overlay-base database found: ${resolveDatabaseOutput.overlayBaseSpecifier}`
);
}
} catch (e) {
logger.warning(`${warningPrefix}: failed to resolve database: ${e}`);
return false;
}
}
return true;
}
async function cleanupAndUploadOverlayBaseDatabaseToCache(codeql, config, logger) {
const overlayDatabaseMode = config.overlayDatabaseMode;
if (overlayDatabaseMode !== "overlay-base" /* OverlayBase */) {
logger.debug(
`Overlay database mode is ${overlayDatabaseMode}. Skip uploading overlay-base database to cache.`
);
return false;
}
if (!config.useOverlayDatabaseCaching) {
logger.debug(
"Overlay database caching is disabled. Skip uploading overlay-base database to cache."
);
return false;
}
if (isInTestMode()) {
logger.debug(
"In test mode. Skip uploading overlay-base database to cache."
);
return false;
}
const databaseIsValid = await checkOverlayBaseDatabase(
codeql,
config,
logger,
"Abort uploading overlay-base database to cache"
);
if (!databaseIsValid) {
return false;
}
await withGroupAsync("Cleaning up databases", async () => {
await codeql.databaseCleanupCluster(config, "overlay" /* Overlay */);
});
const dbLocation = config.dbLocation;
const databaseSizeBytes = await tryGetFolderBytes(dbLocation, logger);
if (databaseSizeBytes === void 0) {
logger.warning(
"Failed to determine database size. Skip uploading overlay-base database to cache."
);
return false;
}
if (databaseSizeBytes > OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES) {
const databaseSizeMB = Math.round(databaseSizeBytes / 1e6);
logger.warning(
`Database size (${databaseSizeMB} MB) exceeds maximum upload size (${OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB} MB). Skip uploading overlay-base database to cache.`
);
return false;
}
const codeQlVersion = (await codeql.getVersion()).version;
const checkoutPath = getRequiredInput("checkout_path");
const cacheSaveKey = await getCacheSaveKey(
config,
codeQlVersion,
checkoutPath,
logger
);
logger.info(
`Uploading overlay-base database to Actions cache with key ${cacheSaveKey}`
);
try {
const cacheId = await waitForResultWithTimeLimit(
MAX_CACHE_OPERATION_MS2,
actionsCache4.saveCache([dbLocation], cacheSaveKey),
() => {
}
);
if (cacheId === void 0) {
logger.warning("Timed out while uploading overlay-base database");
return false;
}
} catch (error3) {
logger.warning(
`Failed to upload overlay-base database to cache: ${error3 instanceof Error ? error3.message : String(error3)}`
);
return false;
}
logger.info(`Successfully uploaded overlay-base database from ${dbLocation}`);
return true;
}
async function getCacheSaveKey(config, codeQlVersion, checkoutPath, logger) {
let runId = 1;
let attemptId = 1;
try {
runId = getWorkflowRunID();
attemptId = getWorkflowRunAttempt();
} catch (e) {
logger.warning(
`Failed to get workflow run ID or attempt ID. Reason: ${getErrorMessage(e)}`
);
}
const sha = await getCommitOid(checkoutPath);
const restoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion
);
return `${restoreKeyPrefix}${sha}-${runId}-${attemptId}`;
}
async function getCacheRestoreKeyPrefix(config, codeQlVersion) {
const languages = [...config.languages].sort().join("_");
const cacheKeyComponents = {
automationID: await getAutomationID()
// Add more components here as needed in the future
};
const componentsHash = createCacheKeyHash(cacheKeyComponents);
return `${CACHE_PREFIX}-${CACHE_VERSION2}-${componentsHash}-${languages}-${codeQlVersion}-`;
}
// src/status-report.ts
var os4 = __toESM(require("os"));
var core13 = __toESM(require_core());
@@ -111467,7 +111470,7 @@ async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error
}
// src/upload-lib.ts
var fs17 = __toESM(require("fs"));
var fs18 = __toESM(require("fs"));
var path14 = __toESM(require("path"));
var url = __toESM(require("url"));
var import_zlib = __toESM(require("zlib"));
@@ -111475,7 +111478,7 @@ var core15 = __toESM(require_core());
var jsonschema2 = __toESM(require_lib2());
// src/fingerprints.ts
var fs15 = __toESM(require("fs"));
var fs16 = __toESM(require("fs"));
var import_path3 = __toESM(require("path"));
// node_modules/long/index.js
@@ -112463,7 +112466,7 @@ async function hash(callback, filepath) {
}
updateHash(current);
};
const readStream = fs15.createReadStream(filepath, "utf8");
const readStream = fs16.createReadStream(filepath, "utf8");
for await (const data of readStream) {
for (let i = 0; i < data.length; ++i) {
processCharacter(data.charCodeAt(i));
@@ -112538,11 +112541,11 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
if (!import_path3.default.isAbsolute(uri)) {
uri = srcRootPrefix + uri;
}
if (!fs15.existsSync(uri)) {
if (!fs16.existsSync(uri)) {
logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`);
return void 0;
}
if (fs15.statSync(uri).isDirectory()) {
if (fs16.statSync(uri).isDirectory()) {
logger.debug(`Unable to compute fingerprint for directory: ${uri}`);
return void 0;
}
@@ -112631,7 +112634,7 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
}
// src/sarif/index.ts
var fs16 = __toESM(require("fs"));
var fs17 = __toESM(require("fs"));
var InvalidSarifUploadError = class extends Error {
};
function getToolNames(sarifFile) {
@@ -112646,7 +112649,7 @@ function getToolNames(sarifFile) {
return Object.keys(toolNames);
}
function readSarifFile(sarifFilePath) {
return JSON.parse(fs16.readFileSync(sarifFilePath, "utf8"));
return JSON.parse(fs17.readFileSync(sarifFilePath, "utf8"));
}
function combineSarifFiles(sarifFiles, logger) {
logger.info(`Loading SARIF file(s)`);
@@ -112779,8 +112782,8 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
codeQL = initCodeQLResult.codeql;
}
const baseTempDir = path14.resolve(tempDir, "combined-sarif");
fs17.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs17.mkdtempSync(path14.resolve(baseTempDir, "output-"));
fs18.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs18.mkdtempSync(path14.resolve(baseTempDir, "output-"));
const outputFile = path14.resolve(outputDirectory, "combined-sarif.sarif");
await codeQL.mergeResults(sarifFiles, outputFile, {
mergeRunsFromEqualCategory: true
@@ -112822,7 +112825,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
`SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}`
);
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
fs17.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
fs18.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
return "dummy-sarif-id";
}
const client = getApiClient();
@@ -112856,7 +112859,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
function findSarifFilesInDir(sarifPath, isSarif) {
const sarifFiles = [];
const walkSarifFiles = (dir) => {
const entries = fs17.readdirSync(dir, { withFileTypes: true });
const entries = fs18.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile() && isSarif(entry.name)) {
sarifFiles.push(path14.resolve(dir, entry.name));
@@ -112869,7 +112872,7 @@ function findSarifFilesInDir(sarifPath, isSarif) {
return sarifFiles;
}
async function getGroupedSarifFilePaths(logger, sarifPath) {
const stats = fs17.statSync(sarifPath, { throwIfNoEntry: false });
const stats = fs18.statSync(sarifPath, { throwIfNoEntry: false });
if (stats === void 0) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
}
@@ -113004,7 +113007,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
payloadObj.base_sha = mergeBaseCommitOid;
} else if (process.env.GITHUB_EVENT_PATH) {
const githubEvent = JSON.parse(
fs17.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
fs18.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
);
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha;
@@ -113108,9 +113111,9 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post
};
}
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
if (!fs17.existsSync(outputDir)) {
fs17.mkdirSync(outputDir, { recursive: true });
} else if (!fs17.lstatSync(outputDir).isDirectory()) {
if (!fs18.existsSync(outputDir)) {
fs18.mkdirSync(outputDir, { recursive: true });
} else if (!fs18.lstatSync(outputDir).isDirectory()) {
throw new ConfigurationError(
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`
);
@@ -113120,7 +113123,7 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
`upload${uploadTarget.sarifExtension}`
);
logger.info(`Writing processed SARIF file to ${outputFile}`);
fs17.writeFileSync(outputFile, sarifPayload);
fs18.writeFileSync(outputFile, sarifPayload);
}
var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3;
var STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1e3;
@@ -113352,7 +113355,7 @@ function doesGoExtractionOutputExist(config) {
"go" /* go */
);
const trapDirectory = import_path4.default.join(golangDbDirectory, "trap", "go" /* go */);
return fs18.existsSync(trapDirectory) && fs18.readdirSync(trapDirectory).some(
return fs19.existsSync(trapDirectory) && fs19.readdirSync(trapDirectory).some(
(fileName) => [
".trap",
".trap.gz",
+8 -11
View File
@@ -93210,7 +93210,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache4;
exports2.saveCache = saveCache3;
var core15 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93387,7 +93387,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
function saveCache3(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -98887,8 +98887,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
exports2.restoreCache = restoreCache3;
exports2.saveCache = saveCache3;
var core15 = __importStar2(require_core());
var path9 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -98945,7 +98945,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache3(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99089,7 +99089,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache4(paths_1, key_1, options_1) {
function saveCache3(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -104140,7 +104140,6 @@ var cliVersion = "2.25.1";
// src/overlay/index.ts
var fs3 = __toESM(require("fs"));
var path4 = __toESM(require("path"));
var actionsCache = __toESM(require_cache5());
// src/git-utils.ts
var fs2 = __toESM(require("fs"));
@@ -104336,8 +104335,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
const gitFileOidsJson = JSON.stringify(gitFileOids);
@@ -104982,10 +104979,10 @@ function initFeatures(gitHubVersion, repositoryNwo, tempDir, logger) {
}
// src/overlay/status.ts
var actionsCache2 = __toESM(require_cache5());
var actionsCache = __toESM(require_cache5());
// src/trap-caching.ts
var actionsCache3 = __toESM(require_cache5());
var actionsCache2 = __toESM(require_cache5());
// src/config-utils.ts
var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4;
+12 -15
View File
@@ -93210,7 +93210,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache5;
exports2.saveCache = saveCache4;
var core19 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93387,7 +93387,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache5(cacheId, archivePath, signedUploadURL, options) {
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -98887,8 +98887,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache5;
exports2.saveCache = saveCache5;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
var core19 = __importStar2(require_core());
var path19 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -98945,7 +98945,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache5(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core19.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99089,7 +99089,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache5(paths_1, key_1, options_1) {
function saveCache4(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core19.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -165564,7 +165564,6 @@ var cliVersion = "2.25.1";
// src/overlay/index.ts
var fs4 = __toESM(require("fs"));
var path5 = __toESM(require("path"));
var actionsCache = __toESM(require_cache5());
// src/git-utils.ts
var fs3 = __toESM(require("fs"));
@@ -165794,8 +165793,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
const gitFileOidsJson = JSON.stringify(gitFileOids);
@@ -166469,7 +166466,7 @@ ${jsonContents}`
// src/overlay/status.ts
var fs7 = __toESM(require("fs"));
var path7 = __toESM(require("path"));
var actionsCache2 = __toESM(require_cache5());
var actionsCache = __toESM(require_cache5());
var MAX_CACHE_OPERATION_MS = 3e4;
var STATUS_FILE_NAME = "overlay-status.json";
function getStatusFilePath(languages) {
@@ -166500,7 +166497,7 @@ async function saveOverlayStatus(codeql, languages, diskUsage, status, logger) {
await fs7.promises.writeFile(statusFile, JSON.stringify(status));
const cacheId = await waitForResultWithTimeLimit(
MAX_CACHE_OPERATION_MS,
actionsCache2.saveCache([statusFile], cacheKey),
actionsCache.saveCache([statusFile], cacheKey),
() => {
logger.warning("Timed out saving overlay status to cache.");
}
@@ -166523,7 +166520,7 @@ async function getCacheKey(codeql, languages, diskUsage) {
}
// src/trap-caching.ts
var actionsCache3 = __toESM(require_cache5());
var actionsCache2 = __toESM(require_cache5());
// src/config-utils.ts
var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4;
@@ -168186,7 +168183,7 @@ var io5 = __toESM(require_io());
var core12 = __toESM(require_core());
// src/dependency-caching.ts
var actionsCache4 = __toESM(require_cache5());
var actionsCache3 = __toESM(require_cache5());
var glob = __toESM(require_glob());
var CODEQL_DEPENDENCY_CACHE_PREFIX = "codeql-dependencies";
async function getDependencyCacheUsage(logger) {
@@ -170018,7 +170015,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
return readSarifFile(outputFile);
}
function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) {
const automationID = getAutomationID2(category, analysis_key, environment);
const automationID = getAutomationID(category, analysis_key, environment);
if (automationID !== void 0) {
for (const run2 of sarifFile.runs || []) {
if (run2.automationDetails === void 0) {
@@ -170031,7 +170028,7 @@ function populateRunAutomationDetails(sarifFile, category, analysis_key, environ
}
return sarifFile;
}
function getAutomationID2(category, analysis_key, environment) {
function getAutomationID(category, analysis_key, environment) {
if (category !== void 0) {
let automationID = category;
if (!automationID.endsWith("/")) {
+221 -218
View File
@@ -204,7 +204,7 @@ var require_file_command = __commonJS({
exports2.issueFileCommand = issueFileCommand;
exports2.prepareKeyValueMessage = prepareKeyValueMessage;
var crypto3 = __importStar2(require("crypto"));
var fs18 = __importStar2(require("fs"));
var fs19 = __importStar2(require("fs"));
var os6 = __importStar2(require("os"));
var utils_1 = require_utils();
function issueFileCommand(command, message) {
@@ -212,10 +212,10 @@ var require_file_command = __commonJS({
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs18.existsSync(filePath)) {
if (!fs19.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs18.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os6.EOL}`, {
fs19.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os6.EOL}`, {
encoding: "utf8"
});
}
@@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({
exports2.isRooted = isRooted;
exports2.tryGetExecutablePath = tryGetExecutablePath;
exports2.getCmdPath = getCmdPath;
var fs18 = __importStar2(require("fs"));
var fs19 = __importStar2(require("fs"));
var path18 = __importStar2(require("path"));
_a = fs18.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
_a = fs19.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
exports2.IS_WINDOWS = process.platform === "win32";
function readlink(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
const result = yield fs18.promises.readlink(fsPath);
const result = yield fs19.promises.readlink(fsPath);
if (exports2.IS_WINDOWS && !result.endsWith("\\")) {
return `${result}\\`;
}
@@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({
});
}
exports2.UV_FS_O_EXLOCK = 268435456;
exports2.READONLY = fs18.constants.O_RDONLY;
exports2.READONLY = fs19.constants.O_RDONLY;
function exists(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
try {
@@ -50448,7 +50448,7 @@ var require_internal_globber = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.DefaultGlobber = void 0;
var core16 = __importStar2(require_core());
var fs18 = __importStar2(require("fs"));
var fs19 = __importStar2(require("fs"));
var globOptionsHelper = __importStar2(require_internal_glob_options_helper());
var path18 = __importStar2(require("path"));
var patternHelper = __importStar2(require_internal_pattern_helper());
@@ -50502,7 +50502,7 @@ var require_internal_globber = __commonJS({
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
core16.debug(`Search path '${searchPath}'`);
try {
yield __await2(fs18.promises.lstat(searchPath));
yield __await2(fs19.promises.lstat(searchPath));
} catch (err) {
if (err.code === "ENOENT") {
continue;
@@ -50536,7 +50536,7 @@ var require_internal_globber = __commonJS({
continue;
}
const childLevel = item.level + 1;
const childItems = (yield __await2(fs18.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path18.join(item.path, x), childLevel));
const childItems = (yield __await2(fs19.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path18.join(item.path, x), childLevel));
stack.push(...childItems.reverse());
} else if (match & internal_match_kind_1.MatchKind.File) {
yield yield __await2(item.path);
@@ -50571,7 +50571,7 @@ var require_internal_globber = __commonJS({
let stats;
if (options.followSymbolicLinks) {
try {
stats = yield fs18.promises.stat(item.path);
stats = yield fs19.promises.stat(item.path);
} catch (err) {
if (err.code === "ENOENT") {
if (options.omitBrokenSymbolicLinks) {
@@ -50583,10 +50583,10 @@ var require_internal_globber = __commonJS({
throw err;
}
} else {
stats = yield fs18.promises.lstat(item.path);
stats = yield fs19.promises.lstat(item.path);
}
if (stats.isDirectory() && options.followSymbolicLinks) {
const realPath = yield fs18.promises.realpath(item.path);
const realPath = yield fs19.promises.realpath(item.path);
while (traversalChain.length >= item.level) {
traversalChain.pop();
}
@@ -50695,7 +50695,7 @@ var require_internal_hash_files = __commonJS({
exports2.hashFiles = hashFiles2;
var crypto3 = __importStar2(require("crypto"));
var core16 = __importStar2(require_core());
var fs18 = __importStar2(require("fs"));
var fs19 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var path18 = __importStar2(require("path"));
@@ -50718,13 +50718,13 @@ var require_internal_hash_files = __commonJS({
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
continue;
}
if (fs18.statSync(file).isDirectory()) {
if (fs19.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash = crypto3.createHash("sha256");
const pipeline = util.promisify(stream2.pipeline);
yield pipeline(fs18.createReadStream(file), hash);
yield pipeline(fs19.createReadStream(file), hash);
result.write(hash.digest());
count++;
if (!hasMatch) {
@@ -52099,7 +52099,7 @@ var require_cacheUtils = __commonJS({
var glob2 = __importStar2(require_glob());
var io7 = __importStar2(require_io());
var crypto3 = __importStar2(require("crypto"));
var fs18 = __importStar2(require("fs"));
var fs19 = __importStar2(require("fs"));
var path18 = __importStar2(require("path"));
var semver10 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
@@ -52128,7 +52128,7 @@ var require_cacheUtils = __commonJS({
});
}
function getArchiveFileSizeInBytes(filePath) {
return fs18.statSync(filePath).size;
return fs19.statSync(filePath).size;
}
function resolvePaths(patterns) {
return __awaiter2(this, void 0, void 0, function* () {
@@ -52166,7 +52166,7 @@ var require_cacheUtils = __commonJS({
}
function unlinkFile(filePath) {
return __awaiter2(this, void 0, void 0, function* () {
return util.promisify(fs18.unlink)(filePath);
return util.promisify(fs19.unlink)(filePath);
});
}
function getVersion(app_1) {
@@ -52208,7 +52208,7 @@ var require_cacheUtils = __commonJS({
}
function getGnuTarPathOnWindows() {
return __awaiter2(this, void 0, void 0, function* () {
if (fs18.existsSync(constants_1.GnuTarPathOnWindows)) {
if (fs19.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion("tar");
@@ -92768,7 +92768,7 @@ var require_downloadUtils = __commonJS({
var http_client_1 = require_lib();
var storage_blob_1 = require_commonjs15();
var buffer = __importStar2(require("buffer"));
var fs18 = __importStar2(require("fs"));
var fs19 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var utils = __importStar2(require_cacheUtils());
@@ -92879,7 +92879,7 @@ var require_downloadUtils = __commonJS({
exports2.DownloadProgress = DownloadProgress;
function downloadCacheHttpClient(archiveLocation, archivePath) {
return __awaiter2(this, void 0, void 0, function* () {
const writeStream = fs18.createWriteStream(archivePath);
const writeStream = fs19.createWriteStream(archivePath);
const httpClient = new http_client_1.HttpClient("actions/cache");
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () {
return httpClient.get(archiveLocation);
@@ -92904,7 +92904,7 @@ var require_downloadUtils = __commonJS({
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
return __awaiter2(this, void 0, void 0, function* () {
var _a;
const archiveDescriptor = yield fs18.promises.open(archivePath, "w");
const archiveDescriptor = yield fs19.promises.open(archivePath, "w");
const httpClient = new http_client_1.HttpClient("actions/cache", void 0, {
socketTimeout: options.timeoutInMs,
keepAlive: true
@@ -93020,7 +93020,7 @@ var require_downloadUtils = __commonJS({
} else {
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
const downloadProgress = new DownloadProgress(contentLength);
const fd = fs18.openSync(archivePath, "w");
const fd = fs19.openSync(archivePath, "w");
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
@@ -93038,12 +93038,12 @@ var require_downloadUtils = __commonJS({
controller.abort();
throw new Error("Aborting cache download as the download time exceeded the timeout.");
} else if (Buffer.isBuffer(result)) {
fs18.writeFileSync(fd, result);
fs19.writeFileSync(fd, result);
}
}
} finally {
downloadProgress.stopDisplayTimer();
fs18.closeSync(fd);
fs19.closeSync(fd);
}
}
});
@@ -93365,7 +93365,7 @@ var require_cacheHttpClient = __commonJS({
var core16 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
var fs18 = __importStar2(require("fs"));
var fs19 = __importStar2(require("fs"));
var url_1 = require("url");
var utils = __importStar2(require_cacheUtils());
var uploadUtils_1 = require_uploadUtils();
@@ -93500,7 +93500,7 @@ Other caches with similar key:`);
return __awaiter2(this, void 0, void 0, function* () {
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs18.openSync(archivePath, "r");
const fd = fs19.openSync(archivePath, "r");
const uploadOptions = (0, options_1.getUploadOptions)(options);
const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency);
const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize);
@@ -93514,7 +93514,7 @@ Other caches with similar key:`);
const start = offset;
const end = offset + chunkSize - 1;
offset += maxChunkSize;
yield uploadChunk(httpClient, resourceUrl, () => fs18.createReadStream(archivePath, {
yield uploadChunk(httpClient, resourceUrl, () => fs19.createReadStream(archivePath, {
fd,
start,
end,
@@ -93525,7 +93525,7 @@ Other caches with similar key:`);
}
})));
} finally {
fs18.closeSync(fd);
fs19.closeSync(fd);
}
return;
});
@@ -99481,7 +99481,7 @@ var require_manifest = __commonJS({
var core_1 = require_core();
var os6 = require("os");
var cp = require("child_process");
var fs18 = require("fs");
var fs19 = require("fs");
function _findMatch(versionSpec, stable, candidates, archFilter) {
return __awaiter2(this, void 0, void 0, function* () {
const platFilter = os6.platform();
@@ -99543,10 +99543,10 @@ var require_manifest = __commonJS({
const lsbReleaseFile = "/etc/lsb-release";
const osReleaseFile = "/etc/os-release";
let contents = "";
if (fs18.existsSync(lsbReleaseFile)) {
contents = fs18.readFileSync(lsbReleaseFile).toString();
} else if (fs18.existsSync(osReleaseFile)) {
contents = fs18.readFileSync(osReleaseFile).toString();
if (fs19.existsSync(lsbReleaseFile)) {
contents = fs19.readFileSync(lsbReleaseFile).toString();
} else if (fs19.existsSync(osReleaseFile)) {
contents = fs19.readFileSync(osReleaseFile).toString();
}
return contents;
}
@@ -99755,7 +99755,7 @@ var require_tool_cache = __commonJS({
var core16 = __importStar2(require_core());
var io7 = __importStar2(require_io());
var crypto3 = __importStar2(require("crypto"));
var fs18 = __importStar2(require("fs"));
var fs19 = __importStar2(require("fs"));
var mm = __importStar2(require_manifest());
var os6 = __importStar2(require("os"));
var path18 = __importStar2(require("path"));
@@ -99801,7 +99801,7 @@ var require_tool_cache = __commonJS({
}
function downloadToolAttempt(url, dest, auth2, headers) {
return __awaiter2(this, void 0, void 0, function* () {
if (fs18.existsSync(dest)) {
if (fs19.existsSync(dest)) {
throw new Error(`Destination file path ${dest} already exists`);
}
const http = new httpm.HttpClient(userAgent2, [], {
@@ -99825,7 +99825,7 @@ var require_tool_cache = __commonJS({
const readStream = responseMessageFactory();
let succeeded = false;
try {
yield pipeline(readStream, fs18.createWriteStream(dest));
yield pipeline(readStream, fs19.createWriteStream(dest));
core16.debug("download complete");
succeeded = true;
return dest;
@@ -100037,11 +100037,11 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os6.arch();
core16.debug(`Caching tool ${tool} ${version} ${arch2}`);
core16.debug(`source dir: ${sourceDir}`);
if (!fs18.statSync(sourceDir).isDirectory()) {
if (!fs19.statSync(sourceDir).isDirectory()) {
throw new Error("sourceDir is not a directory");
}
const destPath = yield _createToolPath(tool, version, arch2);
for (const itemName of fs18.readdirSync(sourceDir)) {
for (const itemName of fs19.readdirSync(sourceDir)) {
const s = path18.join(sourceDir, itemName);
yield io7.cp(s, destPath, { recursive: true });
}
@@ -100055,7 +100055,7 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os6.arch();
core16.debug(`Caching tool ${tool} ${version} ${arch2}`);
core16.debug(`source file: ${sourceFile}`);
if (!fs18.statSync(sourceFile).isFile()) {
if (!fs19.statSync(sourceFile).isFile()) {
throw new Error("sourceFile is not a file");
}
const destFolder = yield _createToolPath(tool, version, arch2);
@@ -100084,7 +100084,7 @@ var require_tool_cache = __commonJS({
versionSpec = semver10.clean(versionSpec) || "";
const cachePath = path18.join(_getCacheDirectory(), toolName, versionSpec, arch2);
core16.debug(`checking cache: ${cachePath}`);
if (fs18.existsSync(cachePath) && fs18.existsSync(`${cachePath}.complete`)) {
if (fs19.existsSync(cachePath) && fs19.existsSync(`${cachePath}.complete`)) {
core16.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`);
toolPath = cachePath;
} else {
@@ -100097,12 +100097,12 @@ var require_tool_cache = __commonJS({
const versions = [];
arch2 = arch2 || os6.arch();
const toolPath = path18.join(_getCacheDirectory(), toolName);
if (fs18.existsSync(toolPath)) {
const children = fs18.readdirSync(toolPath);
if (fs19.existsSync(toolPath)) {
const children = fs19.readdirSync(toolPath);
for (const child of children) {
if (isExplicitVersion(child)) {
const fullPath = path18.join(toolPath, child, arch2 || "");
if (fs18.existsSync(fullPath) && fs18.existsSync(`${fullPath}.complete`)) {
if (fs19.existsSync(fullPath) && fs19.existsSync(`${fullPath}.complete`)) {
versions.push(child);
}
}
@@ -100173,7 +100173,7 @@ var require_tool_cache = __commonJS({
function _completeToolPath(tool, version, arch2) {
const folderPath = path18.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch2 || "");
const markerPath = `${folderPath}.complete`;
fs18.writeFileSync(markerPath, "");
fs19.writeFileSync(markerPath, "");
core16.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
@@ -100784,7 +100784,7 @@ __export(init_action_exports, {
CODEQL_VERSION_JAR_MINIMIZATION: () => CODEQL_VERSION_JAR_MINIMIZATION
});
module.exports = __toCommonJS(init_action_exports);
var fs17 = __toESM(require("fs"));
var fs18 = __toESM(require("fs"));
var path17 = __toESM(require("path"));
var core15 = __toESM(require_core());
var github3 = __toESM(require_github());
@@ -100869,21 +100869,21 @@ async function getFolderSize(itemPath, options) {
getFolderSize.loose = async (itemPath, options) => await core(itemPath, options);
getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true });
async function core(rootItemPath, options = {}, returnType = {}) {
const fs18 = options.fs || await import("node:fs/promises");
const fs19 = options.fs || await import("node:fs/promises");
let folderSize = 0n;
const foundInos = /* @__PURE__ */ new Set();
const errors = [];
await processItem(rootItemPath);
async function processItem(itemPath) {
if (options.ignore?.test(itemPath)) return;
const stats = returnType.strict ? await fs18.lstat(itemPath, { bigint: true }) : await fs18.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
const stats = returnType.strict ? await fs19.lstat(itemPath, { bigint: true }) : await fs19.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
if (typeof stats !== "object") return;
if (!foundInos.has(stats.ino)) {
foundInos.add(stats.ino);
folderSize += stats.size;
}
if (stats.isDirectory()) {
const directoryItems = returnType.strict ? await fs18.readdir(itemPath) : await fs18.readdir(itemPath).catch((error3) => errors.push(error3));
const directoryItems = returnType.strict ? await fs19.readdir(itemPath) : await fs19.readdir(itemPath).catch((error3) => errors.push(error3));
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
@@ -105195,7 +105195,6 @@ var cliVersion = "2.25.1";
// src/overlay/index.ts
var fs4 = __toESM(require("fs"));
var path6 = __toESM(require("path"));
var actionsCache = __toESM(require_cache5());
// src/git-utils.ts
var fs3 = __toESM(require("fs"));
@@ -105440,8 +105439,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
const gitFileOidsJson = JSON.stringify(gitFileOids);
@@ -105543,153 +105540,6 @@ async function getDiffRangeFilePaths(sourceRoot, logger) {
).filter((rel) => !rel.startsWith(".."));
return [...new Set(relativePaths)];
}
var CACHE_VERSION = 1;
var CACHE_PREFIX = "codeql-overlay-base-database";
var MAX_CACHE_OPERATION_MS = 6e5;
async function checkOverlayBaseDatabase(codeql, config, logger, warningPrefix) {
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
if (!fs4.existsSync(baseDatabaseOidsFilePath)) {
logger.warning(
`${warningPrefix}: ${baseDatabaseOidsFilePath} does not exist`
);
return false;
}
for (const language of config.languages) {
const dbPath = getCodeQLDatabasePath(config, language);
try {
const resolveDatabaseOutput = await codeql.resolveDatabase(dbPath);
if (resolveDatabaseOutput === void 0 || !("overlayBaseSpecifier" in resolveDatabaseOutput)) {
logger.info(`${warningPrefix}: no overlayBaseSpecifier defined`);
return false;
} else {
logger.debug(
`Overlay base specifier for ${language} overlay-base database found: ${resolveDatabaseOutput.overlayBaseSpecifier}`
);
}
} catch (e) {
logger.warning(`${warningPrefix}: failed to resolve database: ${e}`);
return false;
}
}
return true;
}
async function downloadOverlayBaseDatabaseFromCache(codeql, config, logger) {
const overlayDatabaseMode = config.overlayDatabaseMode;
if (overlayDatabaseMode !== "overlay" /* Overlay */) {
logger.debug(
`Overlay database mode is ${overlayDatabaseMode}. Skip downloading overlay-base database from cache.`
);
return void 0;
}
if (!config.useOverlayDatabaseCaching) {
logger.debug(
"Overlay database caching is disabled. Skip downloading overlay-base database from cache."
);
return void 0;
}
if (isInTestMode()) {
logger.debug(
"In test mode. Skip downloading overlay-base database from cache."
);
return void 0;
}
const dbLocation = config.dbLocation;
const codeQlVersion = (await codeql.getVersion()).version;
const cacheRestoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion
);
logger.info(
`Looking in Actions cache for overlay-base database with restore key ${cacheRestoreKeyPrefix}`
);
let databaseDownloadDurationMs = 0;
try {
const databaseDownloadStart = performance.now();
const foundKey = await waitForResultWithTimeLimit(
// This ten-minute limit for the cache restore operation is mainly to
// guard against the possibility that the cache service is unresponsive
// and hangs outside the data download.
//
// Data download (which is normally the most time-consuming part of the
// restore operation) should not run long enough to hit this limit. Even
// for an extremely large 10GB database, at a download speed of 40MB/s
// (see below), the download should complete within five minutes. If we
// do hit this limit, there are likely more serious problems other than
// mere slow download speed.
//
// This is important because we don't want any ongoing file operations
// on the database directory when we do hit this limit. Hitting this
// time limit takes us to a fallback path where we re-initialize the
// database from scratch at dbLocation, and having the cache restore
// operation continue to write into dbLocation in the background would
// really mess things up. We want to hit this limit only in the case
// of a hung cache service, not just slow download speed.
MAX_CACHE_OPERATION_MS,
actionsCache.restoreCache(
[dbLocation],
cacheRestoreKeyPrefix,
void 0,
{
// Azure SDK download (which is the default) uses 128MB segments; see
// https://github.com/actions/toolkit/blob/main/packages/cache/README.md.
// Setting segmentTimeoutInMs to 3000 translates to segment download
// speed of about 40 MB/s, which should be achievable unless the
// download is unreliable (in which case we do want to abort).
segmentTimeoutInMs: 3e3
}
),
() => {
logger.info("Timed out downloading overlay-base database from cache");
}
);
databaseDownloadDurationMs = Math.round(
performance.now() - databaseDownloadStart
);
if (foundKey === void 0) {
logger.info("No overlay-base database found in Actions cache");
return void 0;
}
logger.info(
`Downloaded overlay-base database in cache with key ${foundKey}`
);
} catch (error3) {
logger.warning(
`Failed to download overlay-base database from cache: ${error3 instanceof Error ? error3.message : String(error3)}`
);
return void 0;
}
const databaseIsValid = await checkOverlayBaseDatabase(
codeql,
config,
logger,
"Downloaded overlay-base database is invalid"
);
if (!databaseIsValid) {
logger.warning("Downloaded overlay-base database failed validation");
return void 0;
}
const databaseSizeBytes = await tryGetFolderBytes(dbLocation, logger);
if (databaseSizeBytes === void 0) {
logger.info(
"Filesystem error while accessing downloaded overlay-base database"
);
return void 0;
}
logger.info(`Successfully downloaded overlay-base database to ${dbLocation}`);
return {
databaseSizeBytes: Math.round(databaseSizeBytes),
databaseDownloadDurationMs
};
}
async function getCacheRestoreKeyPrefix(config, codeQlVersion) {
const languages = [...config.languages].sort().join("_");
const cacheKeyComponents = {
automationID: await getAutomationID()
// Add more components here as needed in the future
};
const componentsHash = createCacheKeyHash(cacheKeyComponents);
return `${CACHE_PREFIX}-${CACHE_VERSION}-${componentsHash}-${languages}-${codeQlVersion}-`;
}
// src/tools-features.ts
var semver4 = __toESM(require_semver2());
@@ -106449,8 +106299,8 @@ Improved incremental analysis will be automatically retried when the next versio
// src/overlay/status.ts
var fs7 = __toESM(require("fs"));
var path8 = __toESM(require("path"));
var actionsCache2 = __toESM(require_cache5());
var MAX_CACHE_OPERATION_MS2 = 3e4;
var actionsCache = __toESM(require_cache5());
var MAX_CACHE_OPERATION_MS = 3e4;
var STATUS_FILE_NAME = "overlay-status.json";
function getStatusFilePath(languages) {
return path8.join(
@@ -106482,8 +106332,8 @@ async function getOverlayStatus(codeql, languages, diskUsage, logger) {
try {
await fs7.promises.mkdir(path8.dirname(statusFile), { recursive: true });
const foundKey = await waitForResultWithTimeLimit(
MAX_CACHE_OPERATION_MS2,
actionsCache2.restoreCache([statusFile], cacheKey3),
MAX_CACHE_OPERATION_MS,
actionsCache.restoreCache([statusFile], cacheKey3),
() => {
logger.warning("Timed out restoring overlay status from cache.");
}
@@ -106522,10 +106372,10 @@ async function getCacheKey(codeql, languages, diskUsage) {
// src/trap-caching.ts
var fs8 = __toESM(require("fs"));
var path9 = __toESM(require("path"));
var actionsCache3 = __toESM(require_cache5());
var CACHE_VERSION2 = 1;
var actionsCache2 = __toESM(require_cache5());
var CACHE_VERSION = 1;
var CODEQL_TRAP_CACHE_PREFIX = "codeql-trap";
var MAX_CACHE_OPERATION_MS3 = 12e4;
var MAX_CACHE_OPERATION_MS2 = 12e4;
async function downloadTrapCaches(codeql, languages, logger) {
const result = {};
const languagesSupportingCaching = await getLanguagesSupportingCaching(
@@ -106566,8 +106416,8 @@ async function downloadTrapCaches(codeql, languages, logger) {
`Looking in Actions cache for TRAP cache with key ${preferredKey}`
);
const found = await waitForResultWithTimeLimit(
MAX_CACHE_OPERATION_MS3,
actionsCache3.restoreCache([cacheDir], preferredKey, [
MAX_CACHE_OPERATION_MS2,
actionsCache2.restoreCache([cacheDir], preferredKey, [
// Fall back to any cache with the right key prefix
await cachePrefix(codeql, language)
]),
@@ -106625,7 +106475,7 @@ async function cacheKey(codeql, language, baseSha) {
return `${await cachePrefix(codeql, language)}${baseSha}`;
}
async function cachePrefix(codeql, language) {
return `${CODEQL_TRAP_CACHE_PREFIX}-${CACHE_VERSION2}-${(await codeql.getVersion()).version}-${language}-`;
return `${CODEQL_TRAP_CACHE_PREFIX}-${CACHE_VERSION}-${(await codeql.getVersion()).version}-${language}-`;
}
// src/config-utils.ts
@@ -107447,7 +107297,7 @@ async function logGeneratedFilesTelemetry(config, duration, generatedFilesCount)
// src/dependency-caching.ts
var os3 = __toESM(require("os"));
var import_path2 = require("path");
var actionsCache4 = __toESM(require_cache5());
var actionsCache3 = __toESM(require_cache5());
var glob = __toESM(require_glob());
var CODEQL_DEPENDENCY_CACHE_PREFIX = "codeql-dependencies";
var CODEQL_DEPENDENCY_CACHE_VERSION = 1;
@@ -107574,7 +107424,7 @@ async function downloadDependencyCaches(codeql, features, languages, logger) {
)}`
);
const start = performance.now();
const hitKey = await actionsCache4.restoreCache(
const hitKey = await actionsCache3.restoreCache(
await cacheConfig.getDependencyPaths(codeql, features),
primaryKey,
restoreKeys
@@ -109656,6 +109506,159 @@ To opt out of this change, ${envVarOptOut}`;
core12.exportVariable("CODEQL_ACTION_DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION" /* DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION */, "true");
}
// src/overlay/caching.ts
var fs16 = __toESM(require("fs"));
var actionsCache4 = __toESM(require_cache5());
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
var CACHE_VERSION2 = 1;
var CACHE_PREFIX = "codeql-overlay-base-database";
var MAX_CACHE_OPERATION_MS3 = 6e5;
async function checkOverlayBaseDatabase(codeql, config, logger, warningPrefix) {
const baseDatabaseOidsFilePath = getBaseDatabaseOidsFilePath(config);
if (!fs16.existsSync(baseDatabaseOidsFilePath)) {
logger.warning(
`${warningPrefix}: ${baseDatabaseOidsFilePath} does not exist`
);
return false;
}
for (const language of config.languages) {
const dbPath = getCodeQLDatabasePath(config, language);
try {
const resolveDatabaseOutput = await codeql.resolveDatabase(dbPath);
if (resolveDatabaseOutput === void 0 || !("overlayBaseSpecifier" in resolveDatabaseOutput)) {
logger.info(`${warningPrefix}: no overlayBaseSpecifier defined`);
return false;
} else {
logger.debug(
`Overlay base specifier for ${language} overlay-base database found: ${resolveDatabaseOutput.overlayBaseSpecifier}`
);
}
} catch (e) {
logger.warning(`${warningPrefix}: failed to resolve database: ${e}`);
return false;
}
}
return true;
}
async function downloadOverlayBaseDatabaseFromCache(codeql, config, logger) {
const overlayDatabaseMode = config.overlayDatabaseMode;
if (overlayDatabaseMode !== "overlay" /* Overlay */) {
logger.debug(
`Overlay database mode is ${overlayDatabaseMode}. Skip downloading overlay-base database from cache.`
);
return void 0;
}
if (!config.useOverlayDatabaseCaching) {
logger.debug(
"Overlay database caching is disabled. Skip downloading overlay-base database from cache."
);
return void 0;
}
if (isInTestMode()) {
logger.debug(
"In test mode. Skip downloading overlay-base database from cache."
);
return void 0;
}
const dbLocation = config.dbLocation;
const codeQlVersion = (await codeql.getVersion()).version;
const cacheRestoreKeyPrefix = await getCacheRestoreKeyPrefix(
config,
codeQlVersion
);
logger.info(
`Looking in Actions cache for overlay-base database with restore key ${cacheRestoreKeyPrefix}`
);
let databaseDownloadDurationMs = 0;
try {
const databaseDownloadStart = performance.now();
const foundKey = await waitForResultWithTimeLimit(
// This ten-minute limit for the cache restore operation is mainly to
// guard against the possibility that the cache service is unresponsive
// and hangs outside the data download.
//
// Data download (which is normally the most time-consuming part of the
// restore operation) should not run long enough to hit this limit. Even
// for an extremely large 10GB database, at a download speed of 40MB/s
// (see below), the download should complete within five minutes. If we
// do hit this limit, there are likely more serious problems other than
// mere slow download speed.
//
// This is important because we don't want any ongoing file operations
// on the database directory when we do hit this limit. Hitting this
// time limit takes us to a fallback path where we re-initialize the
// database from scratch at dbLocation, and having the cache restore
// operation continue to write into dbLocation in the background would
// really mess things up. We want to hit this limit only in the case
// of a hung cache service, not just slow download speed.
MAX_CACHE_OPERATION_MS3,
actionsCache4.restoreCache(
[dbLocation],
cacheRestoreKeyPrefix,
void 0,
{
// Azure SDK download (which is the default) uses 128MB segments; see
// https://github.com/actions/toolkit/blob/main/packages/cache/README.md.
// Setting segmentTimeoutInMs to 3000 translates to segment download
// speed of about 40 MB/s, which should be achievable unless the
// download is unreliable (in which case we do want to abort).
segmentTimeoutInMs: 3e3
}
),
() => {
logger.info("Timed out downloading overlay-base database from cache");
}
);
databaseDownloadDurationMs = Math.round(
performance.now() - databaseDownloadStart
);
if (foundKey === void 0) {
logger.info("No overlay-base database found in Actions cache");
return void 0;
}
logger.info(
`Downloaded overlay-base database in cache with key ${foundKey}`
);
} catch (error3) {
logger.warning(
`Failed to download overlay-base database from cache: ${error3 instanceof Error ? error3.message : String(error3)}`
);
return void 0;
}
const databaseIsValid = await checkOverlayBaseDatabase(
codeql,
config,
logger,
"Downloaded overlay-base database is invalid"
);
if (!databaseIsValid) {
logger.warning("Downloaded overlay-base database failed validation");
return void 0;
}
const databaseSizeBytes = await tryGetFolderBytes(dbLocation, logger);
if (databaseSizeBytes === void 0) {
logger.info(
"Filesystem error while accessing downloaded overlay-base database"
);
return void 0;
}
logger.info(`Successfully downloaded overlay-base database to ${dbLocation}`);
return {
databaseSizeBytes: Math.round(databaseSizeBytes),
databaseDownloadDurationMs
};
}
async function getCacheRestoreKeyPrefix(config, codeQlVersion) {
const languages = [...config.languages].sort().join("_");
const cacheKeyComponents = {
automationID: await getAutomationID()
// Add more components here as needed in the future
};
const componentsHash = createCacheKeyHash(cacheKeyComponents);
return `${CACHE_PREFIX}-${CACHE_VERSION2}-${componentsHash}-${languages}-${codeQlVersion}-`;
}
// src/status-report.ts
var os5 = __toESM(require("os"));
var core13 = __toESM(require_core());
@@ -109910,7 +109913,7 @@ async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error
}
// src/workflow.ts
var fs16 = __toESM(require("fs"));
var fs17 = __toESM(require("fs"));
var path16 = __toESM(require("path"));
var import_zlib = __toESM(require("zlib"));
var core14 = __toESM(require_core());
@@ -110062,7 +110065,7 @@ async function getWorkflow(logger) {
);
}
const workflowPath = await getWorkflowAbsolutePath(logger);
return load(fs16.readFileSync(workflowPath, "utf-8"));
return load(fs17.readFileSync(workflowPath, "utf-8"));
}
async function getWorkflowAbsolutePath(logger) {
const relativePath = await getWorkflowRelativePath();
@@ -110070,7 +110073,7 @@ async function getWorkflowAbsolutePath(logger) {
getRequiredEnvParam("GITHUB_WORKSPACE"),
relativePath
);
if (fs16.existsSync(absolutePath)) {
if (fs17.existsSync(absolutePath)) {
logger.debug(
`Derived the following absolute path for the currently executing workflow: ${absolutePath}.`
);
@@ -110402,16 +110405,16 @@ async function run(startedAt) {
"codeql-action-go-tracing",
"bin"
);
fs17.mkdirSync(tempBinPath, { recursive: true });
fs18.mkdirSync(tempBinPath, { recursive: true });
core15.addPath(tempBinPath);
const goWrapperPath = path17.resolve(tempBinPath, "go");
fs17.writeFileSync(
fs18.writeFileSync(
goWrapperPath,
`#!/bin/bash
exec ${goBinaryPath} "$@"`
);
fs17.chmodSync(goWrapperPath, "755");
fs18.chmodSync(goWrapperPath, "755");
core15.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goWrapperPath);
} catch (e) {
logger.warning(
+8 -11
View File
@@ -93210,7 +93210,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache4;
exports2.saveCache = saveCache3;
var core14 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93387,7 +93387,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
function saveCache3(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -98887,8 +98887,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
exports2.restoreCache = restoreCache3;
exports2.saveCache = saveCache3;
var core14 = __importStar2(require_core());
var path7 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -98945,7 +98945,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache3(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core14.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99089,7 +99089,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache4(paths_1, key_1, options_1) {
function saveCache3(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core14.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -104133,7 +104133,6 @@ var semver5 = __toESM(require_semver2());
// src/overlay/index.ts
var fs3 = __toESM(require("fs"));
var path4 = __toESM(require("path"));
var actionsCache = __toESM(require_cache5());
// src/git-utils.ts
var fs2 = __toESM(require("fs"));
@@ -104329,8 +104328,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
const gitFileOidsJson = JSON.stringify(gitFileOids);
@@ -104650,10 +104647,10 @@ var featureConfig = {
};
// src/overlay/status.ts
var actionsCache2 = __toESM(require_cache5());
var actionsCache = __toESM(require_cache5());
// src/trap-caching.ts
var actionsCache3 = __toESM(require_cache5());
var actionsCache2 = __toESM(require_cache5());
// src/config-utils.ts
var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4;
+1344 -1345
View File
File diff suppressed because it is too large Load Diff
+9 -14
View File
@@ -93210,7 +93210,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache5;
exports2.saveCache = saveCache4;
var core15 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93387,7 +93387,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache5(cacheId, archivePath, signedUploadURL, options) {
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -98887,8 +98887,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache5;
exports2.saveCache = saveCache5;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
var core15 = __importStar2(require_core());
var path4 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -98945,7 +98945,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache5(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99089,7 +99089,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache5(paths_1, key_1, options_1) {
function saveCache4(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -161699,9 +161699,6 @@ function getActionsLogger() {
// src/feature-flags.ts
var semver5 = __toESM(require_semver2());
// src/overlay/index.ts
var actionsCache = __toESM(require_cache5());
// src/git-utils.ts
var core8 = __toESM(require_core());
var toolrunner2 = __toESM(require_toolrunner());
@@ -161717,8 +161714,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
// src/tools-features.ts
var semver4 = __toESM(require_semver2());
@@ -161934,10 +161929,10 @@ var featureConfig = {
};
// src/overlay/status.ts
var actionsCache2 = __toESM(require_cache5());
var actionsCache = __toESM(require_cache5());
// src/trap-caching.ts
var actionsCache3 = __toESM(require_cache5());
var actionsCache2 = __toESM(require_cache5());
// src/config-utils.ts
var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4;
@@ -162167,7 +162162,7 @@ var semver7 = __toESM(require_semver2());
var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024;
// src/dependency-caching.ts
var actionsCache4 = __toESM(require_cache5());
var actionsCache3 = __toESM(require_cache5());
var glob = __toESM(require_glob());
// src/artifact-scanner.ts
+2153 -2158
View File
File diff suppressed because it is too large Load Diff
+10 -13
View File
@@ -93210,7 +93210,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache4;
exports2.saveCache = saveCache3;
var core14 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -93387,7 +93387,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
function saveCache3(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -98887,8 +98887,8 @@ var require_cache5 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
exports2.restoreCache = restoreCache3;
exports2.saveCache = saveCache3;
var core14 = __importStar2(require_core());
var path12 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -98945,7 +98945,7 @@ var require_cache5 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache3(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core14.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -99089,7 +99089,7 @@ var require_cache5 = __commonJS({
return void 0;
});
}
function saveCache4(paths_1, key_1, options_1) {
function saveCache3(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core14.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -107255,7 +107255,6 @@ var cliVersion = "2.25.1";
// src/overlay/index.ts
var fs4 = __toESM(require("fs"));
var path5 = __toESM(require("path"));
var actionsCache = __toESM(require_cache5());
// src/git-utils.ts
var fs3 = __toESM(require("fs"));
@@ -107485,8 +107484,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
const gitFileOidsJson = JSON.stringify(gitFileOids);
@@ -107829,10 +107826,10 @@ ${jsonContents}`
}
// src/overlay/status.ts
var actionsCache2 = __toESM(require_cache5());
var actionsCache = __toESM(require_cache5());
// src/trap-caching.ts
var actionsCache3 = __toESM(require_cache5());
var actionsCache2 = __toESM(require_cache5());
// src/config-utils.ts
var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4;
@@ -110786,7 +110783,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
return readSarifFile(outputFile);
}
function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) {
const automationID = getAutomationID2(category, analysis_key, environment);
const automationID = getAutomationID(category, analysis_key, environment);
if (automationID !== void 0) {
for (const run of sarifFile.runs || []) {
if (run.automationDetails === void 0) {
@@ -110799,7 +110796,7 @@ function populateRunAutomationDetails(sarifFile, category, analysis_key, environ
}
return sarifFile;
}
function getAutomationID2(category, analysis_key, environment) {
function getAutomationID(category, analysis_key, environment) {
if (category !== void 0) {
let automationID = category;
if (!automationID.endsWith("/")) {
+9 -14
View File
@@ -155544,7 +155544,7 @@ var require_cacheHttpClient = __commonJS({
exports2.getCacheEntry = getCacheEntry;
exports2.downloadCache = downloadCache;
exports2.reserveCache = reserveCache;
exports2.saveCache = saveCache5;
exports2.saveCache = saveCache4;
var core15 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
@@ -155721,7 +155721,7 @@ Other caches with similar key:`);
}));
});
}
function saveCache5(cacheId, archivePath, signedUploadURL, options) {
function saveCache4(cacheId, archivePath, signedUploadURL, options) {
return __awaiter2(this, void 0, void 0, function* () {
const uploadOptions = (0, options_1.getUploadOptions)(options);
if (uploadOptions.useAzureSdk) {
@@ -156995,8 +156995,8 @@ var require_cache6 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0;
exports2.isFeatureAvailable = isFeatureAvailable;
exports2.restoreCache = restoreCache5;
exports2.saveCache = saveCache5;
exports2.restoreCache = restoreCache4;
exports2.saveCache = saveCache4;
var core15 = __importStar2(require_core());
var path3 = __importStar2(require("path"));
var utils = __importStar2(require_cacheUtils());
@@ -157053,7 +157053,7 @@ var require_cache6 = __commonJS({
return !!process.env["ACTIONS_CACHE_URL"];
}
}
function restoreCache5(paths_1, primaryKey_1, restoreKeys_1, options_1) {
function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -157197,7 +157197,7 @@ var require_cache6 = __commonJS({
return void 0;
});
}
function saveCache5(paths_1, key_1, options_1) {
function saveCache4(paths_1, key_1, options_1) {
return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) {
const cacheServiceVersion = (0, config_1.getCacheServiceVersion)();
core15.debug(`Cache service version: ${cacheServiceVersion}`);
@@ -161865,9 +161865,6 @@ function withGroup(groupName, f) {
// src/feature-flags.ts
var semver5 = __toESM(require_semver2());
// src/overlay/index.ts
var actionsCache = __toESM(require_cache6());
// src/git-utils.ts
var core8 = __toESM(require_core());
var toolrunner2 = __toESM(require_toolrunner());
@@ -161883,8 +161880,6 @@ var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8";
var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9";
var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
// src/tools-features.ts
var semver4 = __toESM(require_semver2());
@@ -162104,10 +162099,10 @@ var featureConfig = {
};
// src/overlay/status.ts
var actionsCache2 = __toESM(require_cache6());
var actionsCache = __toESM(require_cache6());
// src/trap-caching.ts
var actionsCache3 = __toESM(require_cache6());
var actionsCache2 = __toESM(require_cache6());
// src/config-utils.ts
var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_MB = 2e4;
@@ -162154,7 +162149,7 @@ var semver7 = __toESM(require_semver2());
var STREAMING_HIGH_WATERMARK_BYTES = 4 * 1024 * 1024;
// src/dependency-caching.ts
var actionsCache4 = __toESM(require_cache6());
var actionsCache3 = __toESM(require_cache6());
var glob = __toESM(require_glob2());
// src/artifact-scanner.ts
+1344 -1347
View File
File diff suppressed because it is too large Load Diff