Merge pull request #3528 from github/mbg/refactor/sarif

Refactor SARIF-related types and functions into a separate module
This commit is contained in:
Michael B. Gale
2026-03-03 12:10:30 +00:00
committed by GitHub
25 changed files with 1142 additions and 1223 deletions

View File

@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",

287
lib/analyze-action.js generated
View File

@@ -204,7 +204,7 @@ var require_file_command = __commonJS({
exports2.issueFileCommand = issueFileCommand;
exports2.prepareKeyValueMessage = prepareKeyValueMessage;
var crypto3 = __importStar2(require("crypto"));
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var os5 = __importStar2(require("os"));
var utils_1 = require_utils();
function issueFileCommand(command, message) {
@@ -212,10 +212,10 @@ var require_file_command = __commonJS({
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs17.existsSync(filePath)) {
if (!fs18.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs17.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os5.EOL}`, {
fs18.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os5.EOL}`, {
encoding: "utf8"
});
}
@@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({
exports2.isRooted = isRooted;
exports2.tryGetExecutablePath = tryGetExecutablePath;
exports2.getCmdPath = getCmdPath;
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var path16 = __importStar2(require("path"));
_a = fs17.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
_a = fs18.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
exports2.IS_WINDOWS = process.platform === "win32";
function readlink(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
const result = yield fs17.promises.readlink(fsPath);
const result = yield fs18.promises.readlink(fsPath);
if (exports2.IS_WINDOWS && !result.endsWith("\\")) {
return `${result}\\`;
}
@@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({
});
}
exports2.UV_FS_O_EXLOCK = 268435456;
exports2.READONLY = fs17.constants.O_RDONLY;
exports2.READONLY = fs18.constants.O_RDONLY;
function exists(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
try {
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -50403,7 +50404,7 @@ var require_internal_globber = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.DefaultGlobber = void 0;
var core15 = __importStar2(require_core());
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var globOptionsHelper = __importStar2(require_internal_glob_options_helper());
var path16 = __importStar2(require("path"));
var patternHelper = __importStar2(require_internal_pattern_helper());
@@ -50457,7 +50458,7 @@ var require_internal_globber = __commonJS({
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
core15.debug(`Search path '${searchPath}'`);
try {
yield __await2(fs17.promises.lstat(searchPath));
yield __await2(fs18.promises.lstat(searchPath));
} catch (err) {
if (err.code === "ENOENT") {
continue;
@@ -50491,7 +50492,7 @@ var require_internal_globber = __commonJS({
continue;
}
const childLevel = item.level + 1;
const childItems = (yield __await2(fs17.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path16.join(item.path, x), childLevel));
const childItems = (yield __await2(fs18.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path16.join(item.path, x), childLevel));
stack.push(...childItems.reverse());
} else if (match & internal_match_kind_1.MatchKind.File) {
yield yield __await2(item.path);
@@ -50526,7 +50527,7 @@ var require_internal_globber = __commonJS({
let stats;
if (options.followSymbolicLinks) {
try {
stats = yield fs17.promises.stat(item.path);
stats = yield fs18.promises.stat(item.path);
} catch (err) {
if (err.code === "ENOENT") {
if (options.omitBrokenSymbolicLinks) {
@@ -50538,10 +50539,10 @@ var require_internal_globber = __commonJS({
throw err;
}
} else {
stats = yield fs17.promises.lstat(item.path);
stats = yield fs18.promises.lstat(item.path);
}
if (stats.isDirectory() && options.followSymbolicLinks) {
const realPath = yield fs17.promises.realpath(item.path);
const realPath = yield fs18.promises.realpath(item.path);
while (traversalChain.length >= item.level) {
traversalChain.pop();
}
@@ -50650,7 +50651,7 @@ var require_internal_hash_files = __commonJS({
exports2.hashFiles = hashFiles2;
var crypto3 = __importStar2(require("crypto"));
var core15 = __importStar2(require_core());
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var path16 = __importStar2(require("path"));
@@ -50673,13 +50674,13 @@ var require_internal_hash_files = __commonJS({
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
continue;
}
if (fs17.statSync(file).isDirectory()) {
if (fs18.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash2 = crypto3.createHash("sha256");
const pipeline = util.promisify(stream2.pipeline);
yield pipeline(fs17.createReadStream(file), hash2);
yield pipeline(fs18.createReadStream(file), hash2);
result.write(hash2.digest());
count++;
if (!hasMatch) {
@@ -52054,7 +52055,7 @@ var require_cacheUtils = __commonJS({
var glob2 = __importStar2(require_glob());
var io7 = __importStar2(require_io());
var crypto3 = __importStar2(require("crypto"));
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var path16 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
@@ -52083,7 +52084,7 @@ var require_cacheUtils = __commonJS({
});
}
function getArchiveFileSizeInBytes(filePath) {
return fs17.statSync(filePath).size;
return fs18.statSync(filePath).size;
}
function resolvePaths(patterns) {
return __awaiter2(this, void 0, void 0, function* () {
@@ -52121,7 +52122,7 @@ var require_cacheUtils = __commonJS({
}
function unlinkFile(filePath) {
return __awaiter2(this, void 0, void 0, function* () {
return util.promisify(fs17.unlink)(filePath);
return util.promisify(fs18.unlink)(filePath);
});
}
function getVersion(app_1) {
@@ -52163,7 +52164,7 @@ var require_cacheUtils = __commonJS({
}
function getGnuTarPathOnWindows() {
return __awaiter2(this, void 0, void 0, function* () {
if (fs17.existsSync(constants_1.GnuTarPathOnWindows)) {
if (fs18.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion("tar");
@@ -92320,7 +92321,7 @@ var require_downloadUtils = __commonJS({
var http_client_1 = require_lib();
var storage_blob_1 = require_commonjs15();
var buffer = __importStar2(require("buffer"));
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var utils = __importStar2(require_cacheUtils());
@@ -92431,7 +92432,7 @@ var require_downloadUtils = __commonJS({
exports2.DownloadProgress = DownloadProgress;
function downloadCacheHttpClient(archiveLocation, archivePath) {
return __awaiter2(this, void 0, void 0, function* () {
const writeStream = fs17.createWriteStream(archivePath);
const writeStream = fs18.createWriteStream(archivePath);
const httpClient = new http_client_1.HttpClient("actions/cache");
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () {
return httpClient.get(archiveLocation);
@@ -92456,7 +92457,7 @@ var require_downloadUtils = __commonJS({
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
return __awaiter2(this, void 0, void 0, function* () {
var _a;
const archiveDescriptor = yield fs17.promises.open(archivePath, "w");
const archiveDescriptor = yield fs18.promises.open(archivePath, "w");
const httpClient = new http_client_1.HttpClient("actions/cache", void 0, {
socketTimeout: options.timeoutInMs,
keepAlive: true
@@ -92572,7 +92573,7 @@ var require_downloadUtils = __commonJS({
} else {
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
const downloadProgress = new DownloadProgress(contentLength);
const fd = fs17.openSync(archivePath, "w");
const fd = fs18.openSync(archivePath, "w");
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
@@ -92590,12 +92591,12 @@ var require_downloadUtils = __commonJS({
controller.abort();
throw new Error("Aborting cache download as the download time exceeded the timeout.");
} else if (Buffer.isBuffer(result)) {
fs17.writeFileSync(fd, result);
fs18.writeFileSync(fd, result);
}
}
} finally {
downloadProgress.stopDisplayTimer();
fs17.closeSync(fd);
fs18.closeSync(fd);
}
}
});
@@ -92917,7 +92918,7 @@ var require_cacheHttpClient = __commonJS({
var core15 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var url_1 = require("url");
var utils = __importStar2(require_cacheUtils());
var uploadUtils_1 = require_uploadUtils();
@@ -93052,7 +93053,7 @@ Other caches with similar key:`);
return __awaiter2(this, void 0, void 0, function* () {
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs17.openSync(archivePath, "r");
const fd = fs18.openSync(archivePath, "r");
const uploadOptions = (0, options_1.getUploadOptions)(options);
const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency);
const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize);
@@ -93066,7 +93067,7 @@ Other caches with similar key:`);
const start = offset;
const end = offset + chunkSize - 1;
offset += maxChunkSize;
yield uploadChunk(httpClient, resourceUrl, () => fs17.createReadStream(archivePath, {
yield uploadChunk(httpClient, resourceUrl, () => fs18.createReadStream(archivePath, {
fd,
start,
end,
@@ -93077,7 +93078,7 @@ Other caches with similar key:`);
}
})));
} finally {
fs17.closeSync(fd);
fs18.closeSync(fd);
}
return;
});
@@ -99033,7 +99034,7 @@ var require_manifest = __commonJS({
var core_1 = require_core();
var os5 = require("os");
var cp = require("child_process");
var fs17 = require("fs");
var fs18 = require("fs");
function _findMatch(versionSpec, stable, candidates, archFilter) {
return __awaiter2(this, void 0, void 0, function* () {
const platFilter = os5.platform();
@@ -99095,10 +99096,10 @@ var require_manifest = __commonJS({
const lsbReleaseFile = "/etc/lsb-release";
const osReleaseFile = "/etc/os-release";
let contents = "";
if (fs17.existsSync(lsbReleaseFile)) {
contents = fs17.readFileSync(lsbReleaseFile).toString();
} else if (fs17.existsSync(osReleaseFile)) {
contents = fs17.readFileSync(osReleaseFile).toString();
if (fs18.existsSync(lsbReleaseFile)) {
contents = fs18.readFileSync(lsbReleaseFile).toString();
} else if (fs18.existsSync(osReleaseFile)) {
contents = fs18.readFileSync(osReleaseFile).toString();
}
return contents;
}
@@ -99307,7 +99308,7 @@ var require_tool_cache = __commonJS({
var core15 = __importStar2(require_core());
var io7 = __importStar2(require_io());
var crypto3 = __importStar2(require("crypto"));
var fs17 = __importStar2(require("fs"));
var fs18 = __importStar2(require("fs"));
var mm = __importStar2(require_manifest());
var os5 = __importStar2(require("os"));
var path16 = __importStar2(require("path"));
@@ -99353,7 +99354,7 @@ var require_tool_cache = __commonJS({
}
function downloadToolAttempt(url2, dest, auth2, headers) {
return __awaiter2(this, void 0, void 0, function* () {
if (fs17.existsSync(dest)) {
if (fs18.existsSync(dest)) {
throw new Error(`Destination file path ${dest} already exists`);
}
const http = new httpm.HttpClient(userAgent2, [], {
@@ -99377,7 +99378,7 @@ var require_tool_cache = __commonJS({
const readStream = responseMessageFactory();
let succeeded = false;
try {
yield pipeline(readStream, fs17.createWriteStream(dest));
yield pipeline(readStream, fs18.createWriteStream(dest));
core15.debug("download complete");
succeeded = true;
return dest;
@@ -99589,11 +99590,11 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os5.arch();
core15.debug(`Caching tool ${tool} ${version} ${arch2}`);
core15.debug(`source dir: ${sourceDir}`);
if (!fs17.statSync(sourceDir).isDirectory()) {
if (!fs18.statSync(sourceDir).isDirectory()) {
throw new Error("sourceDir is not a directory");
}
const destPath = yield _createToolPath(tool, version, arch2);
for (const itemName of fs17.readdirSync(sourceDir)) {
for (const itemName of fs18.readdirSync(sourceDir)) {
const s = path16.join(sourceDir, itemName);
yield io7.cp(s, destPath, { recursive: true });
}
@@ -99607,7 +99608,7 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os5.arch();
core15.debug(`Caching tool ${tool} ${version} ${arch2}`);
core15.debug(`source file: ${sourceFile}`);
if (!fs17.statSync(sourceFile).isFile()) {
if (!fs18.statSync(sourceFile).isFile()) {
throw new Error("sourceFile is not a file");
}
const destFolder = yield _createToolPath(tool, version, arch2);
@@ -99636,7 +99637,7 @@ var require_tool_cache = __commonJS({
versionSpec = semver9.clean(versionSpec) || "";
const cachePath = path16.join(_getCacheDirectory(), toolName, versionSpec, arch2);
core15.debug(`checking cache: ${cachePath}`);
if (fs17.existsSync(cachePath) && fs17.existsSync(`${cachePath}.complete`)) {
if (fs18.existsSync(cachePath) && fs18.existsSync(`${cachePath}.complete`)) {
core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`);
toolPath = cachePath;
} else {
@@ -99649,12 +99650,12 @@ var require_tool_cache = __commonJS({
const versions = [];
arch2 = arch2 || os5.arch();
const toolPath = path16.join(_getCacheDirectory(), toolName);
if (fs17.existsSync(toolPath)) {
const children = fs17.readdirSync(toolPath);
if (fs18.existsSync(toolPath)) {
const children = fs18.readdirSync(toolPath);
for (const child of children) {
if (isExplicitVersion(child)) {
const fullPath = path16.join(toolPath, child, arch2 || "");
if (fs17.existsSync(fullPath) && fs17.existsSync(`${fullPath}.complete`)) {
if (fs18.existsSync(fullPath) && fs18.existsSync(`${fullPath}.complete`)) {
versions.push(child);
}
}
@@ -99725,7 +99726,7 @@ var require_tool_cache = __commonJS({
function _completeToolPath(tool, version, arch2) {
const folderPath = path16.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || "");
const markerPath = `${folderPath}.complete`;
fs17.writeFileSync(markerPath, "");
fs18.writeFileSync(markerPath, "");
core15.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
@@ -103232,7 +103233,7 @@ __export(analyze_action_exports, {
runPromise: () => runPromise
});
module.exports = __toCommonJS(analyze_action_exports);
var fs16 = __toESM(require("fs"));
var fs17 = __toESM(require("fs"));
var import_path4 = __toESM(require("path"));
var import_perf_hooks3 = require("perf_hooks");
var core14 = __toESM(require_core());
@@ -103261,21 +103262,21 @@ async function getFolderSize(itemPath, options) {
getFolderSize.loose = async (itemPath, options) => await core(itemPath, options);
getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true });
async function core(rootItemPath, options = {}, returnType = {}) {
const fs17 = options.fs || await import("node:fs/promises");
const fs18 = options.fs || await import("node:fs/promises");
let folderSize = 0n;
const foundInos = /* @__PURE__ */ new Set();
const errors = [];
await processItem(rootItemPath);
async function processItem(itemPath) {
if (options.ignore?.test(itemPath)) return;
const stats = returnType.strict ? await fs17.lstat(itemPath, { bigint: true }) : await fs17.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
const stats = returnType.strict ? await fs18.lstat(itemPath, { bigint: true }) : await fs18.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
if (typeof stats !== "object") return;
if (!foundInos.has(stats.ino)) {
foundInos.add(stats.ino);
folderSize += stats.size;
}
if (stats.isDirectory()) {
const directoryItems = returnType.strict ? await fs17.readdir(itemPath) : await fs17.readdir(itemPath).catch((error3) => errors.push(error3));
const directoryItems = returnType.strict ? await fs18.readdir(itemPath) : await fs18.readdir(itemPath).catch((error3) => errors.push(error3));
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
@@ -105916,17 +105917,6 @@ function getExtraOptionsEnvParam() {
);
}
}
function getToolNames(sarif) {
const toolNames = {};
for (const run2 of sarif.runs || []) {
const tool = run2.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function getSystemReservedMemoryMegaBytes(totalMemoryMegaBytes, platform2) {
const fixedAmount = 1024 * (platform2 === "win32" ? 1.5 : 1);
const scaledAmount = getReservedRamScaleFactor() * Math.max(totalMemoryMegaBytes - 8 * 1024, 0);
@@ -111235,7 +111225,7 @@ async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error
}
// src/upload-lib.ts
var fs15 = __toESM(require("fs"));
var fs16 = __toESM(require("fs"));
var path14 = __toESM(require("path"));
var url = __toESM(require("url"));
var import_zlib = __toESM(require("zlib"));
@@ -112316,12 +112306,12 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
}
return uri;
}
async function addFingerprints(sarif, sourceRoot, logger) {
async function addFingerprints(sarifLog, sourceRoot, logger) {
logger.info(
`Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.`
);
const callbacksByFile = {};
for (const run2 of sarif.runs || []) {
for (const run2 of sarifLog.runs || []) {
const artifacts = run2.artifacts || [];
for (const result of run2.results || []) {
const primaryLocation = (result.locations || [])[0];
@@ -112361,7 +112351,7 @@ async function addFingerprints(sarif, sourceRoot, logger) {
};
await hash(teeCallback, filepath);
}
return sarif;
return sarifLog;
}
// src/init.ts
@@ -112396,36 +112386,48 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
};
}
// src/upload-lib.ts
var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning.";
var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository.";
// src/sarif/index.ts
var fs15 = __toESM(require("fs"));
var InvalidSarifUploadError = class extends Error {
};
function getToolNames(sarifFile) {
const toolNames = {};
for (const run2 of sarifFile.runs || []) {
const tool = run2.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function readSarifFile(sarifFilePath) {
return JSON.parse(fs15.readFileSync(sarifFilePath, "utf8"));
}
function combineSarifFiles(sarifFiles, logger) {
logger.info(`Loading SARIF file(s)`);
const combinedSarif = {
version: null,
runs: []
};
const runs = [];
let version = void 0;
for (const sarifFile of sarifFiles) {
logger.debug(`Loading SARIF file: ${sarifFile}`);
const sarifObject = JSON.parse(
fs15.readFileSync(sarifFile, "utf8")
);
if (combinedSarif.version === null) {
combinedSarif.version = sarifObject.version;
} else if (combinedSarif.version !== sarifObject.version) {
const sarifLog = readSarifFile(sarifFile);
if (version === void 0) {
version = sarifLog.version;
} else if (version !== sarifLog.version) {
throw new InvalidSarifUploadError(
`Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`
`Different SARIF versions encountered: ${version} and ${sarifLog.version}`
);
}
combinedSarif.runs.push(...sarifObject.runs);
runs.push(...sarifLog?.runs || []);
}
return combinedSarif;
if (version === void 0) {
version = "2.1.0";
}
return { version, runs };
}
function areAllRunsProducedByCodeQL(sarifObjects) {
return sarifObjects.every((sarifObject) => {
return sarifObject.runs?.every(
(run2) => run2.tool?.driver?.name === "CodeQL"
);
function areAllRunsProducedByCodeQL(sarifLogs) {
return sarifLogs.every((sarifLog) => {
return sarifLog.runs?.every((run2) => run2.tool?.driver?.name === "CodeQL");
});
}
function createRunKey(run2) {
@@ -112438,10 +112440,13 @@ function createRunKey(run2) {
automationId: run2.automationDetails?.id
};
}
function areAllRunsUnique(sarifObjects) {
function areAllRunsUnique(sarifLogs) {
const keys = /* @__PURE__ */ new Set();
for (const sarifObject of sarifObjects) {
for (const run2 of sarifObject.runs) {
for (const sarifLog of sarifLogs) {
if (sarifLog.runs === void 0) {
continue;
}
for (const run2 of sarifLog.runs) {
const key = JSON.stringify(createRunKey(run2));
if (keys.has(key)) {
return false;
@@ -112451,6 +112456,10 @@ function areAllRunsUnique(sarifObjects) {
}
return true;
}
// src/upload-lib.ts
var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning.";
var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository.";
async function shouldShowCombineSarifFilesDeprecationWarning(sarifObjects, githubVersion) {
if (githubVersion.type === "GitHub Enterprise Server" /* GHES */ && satisfiesGHESVersion(githubVersion.version, "<3.14", true)) {
return false;
@@ -112479,9 +112488,7 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) {
}
async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) {
logger.info("Combining SARIF files using the CodeQL CLI");
const sarifObjects = sarifFiles.map((sarifFile) => {
return JSON.parse(fs15.readFileSync(sarifFile, "utf8"));
});
const sarifObjects = sarifFiles.map(readSarifFile);
const deprecationWarningMessage = gitHubVersion.type === "GitHub Enterprise Server" /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025";
const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload";
if (!areAllRunsProducedByCodeQL(sarifObjects)) {
@@ -112534,27 +112541,27 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
codeQL = initCodeQLResult.codeql;
}
const baseTempDir = path14.resolve(tempDir, "combined-sarif");
fs15.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs15.mkdtempSync(path14.resolve(baseTempDir, "output-"));
fs16.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs16.mkdtempSync(path14.resolve(baseTempDir, "output-"));
const outputFile = path14.resolve(outputDirectory, "combined-sarif.sarif");
await codeQL.mergeResults(sarifFiles, outputFile, {
mergeRunsFromEqualCategory: true
});
return JSON.parse(fs15.readFileSync(outputFile, "utf8"));
return readSarifFile(outputFile);
}
function populateRunAutomationDetails(sarif, category, analysis_key, environment) {
function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) {
const automationID = getAutomationID2(category, analysis_key, environment);
if (automationID !== void 0) {
for (const run2 of sarif.runs || []) {
for (const run2 of sarifFile.runs || []) {
if (run2.automationDetails === void 0) {
run2.automationDetails = {
id: automationID
};
}
}
return sarif;
return sarifFile;
}
return sarif;
return sarifFile;
}
function getAutomationID2(category, analysis_key, environment) {
if (category !== void 0) {
@@ -112577,7 +112584,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
`SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}`
);
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
fs15.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
fs16.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
return "dummy-sarif-id";
}
const client = getApiClient();
@@ -112611,7 +112618,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
function findSarifFilesInDir(sarifPath, isSarif) {
const sarifFiles = [];
const walkSarifFiles = (dir) => {
const entries = fs15.readdirSync(dir, { withFileTypes: true });
const entries = fs16.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile() && isSarif(entry.name)) {
sarifFiles.push(path14.resolve(dir, entry.name));
@@ -112624,7 +112631,7 @@ function findSarifFilesInDir(sarifPath, isSarif) {
return sarifFiles;
}
async function getGroupedSarifFilePaths(logger, sarifPath) {
const stats = fs15.statSync(sarifPath, { throwIfNoEntry: false });
const stats = fs16.statSync(sarifPath, { throwIfNoEntry: false });
if (stats === void 0) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
}
@@ -112671,9 +112678,9 @@ async function getGroupedSarifFilePaths(logger, sarifPath) {
}
return results;
}
function countResultsInSarif(sarif) {
function countResultsInSarif(sarifLog) {
let numResults = 0;
const parsedSarif = JSON.parse(sarif);
const parsedSarif = JSON.parse(sarifLog);
if (!Array.isArray(parsedSarif.runs)) {
throw new InvalidSarifUploadError("Invalid SARIF. Missing 'runs' array.");
}
@@ -112687,26 +112694,26 @@ function countResultsInSarif(sarif) {
}
return numResults;
}
function readSarifFile(sarifFilePath) {
function readSarifFileOrThrow(sarifFilePath) {
try {
return JSON.parse(fs15.readFileSync(sarifFilePath, "utf8"));
return readSarifFile(sarifFilePath);
} catch (e) {
throw new InvalidSarifUploadError(
`Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}`
);
}
}
function validateSarifFileSchema(sarif, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarif]) && // We want to validate CodeQL SARIF in testing environments.
function validateSarifFileSchema(sarifLog, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarifLog]) && // We want to validate CodeQL SARIF in testing environments.
!getTestingEnvironment()) {
logger.debug(
`Skipping SARIF schema validation for ${sarifFilePath} as all runs are produced by CodeQL.`
);
return;
return true;
}
logger.info(`Validating ${sarifFilePath}`);
const schema2 = require_sarif_schema_2_1_0();
const result = new jsonschema2.Validator().validate(sarif, schema2);
const result = new jsonschema2.Validator().validate(sarifLog, schema2);
const warningAttributes = ["uri-reference", "uri"];
const errors = (result.errors ?? []).filter(
(err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument))
@@ -112733,6 +112740,7 @@ ${sarifErrors.join(
)}`
);
}
return true;
}
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, mergeBaseCommitOid) {
const payloadObj = {
@@ -112758,7 +112766,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
payloadObj.base_sha = mergeBaseCommitOid;
} else if (process.env.GITHUB_EVENT_PATH) {
const githubEvent = JSON.parse(
fs15.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
fs16.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
);
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha;
@@ -112769,14 +112777,14 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) {
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif;
let sarifLog;
category = analysis.fixCategory(logger, category);
if (sarifPaths.length > 1) {
for (const sarifPath of sarifPaths) {
const parsedSarif = readSarifFile(sarifPath);
const parsedSarif = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(parsedSarif, sarifPath, logger);
}
sarif = await combineSarifFilesUsingCLI(
sarifLog = await combineSarifFilesUsingCLI(
sarifPaths,
gitHubVersion,
features,
@@ -112784,21 +112792,21 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths,
);
} else {
const sarifPath = sarifPaths[0];
sarif = readSarifFile(sarifPath);
validateSarifFileSchema(sarif, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
sarifLog = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(sarifLog, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarifLog], gitHubVersion);
}
sarif = filterAlertsByDiffRange(logger, sarif);
sarif = await addFingerprints(sarif, checkoutPath, logger);
sarifLog = filterAlertsByDiffRange(logger, sarifLog);
sarifLog = await addFingerprints(sarifLog, checkoutPath, logger);
const analysisKey = await getAnalysisKey();
const environment = getRequiredInput("matrix");
sarif = populateRunAutomationDetails(
sarif,
sarifLog = populateRunAutomationDetails(
sarifLog,
category,
analysisKey,
environment
);
return { sarif, analysisKey, environment };
return { sarif: sarifLog, analysisKey, environment };
}
async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) {
const outputPath = pathInput || getOptionalEnvVar("CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */);
@@ -112815,12 +112823,12 @@ async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProc
}
async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
const sarif = postProcessingResults.sarif;
const toolNames = getToolNames(sarif);
const sarifLog = postProcessingResults.sarif;
const toolNames = getToolNames(sarifLog);
logger.debug(`Validating that each SARIF run has a unique category`);
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
validateUniqueCategory(sarifLog, uploadTarget.sentinelPrefix);
logger.debug(`Serializing SARIF for upload`);
const sarifPayload = JSON.stringify(sarif);
const sarifPayload = JSON.stringify(sarifLog);
logger.debug(`Compressing serialized SARIF`);
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = url.pathToFileURL(checkoutPath).href;
@@ -112862,9 +112870,9 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post
};
}
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
if (!fs15.existsSync(outputDir)) {
fs15.mkdirSync(outputDir, { recursive: true });
} else if (!fs15.lstatSync(outputDir).isDirectory()) {
if (!fs16.existsSync(outputDir)) {
fs16.mkdirSync(outputDir, { recursive: true });
} else if (!fs16.lstatSync(outputDir).isDirectory()) {
throw new ConfigurationError(
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`
);
@@ -112874,7 +112882,7 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
`upload${uploadTarget.sarifExtension}`
);
logger.info(`Writing processed SARIF file to ${outputFile}`);
fs15.writeFileSync(outputFile, sarifPayload);
fs16.writeFileSync(outputFile, sarifPayload);
}
var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3;
var STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1e3;
@@ -112972,9 +112980,9 @@ function handleProcessingResultForUnsuccessfulExecution(response, status, logger
assertNever(status);
}
}
function validateUniqueCategory(sarif, sentinelPrefix) {
function validateUniqueCategory(sarifLog, sentinelPrefix) {
const categories = {};
for (const run2 of sarif.runs) {
for (const run2 of sarifLog.runs || []) {
const id = run2?.automationDetails?.id;
const tool = run2.tool?.driver?.name;
const category = `${sanitize(id)}_${sanitize(tool)}`;
@@ -112993,15 +113001,16 @@ function validateUniqueCategory(sarif, sentinelPrefix) {
function sanitize(str2) {
return (str2 ?? "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase();
}
var InvalidSarifUploadError = class extends Error {
};
function filterAlertsByDiffRange(logger, sarif) {
function filterAlertsByDiffRange(logger, sarifLog) {
const diffRanges = readDiffRangesJsonFile(logger);
if (!diffRanges?.length) {
return sarif;
return sarifLog;
}
if (sarifLog.runs === void 0) {
return sarifLog;
}
const checkoutPath = getRequiredInput("checkout_path");
for (const run2 of sarif.runs) {
for (const run2 of sarifLog.runs) {
if (run2.results) {
run2.results = run2.results.filter((result) => {
const locations = [
@@ -113022,7 +113031,7 @@ function filterAlertsByDiffRange(logger, sarif) {
});
}
}
return sarif;
return sarifLog;
}
// src/upload-sarif.ts
@@ -113107,7 +113116,7 @@ function doesGoExtractionOutputExist(config) {
"go" /* go */
);
const trapDirectory = import_path4.default.join(golangDbDirectory, "trap", "go" /* go */);
return fs16.existsSync(trapDirectory) && fs16.readdirSync(trapDirectory).some(
return fs17.existsSync(trapDirectory) && fs17.readdirSync(trapDirectory).some(
(fileName) => [
".trap",
".trap.gz",

View File

@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",

689
lib/init-action-post.js generated

File diff suppressed because it is too large Load Diff

1
lib/init-action.js generated
View File

@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",

View File

@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",

View File

@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",

View File

@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",

View File

@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",

293
lib/upload-lib.js generated
View File

@@ -204,7 +204,7 @@ var require_file_command = __commonJS({
exports2.issueFileCommand = issueFileCommand;
exports2.prepareKeyValueMessage = prepareKeyValueMessage;
var crypto2 = __importStar2(require("crypto"));
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var os2 = __importStar2(require("os"));
var utils_1 = require_utils();
function issueFileCommand(command, message) {
@@ -212,10 +212,10 @@ var require_file_command = __commonJS({
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs12.existsSync(filePath)) {
if (!fs13.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs12.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, {
fs13.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, {
encoding: "utf8"
});
}
@@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({
exports2.isRooted = isRooted;
exports2.tryGetExecutablePath = tryGetExecutablePath;
exports2.getCmdPath = getCmdPath;
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var path12 = __importStar2(require("path"));
_a = fs12.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
_a = fs13.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
exports2.IS_WINDOWS = process.platform === "win32";
function readlink(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
const result = yield fs12.promises.readlink(fsPath);
const result = yield fs13.promises.readlink(fsPath);
if (exports2.IS_WINDOWS && !result.endsWith("\\")) {
return `${result}\\`;
}
@@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({
});
}
exports2.UV_FS_O_EXLOCK = 268435456;
exports2.READONLY = fs12.constants.O_RDONLY;
exports2.READONLY = fs13.constants.O_RDONLY;
function exists(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
try {
@@ -47341,6 +47341,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -50403,7 +50404,7 @@ var require_internal_globber = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.DefaultGlobber = void 0;
var core12 = __importStar2(require_core());
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var globOptionsHelper = __importStar2(require_internal_glob_options_helper());
var path12 = __importStar2(require("path"));
var patternHelper = __importStar2(require_internal_pattern_helper());
@@ -50457,7 +50458,7 @@ var require_internal_globber = __commonJS({
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
core12.debug(`Search path '${searchPath}'`);
try {
yield __await2(fs12.promises.lstat(searchPath));
yield __await2(fs13.promises.lstat(searchPath));
} catch (err) {
if (err.code === "ENOENT") {
continue;
@@ -50491,7 +50492,7 @@ var require_internal_globber = __commonJS({
continue;
}
const childLevel = item.level + 1;
const childItems = (yield __await2(fs12.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path12.join(item.path, x), childLevel));
const childItems = (yield __await2(fs13.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path12.join(item.path, x), childLevel));
stack.push(...childItems.reverse());
} else if (match & internal_match_kind_1.MatchKind.File) {
yield yield __await2(item.path);
@@ -50526,7 +50527,7 @@ var require_internal_globber = __commonJS({
let stats;
if (options.followSymbolicLinks) {
try {
stats = yield fs12.promises.stat(item.path);
stats = yield fs13.promises.stat(item.path);
} catch (err) {
if (err.code === "ENOENT") {
if (options.omitBrokenSymbolicLinks) {
@@ -50538,10 +50539,10 @@ var require_internal_globber = __commonJS({
throw err;
}
} else {
stats = yield fs12.promises.lstat(item.path);
stats = yield fs13.promises.lstat(item.path);
}
if (stats.isDirectory() && options.followSymbolicLinks) {
const realPath = yield fs12.promises.realpath(item.path);
const realPath = yield fs13.promises.realpath(item.path);
while (traversalChain.length >= item.level) {
traversalChain.pop();
}
@@ -50650,7 +50651,7 @@ var require_internal_hash_files = __commonJS({
exports2.hashFiles = hashFiles;
var crypto2 = __importStar2(require("crypto"));
var core12 = __importStar2(require_core());
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var path12 = __importStar2(require("path"));
@@ -50673,13 +50674,13 @@ var require_internal_hash_files = __commonJS({
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
continue;
}
if (fs12.statSync(file).isDirectory()) {
if (fs13.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash2 = crypto2.createHash("sha256");
const pipeline = util.promisify(stream2.pipeline);
yield pipeline(fs12.createReadStream(file), hash2);
yield pipeline(fs13.createReadStream(file), hash2);
result.write(hash2.digest());
count++;
if (!hasMatch) {
@@ -52054,7 +52055,7 @@ var require_cacheUtils = __commonJS({
var glob = __importStar2(require_glob());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var path12 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
@@ -52083,7 +52084,7 @@ var require_cacheUtils = __commonJS({
});
}
function getArchiveFileSizeInBytes(filePath) {
return fs12.statSync(filePath).size;
return fs13.statSync(filePath).size;
}
function resolvePaths(patterns) {
return __awaiter2(this, void 0, void 0, function* () {
@@ -52121,7 +52122,7 @@ var require_cacheUtils = __commonJS({
}
function unlinkFile(filePath) {
return __awaiter2(this, void 0, void 0, function* () {
return util.promisify(fs12.unlink)(filePath);
return util.promisify(fs13.unlink)(filePath);
});
}
function getVersion(app_1) {
@@ -52163,7 +52164,7 @@ var require_cacheUtils = __commonJS({
}
function getGnuTarPathOnWindows() {
return __awaiter2(this, void 0, void 0, function* () {
if (fs12.existsSync(constants_1.GnuTarPathOnWindows)) {
if (fs13.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion("tar");
@@ -92320,7 +92321,7 @@ var require_downloadUtils = __commonJS({
var http_client_1 = require_lib();
var storage_blob_1 = require_commonjs15();
var buffer = __importStar2(require("buffer"));
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var utils = __importStar2(require_cacheUtils());
@@ -92431,7 +92432,7 @@ var require_downloadUtils = __commonJS({
exports2.DownloadProgress = DownloadProgress;
function downloadCacheHttpClient(archiveLocation, archivePath) {
return __awaiter2(this, void 0, void 0, function* () {
const writeStream = fs12.createWriteStream(archivePath);
const writeStream = fs13.createWriteStream(archivePath);
const httpClient = new http_client_1.HttpClient("actions/cache");
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () {
return httpClient.get(archiveLocation);
@@ -92456,7 +92457,7 @@ var require_downloadUtils = __commonJS({
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
return __awaiter2(this, void 0, void 0, function* () {
var _a;
const archiveDescriptor = yield fs12.promises.open(archivePath, "w");
const archiveDescriptor = yield fs13.promises.open(archivePath, "w");
const httpClient = new http_client_1.HttpClient("actions/cache", void 0, {
socketTimeout: options.timeoutInMs,
keepAlive: true
@@ -92572,7 +92573,7 @@ var require_downloadUtils = __commonJS({
} else {
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
const downloadProgress = new DownloadProgress(contentLength);
const fd = fs12.openSync(archivePath, "w");
const fd = fs13.openSync(archivePath, "w");
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
@@ -92590,12 +92591,12 @@ var require_downloadUtils = __commonJS({
controller.abort();
throw new Error("Aborting cache download as the download time exceeded the timeout.");
} else if (Buffer.isBuffer(result)) {
fs12.writeFileSync(fd, result);
fs13.writeFileSync(fd, result);
}
}
} finally {
downloadProgress.stopDisplayTimer();
fs12.closeSync(fd);
fs13.closeSync(fd);
}
}
});
@@ -92917,7 +92918,7 @@ var require_cacheHttpClient = __commonJS({
var core12 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var url_1 = require("url");
var utils = __importStar2(require_cacheUtils());
var uploadUtils_1 = require_uploadUtils();
@@ -93052,7 +93053,7 @@ Other caches with similar key:`);
return __awaiter2(this, void 0, void 0, function* () {
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs12.openSync(archivePath, "r");
const fd = fs13.openSync(archivePath, "r");
const uploadOptions = (0, options_1.getUploadOptions)(options);
const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency);
const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize);
@@ -93066,7 +93067,7 @@ Other caches with similar key:`);
const start = offset;
const end = offset + chunkSize - 1;
offset += maxChunkSize;
yield uploadChunk(httpClient, resourceUrl, () => fs12.createReadStream(archivePath, {
yield uploadChunk(httpClient, resourceUrl, () => fs13.createReadStream(archivePath, {
fd,
start,
end,
@@ -93077,7 +93078,7 @@ Other caches with similar key:`);
}
})));
} finally {
fs12.closeSync(fd);
fs13.closeSync(fd);
}
return;
});
@@ -99033,7 +99034,7 @@ var require_manifest = __commonJS({
var core_1 = require_core();
var os2 = require("os");
var cp = require("child_process");
var fs12 = require("fs");
var fs13 = require("fs");
function _findMatch(versionSpec, stable, candidates, archFilter) {
return __awaiter2(this, void 0, void 0, function* () {
const platFilter = os2.platform();
@@ -99095,10 +99096,10 @@ var require_manifest = __commonJS({
const lsbReleaseFile = "/etc/lsb-release";
const osReleaseFile = "/etc/os-release";
let contents = "";
if (fs12.existsSync(lsbReleaseFile)) {
contents = fs12.readFileSync(lsbReleaseFile).toString();
} else if (fs12.existsSync(osReleaseFile)) {
contents = fs12.readFileSync(osReleaseFile).toString();
if (fs13.existsSync(lsbReleaseFile)) {
contents = fs13.readFileSync(lsbReleaseFile).toString();
} else if (fs13.existsSync(osReleaseFile)) {
contents = fs13.readFileSync(osReleaseFile).toString();
}
return contents;
}
@@ -99307,7 +99308,7 @@ var require_tool_cache = __commonJS({
var core12 = __importStar2(require_core());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var fs12 = __importStar2(require("fs"));
var fs13 = __importStar2(require("fs"));
var mm = __importStar2(require_manifest());
var os2 = __importStar2(require("os"));
var path12 = __importStar2(require("path"));
@@ -99353,7 +99354,7 @@ var require_tool_cache = __commonJS({
}
function downloadToolAttempt(url2, dest, auth2, headers) {
return __awaiter2(this, void 0, void 0, function* () {
if (fs12.existsSync(dest)) {
if (fs13.existsSync(dest)) {
throw new Error(`Destination file path ${dest} already exists`);
}
const http = new httpm.HttpClient(userAgent2, [], {
@@ -99377,7 +99378,7 @@ var require_tool_cache = __commonJS({
const readStream = responseMessageFactory();
let succeeded = false;
try {
yield pipeline(readStream, fs12.createWriteStream(dest));
yield pipeline(readStream, fs13.createWriteStream(dest));
core12.debug("download complete");
succeeded = true;
return dest;
@@ -99589,11 +99590,11 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os2.arch();
core12.debug(`Caching tool ${tool} ${version} ${arch2}`);
core12.debug(`source dir: ${sourceDir}`);
if (!fs12.statSync(sourceDir).isDirectory()) {
if (!fs13.statSync(sourceDir).isDirectory()) {
throw new Error("sourceDir is not a directory");
}
const destPath = yield _createToolPath(tool, version, arch2);
for (const itemName of fs12.readdirSync(sourceDir)) {
for (const itemName of fs13.readdirSync(sourceDir)) {
const s = path12.join(sourceDir, itemName);
yield io6.cp(s, destPath, { recursive: true });
}
@@ -99607,7 +99608,7 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os2.arch();
core12.debug(`Caching tool ${tool} ${version} ${arch2}`);
core12.debug(`source file: ${sourceFile}`);
if (!fs12.statSync(sourceFile).isFile()) {
if (!fs13.statSync(sourceFile).isFile()) {
throw new Error("sourceFile is not a file");
}
const destFolder = yield _createToolPath(tool, version, arch2);
@@ -99636,7 +99637,7 @@ var require_tool_cache = __commonJS({
versionSpec = semver9.clean(versionSpec) || "";
const cachePath = path12.join(_getCacheDirectory(), toolName, versionSpec, arch2);
core12.debug(`checking cache: ${cachePath}`);
if (fs12.existsSync(cachePath) && fs12.existsSync(`${cachePath}.complete`)) {
if (fs13.existsSync(cachePath) && fs13.existsSync(`${cachePath}.complete`)) {
core12.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`);
toolPath = cachePath;
} else {
@@ -99649,12 +99650,12 @@ var require_tool_cache = __commonJS({
const versions = [];
arch2 = arch2 || os2.arch();
const toolPath = path12.join(_getCacheDirectory(), toolName);
if (fs12.existsSync(toolPath)) {
const children = fs12.readdirSync(toolPath);
if (fs13.existsSync(toolPath)) {
const children = fs13.readdirSync(toolPath);
for (const child of children) {
if (isExplicitVersion(child)) {
const fullPath = path12.join(toolPath, child, arch2 || "");
if (fs12.existsSync(fullPath) && fs12.existsSync(`${fullPath}.complete`)) {
if (fs13.existsSync(fullPath) && fs13.existsSync(`${fullPath}.complete`)) {
versions.push(child);
}
}
@@ -99725,7 +99726,7 @@ var require_tool_cache = __commonJS({
function _completeToolPath(tool, version, arch2) {
const folderPath = path12.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || "");
const markerPath = `${folderPath}.complete`;
fs12.writeFileSync(markerPath, "");
fs13.writeFileSync(markerPath, "");
core12.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
@@ -103229,13 +103230,12 @@ var require_sarif_schema_2_1_0 = __commonJS({
// src/upload-lib.ts
var upload_lib_exports = {};
__export(upload_lib_exports, {
InvalidSarifUploadError: () => InvalidSarifUploadError,
buildPayload: () => buildPayload,
findSarifFilesInDir: () => findSarifFilesInDir,
getGroupedSarifFilePaths: () => getGroupedSarifFilePaths,
populateRunAutomationDetails: () => populateRunAutomationDetails,
postProcessSarifFiles: () => postProcessSarifFiles,
readSarifFile: () => readSarifFile,
readSarifFileOrThrow: () => readSarifFileOrThrow,
shouldConsiderConfigurationError: () => shouldConsiderConfigurationError,
shouldConsiderInvalidRequest: () => shouldConsiderInvalidRequest,
shouldShowCombineSarifFilesDeprecationWarning: () => shouldShowCombineSarifFilesDeprecationWarning,
@@ -103249,7 +103249,7 @@ __export(upload_lib_exports, {
writePostProcessedFiles: () => writePostProcessedFiles
});
module.exports = __toCommonJS(upload_lib_exports);
var fs11 = __toESM(require("fs"));
var fs12 = __toESM(require("fs"));
var path11 = __toESM(require("path"));
var url = __toESM(require("url"));
var import_zlib = __toESM(require("zlib"));
@@ -103278,21 +103278,21 @@ async function getFolderSize(itemPath, options) {
getFolderSize.loose = async (itemPath, options) => await core(itemPath, options);
getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true });
async function core(rootItemPath, options = {}, returnType = {}) {
const fs12 = options.fs || await import("node:fs/promises");
const fs13 = options.fs || await import("node:fs/promises");
let folderSize = 0n;
const foundInos = /* @__PURE__ */ new Set();
const errors = [];
await processItem(rootItemPath);
async function processItem(itemPath) {
if (options.ignore?.test(itemPath)) return;
const stats = returnType.strict ? await fs12.lstat(itemPath, { bigint: true }) : await fs12.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
const stats = returnType.strict ? await fs13.lstat(itemPath, { bigint: true }) : await fs13.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
if (typeof stats !== "object") return;
if (!foundInos.has(stats.ino)) {
foundInos.add(stats.ino);
folderSize += stats.size;
}
if (stats.isDirectory()) {
const directoryItems = returnType.strict ? await fs12.readdir(itemPath) : await fs12.readdir(itemPath).catch((error3) => errors.push(error3));
const directoryItems = returnType.strict ? await fs13.readdir(itemPath) : await fs13.readdir(itemPath).catch((error3) => errors.push(error3));
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
@@ -105932,17 +105932,6 @@ function getExtraOptionsEnvParam() {
);
}
}
function getToolNames(sarif) {
const toolNames = {};
for (const run of sarif.runs || []) {
const tool = run.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function getCodeQLDatabasePath(config, language) {
return path.resolve(config.dbLocation, language);
}
@@ -110204,12 +110193,12 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
}
return uri;
}
async function addFingerprints(sarif, sourceRoot, logger) {
async function addFingerprints(sarifLog, sourceRoot, logger) {
logger.info(
`Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.`
);
const callbacksByFile = {};
for (const run of sarif.runs || []) {
for (const run of sarifLog.runs || []) {
const artifacts = run.artifacts || [];
for (const result of run.results || []) {
const primaryLocation = (result.locations || [])[0];
@@ -110249,7 +110238,7 @@ async function addFingerprints(sarif, sourceRoot, logger) {
};
await hash(teeCallback, filepath);
}
return sarif;
return sarifLog;
}
// src/init.ts
@@ -110284,36 +110273,48 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
};
}
// src/upload-lib.ts
var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning.";
var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository.";
// src/sarif/index.ts
var fs11 = __toESM(require("fs"));
var InvalidSarifUploadError = class extends Error {
};
function getToolNames(sarifFile) {
const toolNames = {};
for (const run of sarifFile.runs || []) {
const tool = run.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function readSarifFile(sarifFilePath) {
return JSON.parse(fs11.readFileSync(sarifFilePath, "utf8"));
}
function combineSarifFiles(sarifFiles, logger) {
logger.info(`Loading SARIF file(s)`);
const combinedSarif = {
version: null,
runs: []
};
const runs = [];
let version = void 0;
for (const sarifFile of sarifFiles) {
logger.debug(`Loading SARIF file: ${sarifFile}`);
const sarifObject = JSON.parse(
fs11.readFileSync(sarifFile, "utf8")
);
if (combinedSarif.version === null) {
combinedSarif.version = sarifObject.version;
} else if (combinedSarif.version !== sarifObject.version) {
const sarifLog = readSarifFile(sarifFile);
if (version === void 0) {
version = sarifLog.version;
} else if (version !== sarifLog.version) {
throw new InvalidSarifUploadError(
`Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`
`Different SARIF versions encountered: ${version} and ${sarifLog.version}`
);
}
combinedSarif.runs.push(...sarifObject.runs);
runs.push(...sarifLog?.runs || []);
}
return combinedSarif;
if (version === void 0) {
version = "2.1.0";
}
return { version, runs };
}
function areAllRunsProducedByCodeQL(sarifObjects) {
return sarifObjects.every((sarifObject) => {
return sarifObject.runs?.every(
(run) => run.tool?.driver?.name === "CodeQL"
);
function areAllRunsProducedByCodeQL(sarifLogs) {
return sarifLogs.every((sarifLog) => {
return sarifLog.runs?.every((run) => run.tool?.driver?.name === "CodeQL");
});
}
function createRunKey(run) {
@@ -110326,10 +110327,13 @@ function createRunKey(run) {
automationId: run.automationDetails?.id
};
}
function areAllRunsUnique(sarifObjects) {
function areAllRunsUnique(sarifLogs) {
const keys = /* @__PURE__ */ new Set();
for (const sarifObject of sarifObjects) {
for (const run of sarifObject.runs) {
for (const sarifLog of sarifLogs) {
if (sarifLog.runs === void 0) {
continue;
}
for (const run of sarifLog.runs) {
const key = JSON.stringify(createRunKey(run));
if (keys.has(key)) {
return false;
@@ -110339,6 +110343,10 @@ function areAllRunsUnique(sarifObjects) {
}
return true;
}
// src/upload-lib.ts
var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning.";
var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository.";
async function shouldShowCombineSarifFilesDeprecationWarning(sarifObjects, githubVersion) {
if (githubVersion.type === "GitHub Enterprise Server" /* GHES */ && satisfiesGHESVersion(githubVersion.version, "<3.14", true)) {
return false;
@@ -110367,9 +110375,7 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) {
}
async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) {
logger.info("Combining SARIF files using the CodeQL CLI");
const sarifObjects = sarifFiles.map((sarifFile) => {
return JSON.parse(fs11.readFileSync(sarifFile, "utf8"));
});
const sarifObjects = sarifFiles.map(readSarifFile);
const deprecationWarningMessage = gitHubVersion.type === "GitHub Enterprise Server" /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025";
const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload";
if (!areAllRunsProducedByCodeQL(sarifObjects)) {
@@ -110422,27 +110428,27 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
codeQL = initCodeQLResult.codeql;
}
const baseTempDir = path11.resolve(tempDir, "combined-sarif");
fs11.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs11.mkdtempSync(path11.resolve(baseTempDir, "output-"));
fs12.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs12.mkdtempSync(path11.resolve(baseTempDir, "output-"));
const outputFile = path11.resolve(outputDirectory, "combined-sarif.sarif");
await codeQL.mergeResults(sarifFiles, outputFile, {
mergeRunsFromEqualCategory: true
});
return JSON.parse(fs11.readFileSync(outputFile, "utf8"));
return readSarifFile(outputFile);
}
function populateRunAutomationDetails(sarif, category, analysis_key, environment) {
function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) {
const automationID = getAutomationID2(category, analysis_key, environment);
if (automationID !== void 0) {
for (const run of sarif.runs || []) {
for (const run of sarifFile.runs || []) {
if (run.automationDetails === void 0) {
run.automationDetails = {
id: automationID
};
}
}
return sarif;
return sarifFile;
}
return sarif;
return sarifFile;
}
function getAutomationID2(category, analysis_key, environment) {
if (category !== void 0) {
@@ -110465,7 +110471,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
`SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}`
);
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
fs11.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
fs12.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
return "dummy-sarif-id";
}
const client = getApiClient();
@@ -110499,7 +110505,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
function findSarifFilesInDir(sarifPath, isSarif) {
const sarifFiles = [];
const walkSarifFiles = (dir) => {
const entries = fs11.readdirSync(dir, { withFileTypes: true });
const entries = fs12.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile() && isSarif(entry.name)) {
sarifFiles.push(path11.resolve(dir, entry.name));
@@ -110512,11 +110518,11 @@ function findSarifFilesInDir(sarifPath, isSarif) {
return sarifFiles;
}
function getSarifFilePaths(sarifPath, isSarif) {
if (!fs11.existsSync(sarifPath)) {
if (!fs12.existsSync(sarifPath)) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
}
let sarifFiles;
if (fs11.lstatSync(sarifPath).isDirectory()) {
if (fs12.lstatSync(sarifPath).isDirectory()) {
sarifFiles = findSarifFilesInDir(sarifPath, isSarif);
if (sarifFiles.length === 0) {
throw new ConfigurationError(
@@ -110529,7 +110535,7 @@ function getSarifFilePaths(sarifPath, isSarif) {
return sarifFiles;
}
async function getGroupedSarifFilePaths(logger, sarifPath) {
const stats = fs11.statSync(sarifPath, { throwIfNoEntry: false });
const stats = fs12.statSync(sarifPath, { throwIfNoEntry: false });
if (stats === void 0) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
}
@@ -110576,9 +110582,9 @@ async function getGroupedSarifFilePaths(logger, sarifPath) {
}
return results;
}
function countResultsInSarif(sarif) {
function countResultsInSarif(sarifLog) {
let numResults = 0;
const parsedSarif = JSON.parse(sarif);
const parsedSarif = JSON.parse(sarifLog);
if (!Array.isArray(parsedSarif.runs)) {
throw new InvalidSarifUploadError("Invalid SARIF. Missing 'runs' array.");
}
@@ -110592,26 +110598,26 @@ function countResultsInSarif(sarif) {
}
return numResults;
}
function readSarifFile(sarifFilePath) {
function readSarifFileOrThrow(sarifFilePath) {
try {
return JSON.parse(fs11.readFileSync(sarifFilePath, "utf8"));
return readSarifFile(sarifFilePath);
} catch (e) {
throw new InvalidSarifUploadError(
`Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}`
);
}
}
function validateSarifFileSchema(sarif, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarif]) && // We want to validate CodeQL SARIF in testing environments.
function validateSarifFileSchema(sarifLog, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarifLog]) && // We want to validate CodeQL SARIF in testing environments.
!getTestingEnvironment()) {
logger.debug(
`Skipping SARIF schema validation for ${sarifFilePath} as all runs are produced by CodeQL.`
);
return;
return true;
}
logger.info(`Validating ${sarifFilePath}`);
const schema2 = require_sarif_schema_2_1_0();
const result = new jsonschema2.Validator().validate(sarif, schema2);
const result = new jsonschema2.Validator().validate(sarifLog, schema2);
const warningAttributes = ["uri-reference", "uri"];
const errors = (result.errors ?? []).filter(
(err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument))
@@ -110638,6 +110644,7 @@ ${sarifErrors.join(
)}`
);
}
return true;
}
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, mergeBaseCommitOid) {
const payloadObj = {
@@ -110663,7 +110670,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
payloadObj.base_sha = mergeBaseCommitOid;
} else if (process.env.GITHUB_EVENT_PATH) {
const githubEvent = JSON.parse(
fs11.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
fs12.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
);
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha;
@@ -110674,14 +110681,14 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) {
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif;
let sarifLog;
category = analysis.fixCategory(logger, category);
if (sarifPaths.length > 1) {
for (const sarifPath of sarifPaths) {
const parsedSarif = readSarifFile(sarifPath);
const parsedSarif = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(parsedSarif, sarifPath, logger);
}
sarif = await combineSarifFilesUsingCLI(
sarifLog = await combineSarifFilesUsingCLI(
sarifPaths,
gitHubVersion,
features,
@@ -110689,21 +110696,21 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths,
);
} else {
const sarifPath = sarifPaths[0];
sarif = readSarifFile(sarifPath);
validateSarifFileSchema(sarif, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
sarifLog = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(sarifLog, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarifLog], gitHubVersion);
}
sarif = filterAlertsByDiffRange(logger, sarif);
sarif = await addFingerprints(sarif, checkoutPath, logger);
sarifLog = filterAlertsByDiffRange(logger, sarifLog);
sarifLog = await addFingerprints(sarifLog, checkoutPath, logger);
const analysisKey = await getAnalysisKey();
const environment = getRequiredInput("matrix");
sarif = populateRunAutomationDetails(
sarif,
sarifLog = populateRunAutomationDetails(
sarifLog,
category,
analysisKey,
environment
);
return { sarif, analysisKey, environment };
return { sarif: sarifLog, analysisKey, environment };
}
async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) {
const outputPath = pathInput || getOptionalEnvVar("CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */);
@@ -110750,12 +110757,12 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
}
async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
const sarif = postProcessingResults.sarif;
const toolNames = getToolNames(sarif);
const sarifLog = postProcessingResults.sarif;
const toolNames = getToolNames(sarifLog);
logger.debug(`Validating that each SARIF run has a unique category`);
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
validateUniqueCategory(sarifLog, uploadTarget.sentinelPrefix);
logger.debug(`Serializing SARIF for upload`);
const sarifPayload = JSON.stringify(sarif);
const sarifPayload = JSON.stringify(sarifLog);
logger.debug(`Compressing serialized SARIF`);
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = url.pathToFileURL(checkoutPath).href;
@@ -110797,9 +110804,9 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post
};
}
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
if (!fs11.existsSync(outputDir)) {
fs11.mkdirSync(outputDir, { recursive: true });
} else if (!fs11.lstatSync(outputDir).isDirectory()) {
if (!fs12.existsSync(outputDir)) {
fs12.mkdirSync(outputDir, { recursive: true });
} else if (!fs12.lstatSync(outputDir).isDirectory()) {
throw new ConfigurationError(
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`
);
@@ -110809,7 +110816,7 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
`upload${uploadTarget.sarifExtension}`
);
logger.info(`Writing processed SARIF file to ${outputFile}`);
fs11.writeFileSync(outputFile, sarifPayload);
fs12.writeFileSync(outputFile, sarifPayload);
}
var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3;
var STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1e3;
@@ -110907,9 +110914,9 @@ function handleProcessingResultForUnsuccessfulExecution(response, status, logger
assertNever(status);
}
}
function validateUniqueCategory(sarif, sentinelPrefix) {
function validateUniqueCategory(sarifLog, sentinelPrefix) {
const categories = {};
for (const run of sarif.runs) {
for (const run of sarifLog.runs || []) {
const id = run?.automationDetails?.id;
const tool = run.tool?.driver?.name;
const category = `${sanitize(id)}_${sanitize(tool)}`;
@@ -110928,15 +110935,16 @@ function validateUniqueCategory(sarif, sentinelPrefix) {
function sanitize(str2) {
return (str2 ?? "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase();
}
var InvalidSarifUploadError = class extends Error {
};
function filterAlertsByDiffRange(logger, sarif) {
function filterAlertsByDiffRange(logger, sarifLog) {
const diffRanges = readDiffRangesJsonFile(logger);
if (!diffRanges?.length) {
return sarif;
return sarifLog;
}
if (sarifLog.runs === void 0) {
return sarifLog;
}
const checkoutPath = getRequiredInput("checkout_path");
for (const run of sarif.runs) {
for (const run of sarifLog.runs) {
if (run.results) {
run.results = run.results.filter((result) => {
const locations = [
@@ -110957,17 +110965,16 @@ function filterAlertsByDiffRange(logger, sarif) {
});
}
}
return sarif;
return sarifLog;
}
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
InvalidSarifUploadError,
buildPayload,
findSarifFilesInDir,
getGroupedSarifFilePaths,
populateRunAutomationDetails,
postProcessSarifFiles,
readSarifFile,
readSarifFileOrThrow,
shouldConsiderConfigurationError,
shouldConsiderInvalidRequest,
shouldShowCombineSarifFilesDeprecationWarning,

View File

@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",

View File

@@ -204,7 +204,7 @@ var require_file_command = __commonJS({
exports2.issueFileCommand = issueFileCommand;
exports2.prepareKeyValueMessage = prepareKeyValueMessage;
var crypto2 = __importStar2(require("crypto"));
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var os3 = __importStar2(require("os"));
var utils_1 = require_utils();
function issueFileCommand(command, message) {
@@ -212,10 +212,10 @@ var require_file_command = __commonJS({
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs13.existsSync(filePath)) {
if (!fs14.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs13.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os3.EOL}`, {
fs14.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os3.EOL}`, {
encoding: "utf8"
});
}
@@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({
exports2.isRooted = isRooted;
exports2.tryGetExecutablePath = tryGetExecutablePath;
exports2.getCmdPath = getCmdPath;
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var path13 = __importStar2(require("path"));
_a = fs13.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
_a = fs14.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
exports2.IS_WINDOWS = process.platform === "win32";
function readlink(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
const result = yield fs13.promises.readlink(fsPath);
const result = yield fs14.promises.readlink(fsPath);
if (exports2.IS_WINDOWS && !result.endsWith("\\")) {
return `${result}\\`;
}
@@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({
});
}
exports2.UV_FS_O_EXLOCK = 268435456;
exports2.READONLY = fs13.constants.O_RDONLY;
exports2.READONLY = fs14.constants.O_RDONLY;
function exists(fsPath) {
return __awaiter2(this, void 0, void 0, function* () {
try {
@@ -46044,6 +46044,7 @@ var require_package = __commonJS({
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
ava: "^6.4.1",
@@ -49106,7 +49107,7 @@ var require_internal_globber = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.DefaultGlobber = void 0;
var core14 = __importStar2(require_core());
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var globOptionsHelper = __importStar2(require_internal_glob_options_helper());
var path13 = __importStar2(require("path"));
var patternHelper = __importStar2(require_internal_pattern_helper());
@@ -49160,7 +49161,7 @@ var require_internal_globber = __commonJS({
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
core14.debug(`Search path '${searchPath}'`);
try {
yield __await2(fs13.promises.lstat(searchPath));
yield __await2(fs14.promises.lstat(searchPath));
} catch (err) {
if (err.code === "ENOENT") {
continue;
@@ -49194,7 +49195,7 @@ var require_internal_globber = __commonJS({
continue;
}
const childLevel = item.level + 1;
const childItems = (yield __await2(fs13.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path13.join(item.path, x), childLevel));
const childItems = (yield __await2(fs14.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path13.join(item.path, x), childLevel));
stack.push(...childItems.reverse());
} else if (match & internal_match_kind_1.MatchKind.File) {
yield yield __await2(item.path);
@@ -49229,7 +49230,7 @@ var require_internal_globber = __commonJS({
let stats;
if (options.followSymbolicLinks) {
try {
stats = yield fs13.promises.stat(item.path);
stats = yield fs14.promises.stat(item.path);
} catch (err) {
if (err.code === "ENOENT") {
if (options.omitBrokenSymbolicLinks) {
@@ -49241,10 +49242,10 @@ var require_internal_globber = __commonJS({
throw err;
}
} else {
stats = yield fs13.promises.lstat(item.path);
stats = yield fs14.promises.lstat(item.path);
}
if (stats.isDirectory() && options.followSymbolicLinks) {
const realPath = yield fs13.promises.realpath(item.path);
const realPath = yield fs14.promises.realpath(item.path);
while (traversalChain.length >= item.level) {
traversalChain.pop();
}
@@ -49353,7 +49354,7 @@ var require_internal_hash_files = __commonJS({
exports2.hashFiles = hashFiles;
var crypto2 = __importStar2(require("crypto"));
var core14 = __importStar2(require_core());
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var path13 = __importStar2(require("path"));
@@ -49376,13 +49377,13 @@ var require_internal_hash_files = __commonJS({
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
continue;
}
if (fs13.statSync(file).isDirectory()) {
if (fs14.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash2 = crypto2.createHash("sha256");
const pipeline = util.promisify(stream2.pipeline);
yield pipeline(fs13.createReadStream(file), hash2);
yield pipeline(fs14.createReadStream(file), hash2);
result.write(hash2.digest());
count++;
if (!hasMatch) {
@@ -50757,7 +50758,7 @@ var require_cacheUtils = __commonJS({
var glob = __importStar2(require_glob());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var path13 = __importStar2(require("path"));
var semver9 = __importStar2(require_semver3());
var util = __importStar2(require("util"));
@@ -50786,7 +50787,7 @@ var require_cacheUtils = __commonJS({
});
}
function getArchiveFileSizeInBytes(filePath) {
return fs13.statSync(filePath).size;
return fs14.statSync(filePath).size;
}
function resolvePaths(patterns) {
return __awaiter2(this, void 0, void 0, function* () {
@@ -50824,7 +50825,7 @@ var require_cacheUtils = __commonJS({
}
function unlinkFile(filePath) {
return __awaiter2(this, void 0, void 0, function* () {
return util.promisify(fs13.unlink)(filePath);
return util.promisify(fs14.unlink)(filePath);
});
}
function getVersion(app_1) {
@@ -50866,7 +50867,7 @@ var require_cacheUtils = __commonJS({
}
function getGnuTarPathOnWindows() {
return __awaiter2(this, void 0, void 0, function* () {
if (fs13.existsSync(constants_1.GnuTarPathOnWindows)) {
if (fs14.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion("tar");
@@ -91023,7 +91024,7 @@ var require_downloadUtils = __commonJS({
var http_client_1 = require_lib();
var storage_blob_1 = require_commonjs15();
var buffer = __importStar2(require("buffer"));
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var stream2 = __importStar2(require("stream"));
var util = __importStar2(require("util"));
var utils = __importStar2(require_cacheUtils());
@@ -91134,7 +91135,7 @@ var require_downloadUtils = __commonJS({
exports2.DownloadProgress = DownloadProgress;
function downloadCacheHttpClient(archiveLocation, archivePath) {
return __awaiter2(this, void 0, void 0, function* () {
const writeStream = fs13.createWriteStream(archivePath);
const writeStream = fs14.createWriteStream(archivePath);
const httpClient = new http_client_1.HttpClient("actions/cache");
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () {
return httpClient.get(archiveLocation);
@@ -91159,7 +91160,7 @@ var require_downloadUtils = __commonJS({
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
return __awaiter2(this, void 0, void 0, function* () {
var _a;
const archiveDescriptor = yield fs13.promises.open(archivePath, "w");
const archiveDescriptor = yield fs14.promises.open(archivePath, "w");
const httpClient = new http_client_1.HttpClient("actions/cache", void 0, {
socketTimeout: options.timeoutInMs,
keepAlive: true
@@ -91275,7 +91276,7 @@ var require_downloadUtils = __commonJS({
} else {
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
const downloadProgress = new DownloadProgress(contentLength);
const fd = fs13.openSync(archivePath, "w");
const fd = fs14.openSync(archivePath, "w");
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
@@ -91293,12 +91294,12 @@ var require_downloadUtils = __commonJS({
controller.abort();
throw new Error("Aborting cache download as the download time exceeded the timeout.");
} else if (Buffer.isBuffer(result)) {
fs13.writeFileSync(fd, result);
fs14.writeFileSync(fd, result);
}
}
} finally {
downloadProgress.stopDisplayTimer();
fs13.closeSync(fd);
fs14.closeSync(fd);
}
}
});
@@ -91620,7 +91621,7 @@ var require_cacheHttpClient = __commonJS({
var core14 = __importStar2(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var url_1 = require("url");
var utils = __importStar2(require_cacheUtils());
var uploadUtils_1 = require_uploadUtils();
@@ -91755,7 +91756,7 @@ Other caches with similar key:`);
return __awaiter2(this, void 0, void 0, function* () {
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs13.openSync(archivePath, "r");
const fd = fs14.openSync(archivePath, "r");
const uploadOptions = (0, options_1.getUploadOptions)(options);
const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency);
const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize);
@@ -91769,7 +91770,7 @@ Other caches with similar key:`);
const start = offset;
const end = offset + chunkSize - 1;
offset += maxChunkSize;
yield uploadChunk(httpClient, resourceUrl, () => fs13.createReadStream(archivePath, {
yield uploadChunk(httpClient, resourceUrl, () => fs14.createReadStream(archivePath, {
fd,
start,
end,
@@ -91780,7 +91781,7 @@ Other caches with similar key:`);
}
})));
} finally {
fs13.closeSync(fd);
fs14.closeSync(fd);
}
return;
});
@@ -99033,7 +99034,7 @@ var require_manifest = __commonJS({
var core_1 = require_core();
var os3 = require("os");
var cp = require("child_process");
var fs13 = require("fs");
var fs14 = require("fs");
function _findMatch(versionSpec, stable, candidates, archFilter) {
return __awaiter2(this, void 0, void 0, function* () {
const platFilter = os3.platform();
@@ -99095,10 +99096,10 @@ var require_manifest = __commonJS({
const lsbReleaseFile = "/etc/lsb-release";
const osReleaseFile = "/etc/os-release";
let contents = "";
if (fs13.existsSync(lsbReleaseFile)) {
contents = fs13.readFileSync(lsbReleaseFile).toString();
} else if (fs13.existsSync(osReleaseFile)) {
contents = fs13.readFileSync(osReleaseFile).toString();
if (fs14.existsSync(lsbReleaseFile)) {
contents = fs14.readFileSync(lsbReleaseFile).toString();
} else if (fs14.existsSync(osReleaseFile)) {
contents = fs14.readFileSync(osReleaseFile).toString();
}
return contents;
}
@@ -99307,7 +99308,7 @@ var require_tool_cache = __commonJS({
var core14 = __importStar2(require_core());
var io6 = __importStar2(require_io());
var crypto2 = __importStar2(require("crypto"));
var fs13 = __importStar2(require("fs"));
var fs14 = __importStar2(require("fs"));
var mm = __importStar2(require_manifest());
var os3 = __importStar2(require("os"));
var path13 = __importStar2(require("path"));
@@ -99353,7 +99354,7 @@ var require_tool_cache = __commonJS({
}
function downloadToolAttempt(url2, dest, auth2, headers) {
return __awaiter2(this, void 0, void 0, function* () {
if (fs13.existsSync(dest)) {
if (fs14.existsSync(dest)) {
throw new Error(`Destination file path ${dest} already exists`);
}
const http = new httpm.HttpClient(userAgent2, [], {
@@ -99377,7 +99378,7 @@ var require_tool_cache = __commonJS({
const readStream = responseMessageFactory();
let succeeded = false;
try {
yield pipeline(readStream, fs13.createWriteStream(dest));
yield pipeline(readStream, fs14.createWriteStream(dest));
core14.debug("download complete");
succeeded = true;
return dest;
@@ -99589,11 +99590,11 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os3.arch();
core14.debug(`Caching tool ${tool} ${version} ${arch2}`);
core14.debug(`source dir: ${sourceDir}`);
if (!fs13.statSync(sourceDir).isDirectory()) {
if (!fs14.statSync(sourceDir).isDirectory()) {
throw new Error("sourceDir is not a directory");
}
const destPath = yield _createToolPath(tool, version, arch2);
for (const itemName of fs13.readdirSync(sourceDir)) {
for (const itemName of fs14.readdirSync(sourceDir)) {
const s = path13.join(sourceDir, itemName);
yield io6.cp(s, destPath, { recursive: true });
}
@@ -99607,7 +99608,7 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os3.arch();
core14.debug(`Caching tool ${tool} ${version} ${arch2}`);
core14.debug(`source file: ${sourceFile}`);
if (!fs13.statSync(sourceFile).isFile()) {
if (!fs14.statSync(sourceFile).isFile()) {
throw new Error("sourceFile is not a file");
}
const destFolder = yield _createToolPath(tool, version, arch2);
@@ -99636,7 +99637,7 @@ var require_tool_cache = __commonJS({
versionSpec = semver9.clean(versionSpec) || "";
const cachePath = path13.join(_getCacheDirectory(), toolName, versionSpec, arch2);
core14.debug(`checking cache: ${cachePath}`);
if (fs13.existsSync(cachePath) && fs13.existsSync(`${cachePath}.complete`)) {
if (fs14.existsSync(cachePath) && fs14.existsSync(`${cachePath}.complete`)) {
core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`);
toolPath = cachePath;
} else {
@@ -99649,12 +99650,12 @@ var require_tool_cache = __commonJS({
const versions = [];
arch2 = arch2 || os3.arch();
const toolPath = path13.join(_getCacheDirectory(), toolName);
if (fs13.existsSync(toolPath)) {
const children = fs13.readdirSync(toolPath);
if (fs14.existsSync(toolPath)) {
const children = fs14.readdirSync(toolPath);
for (const child of children) {
if (isExplicitVersion(child)) {
const fullPath = path13.join(toolPath, child, arch2 || "");
if (fs13.existsSync(fullPath) && fs13.existsSync(`${fullPath}.complete`)) {
if (fs14.existsSync(fullPath) && fs14.existsSync(`${fullPath}.complete`)) {
versions.push(child);
}
}
@@ -99725,7 +99726,7 @@ var require_tool_cache = __commonJS({
function _completeToolPath(tool, version, arch2) {
const folderPath = path13.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || "");
const markerPath = `${folderPath}.complete`;
fs13.writeFileSync(markerPath, "");
fs14.writeFileSync(markerPath, "");
core14.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
@@ -103252,21 +103253,21 @@ async function getFolderSize(itemPath, options) {
getFolderSize.loose = async (itemPath, options) => await core(itemPath, options);
getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true });
async function core(rootItemPath, options = {}, returnType = {}) {
const fs13 = options.fs || await import("node:fs/promises");
const fs14 = options.fs || await import("node:fs/promises");
let folderSize = 0n;
const foundInos = /* @__PURE__ */ new Set();
const errors = [];
await processItem(rootItemPath);
async function processItem(itemPath) {
if (options.ignore?.test(itemPath)) return;
const stats = returnType.strict ? await fs13.lstat(itemPath, { bigint: true }) : await fs13.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
const stats = returnType.strict ? await fs14.lstat(itemPath, { bigint: true }) : await fs14.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3));
if (typeof stats !== "object") return;
if (!foundInos.has(stats.ino)) {
foundInos.add(stats.ino);
folderSize += stats.size;
}
if (stats.isDirectory()) {
const directoryItems = returnType.strict ? await fs13.readdir(itemPath) : await fs13.readdir(itemPath).catch((error3) => errors.push(error3));
const directoryItems = returnType.strict ? await fs14.readdir(itemPath) : await fs14.readdir(itemPath).catch((error3) => errors.push(error3));
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
@@ -105906,17 +105907,6 @@ function getExtraOptionsEnvParam() {
);
}
}
function getToolNames(sarif) {
const toolNames = {};
for (const run2 of sarif.runs || []) {
const tool = run2.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function getCodeQLDatabasePath(config, language) {
return path.resolve(config.dbLocation, language);
}
@@ -107493,12 +107483,83 @@ function initFeatures(gitHubVersion, repositoryNwo, tempDir, logger) {
}
}
// src/sarif/index.ts
var fs5 = __toESM(require("fs"));
var InvalidSarifUploadError = class extends Error {
};
function getToolNames(sarifFile) {
const toolNames = {};
for (const run2 of sarifFile.runs || []) {
const tool = run2.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
function readSarifFile(sarifFilePath) {
return JSON.parse(fs5.readFileSync(sarifFilePath, "utf8"));
}
function combineSarifFiles(sarifFiles, logger) {
logger.info(`Loading SARIF file(s)`);
const runs = [];
let version = void 0;
for (const sarifFile of sarifFiles) {
logger.debug(`Loading SARIF file: ${sarifFile}`);
const sarifLog = readSarifFile(sarifFile);
if (version === void 0) {
version = sarifLog.version;
} else if (version !== sarifLog.version) {
throw new InvalidSarifUploadError(
`Different SARIF versions encountered: ${version} and ${sarifLog.version}`
);
}
runs.push(...sarifLog?.runs || []);
}
if (version === void 0) {
version = "2.1.0";
}
return { version, runs };
}
function areAllRunsProducedByCodeQL(sarifLogs) {
return sarifLogs.every((sarifLog) => {
return sarifLog.runs?.every((run2) => run2.tool?.driver?.name === "CodeQL");
});
}
function createRunKey(run2) {
return {
name: run2.tool?.driver?.name,
fullName: run2.tool?.driver?.fullName,
version: run2.tool?.driver?.version,
semanticVersion: run2.tool?.driver?.semanticVersion,
guid: run2.tool?.driver?.guid,
automationId: run2.automationDetails?.id
};
}
function areAllRunsUnique(sarifLogs) {
const keys = /* @__PURE__ */ new Set();
for (const sarifLog of sarifLogs) {
if (sarifLog.runs === void 0) {
continue;
}
for (const run2 of sarifLog.runs) {
const key = JSON.stringify(createRunKey(run2));
if (keys.has(key)) {
return false;
}
keys.add(key);
}
}
return true;
}
// src/status-report.ts
var os = __toESM(require("os"));
var core9 = __toESM(require_core());
// src/config-utils.ts
var fs6 = __toESM(require("fs"));
var fs7 = __toESM(require("fs"));
var path7 = __toESM(require("path"));
// src/config/db-config.ts
@@ -107583,18 +107644,18 @@ function writeDiagnostic(config, language, diagnostic) {
}
// src/diff-informed-analysis-utils.ts
var fs5 = __toESM(require("fs"));
var fs6 = __toESM(require("fs"));
var path6 = __toESM(require("path"));
function getDiffRangesJsonFilePath() {
return path6.join(getTemporaryDirectory(), "pr-diff-range.json");
}
function readDiffRangesJsonFile(logger) {
const jsonFilePath = getDiffRangesJsonFilePath();
if (!fs5.existsSync(jsonFilePath)) {
if (!fs6.existsSync(jsonFilePath)) {
logger.debug(`Diff ranges JSON file does not exist at ${jsonFilePath}`);
return void 0;
}
const jsonContents = fs5.readFileSync(jsonFilePath, "utf8");
const jsonContents = fs6.readFileSync(jsonFilePath, "utf8");
logger.debug(
`Read pr-diff-range JSON file from ${jsonFilePath}:
${jsonContents}`
@@ -107643,10 +107704,10 @@ function getPathToParsedConfigFile(tempDir) {
}
async function getConfig(tempDir, logger) {
const configFile = getPathToParsedConfigFile(tempDir);
if (!fs6.existsSync(configFile)) {
if (!fs7.existsSync(configFile)) {
return void 0;
}
const configString = fs6.readFileSync(configFile, "utf8");
const configString = fs7.readFileSync(configFile, "utf8");
logger.debug("Loaded config:");
logger.debug(configString);
const config = JSON.parse(configString);
@@ -107890,7 +107951,7 @@ async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error
}
// src/upload-lib.ts
var fs12 = __toESM(require("fs"));
var fs13 = __toESM(require("fs"));
var path12 = __toESM(require("path"));
var url = __toESM(require("url"));
var import_zlib = __toESM(require("zlib"));
@@ -107898,7 +107959,7 @@ var core12 = __toESM(require_core());
var jsonschema2 = __toESM(require_lib2());
// src/codeql.ts
var fs10 = __toESM(require("fs"));
var fs11 = __toESM(require("fs"));
var path10 = __toESM(require("path"));
var core11 = __toESM(require_core());
var toolrunner3 = __toESM(require_toolrunner());
@@ -108146,7 +108207,7 @@ function wrapCliConfigurationError(cliError) {
}
// src/setup-codeql.ts
var fs9 = __toESM(require("fs"));
var fs10 = __toESM(require("fs"));
var path9 = __toESM(require("path"));
var toolcache3 = __toESM(require_tool_cache());
var import_fast_deep_equal = __toESM(require_fast_deep_equal());
@@ -108208,7 +108269,7 @@ var v4_default = v4;
// src/tar.ts
var import_child_process = require("child_process");
var fs7 = __toESM(require("fs"));
var fs8 = __toESM(require("fs"));
var stream = __toESM(require("stream"));
var import_toolrunner = __toESM(require_toolrunner());
var io4 = __toESM(require_io());
@@ -108281,7 +108342,7 @@ async function isZstdAvailable(logger) {
}
}
async function extract(tarPath, dest, compressionMethod, tarVersion, logger) {
fs7.mkdirSync(dest, { recursive: true });
fs8.mkdirSync(dest, { recursive: true });
switch (compressionMethod) {
case "gzip":
return await toolcache.extractTar(tarPath, dest);
@@ -108365,7 +108426,7 @@ function inferCompressionMethod(tarPath) {
}
// src/tools-download.ts
var fs8 = __toESM(require("fs"));
var fs9 = __toESM(require("fs"));
var os2 = __toESM(require("os"));
var path8 = __toESM(require("path"));
var import_perf_hooks = require("perf_hooks");
@@ -108472,7 +108533,7 @@ async function downloadAndExtract(codeqlURL, compressionMethod, dest, authorizat
};
}
async function downloadAndExtractZstdWithStreaming(codeqlURL, dest, authorization, headers, tarVersion, logger) {
fs8.mkdirSync(dest, { recursive: true });
fs9.mkdirSync(dest, { recursive: true });
const agent = new import_http_client.HttpClient().getAgent(codeqlURL);
headers = Object.assign(
{ "User-Agent": "CodeQL Action" },
@@ -108509,7 +108570,7 @@ function getToolcacheDirectory(version) {
}
function writeToolcacheMarkerFile(extractedPath, logger) {
const markerFilePath = `${extractedPath}.complete`;
fs8.writeFileSync(markerFilePath, "");
fs9.writeFileSync(markerFilePath, "");
logger.info(`Created toolcache marker file ${markerFilePath}`);
}
function sanitizeUrlForStatusReport(url2) {
@@ -108644,7 +108705,7 @@ async function findOverridingToolsInCache(humanReadableVersion, logger) {
const candidates = toolcache3.findAllVersions("CodeQL").filter(isGoodVersion).map((version) => ({
folder: toolcache3.find("CodeQL", version),
version
})).filter(({ folder }) => fs9.existsSync(path9.join(folder, "pinned-version")));
})).filter(({ folder }) => fs10.existsSync(path9.join(folder, "pinned-version")));
if (candidates.length === 1) {
const candidate = candidates[0];
logger.debug(
@@ -109198,7 +109259,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
"tools",
"tracing-config.lua"
);
return fs10.existsSync(tracingConfigPath);
return fs11.existsSync(tracingConfigPath);
},
async isScannedLanguage(language) {
return !await this.isTracedLanguage(language);
@@ -109678,7 +109739,7 @@ async function writeCodeScanningConfigFile(config, logger) {
logger.startGroup("Augmented user configuration file contents");
logger.info(dump(augmentedConfig));
logger.endGroup();
fs10.writeFileSync(codeScanningConfigFile, dump(augmentedConfig));
fs11.writeFileSync(codeScanningConfigFile, dump(augmentedConfig));
return codeScanningConfigFile;
}
var TRAP_CACHE_SIZE_MB = 1024;
@@ -109722,7 +109783,7 @@ async function getJobRunUuidSarifOptions(codeql) {
}
// src/fingerprints.ts
var fs11 = __toESM(require("fs"));
var fs12 = __toESM(require("fs"));
var import_path2 = __toESM(require("path"));
// node_modules/long/index.js
@@ -110710,7 +110771,7 @@ async function hash(callback, filepath) {
}
updateHash(current);
};
const readStream = fs11.createReadStream(filepath, "utf8");
const readStream = fs12.createReadStream(filepath, "utf8");
for await (const data of readStream) {
for (let i = 0; i < data.length; ++i) {
processCharacter(data.charCodeAt(i));
@@ -110785,22 +110846,22 @@ function resolveUriToFile(location, artifacts, sourceRoot, logger) {
if (!import_path2.default.isAbsolute(uri)) {
uri = srcRootPrefix + uri;
}
if (!fs11.existsSync(uri)) {
if (!fs12.existsSync(uri)) {
logger.debug(`Unable to compute fingerprint for non-existent file: ${uri}`);
return void 0;
}
if (fs11.statSync(uri).isDirectory()) {
if (fs12.statSync(uri).isDirectory()) {
logger.debug(`Unable to compute fingerprint for directory: ${uri}`);
return void 0;
}
return uri;
}
async function addFingerprints(sarif, sourceRoot, logger) {
async function addFingerprints(sarifLog, sourceRoot, logger) {
logger.info(
`Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.`
);
const callbacksByFile = {};
for (const run2 of sarif.runs || []) {
for (const run2 of sarifLog.runs || []) {
const artifacts = run2.artifacts || [];
for (const result of run2.results || []) {
const primaryLocation = (result.locations || [])[0];
@@ -110840,7 +110901,7 @@ async function addFingerprints(sarif, sourceRoot, logger) {
};
await hash(teeCallback, filepath);
}
return sarif;
return sarifLog;
}
// src/init.ts
@@ -110878,58 +110939,6 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe
// src/upload-lib.ts
var GENERIC_403_MSG = "The repo on which this action is running has not opted-in to CodeQL code scanning.";
var GENERIC_404_MSG = "The CodeQL code scanning feature is forbidden on this repository.";
function combineSarifFiles(sarifFiles, logger) {
logger.info(`Loading SARIF file(s)`);
const combinedSarif = {
version: null,
runs: []
};
for (const sarifFile of sarifFiles) {
logger.debug(`Loading SARIF file: ${sarifFile}`);
const sarifObject = JSON.parse(
fs12.readFileSync(sarifFile, "utf8")
);
if (combinedSarif.version === null) {
combinedSarif.version = sarifObject.version;
} else if (combinedSarif.version !== sarifObject.version) {
throw new InvalidSarifUploadError(
`Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`
);
}
combinedSarif.runs.push(...sarifObject.runs);
}
return combinedSarif;
}
function areAllRunsProducedByCodeQL(sarifObjects) {
return sarifObjects.every((sarifObject) => {
return sarifObject.runs?.every(
(run2) => run2.tool?.driver?.name === "CodeQL"
);
});
}
function createRunKey(run2) {
return {
name: run2.tool?.driver?.name,
fullName: run2.tool?.driver?.fullName,
version: run2.tool?.driver?.version,
semanticVersion: run2.tool?.driver?.semanticVersion,
guid: run2.tool?.driver?.guid,
automationId: run2.automationDetails?.id
};
}
function areAllRunsUnique(sarifObjects) {
const keys = /* @__PURE__ */ new Set();
for (const sarifObject of sarifObjects) {
for (const run2 of sarifObject.runs) {
const key = JSON.stringify(createRunKey(run2));
if (keys.has(key)) {
return false;
}
keys.add(key);
}
}
return true;
}
async function shouldShowCombineSarifFilesDeprecationWarning(sarifObjects, githubVersion) {
if (githubVersion.type === "GitHub Enterprise Server" /* GHES */ && satisfiesGHESVersion(githubVersion.version, "<3.14", true)) {
return false;
@@ -110958,9 +110967,7 @@ async function shouldDisableCombineSarifFiles(sarifObjects, githubVersion) {
}
async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, logger) {
logger.info("Combining SARIF files using the CodeQL CLI");
const sarifObjects = sarifFiles.map((sarifFile) => {
return JSON.parse(fs12.readFileSync(sarifFile, "utf8"));
});
const sarifObjects = sarifFiles.map(readSarifFile);
const deprecationWarningMessage = gitHubVersion.type === "GitHub Enterprise Server" /* GHES */ ? "and will be removed in GitHub Enterprise Server 3.18" : "and will be removed in July 2025";
const deprecationMoreInformationMessage = "For more information, see https://github.blog/changelog/2024-05-06-code-scanning-will-stop-combining-runs-from-a-single-upload";
if (!areAllRunsProducedByCodeQL(sarifObjects)) {
@@ -111013,27 +111020,27 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
codeQL = initCodeQLResult.codeql;
}
const baseTempDir = path12.resolve(tempDir, "combined-sarif");
fs12.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs12.mkdtempSync(path12.resolve(baseTempDir, "output-"));
fs13.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs13.mkdtempSync(path12.resolve(baseTempDir, "output-"));
const outputFile = path12.resolve(outputDirectory, "combined-sarif.sarif");
await codeQL.mergeResults(sarifFiles, outputFile, {
mergeRunsFromEqualCategory: true
});
return JSON.parse(fs12.readFileSync(outputFile, "utf8"));
return readSarifFile(outputFile);
}
function populateRunAutomationDetails(sarif, category, analysis_key, environment) {
function populateRunAutomationDetails(sarifFile, category, analysis_key, environment) {
const automationID = getAutomationID2(category, analysis_key, environment);
if (automationID !== void 0) {
for (const run2 of sarif.runs || []) {
for (const run2 of sarifFile.runs || []) {
if (run2.automationDetails === void 0) {
run2.automationDetails = {
id: automationID
};
}
}
return sarif;
return sarifFile;
}
return sarif;
return sarifFile;
}
function getAutomationID2(category, analysis_key, environment) {
if (category !== void 0) {
@@ -111056,7 +111063,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
`SARIF upload disabled by an environment variable. Saving to ${payloadSaveFile}`
);
logger.info(`Payload: ${JSON.stringify(payload, null, 2)}`);
fs12.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
fs13.writeFileSync(payloadSaveFile, JSON.stringify(payload, null, 2));
return "dummy-sarif-id";
}
const client = getApiClient();
@@ -111090,7 +111097,7 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
function findSarifFilesInDir(sarifPath, isSarif) {
const sarifFiles = [];
const walkSarifFiles = (dir) => {
const entries = fs12.readdirSync(dir, { withFileTypes: true });
const entries = fs13.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile() && isSarif(entry.name)) {
sarifFiles.push(path12.resolve(dir, entry.name));
@@ -111103,7 +111110,7 @@ function findSarifFilesInDir(sarifPath, isSarif) {
return sarifFiles;
}
async function getGroupedSarifFilePaths(logger, sarifPath) {
const stats = fs12.statSync(sarifPath, { throwIfNoEntry: false });
const stats = fs13.statSync(sarifPath, { throwIfNoEntry: false });
if (stats === void 0) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
}
@@ -111150,9 +111157,9 @@ async function getGroupedSarifFilePaths(logger, sarifPath) {
}
return results;
}
function countResultsInSarif(sarif) {
function countResultsInSarif(sarifLog) {
let numResults = 0;
const parsedSarif = JSON.parse(sarif);
const parsedSarif = JSON.parse(sarifLog);
if (!Array.isArray(parsedSarif.runs)) {
throw new InvalidSarifUploadError("Invalid SARIF. Missing 'runs' array.");
}
@@ -111166,26 +111173,26 @@ function countResultsInSarif(sarif) {
}
return numResults;
}
function readSarifFile(sarifFilePath) {
function readSarifFileOrThrow(sarifFilePath) {
try {
return JSON.parse(fs12.readFileSync(sarifFilePath, "utf8"));
return readSarifFile(sarifFilePath);
} catch (e) {
throw new InvalidSarifUploadError(
`Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}`
);
}
}
function validateSarifFileSchema(sarif, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarif]) && // We want to validate CodeQL SARIF in testing environments.
function validateSarifFileSchema(sarifLog, sarifFilePath, logger) {
if (areAllRunsProducedByCodeQL([sarifLog]) && // We want to validate CodeQL SARIF in testing environments.
!getTestingEnvironment()) {
logger.debug(
`Skipping SARIF schema validation for ${sarifFilePath} as all runs are produced by CodeQL.`
);
return;
return true;
}
logger.info(`Validating ${sarifFilePath}`);
const schema2 = require_sarif_schema_2_1_0();
const result = new jsonschema2.Validator().validate(sarif, schema2);
const result = new jsonschema2.Validator().validate(sarifLog, schema2);
const warningAttributes = ["uri-reference", "uri"];
const errors = (result.errors ?? []).filter(
(err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument))
@@ -111212,6 +111219,7 @@ ${sarifErrors.join(
)}`
);
}
return true;
}
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, workflowRunAttempt, checkoutURI, environment, toolNames, mergeBaseCommitOid) {
const payloadObj = {
@@ -111237,7 +111245,7 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
payloadObj.base_sha = mergeBaseCommitOid;
} else if (process.env.GITHUB_EVENT_PATH) {
const githubEvent = JSON.parse(
fs12.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
fs13.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
);
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
payloadObj.base_sha = githubEvent.pull_request.base.sha;
@@ -111248,14 +111256,14 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) {
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif;
let sarifLog;
category = analysis.fixCategory(logger, category);
if (sarifPaths.length > 1) {
for (const sarifPath of sarifPaths) {
const parsedSarif = readSarifFile(sarifPath);
const parsedSarif = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(parsedSarif, sarifPath, logger);
}
sarif = await combineSarifFilesUsingCLI(
sarifLog = await combineSarifFilesUsingCLI(
sarifPaths,
gitHubVersion,
features,
@@ -111263,21 +111271,21 @@ async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths,
);
} else {
const sarifPath = sarifPaths[0];
sarif = readSarifFile(sarifPath);
validateSarifFileSchema(sarif, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
sarifLog = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(sarifLog, sarifPath, logger);
await throwIfCombineSarifFilesDisabled([sarifLog], gitHubVersion);
}
sarif = filterAlertsByDiffRange(logger, sarif);
sarif = await addFingerprints(sarif, checkoutPath, logger);
sarifLog = filterAlertsByDiffRange(logger, sarifLog);
sarifLog = await addFingerprints(sarifLog, checkoutPath, logger);
const analysisKey = await getAnalysisKey();
const environment = getRequiredInput("matrix");
sarif = populateRunAutomationDetails(
sarif,
sarifLog = populateRunAutomationDetails(
sarifLog,
category,
analysisKey,
environment
);
return { sarif, analysisKey, environment };
return { sarif: sarifLog, analysisKey, environment };
}
async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) {
const outputPath = pathInput || getOptionalEnvVar("CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */);
@@ -111294,12 +111302,12 @@ async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProc
}
async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
const sarif = postProcessingResults.sarif;
const toolNames = getToolNames(sarif);
const sarifLog = postProcessingResults.sarif;
const toolNames = getToolNames(sarifLog);
logger.debug(`Validating that each SARIF run has a unique category`);
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
validateUniqueCategory(sarifLog, uploadTarget.sentinelPrefix);
logger.debug(`Serializing SARIF for upload`);
const sarifPayload = JSON.stringify(sarif);
const sarifPayload = JSON.stringify(sarifLog);
logger.debug(`Compressing serialized SARIF`);
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = url.pathToFileURL(checkoutPath).href;
@@ -111341,9 +111349,9 @@ async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, post
};
}
function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
if (!fs12.existsSync(outputDir)) {
fs12.mkdirSync(outputDir, { recursive: true });
} else if (!fs12.lstatSync(outputDir).isDirectory()) {
if (!fs13.existsSync(outputDir)) {
fs13.mkdirSync(outputDir, { recursive: true });
} else if (!fs13.lstatSync(outputDir).isDirectory()) {
throw new ConfigurationError(
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`
);
@@ -111353,7 +111361,7 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
`upload${uploadTarget.sarifExtension}`
);
logger.info(`Writing processed SARIF file to ${outputFile}`);
fs12.writeFileSync(outputFile, sarifPayload);
fs13.writeFileSync(outputFile, sarifPayload);
}
var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3;
var STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1e3;
@@ -111451,9 +111459,9 @@ function handleProcessingResultForUnsuccessfulExecution(response, status, logger
assertNever(status);
}
}
function validateUniqueCategory(sarif, sentinelPrefix) {
function validateUniqueCategory(sarifLog, sentinelPrefix) {
const categories = {};
for (const run2 of sarif.runs) {
for (const run2 of sarifLog.runs || []) {
const id = run2?.automationDetails?.id;
const tool = run2.tool?.driver?.name;
const category = `${sanitize(id)}_${sanitize(tool)}`;
@@ -111472,15 +111480,16 @@ function validateUniqueCategory(sarif, sentinelPrefix) {
function sanitize(str2) {
return (str2 ?? "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase();
}
var InvalidSarifUploadError = class extends Error {
};
function filterAlertsByDiffRange(logger, sarif) {
function filterAlertsByDiffRange(logger, sarifLog) {
const diffRanges = readDiffRangesJsonFile(logger);
if (!diffRanges?.length) {
return sarif;
return sarifLog;
}
if (sarifLog.runs === void 0) {
return sarifLog;
}
const checkoutPath = getRequiredInput("checkout_path");
for (const run2 of sarif.runs) {
for (const run2 of sarifLog.runs) {
if (run2.results) {
run2.results = run2.results.filter((result) => {
const locations = [
@@ -111501,7 +111510,7 @@ function filterAlertsByDiffRange(logger, sarif) {
});
}
}
return sarif;
return sarifLog;
}
// src/upload-sarif.ts

8
package-lock.json generated
View File

@@ -43,6 +43,7 @@
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
"ava": "^6.4.1",
@@ -2522,6 +2523,13 @@
"@types/node": "*"
}
},
"node_modules/@types/sarif": {
"version": "2.1.7",
"resolved": "https://registry.npmjs.org/@types/sarif/-/sarif-2.1.7.tgz",
"integrity": "sha512-kRz0VEkJqWLf1LLVN4pT1cg1Z9wAuvI6L97V3m2f5B76Tg8d413ddvLBPTEHAZJlnn4XSvu0FkZtViCQGVyrXQ==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/semver": {
"version": "7.7.1",
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.7.1.tgz",

View File

@@ -58,6 +58,7 @@
"@types/js-yaml": "^4.0.9",
"@types/node": "^20.19.9",
"@types/node-forge": "^1.3.14",
"@types/sarif": "^2.1.7",
"@types/semver": "^7.7.1",
"@types/sinon": "^21.0.0",
"ava": "^6.4.1",

View File

@@ -25,6 +25,7 @@ import { FeatureEnablement, Feature } from "./feature-flags";
import { KnownLanguage, Language } from "./languages";
import { Logger, withGroupAsync } from "./logging";
import { OverlayDatabaseMode } from "./overlay";
import type * as sarif from "./sarif";
import { DatabaseCreationTimings, EventReport } from "./status-report";
import { endTracingForCluster } from "./tracer-config";
import * as util from "./util";
@@ -594,7 +595,7 @@ export async function runQueries(
function getPerQueryAlertCounts(sarifPath: string): Record<string, number> {
const sarifObject = JSON.parse(
fs.readFileSync(sarifPath, "utf8"),
) as util.SarifFile;
) as sarif.Log;
// We do not need to compute fingerprints because we are not sending data based off of locations.
// Generate the query: alert count object

View File

@@ -6,6 +6,7 @@ import test from "ava";
import * as fingerprints from "./fingerprints";
import { getRunnerLogger } from "./logging";
import * as sarif from "./sarif";
import { setupTests } from "./testing-utils";
import * as util from "./util";
@@ -201,7 +202,7 @@ test("addFingerprints", async (t) => {
fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting.input.sarif`)
.toString(),
) as util.SarifFile;
) as sarif.Log;
const expected = JSON.parse(
fs
.readFileSync(
@@ -229,7 +230,7 @@ test("missingRegions", async (t) => {
fs
.readFileSync(`${__dirname}/../src/testdata/fingerprinting2.input.sarif`)
.toString(),
) as util.SarifFile;
) as sarif.Log;
const expected = JSON.parse(
fs
.readFileSync(

View File

@@ -5,7 +5,7 @@ import Long from "long";
import { DocUrl } from "./doc-url";
import { Logger } from "./logging";
import { SarifFile, SarifResult } from "./util";
import type * as sarif from "./sarif";
const tab = "\t".charCodeAt(0);
const space = " ".charCodeAt(0);
@@ -138,7 +138,7 @@ export async function hash(callback: hashCallback, filepath: string) {
// Generate a hash callback function that updates the given result in-place
// when it receives a hash for the correct line number. Ignores hashes for other lines.
function locationUpdateCallback(
result: SarifResult,
result: sarif.Result,
location: any,
logger: Logger,
): hashCallback {
@@ -256,17 +256,17 @@ export function resolveUriToFile(
// Compute fingerprints for results in the given sarif file
// and return an updated sarif file contents.
export async function addFingerprints(
sarif: SarifFile,
sarifLog: Partial<sarif.Log>,
sourceRoot: string,
logger: Logger,
): Promise<SarifFile> {
): Promise<Partial<sarif.Log>> {
logger.info(
`Adding fingerprints to SARIF file. See ${DocUrl.TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS} for more information.`,
);
// Gather together results for the same file and construct
// callbacks to accept hashes for that file and update the location
const callbacksByFile: { [filename: string]: hashCallback[] } = {};
for (const run of sarif.runs || []) {
for (const run of sarifLog.runs || []) {
// We may need the list of artifacts to resolve against
const artifacts = run.artifacts || [];
@@ -316,5 +316,5 @@ export async function addFingerprints(
await hash(teeCallback, filepath);
}
return sarif;
return sarifLog;
}

18
src/sarif/index.test.ts Normal file
View File

@@ -0,0 +1,18 @@
import * as fs from "fs";
import test from "ava";
import { setupTests } from "../testing-utils";
import { getToolNames, type Log } from ".";
setupTests(test);
test("getToolNames", (t) => {
const input = fs.readFileSync(
`${__dirname}/../../src/testdata/tool-names.sarif`,
"utf8",
);
const toolNames = getToolNames(JSON.parse(input) as Log);
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
});

141
src/sarif/index.ts Normal file
View File

@@ -0,0 +1,141 @@
import * as fs from "fs";
import { Logger } from "../logging";
import * as sarif from "sarif";
export type * from "sarif";
// Extends `ToolComponent` with the non-standard `automationId` property we use.
export type RunKey = sarif.ToolComponent & {
/**
* Describes a SARIF run (either uniquely or not uniquely) based on the criteria used by
* Code Scanning to determine analysis categories
*/
automationId: string | undefined;
};
/**
* An error that occurred due to an invalid SARIF upload request.
*/
export class InvalidSarifUploadError extends Error {}
/**
* Get the array of all the tool names contained in the given sarif contents.
*
* Returns an array of unique string tool names.
*/
export function getToolNames(sarifFile: Partial<sarif.Log>): string[] {
const toolNames = {};
for (const run of sarifFile.runs || []) {
const tool = run.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
/**
* Reads the file pointed at by `sarifFilePath` and parses it as JSON. This function does
* not validate that the JSON represents a valid SARIF file. I.e. this function will only
* throw if the file cannot be read or does not contain valid JSON.
*
* @param sarifFilePath The file to read.
* @returns The resulting JSON value, cast to a SARIF `Log`.
*/
export function readSarifFile(sarifFilePath: string): Partial<sarif.Log> {
return JSON.parse(fs.readFileSync(sarifFilePath, "utf8")) as sarif.Log;
}
// Takes a list of paths to sarif files and combines them together,
// returning the contents of the combined sarif file.
export function combineSarifFiles(
sarifFiles: string[],
logger: Logger,
): sarif.Log {
logger.info(`Loading SARIF file(s)`);
const runs: sarif.Run[] = [];
let version: sarif.Log.version | undefined = undefined;
for (const sarifFile of sarifFiles) {
logger.debug(`Loading SARIF file: ${sarifFile}`);
const sarifLog = readSarifFile(sarifFile);
// If this is the first SARIF file we are reading, store the version from it so that we
// can put it in the combined SARIF. If not, then check that the versions match and
// throw an exception if they do not.
if (version === undefined) {
version = sarifLog.version;
} else if (version !== sarifLog.version) {
throw new InvalidSarifUploadError(
`Different SARIF versions encountered: ${version} and ${sarifLog.version}`,
);
}
runs.push(...(sarifLog?.runs || []));
}
// We can't guarantee that the SARIF files we load will have version properties. As a fallback,
// we set it to the expected version if we didn't find any other.
if (version === undefined) {
version = "2.1.0";
}
return { version, runs };
}
/**
* Checks whether all the runs in the given SARIF files were produced by CodeQL.
* @param sarifLogs The list of SARIF objects to check.
*/
export function areAllRunsProducedByCodeQL(
sarifLogs: Array<Partial<sarif.Log>>,
): boolean {
return sarifLogs.every((sarifLog: Partial<sarif.Log>) => {
return sarifLog.runs?.every((run) => run.tool?.driver?.name === "CodeQL");
});
}
function createRunKey(run: sarif.Run): RunKey {
return {
name: run.tool?.driver?.name,
fullName: run.tool?.driver?.fullName,
version: run.tool?.driver?.version,
semanticVersion: run.tool?.driver?.semanticVersion,
guid: run.tool?.driver?.guid,
automationId: run.automationDetails?.id,
};
}
/**
* Checks whether all runs in the given SARIF files are unique (based on the
* criteria used by Code Scanning to determine analysis categories).
* @param sarifLogs The list of SARIF objects to check.
*/
export function areAllRunsUnique(
sarifLogs: Array<Partial<sarif.Log>>,
): boolean {
const keys = new Set<string>();
for (const sarifLog of sarifLogs) {
if (sarifLog.runs === undefined) {
continue;
}
for (const run of sarifLog.runs) {
const key = JSON.stringify(createRunKey(run));
// If the key already exists, the runs are not unique.
if (keys.has(key)) {
return false;
}
keys.add(key);
}
}
return true;
}

View File

@@ -10,6 +10,7 @@ import * as analyses from "./analyses";
import { AnalysisKind, CodeQuality, CodeScanning } from "./analyses";
import * as api from "./api-client";
import { getRunnerLogger, Logger } from "./logging";
import * as sarif from "./sarif";
import { setupTests } from "./testing-utils";
import * as uploadLib from "./upload-lib";
import { UploadPayload } from "./upload-lib/types";
@@ -25,7 +26,7 @@ test("validateSarifFileSchema - valid", (t) => {
const inputFile = `${__dirname}/../src/testdata/valid-sarif.sarif`;
t.notThrows(() =>
uploadLib.validateSarifFileSchema(
uploadLib.readSarifFile(inputFile),
uploadLib.readSarifFileOrThrow(inputFile),
inputFile,
getRunnerLogger(true),
),
@@ -36,7 +37,7 @@ test("validateSarifFileSchema - invalid", (t) => {
const inputFile = `${__dirname}/../src/testdata/invalid-sarif.sarif`;
t.throws(() =>
uploadLib.validateSarifFileSchema(
uploadLib.readSarifFile(inputFile),
uploadLib.readSarifFileOrThrow(inputFile),
inputFile,
getRunnerLogger(true),
),
@@ -262,18 +263,23 @@ test("getGroupedSarifFilePaths - Other file", async (t) => {
});
test("populateRunAutomationDetails", (t) => {
let sarif = {
runs: [{}],
const tool = { driver: { name: "test tool" } };
let sarifLog: sarif.Log = {
version: "2.1.0",
runs: [{ tool }],
};
const analysisKey = ".github/workflows/codeql-analysis.yml:analyze";
let expectedSarif = {
runs: [{ automationDetails: { id: "language:javascript/os:linux/" } }],
let expectedSarif: sarif.Log = {
version: "2.1.0",
runs: [
{ tool, automationDetails: { id: "language:javascript/os:linux/" } },
],
};
// Category has priority over analysis_key/environment
let modifiedSarif = uploadLib.populateRunAutomationDetails(
sarif,
sarifLog,
"language:javascript/os:linux",
analysisKey,
'{"language": "other", "os": "other"}',
@@ -282,7 +288,7 @@ test("populateRunAutomationDetails", (t) => {
// It doesn't matter if the category has a slash at the end or not
modifiedSarif = uploadLib.populateRunAutomationDetails(
sarif,
sarifLog,
"language:javascript/os:linux/",
analysisKey,
"",
@@ -290,10 +296,16 @@ test("populateRunAutomationDetails", (t) => {
t.deepEqual(modifiedSarif, expectedSarif);
// check that the automation details doesn't get overwritten
sarif = { runs: [{ automationDetails: { id: "my_id" } }] };
expectedSarif = { runs: [{ automationDetails: { id: "my_id" } }] };
sarifLog = {
version: "2.1.0",
runs: [{ tool, automationDetails: { id: "my_id" } }],
};
expectedSarif = {
version: "2.1.0",
runs: [{ tool, automationDetails: { id: "my_id" } }],
};
modifiedSarif = uploadLib.populateRunAutomationDetails(
sarif,
sarifLog,
undefined,
analysisKey,
'{"os": "linux", "language": "javascript"}',
@@ -301,11 +313,16 @@ test("populateRunAutomationDetails", (t) => {
t.deepEqual(modifiedSarif, expectedSarif);
// check multiple runs
sarif = { runs: [{ automationDetails: { id: "my_id" } }, {}] };
sarifLog = {
version: "2.1.0",
runs: [{ tool, automationDetails: { id: "my_id" } }, { tool }],
};
expectedSarif = {
version: "2.1.0",
runs: [
{ automationDetails: { id: "my_id" } },
{ tool, automationDetails: { id: "my_id" } },
{
tool,
automationDetails: {
id: ".github/workflows/codeql-analysis.yml:analyze/language:javascript/os:linux/",
},
@@ -313,7 +330,7 @@ test("populateRunAutomationDetails", (t) => {
],
};
modifiedSarif = uploadLib.populateRunAutomationDetails(
sarif,
sarifLog,
undefined,
analysisKey,
'{"os": "linux", "language": "javascript"}',
@@ -515,20 +532,8 @@ test("validateUniqueCategory for automation details id and tool name", (t) => {
);
// Our category sanitization is not perfect. Here are some examples
// of where we see false clashes
t.notThrows(() =>
uploadLib.validateUniqueCategory(
createMockSarif("abc"),
CodeScanning.sentinelPrefix,
),
);
t.throws(() =>
uploadLib.validateUniqueCategory(
createMockSarif("abc", "_"),
CodeScanning.sentinelPrefix,
),
);
// of where we see false clashes because we replace some characters
// with `_` in `sanitize`.
t.notThrows(() =>
uploadLib.validateUniqueCategory(
createMockSarif("abc", "def__"),
@@ -537,7 +542,7 @@ test("validateUniqueCategory for automation details id and tool name", (t) => {
);
t.throws(() =>
uploadLib.validateUniqueCategory(
createMockSarif("abc_def"),
createMockSarif("abc_def", "_"),
CodeScanning.sentinelPrefix,
),
);
@@ -561,7 +566,10 @@ test("validateUniqueCategory for multiple runs", (t) => {
const sarif2 = createMockSarif("ghi", "jkl");
// duplicate categories are allowed within the same sarif file
const multiSarif = { runs: [sarif1.runs[0], sarif1.runs[0], sarif2.runs[0]] };
const multiSarif: sarif.Log = {
version: "2.1.0",
runs: [sarif1.runs[0], sarif1.runs[0], sarif2.runs[0]],
};
t.notThrows(() =>
uploadLib.validateUniqueCategory(multiSarif, CodeScanning.sentinelPrefix),
);
@@ -600,7 +608,7 @@ test("accept results with invalid artifactLocation.uri value", (t) => {
const sarifFile = `${__dirname}/../src/testdata/with-invalid-uri.sarif`;
uploadLib.validateSarifFileSchema(
uploadLib.readSarifFile(sarifFile),
uploadLib.readSarifFileOrThrow(sarifFile),
sarifFile,
mockLogger,
);
@@ -891,8 +899,9 @@ test("shouldConsiderInvalidRequest returns correct recognises processing errors"
t.false(uploadLib.shouldConsiderInvalidRequest(error3));
});
function createMockSarif(id?: string, tool?: string) {
function createMockSarif(id?: string, tool?: string): sarif.Log {
return {
version: "2.1.0",
runs: [
{
automationDetails: {
@@ -900,7 +909,7 @@ function createMockSarif(id?: string, tool?: string) {
},
tool: {
driver: {
name: tool,
name: tool || "test tool",
},
},
},

View File

@@ -21,6 +21,13 @@ import * as gitUtils from "./git-utils";
import { initCodeQL } from "./init";
import { Logger } from "./logging";
import { getRepositoryNwo, RepositoryNwo } from "./repository";
import * as sarif from "./sarif";
import {
areAllRunsProducedByCodeQL,
areAllRunsUnique,
combineSarifFiles,
InvalidSarifUploadError,
} from "./sarif";
import { BasePayload, UploadPayload } from "./upload-lib/types";
import * as util from "./util";
import {
@@ -30,8 +37,6 @@ import {
GitHubVariant,
GitHubVersion,
satisfiesGHESVersion,
SarifFile,
SarifRun,
} from "./util";
const GENERIC_403_MSG =
@@ -39,94 +44,9 @@ const GENERIC_403_MSG =
const GENERIC_404_MSG =
"The CodeQL code scanning feature is forbidden on this repository.";
// Takes a list of paths to sarif files and combines them together,
// returning the contents of the combined sarif file.
function combineSarifFiles(sarifFiles: string[], logger: Logger): SarifFile {
logger.info(`Loading SARIF file(s)`);
const combinedSarif: SarifFile = {
version: null,
runs: [],
};
for (const sarifFile of sarifFiles) {
logger.debug(`Loading SARIF file: ${sarifFile}`);
const sarifObject = JSON.parse(
fs.readFileSync(sarifFile, "utf8"),
) as SarifFile;
// Check SARIF version
if (combinedSarif.version === null) {
combinedSarif.version = sarifObject.version;
} else if (combinedSarif.version !== sarifObject.version) {
throw new InvalidSarifUploadError(
`Different SARIF versions encountered: ${combinedSarif.version} and ${sarifObject.version}`,
);
}
combinedSarif.runs.push(...sarifObject.runs);
}
return combinedSarif;
}
/**
* Checks whether all the runs in the given SARIF files were produced by CodeQL.
* @param sarifObjects The list of SARIF objects to check.
*/
function areAllRunsProducedByCodeQL(sarifObjects: SarifFile[]): boolean {
return sarifObjects.every((sarifObject) => {
return sarifObject.runs?.every(
(run) => run.tool?.driver?.name === "CodeQL",
);
});
}
type SarifRunKey = {
name: string | undefined;
fullName: string | undefined;
version: string | undefined;
semanticVersion: string | undefined;
guid: string | undefined;
automationId: string | undefined;
};
function createRunKey(run: SarifRun): SarifRunKey {
return {
name: run.tool?.driver?.name,
fullName: run.tool?.driver?.fullName,
version: run.tool?.driver?.version,
semanticVersion: run.tool?.driver?.semanticVersion,
guid: run.tool?.driver?.guid,
automationId: run.automationDetails?.id,
};
}
/**
* Checks whether all runs in the given SARIF files are unique (based on the
* criteria used by Code Scanning to determine analysis categories).
* @param sarifObjects The list of SARIF objects to check.
*/
function areAllRunsUnique(sarifObjects: SarifFile[]): boolean {
const keys = new Set<string>();
for (const sarifObject of sarifObjects) {
for (const run of sarifObject.runs) {
const key = JSON.stringify(createRunKey(run));
// If the key already exists, the runs are not unique.
if (keys.has(key)) {
return false;
}
keys.add(key);
}
}
return true;
}
// Checks whether the deprecation warning for combining SARIF files should be shown.
export async function shouldShowCombineSarifFilesDeprecationWarning(
sarifObjects: util.SarifFile[],
sarifObjects: Array<Partial<sarif.Log>>,
githubVersion: GitHubVersion,
) {
// Do not show this warning on GHES versions before 3.14.0
@@ -146,7 +66,7 @@ export async function shouldShowCombineSarifFilesDeprecationWarning(
}
export async function throwIfCombineSarifFilesDisabled(
sarifObjects: util.SarifFile[],
sarifObjects: Array<Partial<sarif.Log>>,
githubVersion: GitHubVersion,
) {
if (!(await shouldDisableCombineSarifFiles(sarifObjects, githubVersion))) {
@@ -163,7 +83,7 @@ export async function throwIfCombineSarifFilesDisabled(
// Checks whether combining SARIF files should be disabled.
async function shouldDisableCombineSarifFiles(
sarifObjects: util.SarifFile[],
sarifObjects: Array<Partial<sarif.Log>>,
githubVersion: GitHubVersion,
) {
if (githubVersion.type === GitHubVariant.GHES) {
@@ -192,12 +112,10 @@ async function combineSarifFilesUsingCLI(
gitHubVersion: GitHubVersion,
features: FeatureEnablement,
logger: Logger,
): Promise<SarifFile> {
): Promise<Partial<sarif.Log>> {
logger.info("Combining SARIF files using the CodeQL CLI");
const sarifObjects = sarifFiles.map((sarifFile): SarifFile => {
return JSON.parse(fs.readFileSync(sarifFile, "utf8")) as SarifFile;
});
const sarifObjects = sarifFiles.map(sarif.readSarifFile);
const deprecationWarningMessage =
gitHubVersion.type === GitHubVariant.GHES
@@ -279,30 +197,30 @@ async function combineSarifFilesUsingCLI(
mergeRunsFromEqualCategory: true,
});
return JSON.parse(fs.readFileSync(outputFile, "utf8")) as SarifFile;
return sarif.readSarifFile(outputFile);
}
// Populates the run.automationDetails.id field using the analysis_key and environment
// and return an updated sarif file contents.
export function populateRunAutomationDetails(
sarif: SarifFile,
sarifFile: Partial<sarif.Log>,
category: string | undefined,
analysis_key: string,
environment: string | undefined,
): SarifFile {
): Partial<sarif.Log> {
const automationID = getAutomationID(category, analysis_key, environment);
if (automationID !== undefined) {
for (const run of sarif.runs || []) {
for (const run of sarifFile.runs || []) {
if (run.automationDetails === undefined) {
run.automationDetails = {
id: automationID,
};
}
}
return sarif;
return sarifFile;
}
return sarif;
return sarifFile;
}
function getAutomationID(
@@ -511,9 +429,9 @@ export async function getGroupedSarifFilePaths(
}
// Counts the number of results in the given SARIF file
function countResultsInSarif(sarif: string): number {
function countResultsInSarif(sarifLog: string): number {
let numResults = 0;
const parsedSarif = JSON.parse(sarif);
const parsedSarif = JSON.parse(sarifLog);
if (!Array.isArray(parsedSarif.runs)) {
throw new InvalidSarifUploadError("Invalid SARIF. Missing 'runs' array.");
}
@@ -529,9 +447,15 @@ function countResultsInSarif(sarif: string): number {
return numResults;
}
export function readSarifFile(sarifFilePath: string): SarifFile {
/** A thin wrapper around `readSarifFile` which wraps exceptions in `InvalidSarifUploadError`.
*
* @throws InvalidSarifUploadError If parsing the SARIF file as JSON failed.
*/
export function readSarifFileOrThrow(
sarifFilePath: string,
): Partial<sarif.Log> {
try {
return JSON.parse(fs.readFileSync(sarifFilePath, "utf8")) as SarifFile;
return sarif.readSarifFile(sarifFilePath);
} catch (e) {
throw new InvalidSarifUploadError(
`Invalid SARIF. JSON syntax error: ${getErrorMessage(e)}`,
@@ -542,26 +466,26 @@ export function readSarifFile(sarifFilePath: string): SarifFile {
// Validates the given SARIF object and throws an error if the SARIF object is invalid.
// The file path is only used in error messages to improve clarity.
export function validateSarifFileSchema(
sarif: SarifFile,
sarifLog: Partial<sarif.Log>,
sarifFilePath: string,
logger: Logger,
) {
): sarifLog is sarif.Log {
if (
areAllRunsProducedByCodeQL([sarif]) &&
areAllRunsProducedByCodeQL([sarifLog]) &&
// We want to validate CodeQL SARIF in testing environments.
!util.getTestingEnvironment()
) {
logger.debug(
`Skipping SARIF schema validation for ${sarifFilePath} as all runs are produced by CodeQL.`,
);
return;
return true;
}
logger.info(`Validating ${sarifFilePath}`);
// eslint-disable-next-line @typescript-eslint/no-require-imports
const schema = require("../src/sarif-schema-2.1.0.json") as jsonschema.Schema;
const result = new jsonschema.Validator().validate(sarif, schema);
const result = new jsonschema.Validator().validate(sarifLog, schema);
// Filter errors related to invalid URIs in the artifactLocation field as this
// is a breaking change. See https://github.com/github/codeql-action/issues/1703
const warningAttributes = ["uri-reference", "uri"];
@@ -603,6 +527,8 @@ export function validateSarifFileSchema(
)}`,
);
}
return true;
}
// buildPayload constructs a map ready to be uploaded to the API from the given
@@ -663,7 +589,7 @@ export function buildPayload(
}
export interface PostProcessingResults {
sarif: util.SarifFile;
sarif: Partial<sarif.Log>;
analysisKey: string;
environment: string;
}
@@ -693,17 +619,17 @@ export async function postProcessSarifFiles(
const gitHubVersion = await getGitHubVersion();
let sarif: SarifFile;
let sarifLog: Partial<sarif.Log>;
category = analysis.fixCategory(logger, category);
if (sarifPaths.length > 1) {
// Validate that the files we were asked to upload are all valid SARIF files
for (const sarifPath of sarifPaths) {
const parsedSarif = readSarifFile(sarifPath);
const parsedSarif = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(parsedSarif, sarifPath, logger);
}
sarif = await combineSarifFilesUsingCLI(
sarifLog = await combineSarifFilesUsingCLI(
sarifPaths,
gitHubVersion,
features,
@@ -711,26 +637,26 @@ export async function postProcessSarifFiles(
);
} else {
const sarifPath = sarifPaths[0];
sarif = readSarifFile(sarifPath);
validateSarifFileSchema(sarif, sarifPath, logger);
sarifLog = readSarifFileOrThrow(sarifPath);
validateSarifFileSchema(sarifLog, sarifPath, logger);
// Validate that there are no runs for the same category
await throwIfCombineSarifFilesDisabled([sarif], gitHubVersion);
await throwIfCombineSarifFilesDisabled([sarifLog], gitHubVersion);
}
sarif = filterAlertsByDiffRange(logger, sarif);
sarif = await fingerprints.addFingerprints(sarif, checkoutPath, logger);
sarifLog = filterAlertsByDiffRange(logger, sarifLog);
sarifLog = await fingerprints.addFingerprints(sarifLog, checkoutPath, logger);
const analysisKey = await api.getAnalysisKey();
const environment = actionsUtil.getRequiredInput("matrix");
sarif = populateRunAutomationDetails(
sarif,
sarifLog = populateRunAutomationDetails(
sarifLog,
category,
analysisKey,
environment,
);
return { sarif, analysisKey, environment };
return { sarif: sarifLog, analysisKey, environment };
}
/**
@@ -836,13 +762,13 @@ export async function uploadPostProcessedFiles(
): Promise<UploadResult> {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
const sarif = postProcessingResults.sarif;
const toolNames = util.getToolNames(sarif);
const sarifLog = postProcessingResults.sarif;
const toolNames = sarif.getToolNames(sarifLog);
logger.debug(`Validating that each SARIF run has a unique category`);
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
validateUniqueCategory(sarifLog, uploadTarget.sentinelPrefix);
logger.debug(`Serializing SARIF for upload`);
const sarifPayload = JSON.stringify(sarif);
const sarifPayload = JSON.stringify(sarifLog);
logger.debug(`Compressing serialized SARIF`);
const zippedSarif = zlib.gzipSync(sarifPayload).toString("base64");
@@ -1085,14 +1011,14 @@ function handleProcessingResultForUnsuccessfulExecution(
}
export function validateUniqueCategory(
sarif: SarifFile,
sarifLog: Partial<sarif.Log>,
sentinelPrefix: string,
): void {
// duplicate categories are allowed in the same sarif file
// but not across multiple sarif files
const categories = {} as Record<string, { id?: string; tool?: string }>;
for (const run of sarif.runs) {
for (const run of sarifLog.runs || []) {
const id = run?.automationDetails?.id;
const tool = run.tool?.driver?.name;
const category = `${sanitize(id)}_${sanitize(tool)}`;
@@ -1127,20 +1053,22 @@ function sanitize(str?: string) {
return (str ?? "_").replace(/[^a-zA-Z0-9_]/g, "_").toLocaleUpperCase();
}
/**
* An error that occurred due to an invalid SARIF upload request.
*/
export class InvalidSarifUploadError extends Error {}
function filterAlertsByDiffRange(logger: Logger, sarif: SarifFile): SarifFile {
function filterAlertsByDiffRange(
logger: Logger,
sarifLog: Partial<sarif.Log>,
): Partial<sarif.Log> {
const diffRanges = readDiffRangesJsonFile(logger);
if (!diffRanges?.length) {
return sarif;
return sarifLog;
}
if (sarifLog.runs === undefined) {
return sarifLog;
}
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
for (const run of sarif.runs) {
for (const run of sarifLog.runs) {
if (run.results) {
run.results = run.results.filter((result) => {
const locations = [
@@ -1176,5 +1104,5 @@ function filterAlertsByDiffRange(logger: Logger, sarif: SarifFile): SarifFile {
}
}
return sarif;
return sarifLog;
}

View File

@@ -7,6 +7,7 @@ import { getGitHubVersion } from "./api-client";
import { initFeatures } from "./feature-flags";
import { Logger, getActionsLogger } from "./logging";
import { getRepositoryNwo } from "./repository";
import { InvalidSarifUploadError } from "./sarif";
import {
createStatusReportBase,
sendStatusReport,
@@ -141,7 +142,7 @@ async function run(startedAt: Date) {
} catch (unwrappedError) {
const error =
isThirdPartyAnalysis(ActionName.UploadSarif) &&
unwrappedError instanceof upload_lib.InvalidSarifUploadError
unwrappedError instanceof InvalidSarifUploadError
? new ConfigurationError(unwrappedError.message)
: wrapError(unwrappedError);
const message = error.message;

View File

@@ -33,7 +33,11 @@ function mockPostProcessSarifFiles() {
sinon.match.any,
analysisConfig,
)
.resolves({ sarif: { runs: [] }, analysisKey: "", environment: "" });
.resolves({
sarif: { version: "2.1.0", runs: [] },
analysisKey: "",
environment: "",
});
}
return postProcessSarifFiles;

View File

@@ -10,20 +10,11 @@ import * as sinon from "sinon";
import * as api from "./api-client";
import { EnvVar } from "./environment";
import { getRunnerLogger } from "./logging";
import { getRecordingLogger, LoggedMessage, setupTests } from "./testing-utils";
import { setupTests } from "./testing-utils";
import * as util from "./util";
setupTests(test);
test("getToolNames", (t) => {
const input = fs.readFileSync(
`${__dirname}/../src/testdata/tool-names.sarif`,
"utf8",
);
const toolNames = util.getToolNames(JSON.parse(input) as util.SarifFile);
t.deepEqual(toolNames, ["CodeQL command-line toolchain", "ESLint"]);
});
const GET_MEMORY_FLAG_TESTS = [
{
input: undefined,
@@ -368,67 +359,6 @@ test("waitForResultWithTimeLimit doesn't call callback if promise resolves", asy
t.deepEqual(result, 99);
});
function createMockSarifWithNotification(
locations: util.SarifLocation[],
): util.SarifFile {
return {
runs: [
{
tool: {
driver: {
name: "CodeQL",
},
},
invocations: [
{
toolExecutionNotifications: [
{
locations,
},
],
},
],
},
],
};
}
const stubLocation: util.SarifLocation = {
physicalLocation: {
artifactLocation: {
uri: "file1",
},
},
};
test("fixInvalidNotifications leaves notifications with unique locations alone", (t) => {
const messages: LoggedMessage[] = [];
const result = util.fixInvalidNotifications(
createMockSarifWithNotification([stubLocation]),
getRecordingLogger(messages),
);
t.deepEqual(result, createMockSarifWithNotification([stubLocation]));
t.is(messages.length, 1);
t.deepEqual(messages[0], {
type: "debug",
message: "No duplicate locations found in SARIF notification objects.",
});
});
test("fixInvalidNotifications removes duplicate locations", (t) => {
const messages: LoggedMessage[] = [];
const result = util.fixInvalidNotifications(
createMockSarifWithNotification([stubLocation, stubLocation]),
getRecordingLogger(messages),
);
t.deepEqual(result, createMockSarifWithNotification([stubLocation]));
t.is(messages.length, 1);
t.deepEqual(messages[0], {
type: "info",
message: "Removed 1 duplicate locations from SARIF notification objects.",
});
});
function formatGitHubVersion(version: util.GitHubVersion): string {
switch (version.type) {
case util.GitHubVariant.DOTCOM:

View File

@@ -55,78 +55,6 @@ const DEFAULT_RESERVED_RAM_SCALING_FACTOR = 0.05;
*/
const MINIMUM_CGROUP_MEMORY_LIMIT_BYTES = 1024 * 1024;
export interface SarifFile {
version?: string | null;
runs: SarifRun[];
}
export interface SarifRun {
tool?: {
driver?: {
guid?: string;
name?: string;
fullName?: string;
semanticVersion?: string;
version?: string;
};
};
automationDetails?: {
id?: string;
};
artifacts?: string[];
invocations?: SarifInvocation[];
results?: SarifResult[];
}
export interface SarifInvocation {
toolExecutionNotifications?: SarifNotification[];
}
export interface SarifResult {
ruleId?: string;
rule?: {
id?: string;
};
message?: {
text?: string;
};
locations: Array<{
physicalLocation: {
artifactLocation: {
uri: string;
};
region?: {
startLine?: number;
};
};
}>;
relatedLocations?: Array<{
physicalLocation: {
artifactLocation: {
uri: string;
};
region?: {
startLine?: number;
};
};
}>;
partialFingerprints: {
primaryLocationLineHash?: string;
};
}
export interface SarifNotification {
locations?: SarifLocation[];
}
export interface SarifLocation {
physicalLocation?: {
artifactLocation?: {
uri?: string;
};
};
}
/**
* Get the extra options for the codeql commands.
*/
@@ -146,25 +74,6 @@ export function getExtraOptionsEnvParam(): object {
}
}
/**
* Get the array of all the tool names contained in the given sarif contents.
*
* Returns an array of unique string tool names.
*/
export function getToolNames(sarif: SarifFile): string[] {
const toolNames = {};
for (const run of sarif.runs || []) {
const tool = run.tool || {};
const driver = tool.driver || {};
if (typeof driver.name === "string" && driver.name.length > 0) {
toolNames[driver.name] = true;
}
}
return Object.keys(toolNames);
}
// Creates a random temporary directory, runs the given body, and then deletes the directory.
// Mostly intended for use within tests.
export async function withTmpDir<T>(
@@ -984,80 +893,6 @@ export function parseMatrixInput(
return JSON.parse(matrixInput) as { [key: string]: string };
}
function removeDuplicateLocations(locations: SarifLocation[]): SarifLocation[] {
const newJsonLocations = new Set<string>();
return locations.filter((location) => {
const jsonLocation = JSON.stringify(location);
if (!newJsonLocations.has(jsonLocation)) {
newJsonLocations.add(jsonLocation);
return true;
}
return false;
});
}
export function fixInvalidNotifications(
sarif: SarifFile,
logger: Logger,
): SarifFile {
if (!Array.isArray(sarif.runs)) {
return sarif;
}
// Ensure that the array of locations for each SARIF notification contains unique locations.
// This is a workaround for a bug in the CodeQL CLI that causes duplicate locations to be
// emitted in some cases.
let numDuplicateLocationsRemoved = 0;
const newSarif = {
...sarif,
runs: sarif.runs.map((run) => {
if (
run.tool?.driver?.name !== "CodeQL" ||
!Array.isArray(run.invocations)
) {
return run;
}
return {
...run,
invocations: run.invocations.map((invocation) => {
if (!Array.isArray(invocation.toolExecutionNotifications)) {
return invocation;
}
return {
...invocation,
toolExecutionNotifications:
invocation.toolExecutionNotifications.map((notification) => {
if (!Array.isArray(notification.locations)) {
return notification;
}
const newLocations = removeDuplicateLocations(
notification.locations,
);
numDuplicateLocationsRemoved +=
notification.locations.length - newLocations.length;
return {
...notification,
locations: newLocations,
};
}),
};
}),
};
}),
};
if (numDuplicateLocationsRemoved > 0) {
logger.info(
`Removed ${numDuplicateLocationsRemoved} duplicate locations from SARIF notification ` +
"objects.",
);
} else {
logger.debug("No duplicate locations found in SARIF notification objects.");
}
return newSarif;
}
export function wrapError(error: unknown): Error {
return error instanceof Error ? error : new Error(String(error));
}