Merge pull request #3167 from github/mbg/upload-sarif/find-then-filter

Find, then filter, SARIF files for `upload-sarif` Action
This commit is contained in:
Michael B. Gale
2025-10-02 11:51:47 +01:00
committed by GitHub
18 changed files with 545 additions and 337 deletions
+6
View File
@@ -146,6 +146,12 @@ export default [
"@typescript-eslint/prefer-regexp-exec": "off",
"@typescript-eslint/require-await": "off",
"@typescript-eslint/restrict-template-expressions": "off",
"@typescript-eslint/no-unused-vars": [
"error",
{
"argsIgnorePattern": "^_",
}
],
"func-style": "off",
},
},
+1 -1
View File
@@ -118671,7 +118671,7 @@ async function uploadCombinedSarifArtifacts(logger, gitHubVariant, codeQlVersion
if (fs5.existsSync(baseTempDir)) {
const outputDirs = fs5.readdirSync(baseTempDir);
for (const outputDir of outputDirs) {
const sarifFiles = fs5.readdirSync(path5.resolve(baseTempDir, outputDir)).filter((f) => f.endsWith(".sarif"));
const sarifFiles = fs5.readdirSync(path5.resolve(baseTempDir, outputDir)).filter((f) => path5.extname(f) === ".sarif");
for (const sarifFile of sarifFiles) {
toUpload.push(path5.resolve(baseTempDir, outputDir, sarifFile));
}
+9 -8
View File
@@ -28149,11 +28149,11 @@ var require_out = __commonJS({
async.read(path20, getSettings(optionsOrSettingsOrCallback), callback);
}
exports2.stat = stat;
function statSync3(path20, optionsOrSettings) {
function statSync4(path20, optionsOrSettings) {
const settings = getSettings(optionsOrSettings);
return sync.read(path20, settings);
}
exports2.statSync = statSync3;
exports2.statSync = statSync4;
function getSettings(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1.default) {
return settingsOrOptions;
@@ -90167,6 +90167,7 @@ var CodeScanning = {
target: "PUT /repos/:owner/:repo/code-scanning/analysis" /* CODE_SCANNING */,
sarifExtension: ".sarif",
sarifPredicate: (name) => name.endsWith(CodeScanning.sarifExtension) && !CodeQuality.sarifPredicate(name),
fixCategory: (_, category) => category,
sentinelPrefix: "CODEQL_UPLOAD_SARIF_"
};
var CodeQuality = {
@@ -90175,6 +90176,7 @@ var CodeQuality = {
target: "PUT /repos/:owner/:repo/code-quality/analysis" /* CODE_QUALITY */,
sarifExtension: ".quality.sarif",
sarifPredicate: (name) => name.endsWith(CodeQuality.sarifExtension),
fixCategory: fixCodeQualityCategory,
sentinelPrefix: "CODEQL_UPLOAD_QUALITY_SARIF_"
};
@@ -93839,7 +93841,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
logger.info(`Interpreting ${analysis.name} results for ${language}`);
let category = automationDetailsId;
if (analysis.kind === "code-quality" /* CodeQuality */) {
category = fixCodeQualityCategory(logger, automationDetailsId);
category = analysis.fixCategory(logger, automationDetailsId);
}
const sarifFile = path16.join(
sarifFolder,
@@ -95712,6 +95714,7 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif;
category = uploadTarget.fixCategory(logger, category);
if (sarifPaths.length > 1) {
for (const sarifPath of sarifPaths) {
const parsedSarif = readSarifFile(sarifPath);
@@ -96161,16 +96164,14 @@ async function run() {
core14.setOutput("sarif-id", uploadResult.sarifID);
}
if (isCodeQualityEnabled(config)) {
const analysis = CodeQuality;
const qualityUploadResult = await uploadFiles(
outputDir,
getRequiredInput("checkout_path"),
fixCodeQualityCategory(
logger,
getOptionalInput("category")
),
getOptionalInput("category"),
features,
logger,
CodeQuality
analysis
);
core14.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
}
+32 -2
View File
@@ -28149,11 +28149,11 @@ var require_out = __commonJS({
async.read(path19, getSettings(optionsOrSettingsOrCallback), callback);
}
exports2.stat = stat;
function statSync2(path19, optionsOrSettings) {
function statSync3(path19, optionsOrSettings) {
const settings = getSettings(optionsOrSettings);
return sync.read(path19, settings);
}
exports2.statSync = statSync2;
exports2.statSync = statSync3;
function getSettings(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1.default) {
return settingsOrOptions;
@@ -128337,6 +128337,9 @@ function getWorkflowRunAttempt() {
function isSelfHostedRunner() {
return process.env.RUNNER_ENVIRONMENT === "self-hosted";
}
function isDefaultSetup() {
return getWorkflowEventName() === "dynamic";
}
function prettyPrintInvocation(cmd, args) {
return [cmd, ...args].map((x) => x.includes(" ") ? `'${x}'` : x).join(" ");
}
@@ -128402,6 +128405,30 @@ var restoreInputs = function() {
}
}
};
var qualityCategoryMapping = {
"c#": "csharp",
cpp: "c-cpp",
c: "c-cpp",
"c++": "c-cpp",
java: "java-kotlin",
javascript: "javascript-typescript",
typescript: "javascript-typescript",
kotlin: "java-kotlin"
};
function fixCodeQualityCategory(logger, category) {
if (category !== void 0 && isDefaultSetup() && category.startsWith("/language:")) {
const language = category.substring("/language:".length);
const mappedLanguage = qualityCategoryMapping[language];
if (mappedLanguage) {
const newCategory = `/language:${mappedLanguage}`;
logger.info(
`Adjusted category for Code Quality from '${category}' to '${newCategory}'.`
);
return newCategory;
}
}
return category;
}
// src/api-client.ts
var core5 = __toESM(require_core());
@@ -128822,6 +128849,7 @@ var CodeScanning = {
target: "PUT /repos/:owner/:repo/code-scanning/analysis" /* CODE_SCANNING */,
sarifExtension: ".sarif",
sarifPredicate: (name) => name.endsWith(CodeScanning.sarifExtension) && !CodeQuality.sarifPredicate(name),
fixCategory: (_2, category) => category,
sentinelPrefix: "CODEQL_UPLOAD_SARIF_"
};
var CodeQuality = {
@@ -128830,6 +128858,7 @@ var CodeQuality = {
target: "PUT /repos/:owner/:repo/code-quality/analysis" /* CODE_QUALITY */,
sarifExtension: ".quality.sarif",
sarifPredicate: (name) => name.endsWith(CodeQuality.sarifExtension),
fixCategory: fixCodeQualityCategory,
sentinelPrefix: "CODEQL_UPLOAD_QUALITY_SARIF_"
};
@@ -133161,6 +133190,7 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif;
category = uploadTarget.fixCategory(logger, category);
if (sarifPaths.length > 1) {
for (const sarifPath of sarifPaths) {
const parsedSarif = readSarifFile(sarifPath);
+107 -10
View File
@@ -29446,11 +29446,11 @@ var require_out = __commonJS({
async.read(path15, getSettings(optionsOrSettingsOrCallback), callback);
}
exports2.stat = stat;
function statSync2(path15, optionsOrSettings) {
function statSync3(path15, optionsOrSettings) {
const settings = getSettings(optionsOrSettings);
return sync.read(path15, settings);
}
exports2.statSync = statSync2;
exports2.statSync = statSync3;
function getSettings(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1.default) {
return settingsOrOptions;
@@ -84821,6 +84821,7 @@ __export(upload_lib_exports, {
InvalidSarifUploadError: () => InvalidSarifUploadError,
buildPayload: () => buildPayload,
findSarifFilesInDir: () => findSarifFilesInDir,
getGroupedSarifFilePaths: () => getGroupedSarifFilePaths,
getSarifFilePaths: () => getSarifFilePaths,
populateRunAutomationDetails: () => populateRunAutomationDetails,
readSarifFile: () => readSarifFile,
@@ -88473,6 +88474,9 @@ function getWorkflowRunAttempt() {
}
return workflowRunAttempt;
}
function isDefaultSetup() {
return getWorkflowEventName() === "dynamic";
}
function prettyPrintInvocation(cmd, args) {
return [cmd, ...args].map((x) => x.includes(" ") ? `'${x}'` : x).join(" ");
}
@@ -88529,6 +88533,57 @@ async function runTool(cmd, args = [], opts = {}) {
}
return stdout;
}
var qualityCategoryMapping = {
"c#": "csharp",
cpp: "c-cpp",
c: "c-cpp",
"c++": "c-cpp",
java: "java-kotlin",
javascript: "javascript-typescript",
typescript: "javascript-typescript",
kotlin: "java-kotlin"
};
function fixCodeQualityCategory(logger, category) {
if (category !== void 0 && isDefaultSetup() && category.startsWith("/language:")) {
const language = category.substring("/language:".length);
const mappedLanguage = qualityCategoryMapping[language];
if (mappedLanguage) {
const newCategory = `/language:${mappedLanguage}`;
logger.info(
`Adjusted category for Code Quality from '${category}' to '${newCategory}'.`
);
return newCategory;
}
}
return category;
}
// src/analyses.ts
var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => {
AnalysisKind2["CodeScanning"] = "code-scanning";
AnalysisKind2["CodeQuality"] = "code-quality";
return AnalysisKind2;
})(AnalysisKind || {});
var supportedAnalysisKinds = new Set(Object.values(AnalysisKind));
var CodeScanning = {
kind: "code-scanning" /* CodeScanning */,
name: "code scanning",
target: "PUT /repos/:owner/:repo/code-scanning/analysis" /* CODE_SCANNING */,
sarifExtension: ".sarif",
sarifPredicate: (name) => name.endsWith(CodeScanning.sarifExtension) && !CodeQuality.sarifPredicate(name),
fixCategory: (_, category) => category,
sentinelPrefix: "CODEQL_UPLOAD_SARIF_"
};
var CodeQuality = {
kind: "code-quality" /* CodeQuality */,
name: "code quality",
target: "PUT /repos/:owner/:repo/code-quality/analysis" /* CODE_QUALITY */,
sarifExtension: ".quality.sarif",
sarifPredicate: (name) => name.endsWith(CodeQuality.sarifExtension),
fixCategory: fixCodeQualityCategory,
sentinelPrefix: "CODEQL_UPLOAD_QUALITY_SARIF_"
};
var SarifScanOrder = [CodeQuality, CodeScanning];
// src/api-client.ts
var core5 = __toESM(require_core());
@@ -88921,14 +88976,6 @@ function wrapCliConfigurationError(cliError) {
var fs7 = __toESM(require("fs"));
var path9 = __toESM(require("path"));
// src/analyses.ts
var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => {
AnalysisKind2["CodeScanning"] = "code-scanning";
AnalysisKind2["CodeQuality"] = "code-quality";
return AnalysisKind2;
})(AnalysisKind || {});
var supportedAnalysisKinds = new Set(Object.values(AnalysisKind));
// src/caching-utils.ts
var core6 = __toESM(require_core());
@@ -92391,6 +92438,54 @@ function getSarifFilePaths(sarifPath, isSarif) {
}
return sarifFiles;
}
async function getGroupedSarifFilePaths(logger, sarifPath) {
const stats = fs13.statSync(sarifPath, { throwIfNoEntry: false });
if (stats === void 0) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
}
const results = {};
if (stats.isDirectory()) {
let unassignedSarifFiles = findSarifFilesInDir(
sarifPath,
(name) => path14.extname(name) === ".sarif"
);
logger.debug(
`Found the following .sarif files in ${sarifPath}: ${unassignedSarifFiles.join(", ")}`
);
for (const analysisConfig of SarifScanOrder) {
const filesForCurrentAnalysis = unassignedSarifFiles.filter(
analysisConfig.sarifPredicate
);
if (filesForCurrentAnalysis.length > 0) {
logger.debug(
`The following SARIF files are for ${analysisConfig.name}: ${filesForCurrentAnalysis.join(", ")}`
);
unassignedSarifFiles = unassignedSarifFiles.filter(
(name) => !analysisConfig.sarifPredicate(name)
);
results[analysisConfig.kind] = filesForCurrentAnalysis;
} else {
logger.debug(`Found no SARIF files for ${analysisConfig.name}`);
}
}
if (unassignedSarifFiles.length !== 0) {
logger.warning(
`Found files in ${sarifPath} which do not belong to any analysis: ${unassignedSarifFiles.join(", ")}`
);
}
} else {
for (const analysisConfig of SarifScanOrder) {
if (analysisConfig.kind === "code-scanning" /* CodeScanning */ || analysisConfig.sarifPredicate(sarifPath)) {
logger.debug(
`Using '${sarifPath}' as a SARIF file for ${analysisConfig.name}.`
);
results[analysisConfig.kind] = [sarifPath];
break;
}
}
}
return results;
}
function countResultsInSarif(sarif) {
let numResults = 0;
const parsedSarif = JSON.parse(sarif);
@@ -92505,6 +92600,7 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif;
category = uploadTarget.fixCategory(logger, category);
if (sarifPaths.length > 1) {
for (const sarifPath of sarifPaths) {
const parsedSarif = readSarifFile(sarifPath);
@@ -92750,6 +92846,7 @@ function filterAlertsByDiffRange(logger, sarif) {
InvalidSarifUploadError,
buildPayload,
findSarifFilesInDir,
getGroupedSarifFilePaths,
getSarifFilePaths,
populateRunAutomationDetails,
readSarifFile,
+1 -1
View File
@@ -117619,7 +117619,7 @@ async function uploadCombinedSarifArtifacts(logger, gitHubVariant, codeQlVersion
if (fs.existsSync(baseTempDir)) {
const outputDirs = fs.readdirSync(baseTempDir);
for (const outputDir of outputDirs) {
const sarifFiles = fs.readdirSync(path.resolve(baseTempDir, outputDir)).filter((f) => f.endsWith(".sarif"));
const sarifFiles = fs.readdirSync(path.resolve(baseTempDir, outputDir)).filter((f) => path.extname(f) === ".sarif");
for (const sarifFile of sarifFiles) {
toUpload.push(path.resolve(baseTempDir, outputDir, sarifFile));
}
+153 -124
View File
@@ -185,7 +185,7 @@ var require_file_command = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.prepareKeyValueMessage = exports2.issueFileCommand = void 0;
var crypto = __importStar4(require("crypto"));
var fs16 = __importStar4(require("fs"));
var fs15 = __importStar4(require("fs"));
var os3 = __importStar4(require("os"));
var utils_1 = require_utils();
function issueFileCommand(command, message) {
@@ -193,10 +193,10 @@ var require_file_command = __commonJS({
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs16.existsSync(filePath)) {
if (!fs15.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs16.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os3.EOL}`, {
fs15.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os3.EOL}`, {
encoding: "utf8"
});
}
@@ -18513,12 +18513,12 @@ var require_io_util = __commonJS({
var _a;
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.getCmdPath = exports2.tryGetExecutablePath = exports2.isRooted = exports2.isDirectory = exports2.exists = exports2.READONLY = exports2.UV_FS_O_EXLOCK = exports2.IS_WINDOWS = exports2.unlink = exports2.symlink = exports2.stat = exports2.rmdir = exports2.rm = exports2.rename = exports2.readlink = exports2.readdir = exports2.open = exports2.mkdir = exports2.lstat = exports2.copyFile = exports2.chmod = void 0;
var fs16 = __importStar4(require("fs"));
var fs15 = __importStar4(require("fs"));
var path16 = __importStar4(require("path"));
_a = fs16.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
_a = fs15.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
exports2.IS_WINDOWS = process.platform === "win32";
exports2.UV_FS_O_EXLOCK = 268435456;
exports2.READONLY = fs16.constants.O_RDONLY;
exports2.READONLY = fs15.constants.O_RDONLY;
function exists(fsPath) {
return __awaiter4(this, void 0, void 0, function* () {
try {
@@ -27907,8 +27907,8 @@ var require_utils7 = __commonJS({
exports2.array = array;
var errno = require_errno();
exports2.errno = errno;
var fs16 = require_fs();
exports2.fs = fs16;
var fs15 = require_fs();
exports2.fs = fs15;
var path16 = require_path();
exports2.path = path16;
var pattern = require_pattern();
@@ -28092,12 +28092,12 @@ var require_fs2 = __commonJS({
"use strict";
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.createFileSystemAdapter = exports2.FILE_SYSTEM_ADAPTER = void 0;
var fs16 = require("fs");
var fs15 = require("fs");
exports2.FILE_SYSTEM_ADAPTER = {
lstat: fs16.lstat,
stat: fs16.stat,
lstatSync: fs16.lstatSync,
statSync: fs16.statSync
lstat: fs15.lstat,
stat: fs15.stat,
lstatSync: fs15.lstatSync,
statSync: fs15.statSync
};
function createFileSystemAdapter(fsMethods) {
if (fsMethods === void 0) {
@@ -28114,12 +28114,12 @@ var require_settings = __commonJS({
"node_modules/@nodelib/fs.stat/out/settings.js"(exports2) {
"use strict";
Object.defineProperty(exports2, "__esModule", { value: true });
var fs16 = require_fs2();
var fs15 = require_fs2();
var Settings = class {
constructor(_options = {}) {
this._options = _options;
this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true);
this.fs = fs16.createFileSystemAdapter(this._options.fs);
this.fs = fs15.createFileSystemAdapter(this._options.fs);
this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false);
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
}
@@ -28149,11 +28149,11 @@ var require_out = __commonJS({
async.read(path16, getSettings(optionsOrSettingsOrCallback), callback);
}
exports2.stat = stat;
function statSync2(path16, optionsOrSettings) {
function statSync3(path16, optionsOrSettings) {
const settings = getSettings(optionsOrSettings);
return sync.read(path16, settings);
}
exports2.statSync = statSync2;
exports2.statSync = statSync3;
function getSettings(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1.default) {
return settingsOrOptions;
@@ -28274,8 +28274,8 @@ var require_utils8 = __commonJS({
"use strict";
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.fs = void 0;
var fs16 = require_fs3();
exports2.fs = fs16;
var fs15 = require_fs3();
exports2.fs = fs15;
}
});
@@ -28470,14 +28470,14 @@ var require_fs4 = __commonJS({
"use strict";
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.createFileSystemAdapter = exports2.FILE_SYSTEM_ADAPTER = void 0;
var fs16 = require("fs");
var fs15 = require("fs");
exports2.FILE_SYSTEM_ADAPTER = {
lstat: fs16.lstat,
stat: fs16.stat,
lstatSync: fs16.lstatSync,
statSync: fs16.statSync,
readdir: fs16.readdir,
readdirSync: fs16.readdirSync
lstat: fs15.lstat,
stat: fs15.stat,
lstatSync: fs15.lstatSync,
statSync: fs15.statSync,
readdir: fs15.readdir,
readdirSync: fs15.readdirSync
};
function createFileSystemAdapter(fsMethods) {
if (fsMethods === void 0) {
@@ -28496,12 +28496,12 @@ var require_settings2 = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
var path16 = require("path");
var fsStat = require_out();
var fs16 = require_fs4();
var fs15 = require_fs4();
var Settings = class {
constructor(_options = {}) {
this._options = _options;
this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false);
this.fs = fs16.createFileSystemAdapter(this._options.fs);
this.fs = fs15.createFileSystemAdapter(this._options.fs);
this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path16.sep);
this.stats = this._getValue(this._options.stats, false);
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
@@ -29762,16 +29762,16 @@ var require_settings4 = __commonJS({
"use strict";
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.DEFAULT_FILE_SYSTEM_ADAPTER = void 0;
var fs16 = require("fs");
var fs15 = require("fs");
var os3 = require("os");
var CPU_COUNT = Math.max(os3.cpus().length, 1);
exports2.DEFAULT_FILE_SYSTEM_ADAPTER = {
lstat: fs16.lstat,
lstatSync: fs16.lstatSync,
stat: fs16.stat,
statSync: fs16.statSync,
readdir: fs16.readdir,
readdirSync: fs16.readdirSync
lstat: fs15.lstat,
lstatSync: fs15.lstatSync,
stat: fs15.stat,
statSync: fs15.statSync,
readdir: fs15.readdir,
readdirSync: fs15.readdirSync
};
var Settings = class {
constructor(_options = {}) {
@@ -35465,7 +35465,7 @@ var require_internal_globber = __commonJS({
Object.defineProperty(exports2, "__esModule", { value: true });
exports2.DefaultGlobber = void 0;
var core14 = __importStar4(require_core());
var fs16 = __importStar4(require("fs"));
var fs15 = __importStar4(require("fs"));
var globOptionsHelper = __importStar4(require_internal_glob_options_helper());
var path16 = __importStar4(require("path"));
var patternHelper = __importStar4(require_internal_pattern_helper());
@@ -35517,7 +35517,7 @@ var require_internal_globber = __commonJS({
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
core14.debug(`Search path '${searchPath}'`);
try {
yield __await4(fs16.promises.lstat(searchPath));
yield __await4(fs15.promises.lstat(searchPath));
} catch (err) {
if (err.code === "ENOENT") {
continue;
@@ -35548,7 +35548,7 @@ var require_internal_globber = __commonJS({
continue;
}
const childLevel = item.level + 1;
const childItems = (yield __await4(fs16.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path16.join(item.path, x), childLevel));
const childItems = (yield __await4(fs15.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path16.join(item.path, x), childLevel));
stack.push(...childItems.reverse());
} else if (match & internal_match_kind_1.MatchKind.File) {
yield yield __await4(item.path);
@@ -35583,7 +35583,7 @@ var require_internal_globber = __commonJS({
let stats;
if (options.followSymbolicLinks) {
try {
stats = yield fs16.promises.stat(item.path);
stats = yield fs15.promises.stat(item.path);
} catch (err) {
if (err.code === "ENOENT") {
if (options.omitBrokenSymbolicLinks) {
@@ -35595,10 +35595,10 @@ var require_internal_globber = __commonJS({
throw err;
}
} else {
stats = yield fs16.promises.lstat(item.path);
stats = yield fs15.promises.lstat(item.path);
}
if (stats.isDirectory() && options.followSymbolicLinks) {
const realPath = yield fs16.promises.realpath(item.path);
const realPath = yield fs15.promises.realpath(item.path);
while (traversalChain.length >= item.level) {
traversalChain.pop();
}
@@ -36932,7 +36932,7 @@ var require_cacheUtils = __commonJS({
var glob = __importStar4(require_glob());
var io6 = __importStar4(require_io());
var crypto = __importStar4(require("crypto"));
var fs16 = __importStar4(require("fs"));
var fs15 = __importStar4(require("fs"));
var path16 = __importStar4(require("path"));
var semver8 = __importStar4(require_semver3());
var util = __importStar4(require("util"));
@@ -36962,7 +36962,7 @@ var require_cacheUtils = __commonJS({
}
exports2.createTempDirectory = createTempDirectory;
function getArchiveFileSizeInBytes(filePath) {
return fs16.statSync(filePath).size;
return fs15.statSync(filePath).size;
}
exports2.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
function resolvePaths(patterns) {
@@ -37002,7 +37002,7 @@ var require_cacheUtils = __commonJS({
exports2.resolvePaths = resolvePaths;
function unlinkFile(filePath) {
return __awaiter4(this, void 0, void 0, function* () {
return util.promisify(fs16.unlink)(filePath);
return util.promisify(fs15.unlink)(filePath);
});
}
exports2.unlinkFile = unlinkFile;
@@ -37047,7 +37047,7 @@ var require_cacheUtils = __commonJS({
exports2.getCacheFileName = getCacheFileName;
function getGnuTarPathOnWindows() {
return __awaiter4(this, void 0, void 0, function* () {
if (fs16.existsSync(constants_1.GnuTarPathOnWindows)) {
if (fs15.existsSync(constants_1.GnuTarPathOnWindows)) {
return constants_1.GnuTarPathOnWindows;
}
const versionOutput = yield getVersion("tar");
@@ -48820,7 +48820,7 @@ var require_dist7 = __commonJS({
var stream2 = require("stream");
var coreLro = require_dist6();
var events = require("events");
var fs16 = require("fs");
var fs15 = require("fs");
var util = require("util");
var buffer = require("buffer");
function _interopNamespaceDefault(e) {
@@ -48843,7 +48843,7 @@ var require_dist7 = __commonJS({
}
var coreHttpCompat__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreHttpCompat);
var coreClient__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreClient);
var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs16);
var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs15);
var util__namespace = /* @__PURE__ */ _interopNamespaceDefault(util);
var logger = logger$1.createClientLogger("storage-blob");
var BaseRequestPolicy = class {
@@ -72691,7 +72691,7 @@ var require_downloadUtils = __commonJS({
var http_client_1 = require_lib();
var storage_blob_1 = require_dist7();
var buffer = __importStar4(require("buffer"));
var fs16 = __importStar4(require("fs"));
var fs15 = __importStar4(require("fs"));
var stream2 = __importStar4(require("stream"));
var util = __importStar4(require("util"));
var utils = __importStar4(require_cacheUtils());
@@ -72802,7 +72802,7 @@ var require_downloadUtils = __commonJS({
exports2.DownloadProgress = DownloadProgress;
function downloadCacheHttpClient(archiveLocation, archivePath) {
return __awaiter4(this, void 0, void 0, function* () {
const writeStream = fs16.createWriteStream(archivePath);
const writeStream = fs15.createWriteStream(archivePath);
const httpClient = new http_client_1.HttpClient("actions/cache");
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter4(this, void 0, void 0, function* () {
return httpClient.get(archiveLocation);
@@ -72828,7 +72828,7 @@ var require_downloadUtils = __commonJS({
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
var _a;
return __awaiter4(this, void 0, void 0, function* () {
const archiveDescriptor = yield fs16.promises.open(archivePath, "w");
const archiveDescriptor = yield fs15.promises.open(archivePath, "w");
const httpClient = new http_client_1.HttpClient("actions/cache", void 0, {
socketTimeout: options.timeoutInMs,
keepAlive: true
@@ -72945,7 +72945,7 @@ var require_downloadUtils = __commonJS({
} else {
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
const downloadProgress = new DownloadProgress(contentLength);
const fd = fs16.openSync(archivePath, "w");
const fd = fs15.openSync(archivePath, "w");
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
@@ -72963,12 +72963,12 @@ var require_downloadUtils = __commonJS({
controller.abort();
throw new Error("Aborting cache download as the download time exceeded the timeout.");
} else if (Buffer.isBuffer(result)) {
fs16.writeFileSync(fd, result);
fs15.writeFileSync(fd, result);
}
}
} finally {
downloadProgress.stopDisplayTimer();
fs16.closeSync(fd);
fs15.closeSync(fd);
}
}
});
@@ -73267,7 +73267,7 @@ var require_cacheHttpClient = __commonJS({
var core14 = __importStar4(require_core());
var http_client_1 = require_lib();
var auth_1 = require_auth();
var fs16 = __importStar4(require("fs"));
var fs15 = __importStar4(require("fs"));
var url_1 = require("url");
var utils = __importStar4(require_cacheUtils());
var uploadUtils_1 = require_uploadUtils();
@@ -73405,7 +73405,7 @@ Other caches with similar key:`);
return __awaiter4(this, void 0, void 0, function* () {
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs16.openSync(archivePath, "r");
const fd = fs15.openSync(archivePath, "r");
const uploadOptions = (0, options_1.getUploadOptions)(options);
const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency);
const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize);
@@ -73419,7 +73419,7 @@ Other caches with similar key:`);
const start = offset;
const end = offset + chunkSize - 1;
offset += maxChunkSize;
yield uploadChunk(httpClient, resourceUrl, () => fs16.createReadStream(archivePath, {
yield uploadChunk(httpClient, resourceUrl, () => fs15.createReadStream(archivePath, {
fd,
start,
end,
@@ -73430,7 +73430,7 @@ Other caches with similar key:`);
}
})));
} finally {
fs16.closeSync(fd);
fs15.closeSync(fd);
}
return;
});
@@ -80643,7 +80643,7 @@ var require_manifest = __commonJS({
var core_1 = require_core();
var os3 = require("os");
var cp = require("child_process");
var fs16 = require("fs");
var fs15 = require("fs");
function _findMatch(versionSpec, stable, candidates, archFilter) {
return __awaiter4(this, void 0, void 0, function* () {
const platFilter = os3.platform();
@@ -80707,10 +80707,10 @@ var require_manifest = __commonJS({
const lsbReleaseFile = "/etc/lsb-release";
const osReleaseFile = "/etc/os-release";
let contents = "";
if (fs16.existsSync(lsbReleaseFile)) {
contents = fs16.readFileSync(lsbReleaseFile).toString();
} else if (fs16.existsSync(osReleaseFile)) {
contents = fs16.readFileSync(osReleaseFile).toString();
if (fs15.existsSync(lsbReleaseFile)) {
contents = fs15.readFileSync(lsbReleaseFile).toString();
} else if (fs15.existsSync(osReleaseFile)) {
contents = fs15.readFileSync(osReleaseFile).toString();
}
return contents;
}
@@ -80887,7 +80887,7 @@ var require_tool_cache = __commonJS({
var core14 = __importStar4(require_core());
var io6 = __importStar4(require_io());
var crypto = __importStar4(require("crypto"));
var fs16 = __importStar4(require("fs"));
var fs15 = __importStar4(require("fs"));
var mm = __importStar4(require_manifest());
var os3 = __importStar4(require("os"));
var path16 = __importStar4(require("path"));
@@ -80934,7 +80934,7 @@ var require_tool_cache = __commonJS({
exports2.downloadTool = downloadTool2;
function downloadToolAttempt(url2, dest, auth, headers) {
return __awaiter4(this, void 0, void 0, function* () {
if (fs16.existsSync(dest)) {
if (fs15.existsSync(dest)) {
throw new Error(`Destination file path ${dest} already exists`);
}
const http = new httpm.HttpClient(userAgent, [], {
@@ -80958,7 +80958,7 @@ var require_tool_cache = __commonJS({
const readStream = responseMessageFactory();
let succeeded = false;
try {
yield pipeline(readStream, fs16.createWriteStream(dest));
yield pipeline(readStream, fs15.createWriteStream(dest));
core14.debug("download complete");
succeeded = true;
return dest;
@@ -81170,11 +81170,11 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os3.arch();
core14.debug(`Caching tool ${tool} ${version} ${arch2}`);
core14.debug(`source dir: ${sourceDir}`);
if (!fs16.statSync(sourceDir).isDirectory()) {
if (!fs15.statSync(sourceDir).isDirectory()) {
throw new Error("sourceDir is not a directory");
}
const destPath = yield _createToolPath(tool, version, arch2);
for (const itemName of fs16.readdirSync(sourceDir)) {
for (const itemName of fs15.readdirSync(sourceDir)) {
const s = path16.join(sourceDir, itemName);
yield io6.cp(s, destPath, { recursive: true });
}
@@ -81189,7 +81189,7 @@ var require_tool_cache = __commonJS({
arch2 = arch2 || os3.arch();
core14.debug(`Caching tool ${tool} ${version} ${arch2}`);
core14.debug(`source file: ${sourceFile}`);
if (!fs16.statSync(sourceFile).isFile()) {
if (!fs15.statSync(sourceFile).isFile()) {
throw new Error("sourceFile is not a file");
}
const destFolder = yield _createToolPath(tool, version, arch2);
@@ -81219,7 +81219,7 @@ var require_tool_cache = __commonJS({
versionSpec = semver8.clean(versionSpec) || "";
const cachePath = path16.join(_getCacheDirectory(), toolName, versionSpec, arch2);
core14.debug(`checking cache: ${cachePath}`);
if (fs16.existsSync(cachePath) && fs16.existsSync(`${cachePath}.complete`)) {
if (fs15.existsSync(cachePath) && fs15.existsSync(`${cachePath}.complete`)) {
core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`);
toolPath = cachePath;
} else {
@@ -81233,12 +81233,12 @@ var require_tool_cache = __commonJS({
const versions = [];
arch2 = arch2 || os3.arch();
const toolPath = path16.join(_getCacheDirectory(), toolName);
if (fs16.existsSync(toolPath)) {
const children = fs16.readdirSync(toolPath);
if (fs15.existsSync(toolPath)) {
const children = fs15.readdirSync(toolPath);
for (const child of children) {
if (isExplicitVersion(child)) {
const fullPath = path16.join(toolPath, child, arch2 || "");
if (fs16.existsSync(fullPath) && fs16.existsSync(`${fullPath}.complete`)) {
if (fs15.existsSync(fullPath) && fs15.existsSync(`${fullPath}.complete`)) {
versions.push(child);
}
}
@@ -81312,7 +81312,7 @@ var require_tool_cache = __commonJS({
function _completeToolPath(tool, version, arch2) {
const folderPath = path16.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || "");
const markerPath = `${folderPath}.complete`;
fs16.writeFileSync(markerPath, "");
fs15.writeFileSync(markerPath, "");
core14.debug("finished caching tool");
}
function isExplicitVersion(versionSpec) {
@@ -85706,21 +85706,21 @@ async function getFolderSize(itemPath, options) {
getFolderSize.loose = async (itemPath, options) => await core(itemPath, options);
getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true });
async function core(rootItemPath, options = {}, returnType = {}) {
const fs16 = options.fs || await import("node:fs/promises");
const fs15 = options.fs || await import("node:fs/promises");
let folderSize = 0n;
const foundInos = /* @__PURE__ */ new Set();
const errors = [];
await processItem(rootItemPath);
async function processItem(itemPath) {
if (options.ignore?.test(itemPath)) return;
const stats = returnType.strict ? await fs16.lstat(itemPath, { bigint: true }) : await fs16.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2));
const stats = returnType.strict ? await fs15.lstat(itemPath, { bigint: true }) : await fs15.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2));
if (typeof stats !== "object") return;
if (!foundInos.has(stats.ino)) {
foundInos.add(stats.ino);
folderSize += stats.size;
}
if (stats.isDirectory()) {
const directoryItems = returnType.strict ? await fs16.readdir(itemPath) : await fs16.readdir(itemPath).catch((error2) => errors.push(error2));
const directoryItems = returnType.strict ? await fs15.readdir(itemPath) : await fs15.readdir(itemPath).catch((error2) => errors.push(error2));
if (typeof directoryItems !== "object") return;
await Promise.all(
directoryItems.map(
@@ -88590,6 +88590,11 @@ async function asyncSome(array, predicate) {
const results = await Promise.all(array.map(predicate));
return results.some((result) => result);
}
function unsafeEntriesInvariant(object) {
return Object.entries(object).filter(
([_, val2]) => val2 !== void 0
);
}
// src/actions-util.ts
var pkg = require_package();
@@ -88770,6 +88775,7 @@ var CodeScanning = {
target: "PUT /repos/:owner/:repo/code-scanning/analysis" /* CODE_SCANNING */,
sarifExtension: ".sarif",
sarifPredicate: (name) => name.endsWith(CodeScanning.sarifExtension) && !CodeQuality.sarifPredicate(name),
fixCategory: (_, category) => category,
sentinelPrefix: "CODEQL_UPLOAD_SARIF_"
};
var CodeQuality = {
@@ -88778,8 +88784,18 @@ var CodeQuality = {
target: "PUT /repos/:owner/:repo/code-quality/analysis" /* CODE_QUALITY */,
sarifExtension: ".quality.sarif",
sarifPredicate: (name) => name.endsWith(CodeQuality.sarifExtension),
fixCategory: fixCodeQualityCategory,
sentinelPrefix: "CODEQL_UPLOAD_QUALITY_SARIF_"
};
function getAnalysisConfig(kind) {
switch (kind) {
case "code-scanning" /* CodeScanning */:
return CodeScanning;
case "code-quality" /* CodeQuality */:
return CodeQuality;
}
}
var SarifScanOrder = [CodeQuality, CodeScanning];
// src/api-client.ts
var core5 = __toESM(require_core());
@@ -93074,6 +93090,54 @@ function findSarifFilesInDir(sarifPath, isSarif) {
walkSarifFiles(sarifPath);
return sarifFiles;
}
async function getGroupedSarifFilePaths(logger, sarifPath) {
const stats = fs14.statSync(sarifPath, { throwIfNoEntry: false });
if (stats === void 0) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
}
const results = {};
if (stats.isDirectory()) {
let unassignedSarifFiles = findSarifFilesInDir(
sarifPath,
(name) => path15.extname(name) === ".sarif"
);
logger.debug(
`Found the following .sarif files in ${sarifPath}: ${unassignedSarifFiles.join(", ")}`
);
for (const analysisConfig of SarifScanOrder) {
const filesForCurrentAnalysis = unassignedSarifFiles.filter(
analysisConfig.sarifPredicate
);
if (filesForCurrentAnalysis.length > 0) {
logger.debug(
`The following SARIF files are for ${analysisConfig.name}: ${filesForCurrentAnalysis.join(", ")}`
);
unassignedSarifFiles = unassignedSarifFiles.filter(
(name) => !analysisConfig.sarifPredicate(name)
);
results[analysisConfig.kind] = filesForCurrentAnalysis;
} else {
logger.debug(`Found no SARIF files for ${analysisConfig.name}`);
}
}
if (unassignedSarifFiles.length !== 0) {
logger.warning(
`Found files in ${sarifPath} which do not belong to any analysis: ${unassignedSarifFiles.join(", ")}`
);
}
} else {
for (const analysisConfig of SarifScanOrder) {
if (analysisConfig.kind === "code-scanning" /* CodeScanning */ || analysisConfig.sarifPredicate(sarifPath)) {
logger.debug(
`Using '${sarifPath}' as a SARIF file for ${analysisConfig.name}.`
);
results[analysisConfig.kind] = [sarifPath];
break;
}
}
}
return results;
}
function countResultsInSarif(sarif) {
let numResults = 0;
const parsedSarif = JSON.parse(sarif);
@@ -93174,6 +93238,7 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif;
category = uploadTarget.fixCategory(logger, category);
if (sarifPaths.length > 1) {
for (const sarifPath of sarifPaths) {
const parsedSarif = readSarifFile(sarifPath);
@@ -93416,61 +93481,25 @@ function filterAlertsByDiffRange(logger, sarif) {
}
// src/upload-sarif.ts
var fs15 = __toESM(require("fs"));
async function findAndUpload(logger, features, sarifPath, pathStats, checkoutPath, analysis, category) {
let sarifFiles;
if (pathStats.isDirectory()) {
sarifFiles = findSarifFilesInDir(
sarifPath,
analysis.sarifPredicate
);
} else if (pathStats.isFile() && (analysis.sarifPredicate(sarifPath) || analysis.kind === "code-scanning" /* CodeScanning */ && !CodeQuality.sarifPredicate(sarifPath))) {
sarifFiles = [sarifPath];
} else {
return void 0;
}
if (sarifFiles.length !== 0) {
return await uploadSpecifiedFiles(
async function uploadSarif(logger, features, checkoutPath, sarifPath, category) {
const sarifGroups = await getGroupedSarifFilePaths(
logger,
sarifPath
);
const uploadResults = {};
for (const [analysisKind, sarifFiles] of unsafeEntriesInvariant(
sarifGroups
)) {
const analysisConfig = getAnalysisConfig(analysisKind);
uploadResults[analysisKind] = await uploadSpecifiedFiles(
sarifFiles,
checkoutPath,
category,
features,
logger,
analysis
analysisConfig
);
}
return void 0;
}
async function uploadSarif(logger, features, checkoutPath, sarifPath, category) {
const pathStats = fs15.lstatSync(sarifPath, { throwIfNoEntry: false });
if (pathStats === void 0) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}.`);
}
const uploadResults = {};
const uploadResult = await findAndUpload(
logger,
features,
sarifPath,
pathStats,
checkoutPath,
CodeScanning,
category
);
if (uploadResult !== void 0) {
uploadResults["code-scanning" /* CodeScanning */] = uploadResult;
}
const qualityUploadResult = await findAndUpload(
logger,
features,
sarifPath,
pathStats,
checkoutPath,
CodeQuality,
fixCodeQualityCategory(logger, category)
);
if (qualityUploadResult !== void 0) {
uploadResults["code-quality" /* CodeQuality */] = qualityUploadResult;
}
return uploadResults;
}
+29
View File
@@ -1,3 +1,5 @@
import { fixCodeQualityCategory } from "./actions-util";
import { Logger } from "./logging";
import { ConfigurationError } from "./util";
export enum AnalysisKind {
@@ -61,6 +63,8 @@ export interface AnalysisConfig {
/** A predicate on filenames to decide whether a SARIF file
* belongs to this kind of analysis. */
sarifPredicate: (name: string) => boolean;
/** Analysis-specific adjustment of the category. */
fixCategory: (logger: Logger, category?: string) => string | undefined;
/** A prefix for environment variables used to track the uniqueness of SARIF uploads. */
sentinelPrefix: string;
}
@@ -74,6 +78,7 @@ export const CodeScanning: AnalysisConfig = {
sarifPredicate: (name) =>
name.endsWith(CodeScanning.sarifExtension) &&
!CodeQuality.sarifPredicate(name),
fixCategory: (_, category) => category,
sentinelPrefix: "CODEQL_UPLOAD_SARIF_",
};
@@ -84,5 +89,29 @@ export const CodeQuality: AnalysisConfig = {
target: SARIF_UPLOAD_ENDPOINT.CODE_QUALITY,
sarifExtension: ".quality.sarif",
sarifPredicate: (name) => name.endsWith(CodeQuality.sarifExtension),
fixCategory: fixCodeQualityCategory,
sentinelPrefix: "CODEQL_UPLOAD_QUALITY_SARIF_",
};
/**
* Gets the `AnalysisConfig` corresponding to `kind`.
* @param kind The analysis kind to get the `AnalysisConfig` for.
* @returns The `AnalysisConfig` corresponding to `kind`.
*/
export function getAnalysisConfig(kind: AnalysisKind): AnalysisConfig {
// Using a switch statement here accomplishes two things:
// 1. The type checker believes us that we have a case for every `AnalysisKind`.
// 2. If we ever add another member to `AnalysisKind`, the type checker will alert us that we have to add a case.
switch (kind) {
case AnalysisKind.CodeScanning:
return CodeScanning;
case AnalysisKind.CodeQuality:
return CodeQuality;
}
}
// Since we have overlapping extensions (i.e. ".sarif" includes ".quality.sarif"),
// we want to scan a folder containing SARIF files in an order that finds the more
// specific extensions first. This constant defines an array in the order of analyis
// configurations with more specific extensions to less specific extensions.
export const SarifScanOrder = [CodeQuality, CodeScanning];
+3 -5
View File
@@ -356,16 +356,14 @@ async function run() {
}
if (isCodeQualityEnabled(config)) {
const analysis = analyses.CodeQuality;
const qualityUploadResult = await uploadLib.uploadFiles(
outputDir,
actionsUtil.getRequiredInput("checkout_path"),
actionsUtil.fixCodeQualityCategory(
logger,
actionsUtil.getOptionalInput("category"),
),
actionsUtil.getOptionalInput("category"),
features,
logger,
analyses.CodeQuality,
analysis,
);
core.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
}
+1 -1
View File
@@ -334,7 +334,7 @@ test("resolveQuerySuiteAlias", (t) => {
for (const suite of defaultSuites) {
const resolved = resolveQuerySuiteAlias(KnownLanguage.go, suite);
t.assert(
resolved.endsWith(".qls"),
path.extname(resolved) === ".qls",
"Resolved default suite doesn't end in .qls",
);
t.assert(
+1 -2
View File
@@ -7,7 +7,6 @@ import * as del from "del";
import * as yaml from "js-yaml";
import {
fixCodeQualityCategory,
getRequiredInput,
getTemporaryDirectory,
PullRequestBranches,
@@ -781,7 +780,7 @@ export async function runQueries(
// accepted by the Code Quality backend.
let category = automationDetailsId;
if (analysis.kind === analyses.AnalysisKind.CodeQuality) {
category = fixCodeQualityCategory(logger, automationDetailsId);
category = analysis.fixCategory(logger, automationDetailsId);
}
const sarifFile = path.join(
-1
View File
@@ -153,7 +153,6 @@ const packSpecPrettyPrintingMacro = test.macro({
title: (
_providedTitle: string | undefined,
packStr: string,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_packObj: dbConfig.Pack,
) => `Prettyprint pack spec: '${packStr}'`,
});
+1 -1
View File
@@ -59,7 +59,7 @@ export async function uploadCombinedSarifArtifacts(
for (const outputDir of outputDirs) {
const sarifFiles = fs
.readdirSync(path.resolve(baseTempDir, outputDir))
.filter((f) => f.endsWith(".sarif"));
.filter((f) => path.extname(f) === ".sarif");
for (const sarifFile of sarifFiles) {
toUpload.push(path.resolve(baseTempDir, outputDir, sarifFile));
+81 -11
View File
@@ -3,7 +3,7 @@ import * as path from "path";
import test from "ava";
import { CodeQuality, CodeScanning } from "./analyses";
import { AnalysisKind, CodeQuality, CodeScanning } from "./analyses";
import { getRunnerLogger, Logger } from "./logging";
import { setupTests } from "./testing-utils";
import * as uploadLib from "./upload-lib";
@@ -127,27 +127,97 @@ test("finding SARIF files", async (t) => {
fs.writeFileSync(path.join(tmpDir, "a.quality.sarif"), "");
fs.writeFileSync(path.join(tmpDir, "dir1", "b.quality.sarif"), "");
const expectedSarifFiles = [
path.join(tmpDir, "a.sarif"),
path.join(tmpDir, "b.sarif"),
path.join(tmpDir, "dir1", "d.sarif"),
path.join(tmpDir, "dir1", "dir2", "e.sarif"),
];
const sarifFiles = uploadLib.findSarifFilesInDir(
tmpDir,
CodeScanning.sarifPredicate,
);
t.deepEqual(sarifFiles, [
path.join(tmpDir, "a.sarif"),
path.join(tmpDir, "b.sarif"),
path.join(tmpDir, "dir1", "d.sarif"),
path.join(tmpDir, "dir1", "dir2", "e.sarif"),
]);
t.deepEqual(sarifFiles, expectedSarifFiles);
const expectedQualitySarifFiles = [
path.join(tmpDir, "a.quality.sarif"),
path.join(tmpDir, "dir1", "b.quality.sarif"),
];
const qualitySarifFiles = uploadLib.findSarifFilesInDir(
tmpDir,
CodeQuality.sarifPredicate,
);
t.deepEqual(qualitySarifFiles, [
path.join(tmpDir, "a.quality.sarif"),
path.join(tmpDir, "dir1", "b.quality.sarif"),
]);
t.deepEqual(qualitySarifFiles, expectedQualitySarifFiles);
const groupedSarifFiles = await uploadLib.getGroupedSarifFilePaths(
getRunnerLogger(true),
tmpDir,
);
t.not(groupedSarifFiles, undefined);
t.not(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
t.not(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
t.deepEqual(
groupedSarifFiles[AnalysisKind.CodeScanning],
expectedSarifFiles,
);
t.deepEqual(
groupedSarifFiles[AnalysisKind.CodeQuality],
expectedQualitySarifFiles,
);
});
});
test("getGroupedSarifFilePaths - Code Quality file", async (t) => {
await withTmpDir(async (tmpDir) => {
const sarifPath = path.join(tmpDir, "a.quality.sarif");
fs.writeFileSync(sarifPath, "");
const groupedSarifFiles = await uploadLib.getGroupedSarifFilePaths(
getRunnerLogger(true),
sarifPath,
);
t.not(groupedSarifFiles, undefined);
t.is(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
t.not(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
t.deepEqual(groupedSarifFiles[AnalysisKind.CodeQuality], [sarifPath]);
});
});
test("getGroupedSarifFilePaths - Code Scanning file", async (t) => {
await withTmpDir(async (tmpDir) => {
const sarifPath = path.join(tmpDir, "a.sarif");
fs.writeFileSync(sarifPath, "");
const groupedSarifFiles = await uploadLib.getGroupedSarifFilePaths(
getRunnerLogger(true),
sarifPath,
);
t.not(groupedSarifFiles, undefined);
t.not(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
t.is(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
t.deepEqual(groupedSarifFiles[AnalysisKind.CodeScanning], [sarifPath]);
});
});
test("getGroupedSarifFilePaths - Other file", async (t) => {
await withTmpDir(async (tmpDir) => {
const sarifPath = path.join(tmpDir, "a.json");
fs.writeFileSync(sarifPath, "");
const groupedSarifFiles = await uploadLib.getGroupedSarifFilePaths(
getRunnerLogger(true),
sarifPath,
);
t.not(groupedSarifFiles, undefined);
t.not(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
t.is(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
t.deepEqual(groupedSarifFiles[AnalysisKind.CodeScanning], [sarifPath]);
});
});
+74
View File
@@ -459,6 +459,79 @@ export function getSarifFilePaths(
return sarifFiles;
}
type GroupedSarifFiles = Partial<Record<analyses.AnalysisKind, string[]>>;
/**
* Finds SARIF files in `sarifPath`, and groups them by analysis kind, following `SarifScanOrder`.
*
* @param logger The logger to use.
* @param sarifPath The path of a file or directory to recursively scan for SARIF files.
* @returns The `.sarif` files found in `sarifPath`, grouped by analysis kind.
*/
export async function getGroupedSarifFilePaths(
logger: Logger,
sarifPath: string,
): Promise<GroupedSarifFiles> {
const stats = fs.statSync(sarifPath, { throwIfNoEntry: false });
if (stats === undefined) {
// This is always a configuration error, even for first-party runs.
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
}
const results: GroupedSarifFiles = {};
if (stats.isDirectory()) {
let unassignedSarifFiles = findSarifFilesInDir(
sarifPath,
(name) => path.extname(name) === ".sarif",
);
logger.debug(
`Found the following .sarif files in ${sarifPath}: ${unassignedSarifFiles.join(", ")}`,
);
for (const analysisConfig of analyses.SarifScanOrder) {
const filesForCurrentAnalysis = unassignedSarifFiles.filter(
analysisConfig.sarifPredicate,
);
if (filesForCurrentAnalysis.length > 0) {
logger.debug(
`The following SARIF files are for ${analysisConfig.name}: ${filesForCurrentAnalysis.join(", ")}`,
);
// Looping through the array a second time is not efficient, but more readable.
// Change this to one loop for both calls to `filter` if this becomes a bottleneck.
unassignedSarifFiles = unassignedSarifFiles.filter(
(name) => !analysisConfig.sarifPredicate(name),
);
results[analysisConfig.kind] = filesForCurrentAnalysis;
} else {
logger.debug(`Found no SARIF files for ${analysisConfig.name}`);
}
}
if (unassignedSarifFiles.length !== 0) {
logger.warning(
`Found files in ${sarifPath} which do not belong to any analysis: ${unassignedSarifFiles.join(", ")}`,
);
}
} else {
for (const analysisConfig of analyses.SarifScanOrder) {
if (
analysisConfig.kind === analyses.AnalysisKind.CodeScanning ||
analysisConfig.sarifPredicate(sarifPath)
) {
logger.debug(
`Using '${sarifPath}' as a SARIF file for ${analysisConfig.name}.`,
);
results[analysisConfig.kind] = [sarifPath];
break;
}
}
}
return results;
}
// Counts the number of results in the given SARIF file
function countResultsInSarif(sarif: string): number {
let numResults = 0;
@@ -655,6 +728,7 @@ export async function uploadSpecifiedFiles(
const gitHubVersion = await getGitHubVersion();
let sarif: SarifFile;
category = uploadTarget.fixCategory(logger, category);
if (sarifPaths.length > 1) {
// Validate that the files we were asked to upload are all valid SARIF files
+5 -82
View File
@@ -4,87 +4,16 @@ import * as path from "path";
import test, { ExecutionContext } from "ava";
import * as sinon from "sinon";
import {
AnalysisConfig,
AnalysisKind,
CodeQuality,
CodeScanning,
} from "./analyses";
import { AnalysisKind, getAnalysisConfig } from "./analyses";
import { getRunnerLogger } from "./logging";
import { createFeatures, setupTests } from "./testing-utils";
import { UploadResult } from "./upload-lib";
import * as uploadLib from "./upload-lib";
import { findAndUpload, uploadSarif } from "./upload-sarif";
import { uploadSarif } from "./upload-sarif";
import * as util from "./util";
setupTests(test);
const findAndUploadMacro = test.macro({
exec: async (
t: ExecutionContext<unknown>,
sarifFiles: string[],
analysis: AnalysisConfig,
sarifPath: (tempDir: string) => string = (tempDir) => tempDir,
expectedResult: UploadResult | undefined,
) => {
await util.withTmpDir(async (tempDir) => {
sinon.stub(uploadLib, "uploadSpecifiedFiles").resolves(expectedResult);
const logger = getRunnerLogger(true);
const features = createFeatures([]);
for (const sarifFile of sarifFiles) {
fs.writeFileSync(path.join(tempDir, sarifFile), "");
}
const stats = fs.statSync(sarifPath(tempDir));
const actual = await findAndUpload(
logger,
features,
sarifPath(tempDir),
stats,
"",
analysis,
);
t.deepEqual(actual, expectedResult);
});
},
title: (providedTitle = "") => `findAndUpload - ${providedTitle}`,
});
test(
"no matching files",
findAndUploadMacro,
["test.json"],
CodeScanning,
undefined,
undefined,
);
test(
"matching files for Code Scanning with directory path",
findAndUploadMacro,
["test.sarif"],
CodeScanning,
undefined,
{
statusReport: {},
sarifID: "some-id",
},
);
test(
"matching files for Code Scanning with file path",
findAndUploadMacro,
["test.sarif"],
CodeScanning,
(tempDir) => path.join(tempDir, "test.sarif"),
{
statusReport: {},
sarifID: "some-id",
},
);
interface UploadSarifExpectedResult {
uploadResult?: UploadResult;
expectedFiles?: string[];
@@ -117,9 +46,7 @@ const uploadSarifMacro = test.macro({
sinon.match.any,
features,
logger,
analysisKind === AnalysisKind.CodeScanning
? CodeScanning
: CodeQuality,
getAnalysisConfig(analysisKind),
)
.resolves(expectedResult[analysisKind as AnalysisKind]?.uploadResult);
}
@@ -146,9 +73,7 @@ const uploadSarifMacro = test.macro({
sinon.match.any,
features,
logger,
analysisKind === AnalysisKind.CodeScanning
? CodeScanning
: CodeQuality,
getAnalysisConfig(analysisKind),
),
);
} else {
@@ -164,9 +89,7 @@ const uploadSarifMacro = test.macro({
sinon.match.any,
features,
logger,
analysisKind === AnalysisKind.CodeScanning
? CodeScanning
: CodeQuality,
getAnalysisConfig(analysisKind),
),
`uploadSpecifiedFiles was called for ${analysisKind}, but should not have been.`,
);
+17 -88
View File
@@ -1,65 +1,8 @@
import * as fs from "fs";
import * as actionsUtil from "./actions-util";
import * as analyses from "./analyses";
import { FeatureEnablement } from "./feature-flags";
import { Logger } from "./logging";
import * as upload_lib from "./upload-lib";
import { ConfigurationError } from "./util";
/**
* Searches for SARIF files for the given `analysis` in the given `sarifPath`.
* If any are found, then they are uploaded to the appropriate endpoint for the given `analysis`.
*
* @param logger The logger to use.
* @param features Information about FFs.
* @param sarifPath The path to a SARIF file or directory containing SARIF files.
* @param pathStats Information about `sarifPath`.
* @param checkoutPath The checkout path.
* @param analysis The configuration of the analysis we should upload SARIF files for.
* @param category The SARIF category to use for the upload.
* @returns The result of uploading the SARIF file(s) or `undefined` if there are none.
*/
export async function findAndUpload(
logger: Logger,
features: FeatureEnablement,
sarifPath: string,
pathStats: fs.Stats,
checkoutPath: string,
analysis: analyses.AnalysisConfig,
category?: string,
): Promise<upload_lib.UploadResult | undefined> {
let sarifFiles: string[] | undefined;
if (pathStats.isDirectory()) {
sarifFiles = upload_lib.findSarifFilesInDir(
sarifPath,
analysis.sarifPredicate,
);
} else if (
pathStats.isFile() &&
(analysis.sarifPredicate(sarifPath) ||
(analysis.kind === analyses.AnalysisKind.CodeScanning &&
!analyses.CodeQuality.sarifPredicate(sarifPath)))
) {
sarifFiles = [sarifPath];
} else {
return undefined;
}
if (sarifFiles.length !== 0) {
return await upload_lib.uploadSpecifiedFiles(
sarifFiles,
checkoutPath,
category,
features,
logger,
analysis,
);
}
return undefined;
}
import { unsafeEntriesInvariant } from "./util";
// Maps analysis kinds to SARIF IDs.
export type UploadSarifResults = Partial<
@@ -84,38 +27,24 @@ export async function uploadSarif(
sarifPath: string,
category?: string,
): Promise<UploadSarifResults> {
const pathStats = fs.lstatSync(sarifPath, { throwIfNoEntry: false });
if (pathStats === undefined) {
throw new ConfigurationError(`Path does not exist: ${sarifPath}.`);
}
const sarifGroups = await upload_lib.getGroupedSarifFilePaths(
logger,
sarifPath,
);
const uploadResults: UploadSarifResults = {};
const uploadResult = await findAndUpload(
logger,
features,
sarifPath,
pathStats,
checkoutPath,
analyses.CodeScanning,
category,
);
if (uploadResult !== undefined) {
uploadResults[analyses.AnalysisKind.CodeScanning] = uploadResult;
}
// If there are `.quality.sarif` files in `sarifPath`, then upload those to the code quality service.
const qualityUploadResult = await findAndUpload(
logger,
features,
sarifPath,
pathStats,
checkoutPath,
analyses.CodeQuality,
actionsUtil.fixCodeQualityCategory(logger, category),
);
if (qualityUploadResult !== undefined) {
uploadResults[analyses.AnalysisKind.CodeQuality] = qualityUploadResult;
for (const [analysisKind, sarifFiles] of unsafeEntriesInvariant(
sarifGroups,
)) {
const analysisConfig = analyses.getAnalysisConfig(analysisKind);
uploadResults[analysisKind] = await upload_lib.uploadSpecifiedFiles(
sarifFiles,
checkoutPath,
category,
features,
logger,
analysisConfig,
);
}
return uploadResults;
+24
View File
@@ -1287,3 +1287,27 @@ export async function asyncSome<T>(
export function isDefined<T>(value: T | null | undefined): value is T {
return value !== undefined && value !== null;
}
/** Like `Object.keys`, but typed so that the elements of the resulting array have the
* same type as the keys of the input object. Note that this may not be sound if the input
* object has been cast to `T` from a subtype of `T` and contains additional keys that
* are not represented by `keyof T`.
*/
export function unsafeKeysInvariant<T extends Record<string, any>>(
object: T,
): Array<keyof T> {
return Object.keys(object) as Array<keyof T>;
}
/** Like `Object.entries`, but typed so that the key elements of the result have the
* same type as the keys of the input object. Note that this may not be sound if the input
* object has been cast to `T` from a subtype of `T` and contains additional keys that
* are not represented by `keyof T`.
*/
export function unsafeEntriesInvariant<T extends Record<string, any>>(
object: T,
): Array<[keyof T, Exclude<T[keyof T], undefined>]> {
return Object.entries(object).filter(
([_, val]) => val !== undefined,
) as Array<[keyof T, Exclude<T[keyof T], undefined>]>;
}