mirror of
https://github.com/github/codeql-action.git
synced 2026-04-27 17:39:15 +00:00
Merge pull request #3172 from github/update-v3.30.6-10feb5d2a
Merge main into releases/v3
This commit is contained in:
+1
-1
@@ -89,7 +89,7 @@ jobs:
|
||||
ref: refs/heads/main
|
||||
sha: 5e235361806c361d4d3f8859e3c897658025a9a2
|
||||
- name: Check output from `upload-sarif` step
|
||||
if: fromJSON(steps.upload-sarif.outputs.sarif-ids)[0].analysis != 'code-quality'
|
||||
if: '!(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)'
|
||||
run: exit 1
|
||||
env:
|
||||
CODEQL_ACTION_TEST_MODE: true
|
||||
|
||||
@@ -1,99 +0,0 @@
|
||||
name: Update dependency proxy release assets
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "The tag of CodeQL Bundle release that contains the proxy binaries as release assets"
|
||||
type: string
|
||||
required: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
update:
|
||||
name: Update code and create PR
|
||||
timeout-minutes: 15
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write # needed to push the updated files
|
||||
pull-requests: write # needed to create the PR
|
||||
env:
|
||||
RELEASE_TAG: ${{ inputs.tag }}
|
||||
steps:
|
||||
- name: Check release tag format
|
||||
id: checks
|
||||
run: |
|
||||
if ! [[ $RELEASE_TAG =~ ^codeql-bundle-v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "Invalid release tag: expected a CodeQL bundle tag in the 'codeql-bundle-vM.N.P' format."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "target_branch=dependency-proxy/$RELEASE_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Check that the release exists
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
run: |
|
||||
(gh release view --repo "$GITHUB_REPOSITORY" --json "assets" "$RELEASE_TAG" && echo "Release found.") || exit 1
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v5
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0 # ensure we have all tags and can push commits
|
||||
ref: main
|
||||
|
||||
- name: Update git config
|
||||
run: |
|
||||
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --global user.name "github-actions[bot]"
|
||||
|
||||
- name: Update release tag and version
|
||||
run: |
|
||||
NOW=$(date +"%Y%m%d%H%M%S") # only used to make sure we don't fetch stale binaries from the toolcache
|
||||
sed -i "s|https://github.com/github/codeql-action/releases/download/codeql-bundle-v[0-9.]\+/|https://github.com/github/codeql-action/releases/download/$RELEASE_TAG/|g" ./src/start-proxy-action.ts
|
||||
sed -i "s/\"v2.0.[0-9]\+\"/\"v2.0.$NOW\"/g" ./src/start-proxy-action.ts
|
||||
|
||||
- name: Compile TypeScript and commit changes
|
||||
env:
|
||||
TARGET_BRANCH: ${{ steps.checks.outputs.target_branch }}
|
||||
run: |
|
||||
set -exu
|
||||
git checkout -b "$TARGET_BRANCH"
|
||||
|
||||
npm run build
|
||||
git add ./src/start-proxy-action.ts
|
||||
git add ./lib
|
||||
git commit -m "Update release used by \`start-proxy\` action"
|
||||
|
||||
- name: Push changes and open PR
|
||||
env:
|
||||
GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
|
||||
TARGET_BRANCH: ${{ steps.checks.outputs.target_branch }}
|
||||
PR_FLAG: ${{ (github.event_name == 'workflow_dispatch' && '--draft') || '--dry-run' }}
|
||||
run: |
|
||||
set -exu
|
||||
pr_title="Update release used by \`start-proxy\` to \`$RELEASE_TAG\`"
|
||||
pr_body=$(cat << EOF
|
||||
This PR updates the \`start-proxy\` action to use the private registry proxy binaries that
|
||||
are attached as release assets to the \`$RELEASE_TAG\` release.
|
||||
|
||||
|
||||
Please do the following before merging:
|
||||
|
||||
- [ ] Verify that the changes to the code are correct.
|
||||
- [ ] Mark the PR as ready for review to trigger the CI.
|
||||
EOF
|
||||
)
|
||||
|
||||
git push origin "$TARGET_BRANCH"
|
||||
gh pr create \
|
||||
--head "$TARGET_BRANCH" \
|
||||
--base "main" \
|
||||
--title "${pr_title}" \
|
||||
--body "${pr_body}" \
|
||||
$PR_FLAG
|
||||
@@ -2,6 +2,10 @@
|
||||
|
||||
See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs.
|
||||
|
||||
## 3.30.6 - 02 Oct 2025
|
||||
|
||||
- Update default CodeQL bundle version to 2.23.2. [#3168](https://github.com/github/codeql-action/pull/3168)
|
||||
|
||||
## 3.30.5 - 26 Sep 2025
|
||||
|
||||
- We fixed a bug that was introduced in `3.30.4` with `upload-sarif` which resulted in files without a `.sarif` extension not getting uploaded. [#3160](https://github.com/github/codeql-action/pull/3160)
|
||||
|
||||
@@ -146,6 +146,12 @@ export default [
|
||||
"@typescript-eslint/prefer-regexp-exec": "off",
|
||||
"@typescript-eslint/require-await": "off",
|
||||
"@typescript-eslint/restrict-template-expressions": "off",
|
||||
"@typescript-eslint/no-unused-vars": [
|
||||
"error",
|
||||
{
|
||||
"argsIgnorePattern": "^_",
|
||||
}
|
||||
],
|
||||
"func-style": "off",
|
||||
},
|
||||
},
|
||||
|
||||
Generated
+3
-3
@@ -26438,7 +26438,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.5",
|
||||
version: "3.30.6",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -28542,7 +28542,7 @@ var require_brace_expansion = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
@@ -118671,7 +118671,7 @@ async function uploadCombinedSarifArtifacts(logger, gitHubVariant, codeQlVersion
|
||||
if (fs5.existsSync(baseTempDir)) {
|
||||
const outputDirs = fs5.readdirSync(baseTempDir);
|
||||
for (const outputDir of outputDirs) {
|
||||
const sarifFiles = fs5.readdirSync(path5.resolve(baseTempDir, outputDir)).filter((f) => f.endsWith(".sarif"));
|
||||
const sarifFiles = fs5.readdirSync(path5.resolve(baseTempDir, outputDir)).filter((f) => path5.extname(f) === ".sarif");
|
||||
for (const sarifFile of sarifFiles) {
|
||||
toUpload.push(path5.resolve(baseTempDir, outputDir, sarifFile));
|
||||
}
|
||||
|
||||
Generated
+39
-15
@@ -28149,11 +28149,11 @@ var require_out = __commonJS({
|
||||
async.read(path20, getSettings(optionsOrSettingsOrCallback), callback);
|
||||
}
|
||||
exports2.stat = stat;
|
||||
function statSync3(path20, optionsOrSettings) {
|
||||
function statSync4(path20, optionsOrSettings) {
|
||||
const settings = getSettings(optionsOrSettings);
|
||||
return sync.read(path20, settings);
|
||||
}
|
||||
exports2.statSync = statSync3;
|
||||
exports2.statSync = statSync4;
|
||||
function getSettings(settingsOrOptions = {}) {
|
||||
if (settingsOrOptions instanceof settings_1.default) {
|
||||
return settingsOrOptions;
|
||||
@@ -32287,7 +32287,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.5",
|
||||
version: "3.30.6",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -34391,7 +34391,7 @@ var require_brace_expansion = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
@@ -90167,6 +90167,7 @@ var CodeScanning = {
|
||||
target: "PUT /repos/:owner/:repo/code-scanning/analysis" /* CODE_SCANNING */,
|
||||
sarifExtension: ".sarif",
|
||||
sarifPredicate: (name) => name.endsWith(CodeScanning.sarifExtension) && !CodeQuality.sarifPredicate(name),
|
||||
fixCategory: (_, category) => category,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_SARIF_"
|
||||
};
|
||||
var CodeQuality = {
|
||||
@@ -90175,6 +90176,7 @@ var CodeQuality = {
|
||||
target: "PUT /repos/:owner/:repo/code-quality/analysis" /* CODE_QUALITY */,
|
||||
sarifExtension: ".quality.sarif",
|
||||
sarifPredicate: (name) => name.endsWith(CodeQuality.sarifExtension),
|
||||
fixCategory: fixCodeQualityCategory,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_QUALITY_SARIF_"
|
||||
};
|
||||
|
||||
@@ -90634,8 +90636,8 @@ var path8 = __toESM(require("path"));
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/defaults.json
|
||||
var bundleVersion = "codeql-bundle-v2.23.1";
|
||||
var cliVersion = "2.23.1";
|
||||
var bundleVersion = "codeql-bundle-v2.23.2";
|
||||
var cliVersion = "2.23.2";
|
||||
|
||||
// src/overlay-database-utils.ts
|
||||
var crypto = __toESM(require("crypto"));
|
||||
@@ -93338,6 +93340,7 @@ async function makeGlobber(patterns) {
|
||||
return glob.create(patterns.join("\n"));
|
||||
}
|
||||
async function uploadDependencyCaches(config, logger, minimizeJavaJars) {
|
||||
const status = [];
|
||||
for (const language of config.languages) {
|
||||
const cacheConfig = getDefaultCacheConfig()[language];
|
||||
if (cacheConfig === void 0) {
|
||||
@@ -93348,6 +93351,7 @@ async function uploadDependencyCaches(config, logger, minimizeJavaJars) {
|
||||
}
|
||||
const globber = await makeGlobber(cacheConfig.hash);
|
||||
if ((await globber.glob()).length === 0) {
|
||||
status.push({ language, result: "no-hash" /* NoHash */ });
|
||||
logger.info(
|
||||
`Skipping upload of dependency cache for ${language} as we cannot calculate a hash for the cache key.`
|
||||
);
|
||||
@@ -93355,6 +93359,7 @@ async function uploadDependencyCaches(config, logger, minimizeJavaJars) {
|
||||
}
|
||||
const size = await getTotalCacheSize(cacheConfig.paths, logger, true);
|
||||
if (size === 0) {
|
||||
status.push({ language, result: "empty" /* Empty */ });
|
||||
logger.info(
|
||||
`Skipping upload of dependency cache for ${language} since it is empty.`
|
||||
);
|
||||
@@ -93365,18 +93370,28 @@ async function uploadDependencyCaches(config, logger, minimizeJavaJars) {
|
||||
`Uploading cache of size ${size} for ${language} with key ${key}...`
|
||||
);
|
||||
try {
|
||||
const start = performance.now();
|
||||
await actionsCache3.saveCache(cacheConfig.paths, key);
|
||||
const upload_duration_ms = Math.round(performance.now() - start);
|
||||
status.push({
|
||||
language,
|
||||
result: "stored" /* Stored */,
|
||||
upload_size_bytes: Math.round(size),
|
||||
upload_duration_ms
|
||||
});
|
||||
} catch (error2) {
|
||||
if (error2 instanceof actionsCache3.ReserveCacheError) {
|
||||
logger.info(
|
||||
`Not uploading cache for ${language}, because ${key} is already in use.`
|
||||
);
|
||||
logger.debug(error2.message);
|
||||
status.push({ language, result: "duplicate" /* Duplicate */ });
|
||||
} else {
|
||||
throw error2;
|
||||
}
|
||||
}
|
||||
}
|
||||
return status;
|
||||
}
|
||||
async function cacheKey2(language, cacheConfig, minimizeJavaJars = false) {
|
||||
const hash2 = await glob.hashFiles(cacheConfig.hash.join("\n"));
|
||||
@@ -93826,7 +93841,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||
logger.info(`Interpreting ${analysis.name} results for ${language}`);
|
||||
let category = automationDetailsId;
|
||||
if (analysis.kind === "code-quality" /* CodeQuality */) {
|
||||
category = fixCodeQualityCategory(logger, automationDetailsId);
|
||||
category = analysis.fixCategory(logger, automationDetailsId);
|
||||
}
|
||||
const sarifFile = path16.join(
|
||||
sarifFolder,
|
||||
@@ -95699,6 +95714,7 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
category = uploadTarget.fixCategory(logger, category);
|
||||
if (sarifPaths.length > 1) {
|
||||
for (const sarifPath of sarifPaths) {
|
||||
const parsedSarif = readSarifFile(sarifPath);
|
||||
@@ -95941,7 +95957,7 @@ function filterAlertsByDiffRange(logger, sarif) {
|
||||
}
|
||||
|
||||
// src/analyze-action.ts
|
||||
async function sendStatusReport2(startedAt, config, stats, error2, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, trapCacheCleanup, logger) {
|
||||
async function sendStatusReport2(startedAt, config, stats, error2, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, trapCacheCleanup, dependencyCacheResults, logger) {
|
||||
const status = getActionsStatus(error2, stats?.analyze_failure_language);
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
"finish" /* Analyze */,
|
||||
@@ -95958,7 +95974,8 @@ async function sendStatusReport2(startedAt, config, stats, error2, trapCacheUplo
|
||||
...statusReportBase,
|
||||
...stats || {},
|
||||
...dbCreationTimings || {},
|
||||
...trapCacheCleanup || {}
|
||||
...trapCacheCleanup || {},
|
||||
dependency_caching_upload_results: dependencyCacheResults
|
||||
};
|
||||
if (config && didUploadTrapCaches) {
|
||||
const trapCacheUploadStatusReport = {
|
||||
@@ -96039,6 +96056,7 @@ async function run() {
|
||||
let trapCacheUploadTime = void 0;
|
||||
let dbCreationTimings = void 0;
|
||||
let didUploadTrapCaches = false;
|
||||
let dependencyCacheResults;
|
||||
initializeEnvironment(getActionVersion());
|
||||
persistInputs();
|
||||
const logger = getActionsLogger();
|
||||
@@ -96146,16 +96164,14 @@ async function run() {
|
||||
core14.setOutput("sarif-id", uploadResult.sarifID);
|
||||
}
|
||||
if (isCodeQualityEnabled(config)) {
|
||||
const analysis = CodeQuality;
|
||||
const qualityUploadResult = await uploadFiles(
|
||||
outputDir,
|
||||
getRequiredInput("checkout_path"),
|
||||
fixCodeQualityCategory(
|
||||
logger,
|
||||
getOptionalInput("category")
|
||||
),
|
||||
getOptionalInput("category"),
|
||||
features,
|
||||
logger,
|
||||
CodeQuality
|
||||
analysis
|
||||
);
|
||||
core14.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
|
||||
}
|
||||
@@ -96177,7 +96193,11 @@ async function run() {
|
||||
"java_minimize_dependency_jars" /* JavaMinimizeDependencyJars */,
|
||||
codeql
|
||||
);
|
||||
await uploadDependencyCaches(config, logger, minimizeJavaJars);
|
||||
dependencyCacheResults = await uploadDependencyCaches(
|
||||
config,
|
||||
logger,
|
||||
minimizeJavaJars
|
||||
);
|
||||
}
|
||||
if (isInTestMode()) {
|
||||
logger.debug("In test mode. Waiting for processing is disabled.");
|
||||
@@ -96208,6 +96228,7 @@ async function run() {
|
||||
dbCreationTimings,
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
logger
|
||||
);
|
||||
return;
|
||||
@@ -96225,6 +96246,7 @@ async function run() {
|
||||
dbCreationTimings,
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
logger
|
||||
);
|
||||
} else if (runStats) {
|
||||
@@ -96237,6 +96259,7 @@ async function run() {
|
||||
dbCreationTimings,
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
logger
|
||||
);
|
||||
} else {
|
||||
@@ -96249,6 +96272,7 @@ async function run() {
|
||||
dbCreationTimings,
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
logger
|
||||
);
|
||||
}
|
||||
|
||||
Generated
+4
-4
@@ -26438,7 +26438,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.5",
|
||||
version: "3.30.6",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -28542,7 +28542,7 @@ var require_brace_expansion = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
@@ -78295,8 +78295,8 @@ var path3 = __toESM(require("path"));
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/defaults.json
|
||||
var bundleVersion = "codeql-bundle-v2.23.1";
|
||||
var cliVersion = "2.23.1";
|
||||
var bundleVersion = "codeql-bundle-v2.23.2";
|
||||
var cliVersion = "2.23.2";
|
||||
|
||||
// src/overlay-database-utils.ts
|
||||
var fs2 = __toESM(require("fs"));
|
||||
|
||||
+4
-4
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.23.1",
|
||||
"cliVersion": "2.23.1",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.0",
|
||||
"priorCliVersion": "2.23.0"
|
||||
"bundleVersion": "codeql-bundle-v2.23.2",
|
||||
"cliVersion": "2.23.2",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.1",
|
||||
"priorCliVersion": "2.23.1"
|
||||
}
|
||||
|
||||
Generated
+73
-10
@@ -28149,11 +28149,11 @@ var require_out = __commonJS({
|
||||
async.read(path19, getSettings(optionsOrSettingsOrCallback), callback);
|
||||
}
|
||||
exports2.stat = stat;
|
||||
function statSync2(path19, optionsOrSettings) {
|
||||
function statSync3(path19, optionsOrSettings) {
|
||||
const settings = getSettings(optionsOrSettings);
|
||||
return sync.read(path19, settings);
|
||||
}
|
||||
exports2.statSync = statSync2;
|
||||
exports2.statSync = statSync3;
|
||||
function getSettings(settingsOrOptions = {}) {
|
||||
if (settingsOrOptions instanceof settings_1.default) {
|
||||
return settingsOrOptions;
|
||||
@@ -32287,7 +32287,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.5",
|
||||
version: "3.30.6",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -34391,7 +34391,7 @@ var require_brace_expansion = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
@@ -128337,6 +128337,9 @@ function getWorkflowRunAttempt() {
|
||||
function isSelfHostedRunner() {
|
||||
return process.env.RUNNER_ENVIRONMENT === "self-hosted";
|
||||
}
|
||||
function isDefaultSetup() {
|
||||
return getWorkflowEventName() === "dynamic";
|
||||
}
|
||||
function prettyPrintInvocation(cmd, args) {
|
||||
return [cmd, ...args].map((x) => x.includes(" ") ? `'${x}'` : x).join(" ");
|
||||
}
|
||||
@@ -128402,6 +128405,30 @@ var restoreInputs = function() {
|
||||
}
|
||||
}
|
||||
};
|
||||
var qualityCategoryMapping = {
|
||||
"c#": "csharp",
|
||||
cpp: "c-cpp",
|
||||
c: "c-cpp",
|
||||
"c++": "c-cpp",
|
||||
java: "java-kotlin",
|
||||
javascript: "javascript-typescript",
|
||||
typescript: "javascript-typescript",
|
||||
kotlin: "java-kotlin"
|
||||
};
|
||||
function fixCodeQualityCategory(logger, category) {
|
||||
if (category !== void 0 && isDefaultSetup() && category.startsWith("/language:")) {
|
||||
const language = category.substring("/language:".length);
|
||||
const mappedLanguage = qualityCategoryMapping[language];
|
||||
if (mappedLanguage) {
|
||||
const newCategory = `/language:${mappedLanguage}`;
|
||||
logger.info(
|
||||
`Adjusted category for Code Quality from '${category}' to '${newCategory}'.`
|
||||
);
|
||||
return newCategory;
|
||||
}
|
||||
}
|
||||
return category;
|
||||
}
|
||||
|
||||
// src/api-client.ts
|
||||
var core5 = __toESM(require_core());
|
||||
@@ -128534,6 +128561,18 @@ function computeAutomationID(analysis_key, environment) {
|
||||
}
|
||||
return automationID;
|
||||
}
|
||||
async function listActionsCaches(key, ref) {
|
||||
const repositoryNwo = getRepositoryNwo();
|
||||
return await getApiClient().paginate(
|
||||
"GET /repos/{owner}/{repo}/actions/caches",
|
||||
{
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
key,
|
||||
ref
|
||||
}
|
||||
);
|
||||
}
|
||||
function wrapApiConfigurationError(e) {
|
||||
if (isHTTPError(e)) {
|
||||
if (e.message.includes("API rate limit exceeded for installation") || e.message.includes("commit not found") || e.message.includes("Resource not accessible by integration") || /ref .* not found in this repository/.test(e.message)) {
|
||||
@@ -128547,6 +128586,9 @@ function wrapApiConfigurationError(e) {
|
||||
return e;
|
||||
}
|
||||
|
||||
// src/caching-utils.ts
|
||||
var core6 = __toESM(require_core());
|
||||
|
||||
// src/codeql.ts
|
||||
var fs13 = __toESM(require("fs"));
|
||||
var path13 = __toESM(require("path"));
|
||||
@@ -128807,6 +128849,7 @@ var CodeScanning = {
|
||||
target: "PUT /repos/:owner/:repo/code-scanning/analysis" /* CODE_SCANNING */,
|
||||
sarifExtension: ".sarif",
|
||||
sarifPredicate: (name) => name.endsWith(CodeScanning.sarifExtension) && !CodeQuality.sarifPredicate(name),
|
||||
fixCategory: (_2, category) => category,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_SARIF_"
|
||||
};
|
||||
var CodeQuality = {
|
||||
@@ -128815,12 +128858,10 @@ var CodeQuality = {
|
||||
target: "PUT /repos/:owner/:repo/code-quality/analysis" /* CODE_QUALITY */,
|
||||
sarifExtension: ".quality.sarif",
|
||||
sarifPredicate: (name) => name.endsWith(CodeQuality.sarifExtension),
|
||||
fixCategory: fixCodeQualityCategory,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_QUALITY_SARIF_"
|
||||
};
|
||||
|
||||
// src/caching-utils.ts
|
||||
var core6 = __toESM(require_core());
|
||||
|
||||
// src/config/db-config.ts
|
||||
var semver2 = __toESM(require_semver2());
|
||||
var PACK_IDENTIFIER_PATTERN = (function() {
|
||||
@@ -128840,8 +128881,8 @@ var path8 = __toESM(require("path"));
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/defaults.json
|
||||
var bundleVersion = "codeql-bundle-v2.23.1";
|
||||
var cliVersion = "2.23.1";
|
||||
var bundleVersion = "codeql-bundle-v2.23.2";
|
||||
var cliVersion = "2.23.2";
|
||||
|
||||
// src/overlay-database-utils.ts
|
||||
var fs6 = __toESM(require("fs"));
|
||||
@@ -131169,6 +131210,22 @@ var core11 = __toESM(require_core());
|
||||
// src/dependency-caching.ts
|
||||
var actionsCache3 = __toESM(require_cache3());
|
||||
var glob = __toESM(require_glob3());
|
||||
var CODEQL_DEPENDENCY_CACHE_PREFIX = "codeql-dependencies";
|
||||
async function getDependencyCacheUsage(logger) {
|
||||
try {
|
||||
const caches = await listActionsCaches(CODEQL_DEPENDENCY_CACHE_PREFIX);
|
||||
const totalSize = caches.reduce(
|
||||
(acc, cache) => acc + (cache.size_in_bytes ?? 0),
|
||||
0
|
||||
);
|
||||
return { count: caches.length, size_bytes: totalSize };
|
||||
} catch (err) {
|
||||
logger.warning(
|
||||
`Unable to retrieve information about dependency cache usage: ${getErrorMessage(err)}`
|
||||
);
|
||||
}
|
||||
return void 0;
|
||||
}
|
||||
|
||||
// src/analyze.ts
|
||||
function dbIsFinalized(config, language, logger) {
|
||||
@@ -133133,6 +133190,7 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
category = uploadTarget.fixCategory(logger, category);
|
||||
if (sarifPaths.length > 1) {
|
||||
for (const sarifPath of sarifPaths) {
|
||||
const parsedSarif = readSarifFile(sarifPath);
|
||||
@@ -133718,6 +133776,7 @@ async function runWrapper() {
|
||||
const startedAt = /* @__PURE__ */ new Date();
|
||||
let config;
|
||||
let uploadFailedSarifResult;
|
||||
let dependencyCachingUsage;
|
||||
try {
|
||||
restoreInputs();
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
@@ -133745,6 +133804,9 @@ async function runWrapper() {
|
||||
features,
|
||||
logger
|
||||
);
|
||||
if (await isAnalyzingDefaultBranch() && config.dependencyCachingEnabled !== "none" /* None */) {
|
||||
dependencyCachingUsage = await getDependencyCacheUsage(logger);
|
||||
}
|
||||
}
|
||||
} catch (unwrappedError) {
|
||||
const error2 = wrapError(unwrappedError);
|
||||
@@ -133778,7 +133840,8 @@ async function runWrapper() {
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
...uploadFailedSarifResult,
|
||||
job_status: getFinalJobStatus()
|
||||
job_status: getFinalJobStatus(),
|
||||
dependency_caching_usage: dependencyCachingUsage
|
||||
};
|
||||
logger.info("Sending status report for init-post step.");
|
||||
await sendStatusReport(statusReport);
|
||||
|
||||
Generated
+21
-11
@@ -32287,7 +32287,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.5",
|
||||
version: "3.30.6",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -34391,7 +34391,7 @@ var require_brace_expansion = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
@@ -86563,8 +86563,8 @@ var path9 = __toESM(require("path"));
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/defaults.json
|
||||
var bundleVersion = "codeql-bundle-v2.23.1";
|
||||
var cliVersion = "2.23.1";
|
||||
var bundleVersion = "codeql-bundle-v2.23.2";
|
||||
var cliVersion = "2.23.2";
|
||||
|
||||
// src/overlay-database-utils.ts
|
||||
var crypto = __toESM(require("crypto"));
|
||||
@@ -88184,7 +88184,7 @@ async function makeGlobber(patterns) {
|
||||
return glob.create(patterns.join("\n"));
|
||||
}
|
||||
async function downloadDependencyCaches(languages, logger, minimizeJavaJars) {
|
||||
const restoredCaches = [];
|
||||
const status = [];
|
||||
for (const language of languages) {
|
||||
const cacheConfig = getDefaultCacheConfig()[language];
|
||||
if (cacheConfig === void 0) {
|
||||
@@ -88195,6 +88195,7 @@ async function downloadDependencyCaches(languages, logger, minimizeJavaJars) {
|
||||
}
|
||||
const globber = await makeGlobber(cacheConfig.hash);
|
||||
if ((await globber.glob()).length === 0) {
|
||||
status.push({ language, hit_kind: "no-hash" /* NoHash */ });
|
||||
logger.info(
|
||||
`Skipping download of dependency cache for ${language} as we cannot calculate a hash for the cache key.`
|
||||
);
|
||||
@@ -88209,19 +88210,23 @@ async function downloadDependencyCaches(languages, logger, minimizeJavaJars) {
|
||||
", "
|
||||
)}`
|
||||
);
|
||||
const start = performance.now();
|
||||
const hitKey = await actionsCache3.restoreCache(
|
||||
cacheConfig.paths,
|
||||
primaryKey,
|
||||
restoreKeys
|
||||
);
|
||||
const download_duration_ms = Math.round(performance.now() - start);
|
||||
if (hitKey !== void 0) {
|
||||
logger.info(`Cache hit on key ${hitKey} for ${language}.`);
|
||||
restoredCaches.push(language);
|
||||
const hit_kind = hitKey === primaryKey ? "exact" /* Exact */ : "partial" /* Partial */;
|
||||
status.push({ language, hit_kind, download_duration_ms });
|
||||
} else {
|
||||
status.push({ language, hit_kind: "miss" /* Miss */ });
|
||||
logger.info(`No suitable cache found for ${language}.`);
|
||||
}
|
||||
}
|
||||
return restoredCaches;
|
||||
return status;
|
||||
}
|
||||
async function cacheKey2(language, cacheConfig, minimizeJavaJars = false) {
|
||||
const hash = await glob.hashFiles(cacheConfig.hash.join("\n"));
|
||||
@@ -90324,7 +90329,7 @@ async function sendStatusReport(statusReport) {
|
||||
);
|
||||
}
|
||||
}
|
||||
async function createInitWithConfigStatusReport(config, initStatusReport, configFile, totalCacheSize, overlayBaseDatabaseStats) {
|
||||
async function createInitWithConfigStatusReport(config, initStatusReport, configFile, totalCacheSize, overlayBaseDatabaseStats, dependencyCachingResults) {
|
||||
const languages = config.languages.join(",");
|
||||
const paths = (config.originalUserInput.paths || []).join(",");
|
||||
const pathsIgnore = (config.originalUserInput["paths-ignore"] || []).join(
|
||||
@@ -90361,6 +90366,7 @@ async function createInitWithConfigStatusReport(config, initStatusReport, config
|
||||
trap_cache_download_duration_ms: Math.round(config.trapCacheDownloadTime),
|
||||
overlay_base_database_download_size_bytes: overlayBaseDatabaseStats?.databaseSizeBytes,
|
||||
overlay_base_database_download_duration_ms: overlayBaseDatabaseStats?.databaseDownloadDurationMs,
|
||||
dependency_caching_restore_results: dependencyCachingResults,
|
||||
query_filters: JSON.stringify(
|
||||
config.originalUserInput["query-filters"] ?? []
|
||||
),
|
||||
@@ -90543,7 +90549,7 @@ async function getWorkflowAbsolutePath(logger) {
|
||||
}
|
||||
|
||||
// src/init-action.ts
|
||||
async function sendCompletedStatusReport(startedAt, config, configFile, toolsDownloadStatusReport, toolsFeatureFlagsValid, toolsSource, toolsVersion, overlayBaseDatabaseStats, logger, error2) {
|
||||
async function sendCompletedStatusReport(startedAt, config, configFile, toolsDownloadStatusReport, toolsFeatureFlagsValid, toolsSource, toolsVersion, overlayBaseDatabaseStats, dependencyCachingResults, logger, error2) {
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
"init" /* Init */,
|
||||
getActionsStatus(error2),
|
||||
@@ -90580,7 +90586,8 @@ async function sendCompletedStatusReport(startedAt, config, configFile, toolsDow
|
||||
Math.round(
|
||||
await getTotalCacheSize(Object.values(config.trapCaches), logger)
|
||||
),
|
||||
overlayBaseDatabaseStats
|
||||
overlayBaseDatabaseStats,
|
||||
dependencyCachingResults
|
||||
);
|
||||
await sendStatusReport({
|
||||
...initWithConfigStatusReport,
|
||||
@@ -90744,6 +90751,7 @@ async function run() {
|
||||
return;
|
||||
}
|
||||
let overlayBaseDatabaseStats;
|
||||
let dependencyCachingResults;
|
||||
try {
|
||||
if (config.overlayDatabaseMode === "overlay" /* Overlay */ && config.useOverlayDatabaseCaching) {
|
||||
overlayBaseDatabaseStats = await downloadOverlayBaseDatabaseFromCache(
|
||||
@@ -90888,7 +90896,7 @@ exec ${goBinaryPath} "$@"`
|
||||
codeql
|
||||
);
|
||||
if (shouldRestoreCache(config.dependencyCachingEnabled)) {
|
||||
await downloadDependencyCaches(
|
||||
dependencyCachingResults = await downloadDependencyCaches(
|
||||
config.languages,
|
||||
logger,
|
||||
minimizeJavaJars
|
||||
@@ -90993,6 +91001,7 @@ exec ${goBinaryPath} "$@"`
|
||||
toolsSource,
|
||||
toolsVersion,
|
||||
overlayBaseDatabaseStats,
|
||||
dependencyCachingResults,
|
||||
logger,
|
||||
error2
|
||||
);
|
||||
@@ -91010,6 +91019,7 @@ exec ${goBinaryPath} "$@"`
|
||||
toolsSource,
|
||||
toolsVersion,
|
||||
overlayBaseDatabaseStats,
|
||||
dependencyCachingResults,
|
||||
logger
|
||||
);
|
||||
}
|
||||
|
||||
Generated
+2
-2
@@ -26438,7 +26438,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.5",
|
||||
version: "3.30.6",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -28542,7 +28542,7 @@ var require_brace_expansion = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
|
||||
Generated
+2
-2
@@ -26438,7 +26438,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.5",
|
||||
version: "3.30.6",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -28542,7 +28542,7 @@ var require_brace_expansion = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
|
||||
Generated
+46855
-677
File diff suppressed because it is too large
Load Diff
Generated
+111
-14
@@ -29446,11 +29446,11 @@ var require_out = __commonJS({
|
||||
async.read(path15, getSettings(optionsOrSettingsOrCallback), callback);
|
||||
}
|
||||
exports2.stat = stat;
|
||||
function statSync2(path15, optionsOrSettings) {
|
||||
function statSync3(path15, optionsOrSettings) {
|
||||
const settings = getSettings(optionsOrSettings);
|
||||
return sync.read(path15, settings);
|
||||
}
|
||||
exports2.statSync = statSync2;
|
||||
exports2.statSync = statSync3;
|
||||
function getSettings(settingsOrOptions = {}) {
|
||||
if (settingsOrOptions instanceof settings_1.default) {
|
||||
return settingsOrOptions;
|
||||
@@ -33584,7 +33584,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.5",
|
||||
version: "3.30.6",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -35688,7 +35688,7 @@ var require_brace_expansion = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
@@ -84821,6 +84821,7 @@ __export(upload_lib_exports, {
|
||||
InvalidSarifUploadError: () => InvalidSarifUploadError,
|
||||
buildPayload: () => buildPayload,
|
||||
findSarifFilesInDir: () => findSarifFilesInDir,
|
||||
getGroupedSarifFilePaths: () => getGroupedSarifFilePaths,
|
||||
getSarifFilePaths: () => getSarifFilePaths,
|
||||
populateRunAutomationDetails: () => populateRunAutomationDetails,
|
||||
readSarifFile: () => readSarifFile,
|
||||
@@ -88473,6 +88474,9 @@ function getWorkflowRunAttempt() {
|
||||
}
|
||||
return workflowRunAttempt;
|
||||
}
|
||||
function isDefaultSetup() {
|
||||
return getWorkflowEventName() === "dynamic";
|
||||
}
|
||||
function prettyPrintInvocation(cmd, args) {
|
||||
return [cmd, ...args].map((x) => x.includes(" ") ? `'${x}'` : x).join(" ");
|
||||
}
|
||||
@@ -88529,6 +88533,57 @@ async function runTool(cmd, args = [], opts = {}) {
|
||||
}
|
||||
return stdout;
|
||||
}
|
||||
var qualityCategoryMapping = {
|
||||
"c#": "csharp",
|
||||
cpp: "c-cpp",
|
||||
c: "c-cpp",
|
||||
"c++": "c-cpp",
|
||||
java: "java-kotlin",
|
||||
javascript: "javascript-typescript",
|
||||
typescript: "javascript-typescript",
|
||||
kotlin: "java-kotlin"
|
||||
};
|
||||
function fixCodeQualityCategory(logger, category) {
|
||||
if (category !== void 0 && isDefaultSetup() && category.startsWith("/language:")) {
|
||||
const language = category.substring("/language:".length);
|
||||
const mappedLanguage = qualityCategoryMapping[language];
|
||||
if (mappedLanguage) {
|
||||
const newCategory = `/language:${mappedLanguage}`;
|
||||
logger.info(
|
||||
`Adjusted category for Code Quality from '${category}' to '${newCategory}'.`
|
||||
);
|
||||
return newCategory;
|
||||
}
|
||||
}
|
||||
return category;
|
||||
}
|
||||
|
||||
// src/analyses.ts
|
||||
var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => {
|
||||
AnalysisKind2["CodeScanning"] = "code-scanning";
|
||||
AnalysisKind2["CodeQuality"] = "code-quality";
|
||||
return AnalysisKind2;
|
||||
})(AnalysisKind || {});
|
||||
var supportedAnalysisKinds = new Set(Object.values(AnalysisKind));
|
||||
var CodeScanning = {
|
||||
kind: "code-scanning" /* CodeScanning */,
|
||||
name: "code scanning",
|
||||
target: "PUT /repos/:owner/:repo/code-scanning/analysis" /* CODE_SCANNING */,
|
||||
sarifExtension: ".sarif",
|
||||
sarifPredicate: (name) => name.endsWith(CodeScanning.sarifExtension) && !CodeQuality.sarifPredicate(name),
|
||||
fixCategory: (_, category) => category,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_SARIF_"
|
||||
};
|
||||
var CodeQuality = {
|
||||
kind: "code-quality" /* CodeQuality */,
|
||||
name: "code quality",
|
||||
target: "PUT /repos/:owner/:repo/code-quality/analysis" /* CODE_QUALITY */,
|
||||
sarifExtension: ".quality.sarif",
|
||||
sarifPredicate: (name) => name.endsWith(CodeQuality.sarifExtension),
|
||||
fixCategory: fixCodeQualityCategory,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_QUALITY_SARIF_"
|
||||
};
|
||||
var SarifScanOrder = [CodeQuality, CodeScanning];
|
||||
|
||||
// src/api-client.ts
|
||||
var core5 = __toESM(require_core());
|
||||
@@ -88921,14 +88976,6 @@ function wrapCliConfigurationError(cliError) {
|
||||
var fs7 = __toESM(require("fs"));
|
||||
var path9 = __toESM(require("path"));
|
||||
|
||||
// src/analyses.ts
|
||||
var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => {
|
||||
AnalysisKind2["CodeScanning"] = "code-scanning";
|
||||
AnalysisKind2["CodeQuality"] = "code-quality";
|
||||
return AnalysisKind2;
|
||||
})(AnalysisKind || {});
|
||||
var supportedAnalysisKinds = new Set(Object.values(AnalysisKind));
|
||||
|
||||
// src/caching-utils.ts
|
||||
var core6 = __toESM(require_core());
|
||||
|
||||
@@ -88949,8 +88996,8 @@ var path8 = __toESM(require("path"));
|
||||
var semver4 = __toESM(require_semver2());
|
||||
|
||||
// src/defaults.json
|
||||
var bundleVersion = "codeql-bundle-v2.23.1";
|
||||
var cliVersion = "2.23.1";
|
||||
var bundleVersion = "codeql-bundle-v2.23.2";
|
||||
var cliVersion = "2.23.2";
|
||||
|
||||
// src/overlay-database-utils.ts
|
||||
var fs5 = __toESM(require("fs"));
|
||||
@@ -92391,6 +92438,54 @@ function getSarifFilePaths(sarifPath, isSarif) {
|
||||
}
|
||||
return sarifFiles;
|
||||
}
|
||||
async function getGroupedSarifFilePaths(logger, sarifPath) {
|
||||
const stats = fs13.statSync(sarifPath, { throwIfNoEntry: false });
|
||||
if (stats === void 0) {
|
||||
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
|
||||
}
|
||||
const results = {};
|
||||
if (stats.isDirectory()) {
|
||||
let unassignedSarifFiles = findSarifFilesInDir(
|
||||
sarifPath,
|
||||
(name) => path14.extname(name) === ".sarif"
|
||||
);
|
||||
logger.debug(
|
||||
`Found the following .sarif files in ${sarifPath}: ${unassignedSarifFiles.join(", ")}`
|
||||
);
|
||||
for (const analysisConfig of SarifScanOrder) {
|
||||
const filesForCurrentAnalysis = unassignedSarifFiles.filter(
|
||||
analysisConfig.sarifPredicate
|
||||
);
|
||||
if (filesForCurrentAnalysis.length > 0) {
|
||||
logger.debug(
|
||||
`The following SARIF files are for ${analysisConfig.name}: ${filesForCurrentAnalysis.join(", ")}`
|
||||
);
|
||||
unassignedSarifFiles = unassignedSarifFiles.filter(
|
||||
(name) => !analysisConfig.sarifPredicate(name)
|
||||
);
|
||||
results[analysisConfig.kind] = filesForCurrentAnalysis;
|
||||
} else {
|
||||
logger.debug(`Found no SARIF files for ${analysisConfig.name}`);
|
||||
}
|
||||
}
|
||||
if (unassignedSarifFiles.length !== 0) {
|
||||
logger.warning(
|
||||
`Found files in ${sarifPath} which do not belong to any analysis: ${unassignedSarifFiles.join(", ")}`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
for (const analysisConfig of SarifScanOrder) {
|
||||
if (analysisConfig.kind === "code-scanning" /* CodeScanning */ || analysisConfig.sarifPredicate(sarifPath)) {
|
||||
logger.debug(
|
||||
`Using '${sarifPath}' as a SARIF file for ${analysisConfig.name}.`
|
||||
);
|
||||
results[analysisConfig.kind] = [sarifPath];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
function countResultsInSarif(sarif) {
|
||||
let numResults = 0;
|
||||
const parsedSarif = JSON.parse(sarif);
|
||||
@@ -92505,6 +92600,7 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
category = uploadTarget.fixCategory(logger, category);
|
||||
if (sarifPaths.length > 1) {
|
||||
for (const sarifPath of sarifPaths) {
|
||||
const parsedSarif = readSarifFile(sarifPath);
|
||||
@@ -92750,6 +92846,7 @@ function filterAlertsByDiffRange(logger, sarif) {
|
||||
InvalidSarifUploadError,
|
||||
buildPayload,
|
||||
findSarifFilesInDir,
|
||||
getGroupedSarifFilePaths,
|
||||
getSarifFilePaths,
|
||||
populateRunAutomationDetails,
|
||||
readSarifFile,
|
||||
|
||||
Generated
+3
-3
@@ -26438,7 +26438,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.5",
|
||||
version: "3.30.6",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -106366,7 +106366,7 @@ var require_brace_expansion3 = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
@@ -117619,7 +117619,7 @@ async function uploadCombinedSarifArtifacts(logger, gitHubVariant, codeQlVersion
|
||||
if (fs.existsSync(baseTempDir)) {
|
||||
const outputDirs = fs.readdirSync(baseTempDir);
|
||||
for (const outputDir of outputDirs) {
|
||||
const sarifFiles = fs.readdirSync(path.resolve(baseTempDir, outputDir)).filter((f) => f.endsWith(".sarif"));
|
||||
const sarifFiles = fs.readdirSync(path.resolve(baseTempDir, outputDir)).filter((f) => path.extname(f) === ".sarif");
|
||||
for (const sarifFile of sarifFiles) {
|
||||
toUpload.push(path.resolve(baseTempDir, outputDir, sarifFile));
|
||||
}
|
||||
|
||||
Generated
+174
-131
@@ -185,7 +185,7 @@ var require_file_command = __commonJS({
|
||||
Object.defineProperty(exports2, "__esModule", { value: true });
|
||||
exports2.prepareKeyValueMessage = exports2.issueFileCommand = void 0;
|
||||
var crypto = __importStar4(require("crypto"));
|
||||
var fs16 = __importStar4(require("fs"));
|
||||
var fs15 = __importStar4(require("fs"));
|
||||
var os3 = __importStar4(require("os"));
|
||||
var utils_1 = require_utils();
|
||||
function issueFileCommand(command, message) {
|
||||
@@ -193,10 +193,10 @@ var require_file_command = __commonJS({
|
||||
if (!filePath) {
|
||||
throw new Error(`Unable to find environment variable for file command ${command}`);
|
||||
}
|
||||
if (!fs16.existsSync(filePath)) {
|
||||
if (!fs15.existsSync(filePath)) {
|
||||
throw new Error(`Missing file at path: ${filePath}`);
|
||||
}
|
||||
fs16.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os3.EOL}`, {
|
||||
fs15.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os3.EOL}`, {
|
||||
encoding: "utf8"
|
||||
});
|
||||
}
|
||||
@@ -18513,12 +18513,12 @@ var require_io_util = __commonJS({
|
||||
var _a;
|
||||
Object.defineProperty(exports2, "__esModule", { value: true });
|
||||
exports2.getCmdPath = exports2.tryGetExecutablePath = exports2.isRooted = exports2.isDirectory = exports2.exists = exports2.READONLY = exports2.UV_FS_O_EXLOCK = exports2.IS_WINDOWS = exports2.unlink = exports2.symlink = exports2.stat = exports2.rmdir = exports2.rm = exports2.rename = exports2.readlink = exports2.readdir = exports2.open = exports2.mkdir = exports2.lstat = exports2.copyFile = exports2.chmod = void 0;
|
||||
var fs16 = __importStar4(require("fs"));
|
||||
var fs15 = __importStar4(require("fs"));
|
||||
var path16 = __importStar4(require("path"));
|
||||
_a = fs16.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
|
||||
_a = fs15.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink;
|
||||
exports2.IS_WINDOWS = process.platform === "win32";
|
||||
exports2.UV_FS_O_EXLOCK = 268435456;
|
||||
exports2.READONLY = fs16.constants.O_RDONLY;
|
||||
exports2.READONLY = fs15.constants.O_RDONLY;
|
||||
function exists(fsPath) {
|
||||
return __awaiter4(this, void 0, void 0, function* () {
|
||||
try {
|
||||
@@ -27907,8 +27907,8 @@ var require_utils7 = __commonJS({
|
||||
exports2.array = array;
|
||||
var errno = require_errno();
|
||||
exports2.errno = errno;
|
||||
var fs16 = require_fs();
|
||||
exports2.fs = fs16;
|
||||
var fs15 = require_fs();
|
||||
exports2.fs = fs15;
|
||||
var path16 = require_path();
|
||||
exports2.path = path16;
|
||||
var pattern = require_pattern();
|
||||
@@ -28092,12 +28092,12 @@ var require_fs2 = __commonJS({
|
||||
"use strict";
|
||||
Object.defineProperty(exports2, "__esModule", { value: true });
|
||||
exports2.createFileSystemAdapter = exports2.FILE_SYSTEM_ADAPTER = void 0;
|
||||
var fs16 = require("fs");
|
||||
var fs15 = require("fs");
|
||||
exports2.FILE_SYSTEM_ADAPTER = {
|
||||
lstat: fs16.lstat,
|
||||
stat: fs16.stat,
|
||||
lstatSync: fs16.lstatSync,
|
||||
statSync: fs16.statSync
|
||||
lstat: fs15.lstat,
|
||||
stat: fs15.stat,
|
||||
lstatSync: fs15.lstatSync,
|
||||
statSync: fs15.statSync
|
||||
};
|
||||
function createFileSystemAdapter(fsMethods) {
|
||||
if (fsMethods === void 0) {
|
||||
@@ -28114,12 +28114,12 @@ var require_settings = __commonJS({
|
||||
"node_modules/@nodelib/fs.stat/out/settings.js"(exports2) {
|
||||
"use strict";
|
||||
Object.defineProperty(exports2, "__esModule", { value: true });
|
||||
var fs16 = require_fs2();
|
||||
var fs15 = require_fs2();
|
||||
var Settings = class {
|
||||
constructor(_options = {}) {
|
||||
this._options = _options;
|
||||
this.followSymbolicLink = this._getValue(this._options.followSymbolicLink, true);
|
||||
this.fs = fs16.createFileSystemAdapter(this._options.fs);
|
||||
this.fs = fs15.createFileSystemAdapter(this._options.fs);
|
||||
this.markSymbolicLink = this._getValue(this._options.markSymbolicLink, false);
|
||||
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
|
||||
}
|
||||
@@ -28149,11 +28149,11 @@ var require_out = __commonJS({
|
||||
async.read(path16, getSettings(optionsOrSettingsOrCallback), callback);
|
||||
}
|
||||
exports2.stat = stat;
|
||||
function statSync2(path16, optionsOrSettings) {
|
||||
function statSync3(path16, optionsOrSettings) {
|
||||
const settings = getSettings(optionsOrSettings);
|
||||
return sync.read(path16, settings);
|
||||
}
|
||||
exports2.statSync = statSync2;
|
||||
exports2.statSync = statSync3;
|
||||
function getSettings(settingsOrOptions = {}) {
|
||||
if (settingsOrOptions instanceof settings_1.default) {
|
||||
return settingsOrOptions;
|
||||
@@ -28274,8 +28274,8 @@ var require_utils8 = __commonJS({
|
||||
"use strict";
|
||||
Object.defineProperty(exports2, "__esModule", { value: true });
|
||||
exports2.fs = void 0;
|
||||
var fs16 = require_fs3();
|
||||
exports2.fs = fs16;
|
||||
var fs15 = require_fs3();
|
||||
exports2.fs = fs15;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -28470,14 +28470,14 @@ var require_fs4 = __commonJS({
|
||||
"use strict";
|
||||
Object.defineProperty(exports2, "__esModule", { value: true });
|
||||
exports2.createFileSystemAdapter = exports2.FILE_SYSTEM_ADAPTER = void 0;
|
||||
var fs16 = require("fs");
|
||||
var fs15 = require("fs");
|
||||
exports2.FILE_SYSTEM_ADAPTER = {
|
||||
lstat: fs16.lstat,
|
||||
stat: fs16.stat,
|
||||
lstatSync: fs16.lstatSync,
|
||||
statSync: fs16.statSync,
|
||||
readdir: fs16.readdir,
|
||||
readdirSync: fs16.readdirSync
|
||||
lstat: fs15.lstat,
|
||||
stat: fs15.stat,
|
||||
lstatSync: fs15.lstatSync,
|
||||
statSync: fs15.statSync,
|
||||
readdir: fs15.readdir,
|
||||
readdirSync: fs15.readdirSync
|
||||
};
|
||||
function createFileSystemAdapter(fsMethods) {
|
||||
if (fsMethods === void 0) {
|
||||
@@ -28496,12 +28496,12 @@ var require_settings2 = __commonJS({
|
||||
Object.defineProperty(exports2, "__esModule", { value: true });
|
||||
var path16 = require("path");
|
||||
var fsStat = require_out();
|
||||
var fs16 = require_fs4();
|
||||
var fs15 = require_fs4();
|
||||
var Settings = class {
|
||||
constructor(_options = {}) {
|
||||
this._options = _options;
|
||||
this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false);
|
||||
this.fs = fs16.createFileSystemAdapter(this._options.fs);
|
||||
this.fs = fs15.createFileSystemAdapter(this._options.fs);
|
||||
this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path16.sep);
|
||||
this.stats = this._getValue(this._options.stats, false);
|
||||
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
|
||||
@@ -29762,16 +29762,16 @@ var require_settings4 = __commonJS({
|
||||
"use strict";
|
||||
Object.defineProperty(exports2, "__esModule", { value: true });
|
||||
exports2.DEFAULT_FILE_SYSTEM_ADAPTER = void 0;
|
||||
var fs16 = require("fs");
|
||||
var fs15 = require("fs");
|
||||
var os3 = require("os");
|
||||
var CPU_COUNT = Math.max(os3.cpus().length, 1);
|
||||
exports2.DEFAULT_FILE_SYSTEM_ADAPTER = {
|
||||
lstat: fs16.lstat,
|
||||
lstatSync: fs16.lstatSync,
|
||||
stat: fs16.stat,
|
||||
statSync: fs16.statSync,
|
||||
readdir: fs16.readdir,
|
||||
readdirSync: fs16.readdirSync
|
||||
lstat: fs15.lstat,
|
||||
lstatSync: fs15.lstatSync,
|
||||
stat: fs15.stat,
|
||||
statSync: fs15.statSync,
|
||||
readdir: fs15.readdir,
|
||||
readdirSync: fs15.readdirSync
|
||||
};
|
||||
var Settings = class {
|
||||
constructor(_options = {}) {
|
||||
@@ -32287,7 +32287,7 @@ var require_package = __commonJS({
|
||||
"package.json"(exports2, module2) {
|
||||
module2.exports = {
|
||||
name: "codeql",
|
||||
version: "3.30.5",
|
||||
version: "3.30.6",
|
||||
private: true,
|
||||
description: "CodeQL action",
|
||||
scripts: {
|
||||
@@ -34391,7 +34391,7 @@ var require_brace_expansion = __commonJS({
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(",") >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
if (m.post.match(/,.*\}/)) {
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str2 = m.pre + "{" + m.body + escClose + m.post;
|
||||
return expand(str2);
|
||||
}
|
||||
@@ -35465,7 +35465,7 @@ var require_internal_globber = __commonJS({
|
||||
Object.defineProperty(exports2, "__esModule", { value: true });
|
||||
exports2.DefaultGlobber = void 0;
|
||||
var core14 = __importStar4(require_core());
|
||||
var fs16 = __importStar4(require("fs"));
|
||||
var fs15 = __importStar4(require("fs"));
|
||||
var globOptionsHelper = __importStar4(require_internal_glob_options_helper());
|
||||
var path16 = __importStar4(require("path"));
|
||||
var patternHelper = __importStar4(require_internal_pattern_helper());
|
||||
@@ -35517,7 +35517,7 @@ var require_internal_globber = __commonJS({
|
||||
for (const searchPath of patternHelper.getSearchPaths(patterns)) {
|
||||
core14.debug(`Search path '${searchPath}'`);
|
||||
try {
|
||||
yield __await4(fs16.promises.lstat(searchPath));
|
||||
yield __await4(fs15.promises.lstat(searchPath));
|
||||
} catch (err) {
|
||||
if (err.code === "ENOENT") {
|
||||
continue;
|
||||
@@ -35548,7 +35548,7 @@ var require_internal_globber = __commonJS({
|
||||
continue;
|
||||
}
|
||||
const childLevel = item.level + 1;
|
||||
const childItems = (yield __await4(fs16.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path16.join(item.path, x), childLevel));
|
||||
const childItems = (yield __await4(fs15.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path16.join(item.path, x), childLevel));
|
||||
stack.push(...childItems.reverse());
|
||||
} else if (match & internal_match_kind_1.MatchKind.File) {
|
||||
yield yield __await4(item.path);
|
||||
@@ -35583,7 +35583,7 @@ var require_internal_globber = __commonJS({
|
||||
let stats;
|
||||
if (options.followSymbolicLinks) {
|
||||
try {
|
||||
stats = yield fs16.promises.stat(item.path);
|
||||
stats = yield fs15.promises.stat(item.path);
|
||||
} catch (err) {
|
||||
if (err.code === "ENOENT") {
|
||||
if (options.omitBrokenSymbolicLinks) {
|
||||
@@ -35595,10 +35595,10 @@ var require_internal_globber = __commonJS({
|
||||
throw err;
|
||||
}
|
||||
} else {
|
||||
stats = yield fs16.promises.lstat(item.path);
|
||||
stats = yield fs15.promises.lstat(item.path);
|
||||
}
|
||||
if (stats.isDirectory() && options.followSymbolicLinks) {
|
||||
const realPath = yield fs16.promises.realpath(item.path);
|
||||
const realPath = yield fs15.promises.realpath(item.path);
|
||||
while (traversalChain.length >= item.level) {
|
||||
traversalChain.pop();
|
||||
}
|
||||
@@ -36932,7 +36932,7 @@ var require_cacheUtils = __commonJS({
|
||||
var glob = __importStar4(require_glob());
|
||||
var io6 = __importStar4(require_io());
|
||||
var crypto = __importStar4(require("crypto"));
|
||||
var fs16 = __importStar4(require("fs"));
|
||||
var fs15 = __importStar4(require("fs"));
|
||||
var path16 = __importStar4(require("path"));
|
||||
var semver8 = __importStar4(require_semver3());
|
||||
var util = __importStar4(require("util"));
|
||||
@@ -36962,7 +36962,7 @@ var require_cacheUtils = __commonJS({
|
||||
}
|
||||
exports2.createTempDirectory = createTempDirectory;
|
||||
function getArchiveFileSizeInBytes(filePath) {
|
||||
return fs16.statSync(filePath).size;
|
||||
return fs15.statSync(filePath).size;
|
||||
}
|
||||
exports2.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||
function resolvePaths(patterns) {
|
||||
@@ -37002,7 +37002,7 @@ var require_cacheUtils = __commonJS({
|
||||
exports2.resolvePaths = resolvePaths;
|
||||
function unlinkFile(filePath) {
|
||||
return __awaiter4(this, void 0, void 0, function* () {
|
||||
return util.promisify(fs16.unlink)(filePath);
|
||||
return util.promisify(fs15.unlink)(filePath);
|
||||
});
|
||||
}
|
||||
exports2.unlinkFile = unlinkFile;
|
||||
@@ -37047,7 +37047,7 @@ var require_cacheUtils = __commonJS({
|
||||
exports2.getCacheFileName = getCacheFileName;
|
||||
function getGnuTarPathOnWindows() {
|
||||
return __awaiter4(this, void 0, void 0, function* () {
|
||||
if (fs16.existsSync(constants_1.GnuTarPathOnWindows)) {
|
||||
if (fs15.existsSync(constants_1.GnuTarPathOnWindows)) {
|
||||
return constants_1.GnuTarPathOnWindows;
|
||||
}
|
||||
const versionOutput = yield getVersion("tar");
|
||||
@@ -48820,7 +48820,7 @@ var require_dist7 = __commonJS({
|
||||
var stream2 = require("stream");
|
||||
var coreLro = require_dist6();
|
||||
var events = require("events");
|
||||
var fs16 = require("fs");
|
||||
var fs15 = require("fs");
|
||||
var util = require("util");
|
||||
var buffer = require("buffer");
|
||||
function _interopNamespaceDefault(e) {
|
||||
@@ -48843,7 +48843,7 @@ var require_dist7 = __commonJS({
|
||||
}
|
||||
var coreHttpCompat__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreHttpCompat);
|
||||
var coreClient__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreClient);
|
||||
var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs16);
|
||||
var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs15);
|
||||
var util__namespace = /* @__PURE__ */ _interopNamespaceDefault(util);
|
||||
var logger = logger$1.createClientLogger("storage-blob");
|
||||
var BaseRequestPolicy = class {
|
||||
@@ -72691,7 +72691,7 @@ var require_downloadUtils = __commonJS({
|
||||
var http_client_1 = require_lib();
|
||||
var storage_blob_1 = require_dist7();
|
||||
var buffer = __importStar4(require("buffer"));
|
||||
var fs16 = __importStar4(require("fs"));
|
||||
var fs15 = __importStar4(require("fs"));
|
||||
var stream2 = __importStar4(require("stream"));
|
||||
var util = __importStar4(require("util"));
|
||||
var utils = __importStar4(require_cacheUtils());
|
||||
@@ -72802,7 +72802,7 @@ var require_downloadUtils = __commonJS({
|
||||
exports2.DownloadProgress = DownloadProgress;
|
||||
function downloadCacheHttpClient(archiveLocation, archivePath) {
|
||||
return __awaiter4(this, void 0, void 0, function* () {
|
||||
const writeStream = fs16.createWriteStream(archivePath);
|
||||
const writeStream = fs15.createWriteStream(archivePath);
|
||||
const httpClient = new http_client_1.HttpClient("actions/cache");
|
||||
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter4(this, void 0, void 0, function* () {
|
||||
return httpClient.get(archiveLocation);
|
||||
@@ -72828,7 +72828,7 @@ var require_downloadUtils = __commonJS({
|
||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
||||
var _a;
|
||||
return __awaiter4(this, void 0, void 0, function* () {
|
||||
const archiveDescriptor = yield fs16.promises.open(archivePath, "w");
|
||||
const archiveDescriptor = yield fs15.promises.open(archivePath, "w");
|
||||
const httpClient = new http_client_1.HttpClient("actions/cache", void 0, {
|
||||
socketTimeout: options.timeoutInMs,
|
||||
keepAlive: true
|
||||
@@ -72945,7 +72945,7 @@ var require_downloadUtils = __commonJS({
|
||||
} else {
|
||||
const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH);
|
||||
const downloadProgress = new DownloadProgress(contentLength);
|
||||
const fd = fs16.openSync(archivePath, "w");
|
||||
const fd = fs15.openSync(archivePath, "w");
|
||||
try {
|
||||
downloadProgress.startDisplayTimer();
|
||||
const controller = new abort_controller_1.AbortController();
|
||||
@@ -72963,12 +72963,12 @@ var require_downloadUtils = __commonJS({
|
||||
controller.abort();
|
||||
throw new Error("Aborting cache download as the download time exceeded the timeout.");
|
||||
} else if (Buffer.isBuffer(result)) {
|
||||
fs16.writeFileSync(fd, result);
|
||||
fs15.writeFileSync(fd, result);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
downloadProgress.stopDisplayTimer();
|
||||
fs16.closeSync(fd);
|
||||
fs15.closeSync(fd);
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -73267,7 +73267,7 @@ var require_cacheHttpClient = __commonJS({
|
||||
var core14 = __importStar4(require_core());
|
||||
var http_client_1 = require_lib();
|
||||
var auth_1 = require_auth();
|
||||
var fs16 = __importStar4(require("fs"));
|
||||
var fs15 = __importStar4(require("fs"));
|
||||
var url_1 = require("url");
|
||||
var utils = __importStar4(require_cacheUtils());
|
||||
var uploadUtils_1 = require_uploadUtils();
|
||||
@@ -73405,7 +73405,7 @@ Other caches with similar key:`);
|
||||
return __awaiter4(this, void 0, void 0, function* () {
|
||||
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
|
||||
const fd = fs16.openSync(archivePath, "r");
|
||||
const fd = fs15.openSync(archivePath, "r");
|
||||
const uploadOptions = (0, options_1.getUploadOptions)(options);
|
||||
const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency);
|
||||
const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize);
|
||||
@@ -73419,7 +73419,7 @@ Other caches with similar key:`);
|
||||
const start = offset;
|
||||
const end = offset + chunkSize - 1;
|
||||
offset += maxChunkSize;
|
||||
yield uploadChunk(httpClient, resourceUrl, () => fs16.createReadStream(archivePath, {
|
||||
yield uploadChunk(httpClient, resourceUrl, () => fs15.createReadStream(archivePath, {
|
||||
fd,
|
||||
start,
|
||||
end,
|
||||
@@ -73430,7 +73430,7 @@ Other caches with similar key:`);
|
||||
}
|
||||
})));
|
||||
} finally {
|
||||
fs16.closeSync(fd);
|
||||
fs15.closeSync(fd);
|
||||
}
|
||||
return;
|
||||
});
|
||||
@@ -80643,7 +80643,7 @@ var require_manifest = __commonJS({
|
||||
var core_1 = require_core();
|
||||
var os3 = require("os");
|
||||
var cp = require("child_process");
|
||||
var fs16 = require("fs");
|
||||
var fs15 = require("fs");
|
||||
function _findMatch(versionSpec, stable, candidates, archFilter) {
|
||||
return __awaiter4(this, void 0, void 0, function* () {
|
||||
const platFilter = os3.platform();
|
||||
@@ -80707,10 +80707,10 @@ var require_manifest = __commonJS({
|
||||
const lsbReleaseFile = "/etc/lsb-release";
|
||||
const osReleaseFile = "/etc/os-release";
|
||||
let contents = "";
|
||||
if (fs16.existsSync(lsbReleaseFile)) {
|
||||
contents = fs16.readFileSync(lsbReleaseFile).toString();
|
||||
} else if (fs16.existsSync(osReleaseFile)) {
|
||||
contents = fs16.readFileSync(osReleaseFile).toString();
|
||||
if (fs15.existsSync(lsbReleaseFile)) {
|
||||
contents = fs15.readFileSync(lsbReleaseFile).toString();
|
||||
} else if (fs15.existsSync(osReleaseFile)) {
|
||||
contents = fs15.readFileSync(osReleaseFile).toString();
|
||||
}
|
||||
return contents;
|
||||
}
|
||||
@@ -80887,7 +80887,7 @@ var require_tool_cache = __commonJS({
|
||||
var core14 = __importStar4(require_core());
|
||||
var io6 = __importStar4(require_io());
|
||||
var crypto = __importStar4(require("crypto"));
|
||||
var fs16 = __importStar4(require("fs"));
|
||||
var fs15 = __importStar4(require("fs"));
|
||||
var mm = __importStar4(require_manifest());
|
||||
var os3 = __importStar4(require("os"));
|
||||
var path16 = __importStar4(require("path"));
|
||||
@@ -80934,7 +80934,7 @@ var require_tool_cache = __commonJS({
|
||||
exports2.downloadTool = downloadTool2;
|
||||
function downloadToolAttempt(url2, dest, auth, headers) {
|
||||
return __awaiter4(this, void 0, void 0, function* () {
|
||||
if (fs16.existsSync(dest)) {
|
||||
if (fs15.existsSync(dest)) {
|
||||
throw new Error(`Destination file path ${dest} already exists`);
|
||||
}
|
||||
const http = new httpm.HttpClient(userAgent, [], {
|
||||
@@ -80958,7 +80958,7 @@ var require_tool_cache = __commonJS({
|
||||
const readStream = responseMessageFactory();
|
||||
let succeeded = false;
|
||||
try {
|
||||
yield pipeline(readStream, fs16.createWriteStream(dest));
|
||||
yield pipeline(readStream, fs15.createWriteStream(dest));
|
||||
core14.debug("download complete");
|
||||
succeeded = true;
|
||||
return dest;
|
||||
@@ -81170,11 +81170,11 @@ var require_tool_cache = __commonJS({
|
||||
arch2 = arch2 || os3.arch();
|
||||
core14.debug(`Caching tool ${tool} ${version} ${arch2}`);
|
||||
core14.debug(`source dir: ${sourceDir}`);
|
||||
if (!fs16.statSync(sourceDir).isDirectory()) {
|
||||
if (!fs15.statSync(sourceDir).isDirectory()) {
|
||||
throw new Error("sourceDir is not a directory");
|
||||
}
|
||||
const destPath = yield _createToolPath(tool, version, arch2);
|
||||
for (const itemName of fs16.readdirSync(sourceDir)) {
|
||||
for (const itemName of fs15.readdirSync(sourceDir)) {
|
||||
const s = path16.join(sourceDir, itemName);
|
||||
yield io6.cp(s, destPath, { recursive: true });
|
||||
}
|
||||
@@ -81189,7 +81189,7 @@ var require_tool_cache = __commonJS({
|
||||
arch2 = arch2 || os3.arch();
|
||||
core14.debug(`Caching tool ${tool} ${version} ${arch2}`);
|
||||
core14.debug(`source file: ${sourceFile}`);
|
||||
if (!fs16.statSync(sourceFile).isFile()) {
|
||||
if (!fs15.statSync(sourceFile).isFile()) {
|
||||
throw new Error("sourceFile is not a file");
|
||||
}
|
||||
const destFolder = yield _createToolPath(tool, version, arch2);
|
||||
@@ -81219,7 +81219,7 @@ var require_tool_cache = __commonJS({
|
||||
versionSpec = semver8.clean(versionSpec) || "";
|
||||
const cachePath = path16.join(_getCacheDirectory(), toolName, versionSpec, arch2);
|
||||
core14.debug(`checking cache: ${cachePath}`);
|
||||
if (fs16.existsSync(cachePath) && fs16.existsSync(`${cachePath}.complete`)) {
|
||||
if (fs15.existsSync(cachePath) && fs15.existsSync(`${cachePath}.complete`)) {
|
||||
core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`);
|
||||
toolPath = cachePath;
|
||||
} else {
|
||||
@@ -81233,12 +81233,12 @@ var require_tool_cache = __commonJS({
|
||||
const versions = [];
|
||||
arch2 = arch2 || os3.arch();
|
||||
const toolPath = path16.join(_getCacheDirectory(), toolName);
|
||||
if (fs16.existsSync(toolPath)) {
|
||||
const children = fs16.readdirSync(toolPath);
|
||||
if (fs15.existsSync(toolPath)) {
|
||||
const children = fs15.readdirSync(toolPath);
|
||||
for (const child of children) {
|
||||
if (isExplicitVersion(child)) {
|
||||
const fullPath = path16.join(toolPath, child, arch2 || "");
|
||||
if (fs16.existsSync(fullPath) && fs16.existsSync(`${fullPath}.complete`)) {
|
||||
if (fs15.existsSync(fullPath) && fs15.existsSync(`${fullPath}.complete`)) {
|
||||
versions.push(child);
|
||||
}
|
||||
}
|
||||
@@ -81312,7 +81312,7 @@ var require_tool_cache = __commonJS({
|
||||
function _completeToolPath(tool, version, arch2) {
|
||||
const folderPath = path16.join(_getCacheDirectory(), tool, semver8.clean(version) || version, arch2 || "");
|
||||
const markerPath = `${folderPath}.complete`;
|
||||
fs16.writeFileSync(markerPath, "");
|
||||
fs15.writeFileSync(markerPath, "");
|
||||
core14.debug("finished caching tool");
|
||||
}
|
||||
function isExplicitVersion(versionSpec) {
|
||||
@@ -84816,7 +84816,6 @@ var require_sarif_schema_2_1_0 = __commonJS({
|
||||
});
|
||||
|
||||
// src/upload-sarif-action.ts
|
||||
var fs15 = __toESM(require("fs"));
|
||||
var core13 = __toESM(require_core());
|
||||
|
||||
// src/actions-util.ts
|
||||
@@ -85707,21 +85706,21 @@ async function getFolderSize(itemPath, options) {
|
||||
getFolderSize.loose = async (itemPath, options) => await core(itemPath, options);
|
||||
getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true });
|
||||
async function core(rootItemPath, options = {}, returnType = {}) {
|
||||
const fs16 = options.fs || await import("node:fs/promises");
|
||||
const fs15 = options.fs || await import("node:fs/promises");
|
||||
let folderSize = 0n;
|
||||
const foundInos = /* @__PURE__ */ new Set();
|
||||
const errors = [];
|
||||
await processItem(rootItemPath);
|
||||
async function processItem(itemPath) {
|
||||
if (options.ignore?.test(itemPath)) return;
|
||||
const stats = returnType.strict ? await fs16.lstat(itemPath, { bigint: true }) : await fs16.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2));
|
||||
const stats = returnType.strict ? await fs15.lstat(itemPath, { bigint: true }) : await fs15.lstat(itemPath, { bigint: true }).catch((error2) => errors.push(error2));
|
||||
if (typeof stats !== "object") return;
|
||||
if (!foundInos.has(stats.ino)) {
|
||||
foundInos.add(stats.ino);
|
||||
folderSize += stats.size;
|
||||
}
|
||||
if (stats.isDirectory()) {
|
||||
const directoryItems = returnType.strict ? await fs16.readdir(itemPath) : await fs16.readdir(itemPath).catch((error2) => errors.push(error2));
|
||||
const directoryItems = returnType.strict ? await fs15.readdir(itemPath) : await fs15.readdir(itemPath).catch((error2) => errors.push(error2));
|
||||
if (typeof directoryItems !== "object") return;
|
||||
await Promise.all(
|
||||
directoryItems.map(
|
||||
@@ -88591,6 +88590,11 @@ async function asyncSome(array, predicate) {
|
||||
const results = await Promise.all(array.map(predicate));
|
||||
return results.some((result) => result);
|
||||
}
|
||||
function unsafeEntriesInvariant(object) {
|
||||
return Object.entries(object).filter(
|
||||
([_, val2]) => val2 !== void 0
|
||||
);
|
||||
}
|
||||
|
||||
// src/actions-util.ts
|
||||
var pkg = require_package();
|
||||
@@ -88771,6 +88775,7 @@ var CodeScanning = {
|
||||
target: "PUT /repos/:owner/:repo/code-scanning/analysis" /* CODE_SCANNING */,
|
||||
sarifExtension: ".sarif",
|
||||
sarifPredicate: (name) => name.endsWith(CodeScanning.sarifExtension) && !CodeQuality.sarifPredicate(name),
|
||||
fixCategory: (_, category) => category,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_SARIF_"
|
||||
};
|
||||
var CodeQuality = {
|
||||
@@ -88779,8 +88784,18 @@ var CodeQuality = {
|
||||
target: "PUT /repos/:owner/:repo/code-quality/analysis" /* CODE_QUALITY */,
|
||||
sarifExtension: ".quality.sarif",
|
||||
sarifPredicate: (name) => name.endsWith(CodeQuality.sarifExtension),
|
||||
fixCategory: fixCodeQualityCategory,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_QUALITY_SARIF_"
|
||||
};
|
||||
function getAnalysisConfig(kind) {
|
||||
switch (kind) {
|
||||
case "code-scanning" /* CodeScanning */:
|
||||
return CodeScanning;
|
||||
case "code-quality" /* CodeQuality */:
|
||||
return CodeQuality;
|
||||
}
|
||||
}
|
||||
var SarifScanOrder = [CodeQuality, CodeScanning];
|
||||
|
||||
// src/api-client.ts
|
||||
var core5 = __toESM(require_core());
|
||||
@@ -88932,8 +88947,8 @@ var path8 = __toESM(require("path"));
|
||||
var semver3 = __toESM(require_semver2());
|
||||
|
||||
// src/defaults.json
|
||||
var bundleVersion = "codeql-bundle-v2.23.1";
|
||||
var cliVersion = "2.23.1";
|
||||
var bundleVersion = "codeql-bundle-v2.23.2";
|
||||
var cliVersion = "2.23.2";
|
||||
|
||||
// src/overlay-database-utils.ts
|
||||
var fs5 = __toESM(require("fs"));
|
||||
@@ -93075,6 +93090,54 @@ function findSarifFilesInDir(sarifPath, isSarif) {
|
||||
walkSarifFiles(sarifPath);
|
||||
return sarifFiles;
|
||||
}
|
||||
async function getGroupedSarifFilePaths(logger, sarifPath) {
|
||||
const stats = fs14.statSync(sarifPath, { throwIfNoEntry: false });
|
||||
if (stats === void 0) {
|
||||
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
|
||||
}
|
||||
const results = {};
|
||||
if (stats.isDirectory()) {
|
||||
let unassignedSarifFiles = findSarifFilesInDir(
|
||||
sarifPath,
|
||||
(name) => path15.extname(name) === ".sarif"
|
||||
);
|
||||
logger.debug(
|
||||
`Found the following .sarif files in ${sarifPath}: ${unassignedSarifFiles.join(", ")}`
|
||||
);
|
||||
for (const analysisConfig of SarifScanOrder) {
|
||||
const filesForCurrentAnalysis = unassignedSarifFiles.filter(
|
||||
analysisConfig.sarifPredicate
|
||||
);
|
||||
if (filesForCurrentAnalysis.length > 0) {
|
||||
logger.debug(
|
||||
`The following SARIF files are for ${analysisConfig.name}: ${filesForCurrentAnalysis.join(", ")}`
|
||||
);
|
||||
unassignedSarifFiles = unassignedSarifFiles.filter(
|
||||
(name) => !analysisConfig.sarifPredicate(name)
|
||||
);
|
||||
results[analysisConfig.kind] = filesForCurrentAnalysis;
|
||||
} else {
|
||||
logger.debug(`Found no SARIF files for ${analysisConfig.name}`);
|
||||
}
|
||||
}
|
||||
if (unassignedSarifFiles.length !== 0) {
|
||||
logger.warning(
|
||||
`Found files in ${sarifPath} which do not belong to any analysis: ${unassignedSarifFiles.join(", ")}`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
for (const analysisConfig of SarifScanOrder) {
|
||||
if (analysisConfig.kind === "code-scanning" /* CodeScanning */ || analysisConfig.sarifPredicate(sarifPath)) {
|
||||
logger.debug(
|
||||
`Using '${sarifPath}' as a SARIF file for ${analysisConfig.name}.`
|
||||
);
|
||||
results[analysisConfig.kind] = [sarifPath];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
function countResultsInSarif(sarif) {
|
||||
let numResults = 0;
|
||||
const parsedSarif = JSON.parse(sarif);
|
||||
@@ -93175,6 +93238,7 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
|
||||
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
let sarif;
|
||||
category = uploadTarget.fixCategory(logger, category);
|
||||
if (sarifPaths.length > 1) {
|
||||
for (const sarifPath of sarifPaths) {
|
||||
const parsedSarif = readSarifFile(sarifPath);
|
||||
@@ -93416,31 +93480,30 @@ function filterAlertsByDiffRange(logger, sarif) {
|
||||
return sarif;
|
||||
}
|
||||
|
||||
// src/upload-sarif-action.ts
|
||||
async function findAndUpload(logger, features, sarifPath, pathStats, checkoutPath, analysis, category) {
|
||||
let sarifFiles;
|
||||
if (pathStats.isDirectory()) {
|
||||
sarifFiles = findSarifFilesInDir(
|
||||
sarifPath,
|
||||
analysis.sarifPredicate
|
||||
);
|
||||
} else if (pathStats.isFile() && (analysis.sarifPredicate(sarifPath) || analysis.kind === "code-scanning" /* CodeScanning */ && !CodeQuality.sarifPredicate(sarifPath))) {
|
||||
sarifFiles = [sarifPath];
|
||||
} else {
|
||||
return void 0;
|
||||
}
|
||||
if (sarifFiles.length !== 0) {
|
||||
return await uploadSpecifiedFiles(
|
||||
// src/upload-sarif.ts
|
||||
async function uploadSarif(logger, features, checkoutPath, sarifPath, category) {
|
||||
const sarifGroups = await getGroupedSarifFilePaths(
|
||||
logger,
|
||||
sarifPath
|
||||
);
|
||||
const uploadResults = {};
|
||||
for (const [analysisKind, sarifFiles] of unsafeEntriesInvariant(
|
||||
sarifGroups
|
||||
)) {
|
||||
const analysisConfig = getAnalysisConfig(analysisKind);
|
||||
uploadResults[analysisKind] = await uploadSpecifiedFiles(
|
||||
sarifFiles,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
analysis
|
||||
analysisConfig
|
||||
);
|
||||
}
|
||||
return void 0;
|
||||
return uploadResults;
|
||||
}
|
||||
|
||||
// src/upload-sarif-action.ts
|
||||
async function sendSuccessStatusReport(startedAt, uploadStats, logger) {
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
"upload-sarif" /* UploadSarif */,
|
||||
@@ -93487,57 +93550,37 @@ async function run() {
|
||||
const sarifPath = getRequiredInput("sarif_file");
|
||||
const checkoutPath = getRequiredInput("checkout_path");
|
||||
const category = getOptionalInput("category");
|
||||
const pathStats = fs15.lstatSync(sarifPath, { throwIfNoEntry: false });
|
||||
if (pathStats === void 0) {
|
||||
throw new ConfigurationError(`Path does not exist: ${sarifPath}.`);
|
||||
}
|
||||
const sarifIds = [];
|
||||
const uploadResult = await findAndUpload(
|
||||
const uploadResults = await uploadSarif(
|
||||
logger,
|
||||
features,
|
||||
sarifPath,
|
||||
pathStats,
|
||||
checkoutPath,
|
||||
CodeScanning,
|
||||
sarifPath,
|
||||
category
|
||||
);
|
||||
if (uploadResult !== void 0) {
|
||||
core13.setOutput("sarif-id", uploadResult.sarifID);
|
||||
sarifIds.push({
|
||||
analysis: "code-scanning" /* CodeScanning */,
|
||||
id: uploadResult.sarifID
|
||||
});
|
||||
if (Object.keys(uploadResults).length === 0) {
|
||||
throw new ConfigurationError(
|
||||
`No SARIF files found to upload in "${sarifPath}".`
|
||||
);
|
||||
}
|
||||
const qualityUploadResult = await findAndUpload(
|
||||
logger,
|
||||
features,
|
||||
sarifPath,
|
||||
pathStats,
|
||||
checkoutPath,
|
||||
CodeQuality,
|
||||
fixCodeQualityCategory(logger, category)
|
||||
);
|
||||
if (qualityUploadResult !== void 0) {
|
||||
sarifIds.push({
|
||||
analysis: "code-quality" /* CodeQuality */,
|
||||
id: qualityUploadResult.sarifID
|
||||
});
|
||||
const codeScanningResult = uploadResults["code-scanning" /* CodeScanning */];
|
||||
if (codeScanningResult !== void 0) {
|
||||
core13.setOutput("sarif-id", codeScanningResult.sarifID);
|
||||
}
|
||||
core13.setOutput("sarif-ids", JSON.stringify(sarifIds));
|
||||
core13.setOutput("sarif-ids", JSON.stringify(uploadResults));
|
||||
if (isInTestMode()) {
|
||||
core13.debug("In test mode. Waiting for processing is disabled.");
|
||||
} else if (getRequiredInput("wait-for-processing") === "true") {
|
||||
if (uploadResult !== void 0) {
|
||||
if (codeScanningResult !== void 0) {
|
||||
await waitForProcessing(
|
||||
getRepositoryNwo(),
|
||||
uploadResult.sarifID,
|
||||
codeScanningResult.sarifID,
|
||||
logger
|
||||
);
|
||||
}
|
||||
}
|
||||
await sendSuccessStatusReport(
|
||||
startedAt,
|
||||
uploadResult?.statusReport || {},
|
||||
codeScanningResult?.statusReport || {},
|
||||
logger
|
||||
);
|
||||
} catch (unwrappedError) {
|
||||
|
||||
Generated
+5
-3
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "3.30.5",
|
||||
"version": "3.30.6",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "codeql",
|
||||
"version": "3.30.5",
|
||||
"version": "3.30.6",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "^2.3.1",
|
||||
@@ -4200,7 +4200,9 @@
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0",
|
||||
|
||||
+1
-1
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "3.30.5",
|
||||
"version": "3.30.6",
|
||||
"private": true,
|
||||
"description": "CodeQL action",
|
||||
"scripts": {
|
||||
|
||||
@@ -22,5 +22,5 @@ steps:
|
||||
ref: 'refs/heads/main'
|
||||
sha: '5e235361806c361d4d3f8859e3c897658025a9a2'
|
||||
- name: "Check output from `upload-sarif` step"
|
||||
if: fromJSON(steps.upload-sarif.outputs.sarif-ids)[0].analysis != 'code-quality'
|
||||
if: '!(fromJSON(steps.upload-sarif.outputs.sarif-ids).code-quality)'
|
||||
run: exit 1
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import { fixCodeQualityCategory } from "./actions-util";
|
||||
import { Logger } from "./logging";
|
||||
import { ConfigurationError } from "./util";
|
||||
|
||||
export enum AnalysisKind {
|
||||
@@ -61,6 +63,8 @@ export interface AnalysisConfig {
|
||||
/** A predicate on filenames to decide whether a SARIF file
|
||||
* belongs to this kind of analysis. */
|
||||
sarifPredicate: (name: string) => boolean;
|
||||
/** Analysis-specific adjustment of the category. */
|
||||
fixCategory: (logger: Logger, category?: string) => string | undefined;
|
||||
/** A prefix for environment variables used to track the uniqueness of SARIF uploads. */
|
||||
sentinelPrefix: string;
|
||||
}
|
||||
@@ -74,6 +78,7 @@ export const CodeScanning: AnalysisConfig = {
|
||||
sarifPredicate: (name) =>
|
||||
name.endsWith(CodeScanning.sarifExtension) &&
|
||||
!CodeQuality.sarifPredicate(name),
|
||||
fixCategory: (_, category) => category,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_SARIF_",
|
||||
};
|
||||
|
||||
@@ -84,5 +89,29 @@ export const CodeQuality: AnalysisConfig = {
|
||||
target: SARIF_UPLOAD_ENDPOINT.CODE_QUALITY,
|
||||
sarifExtension: ".quality.sarif",
|
||||
sarifPredicate: (name) => name.endsWith(CodeQuality.sarifExtension),
|
||||
fixCategory: fixCodeQualityCategory,
|
||||
sentinelPrefix: "CODEQL_UPLOAD_QUALITY_SARIF_",
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets the `AnalysisConfig` corresponding to `kind`.
|
||||
* @param kind The analysis kind to get the `AnalysisConfig` for.
|
||||
* @returns The `AnalysisConfig` corresponding to `kind`.
|
||||
*/
|
||||
export function getAnalysisConfig(kind: AnalysisKind): AnalysisConfig {
|
||||
// Using a switch statement here accomplishes two things:
|
||||
// 1. The type checker believes us that we have a case for every `AnalysisKind`.
|
||||
// 2. If we ever add another member to `AnalysisKind`, the type checker will alert us that we have to add a case.
|
||||
switch (kind) {
|
||||
case AnalysisKind.CodeScanning:
|
||||
return CodeScanning;
|
||||
case AnalysisKind.CodeQuality:
|
||||
return CodeQuality;
|
||||
}
|
||||
}
|
||||
|
||||
// Since we have overlapping extensions (i.e. ".sarif" includes ".quality.sarif"),
|
||||
// we want to scan a folder containing SARIF files in an order that finds the more
|
||||
// specific extensions first. This constant defines an array in the order of analyis
|
||||
// configurations with more specific extensions to less specific extensions.
|
||||
export const SarifScanOrder = [CodeQuality, CodeScanning];
|
||||
|
||||
+25
-8
@@ -26,7 +26,10 @@ import {
|
||||
isCodeScanningEnabled,
|
||||
} from "./config-utils";
|
||||
import { uploadDatabases } from "./database-upload";
|
||||
import { uploadDependencyCaches } from "./dependency-caching";
|
||||
import {
|
||||
DependencyCacheUploadStatusReport,
|
||||
uploadDependencyCaches,
|
||||
} from "./dependency-caching";
|
||||
import { getDiffInformedAnalysisBranches } from "./diff-informed-analysis-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Feature, Features } from "./feature-flags";
|
||||
@@ -55,10 +58,15 @@ interface AnalysisStatusReport
|
||||
extends uploadLib.UploadStatusReport,
|
||||
QueriesStatusReport {}
|
||||
|
||||
interface DependencyCachingUploadStatusReport {
|
||||
dependency_caching_upload_results?: DependencyCacheUploadStatusReport;
|
||||
}
|
||||
|
||||
interface FinishStatusReport
|
||||
extends StatusReportBase,
|
||||
DatabaseCreationTimings,
|
||||
AnalysisStatusReport {}
|
||||
AnalysisStatusReport,
|
||||
DependencyCachingUploadStatusReport {}
|
||||
|
||||
interface FinishWithTrapUploadStatusReport extends FinishStatusReport {
|
||||
/** Size of TRAP caches that we uploaded, in bytes. */
|
||||
@@ -76,6 +84,7 @@ async function sendStatusReport(
|
||||
dbCreationTimings: DatabaseCreationTimings | undefined,
|
||||
didUploadTrapCaches: boolean,
|
||||
trapCacheCleanup: TrapCacheCleanupStatusReport | undefined,
|
||||
dependencyCacheResults: DependencyCacheUploadStatusReport | undefined,
|
||||
logger: Logger,
|
||||
) {
|
||||
const status = getActionsStatus(error, stats?.analyze_failure_language);
|
||||
@@ -95,6 +104,7 @@ async function sendStatusReport(
|
||||
...(stats || {}),
|
||||
...(dbCreationTimings || {}),
|
||||
...(trapCacheCleanup || {}),
|
||||
dependency_caching_upload_results: dependencyCacheResults,
|
||||
};
|
||||
if (config && didUploadTrapCaches) {
|
||||
const trapCacheUploadStatusReport: FinishWithTrapUploadStatusReport = {
|
||||
@@ -209,6 +219,7 @@ async function run() {
|
||||
let trapCacheUploadTime: number | undefined = undefined;
|
||||
let dbCreationTimings: DatabaseCreationTimings | undefined = undefined;
|
||||
let didUploadTrapCaches = false;
|
||||
let dependencyCacheResults: DependencyCacheUploadStatusReport | undefined;
|
||||
util.initializeEnvironment(actionsUtil.getActionVersion());
|
||||
|
||||
// Make inputs accessible in the `post` step, details at
|
||||
@@ -345,16 +356,14 @@ async function run() {
|
||||
}
|
||||
|
||||
if (isCodeQualityEnabled(config)) {
|
||||
const analysis = analyses.CodeQuality;
|
||||
const qualityUploadResult = await uploadLib.uploadFiles(
|
||||
outputDir,
|
||||
actionsUtil.getRequiredInput("checkout_path"),
|
||||
actionsUtil.fixCodeQualityCategory(
|
||||
logger,
|
||||
actionsUtil.getOptionalInput("category"),
|
||||
),
|
||||
actionsUtil.getOptionalInput("category"),
|
||||
features,
|
||||
logger,
|
||||
analyses.CodeQuality,
|
||||
analysis,
|
||||
);
|
||||
core.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
|
||||
}
|
||||
@@ -388,7 +397,11 @@ async function run() {
|
||||
Feature.JavaMinimizeDependencyJars,
|
||||
codeql,
|
||||
);
|
||||
await uploadDependencyCaches(config, logger, minimizeJavaJars);
|
||||
dependencyCacheResults = await uploadDependencyCaches(
|
||||
config,
|
||||
logger,
|
||||
minimizeJavaJars,
|
||||
);
|
||||
}
|
||||
|
||||
// We don't upload results in test mode, so don't wait for processing
|
||||
@@ -431,6 +444,7 @@ async function run() {
|
||||
dbCreationTimings,
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
logger,
|
||||
);
|
||||
return;
|
||||
@@ -449,6 +463,7 @@ async function run() {
|
||||
dbCreationTimings,
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
logger,
|
||||
);
|
||||
} else if (runStats) {
|
||||
@@ -461,6 +476,7 @@ async function run() {
|
||||
dbCreationTimings,
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
logger,
|
||||
);
|
||||
} else {
|
||||
@@ -473,6 +489,7 @@ async function run() {
|
||||
dbCreationTimings,
|
||||
didUploadTrapCaches,
|
||||
trapCacheCleanupTelemetry,
|
||||
dependencyCacheResults,
|
||||
logger,
|
||||
);
|
||||
}
|
||||
|
||||
+1
-1
@@ -334,7 +334,7 @@ test("resolveQuerySuiteAlias", (t) => {
|
||||
for (const suite of defaultSuites) {
|
||||
const resolved = resolveQuerySuiteAlias(KnownLanguage.go, suite);
|
||||
t.assert(
|
||||
resolved.endsWith(".qls"),
|
||||
path.extname(resolved) === ".qls",
|
||||
"Resolved default suite doesn't end in .qls",
|
||||
);
|
||||
t.assert(
|
||||
|
||||
+1
-2
@@ -7,7 +7,6 @@ import * as del from "del";
|
||||
import * as yaml from "js-yaml";
|
||||
|
||||
import {
|
||||
fixCodeQualityCategory,
|
||||
getRequiredInput,
|
||||
getTemporaryDirectory,
|
||||
PullRequestBranches,
|
||||
@@ -781,7 +780,7 @@ export async function runQueries(
|
||||
// accepted by the Code Quality backend.
|
||||
let category = automationDetailsId;
|
||||
if (analysis.kind === analyses.AnalysisKind.CodeQuality) {
|
||||
category = fixCodeQualityCategory(logger, automationDetailsId);
|
||||
category = analysis.fixCategory(logger, automationDetailsId);
|
||||
}
|
||||
|
||||
const sarifFile = path.join(
|
||||
|
||||
+1
-1
@@ -245,7 +245,7 @@ export interface ActionsCacheItem {
|
||||
/** List all Actions cache entries matching the provided key and ref. */
|
||||
export async function listActionsCaches(
|
||||
key: string,
|
||||
ref: string,
|
||||
ref?: string,
|
||||
): Promise<ActionsCacheItem[]> {
|
||||
const repositoryNwo = getRepositoryNwo();
|
||||
|
||||
|
||||
@@ -153,7 +153,6 @@ const packSpecPrettyPrintingMacro = test.macro({
|
||||
title: (
|
||||
_providedTitle: string | undefined,
|
||||
packStr: string,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
_packObj: dbConfig.Pack,
|
||||
) => `Prettyprint pack spec: '${packStr}'`,
|
||||
});
|
||||
|
||||
@@ -59,7 +59,7 @@ export async function uploadCombinedSarifArtifacts(
|
||||
for (const outputDir of outputDirs) {
|
||||
const sarifFiles = fs
|
||||
.readdirSync(path.resolve(baseTempDir, outputDir))
|
||||
.filter((f) => f.endsWith(".sarif"));
|
||||
.filter((f) => path.extname(f) === ".sarif");
|
||||
|
||||
for (const sarifFile of sarifFiles) {
|
||||
toUpload.push(path.resolve(baseTempDir, outputDir, sarifFile));
|
||||
|
||||
+4
-4
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.23.1",
|
||||
"cliVersion": "2.23.1",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.0",
|
||||
"priorCliVersion": "2.23.0"
|
||||
"bundleVersion": "codeql-bundle-v2.23.2",
|
||||
"cliVersion": "2.23.2",
|
||||
"priorBundleVersion": "codeql-bundle-v2.23.1",
|
||||
"priorCliVersion": "2.23.1"
|
||||
}
|
||||
|
||||
+108
-7
@@ -5,12 +5,13 @@ import * as actionsCache from "@actions/cache";
|
||||
import * as glob from "@actions/glob";
|
||||
|
||||
import { getTemporaryDirectory } from "./actions-util";
|
||||
import { listActionsCaches } from "./api-client";
|
||||
import { getTotalCacheSize } from "./caching-utils";
|
||||
import { Config } from "./config-utils";
|
||||
import { EnvVar } from "./environment";
|
||||
import { KnownLanguage, Language } from "./languages";
|
||||
import { Logger } from "./logging";
|
||||
import { getRequiredEnvParam } from "./util";
|
||||
import { getErrorMessage, getRequiredEnvParam } from "./util";
|
||||
|
||||
/**
|
||||
* Caching configuration for a particular language.
|
||||
@@ -84,20 +85,42 @@ async function makeGlobber(patterns: string[]): Promise<glob.Globber> {
|
||||
return glob.create(patterns.join("\n"));
|
||||
}
|
||||
|
||||
/** Enumerates possible outcomes for cache hits. */
|
||||
export enum CacheHitKind {
|
||||
/** We were unable to calculate a hash for the key. */
|
||||
NoHash = "no-hash",
|
||||
/** No cache was found. */
|
||||
Miss = "miss",
|
||||
/** The primary cache key matched. */
|
||||
Exact = "exact",
|
||||
/** A restore key matched. */
|
||||
Partial = "partial",
|
||||
}
|
||||
|
||||
/** Represents results of trying to restore a dependency cache for a language. */
|
||||
export interface DependencyCacheRestoreStatus {
|
||||
language: Language;
|
||||
hit_kind: CacheHitKind;
|
||||
download_duration_ms?: number;
|
||||
}
|
||||
|
||||
/** An array of `DependencyCacheRestoreStatus` objects for each analysed language with a caching configuration. */
|
||||
export type DependencyCacheRestoreStatusReport = DependencyCacheRestoreStatus[];
|
||||
|
||||
/**
|
||||
* Attempts to restore dependency caches for the languages being analyzed.
|
||||
*
|
||||
* @param languages The languages being analyzed.
|
||||
* @param logger A logger to record some informational messages to.
|
||||
* @param minimizeJavaJars Whether the Java extractor should rewrite downloaded JARs to minimize their size.
|
||||
* @returns A list of languages for which dependency caches were restored.
|
||||
* @returns An array of `DependencyCacheRestoreStatus` objects for each analysed language with a caching configuration.
|
||||
*/
|
||||
export async function downloadDependencyCaches(
|
||||
languages: Language[],
|
||||
logger: Logger,
|
||||
minimizeJavaJars: boolean,
|
||||
): Promise<Language[]> {
|
||||
const restoredCaches: Language[] = [];
|
||||
): Promise<DependencyCacheRestoreStatusReport> {
|
||||
const status: DependencyCacheRestoreStatusReport = [];
|
||||
|
||||
for (const language of languages) {
|
||||
const cacheConfig = getDefaultCacheConfig()[language];
|
||||
@@ -114,6 +137,7 @@ export async function downloadDependencyCaches(
|
||||
const globber = await makeGlobber(cacheConfig.hash);
|
||||
|
||||
if ((await globber.glob()).length === 0) {
|
||||
status.push({ language, hit_kind: CacheHitKind.NoHash });
|
||||
logger.info(
|
||||
`Skipping download of dependency cache for ${language} as we cannot calculate a hash for the cache key.`,
|
||||
);
|
||||
@@ -131,35 +155,66 @@ export async function downloadDependencyCaches(
|
||||
)}`,
|
||||
);
|
||||
|
||||
const start = performance.now();
|
||||
const hitKey = await actionsCache.restoreCache(
|
||||
cacheConfig.paths,
|
||||
primaryKey,
|
||||
restoreKeys,
|
||||
);
|
||||
const download_duration_ms = Math.round(performance.now() - start);
|
||||
|
||||
if (hitKey !== undefined) {
|
||||
logger.info(`Cache hit on key ${hitKey} for ${language}.`);
|
||||
restoredCaches.push(language);
|
||||
const hit_kind =
|
||||
hitKey === primaryKey ? CacheHitKind.Exact : CacheHitKind.Partial;
|
||||
status.push({ language, hit_kind, download_duration_ms });
|
||||
} else {
|
||||
status.push({ language, hit_kind: CacheHitKind.Miss });
|
||||
logger.info(`No suitable cache found for ${language}.`);
|
||||
}
|
||||
}
|
||||
|
||||
return restoredCaches;
|
||||
return status;
|
||||
}
|
||||
|
||||
/** Enumerates possible outcomes for storing caches. */
|
||||
export enum CacheStoreResult {
|
||||
/** We were unable to calculate a hash for the key. */
|
||||
NoHash = "no-hash",
|
||||
/** There is nothing to store in the cache. */
|
||||
Empty = "empty",
|
||||
/** There already exists a cache with the key we are trying to store. */
|
||||
Duplicate = "duplicate",
|
||||
/** The cache was stored successfully. */
|
||||
Stored = "stored",
|
||||
}
|
||||
|
||||
/** Represents results of trying to upload a dependency cache for a language. */
|
||||
export interface DependencyCacheUploadStatus {
|
||||
language: Language;
|
||||
result: CacheStoreResult;
|
||||
upload_size_bytes?: number;
|
||||
upload_duration_ms?: number;
|
||||
}
|
||||
|
||||
/** An array of `DependencyCacheUploadStatus` objects for each analysed language with a caching configuration. */
|
||||
export type DependencyCacheUploadStatusReport = DependencyCacheUploadStatus[];
|
||||
|
||||
/**
|
||||
* Attempts to store caches for the languages that were analyzed.
|
||||
*
|
||||
* @param config The configuration for this workflow.
|
||||
* @param logger A logger to record some informational messages to.
|
||||
* @param minimizeJavaJars Whether the Java extractor should rewrite downloaded JARs to minimize their size.
|
||||
*
|
||||
* @returns An array of `DependencyCacheUploadStatus` objects for each analysed language with a caching configuration.
|
||||
*/
|
||||
export async function uploadDependencyCaches(
|
||||
config: Config,
|
||||
logger: Logger,
|
||||
minimizeJavaJars: boolean,
|
||||
): Promise<void> {
|
||||
): Promise<DependencyCacheUploadStatusReport> {
|
||||
const status: DependencyCacheUploadStatusReport = [];
|
||||
for (const language of config.languages) {
|
||||
const cacheConfig = getDefaultCacheConfig()[language];
|
||||
|
||||
@@ -175,6 +230,7 @@ export async function uploadDependencyCaches(
|
||||
const globber = await makeGlobber(cacheConfig.hash);
|
||||
|
||||
if ((await globber.glob()).length === 0) {
|
||||
status.push({ language, result: CacheStoreResult.NoHash });
|
||||
logger.info(
|
||||
`Skipping upload of dependency cache for ${language} as we cannot calculate a hash for the cache key.`,
|
||||
);
|
||||
@@ -195,6 +251,7 @@ export async function uploadDependencyCaches(
|
||||
|
||||
// Skip uploading an empty cache.
|
||||
if (size === 0) {
|
||||
status.push({ language, result: CacheStoreResult.Empty });
|
||||
logger.info(
|
||||
`Skipping upload of dependency cache for ${language} since it is empty.`,
|
||||
);
|
||||
@@ -208,7 +265,16 @@ export async function uploadDependencyCaches(
|
||||
);
|
||||
|
||||
try {
|
||||
const start = performance.now();
|
||||
await actionsCache.saveCache(cacheConfig.paths, key);
|
||||
const upload_duration_ms = Math.round(performance.now() - start);
|
||||
|
||||
status.push({
|
||||
language,
|
||||
result: CacheStoreResult.Stored,
|
||||
upload_size_bytes: Math.round(size),
|
||||
upload_duration_ms,
|
||||
});
|
||||
} catch (error) {
|
||||
// `ReserveCacheError` indicates that the cache key is already in use, which means that a
|
||||
// cache with that key already exists or is in the process of being uploaded by another
|
||||
@@ -218,12 +284,16 @@ export async function uploadDependencyCaches(
|
||||
`Not uploading cache for ${language}, because ${key} is already in use.`,
|
||||
);
|
||||
logger.debug(error.message);
|
||||
|
||||
status.push({ language, result: CacheStoreResult.Duplicate });
|
||||
} else {
|
||||
// Propagate other errors upwards.
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -270,3 +340,34 @@ async function cachePrefix(
|
||||
|
||||
return `${prefix}-${CODEQL_DEPENDENCY_CACHE_VERSION}-${runnerOs}-${language}-`;
|
||||
}
|
||||
|
||||
/** Represents information about our overall cache usage for CodeQL dependency caches. */
|
||||
export interface DependencyCachingUsageReport {
|
||||
count: number;
|
||||
size_bytes: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to determine the overall cache usage for CodeQL dependencies caches.
|
||||
*
|
||||
* @param logger The logger to log errors to.
|
||||
* @returns Returns the overall cache usage for CodeQL dependencies caches, or `undefined` if we couldn't determine it.
|
||||
*/
|
||||
export async function getDependencyCacheUsage(
|
||||
logger: Logger,
|
||||
): Promise<DependencyCachingUsageReport | undefined> {
|
||||
try {
|
||||
const caches = await listActionsCaches(CODEQL_DEPENDENCY_CACHE_PREFIX);
|
||||
const totalSize = caches.reduce(
|
||||
(acc, cache) => acc + (cache.size_in_bytes ?? 0),
|
||||
0,
|
||||
);
|
||||
return { count: caches.length, size_bytes: totalSize };
|
||||
} catch (err) {
|
||||
logger.warning(
|
||||
`Unable to retrieve information about dependency cache usage: ${getErrorMessage(err)}`,
|
||||
);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import { CodeScanning } from "./analyses";
|
||||
import { getApiClient } from "./api-client";
|
||||
import { CodeQL, getCodeQL } from "./codeql";
|
||||
import { Config } from "./config-utils";
|
||||
import * as dependencyCaching from "./dependency-caching";
|
||||
import { EnvVar } from "./environment";
|
||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||
import { Logger } from "./logging";
|
||||
@@ -45,6 +46,10 @@ export interface JobStatusReport {
|
||||
job_status: JobStatus;
|
||||
}
|
||||
|
||||
export interface DependencyCachingUsageReport {
|
||||
dependency_caching_usage?: dependencyCaching.DependencyCachingUsageReport;
|
||||
}
|
||||
|
||||
function createFailedUploadFailedSarifResult(
|
||||
error: unknown,
|
||||
): UploadFailedSarifResult {
|
||||
|
||||
+21
-1
@@ -12,10 +12,16 @@ import {
|
||||
printDebugLogs,
|
||||
} from "./actions-util";
|
||||
import { getGitHubVersion } from "./api-client";
|
||||
import { CachingKind } from "./caching-utils";
|
||||
import { getCodeQL } from "./codeql";
|
||||
import { Config, getConfig } from "./config-utils";
|
||||
import * as debugArtifacts from "./debug-artifacts";
|
||||
import {
|
||||
DependencyCachingUsageReport,
|
||||
getDependencyCacheUsage,
|
||||
} from "./dependency-caching";
|
||||
import { Features } from "./feature-flags";
|
||||
import * as gitUtils from "./git-utils";
|
||||
import * as initActionPostHelper from "./init-action-post-helper";
|
||||
import { getActionsLogger } from "./logging";
|
||||
import { getRepositoryNwo } from "./repository";
|
||||
@@ -32,7 +38,8 @@ import { checkDiskUsage, checkGitHubVersionInRange, wrapError } from "./util";
|
||||
interface InitPostStatusReport
|
||||
extends StatusReportBase,
|
||||
initActionPostHelper.UploadFailedSarifResult,
|
||||
initActionPostHelper.JobStatusReport {}
|
||||
initActionPostHelper.JobStatusReport,
|
||||
initActionPostHelper.DependencyCachingUsageReport {}
|
||||
|
||||
async function runWrapper() {
|
||||
const logger = getActionsLogger();
|
||||
@@ -41,6 +48,7 @@ async function runWrapper() {
|
||||
let uploadFailedSarifResult:
|
||||
| initActionPostHelper.UploadFailedSarifResult
|
||||
| undefined;
|
||||
let dependencyCachingUsage: DependencyCachingUsageReport | undefined;
|
||||
try {
|
||||
// Restore inputs from `init` Action.
|
||||
restoreInputs();
|
||||
@@ -73,6 +81,17 @@ async function runWrapper() {
|
||||
features,
|
||||
logger,
|
||||
);
|
||||
|
||||
// If we are analysing the default branch and some kind of caching is enabled,
|
||||
// then try to determine our overall cache usage for dependency caches. We only
|
||||
// do this under these circumstances to avoid slowing down analyses for PRs
|
||||
// and where caching may not be enabled.
|
||||
if (
|
||||
(await gitUtils.isAnalyzingDefaultBranch()) &&
|
||||
config.dependencyCachingEnabled !== CachingKind.None
|
||||
) {
|
||||
dependencyCachingUsage = await getDependencyCacheUsage(logger);
|
||||
}
|
||||
}
|
||||
} catch (unwrappedError) {
|
||||
const error = wrapError(unwrappedError);
|
||||
@@ -109,6 +128,7 @@ async function runWrapper() {
|
||||
...statusReportBase,
|
||||
...uploadFailedSarifResult,
|
||||
job_status: initActionPostHelper.getFinalJobStatus(),
|
||||
dependency_caching_usage: dependencyCachingUsage,
|
||||
};
|
||||
logger.info("Sending status report for init-post step.");
|
||||
await sendStatusReport(statusReport);
|
||||
|
||||
+10
-2
@@ -23,7 +23,10 @@ import {
|
||||
} from "./caching-utils";
|
||||
import { CodeQL } from "./codeql";
|
||||
import * as configUtils from "./config-utils";
|
||||
import { downloadDependencyCaches } from "./dependency-caching";
|
||||
import {
|
||||
DependencyCacheRestoreStatusReport,
|
||||
downloadDependencyCaches,
|
||||
} from "./dependency-caching";
|
||||
import {
|
||||
addDiagnostic,
|
||||
flushDiagnostics,
|
||||
@@ -102,6 +105,7 @@ async function sendCompletedStatusReport(
|
||||
toolsSource: ToolsSource,
|
||||
toolsVersion: string,
|
||||
overlayBaseDatabaseStats: OverlayBaseDatabaseDownloadStats | undefined,
|
||||
dependencyCachingResults: DependencyCacheRestoreStatusReport | undefined,
|
||||
logger: Logger,
|
||||
error?: Error,
|
||||
) {
|
||||
@@ -151,6 +155,7 @@ async function sendCompletedStatusReport(
|
||||
await getTotalCacheSize(Object.values(config.trapCaches), logger),
|
||||
),
|
||||
overlayBaseDatabaseStats,
|
||||
dependencyCachingResults,
|
||||
);
|
||||
await sendStatusReport({
|
||||
...initWithConfigStatusReport,
|
||||
@@ -351,6 +356,7 @@ async function run() {
|
||||
}
|
||||
|
||||
let overlayBaseDatabaseStats: OverlayBaseDatabaseDownloadStats | undefined;
|
||||
let dependencyCachingResults: DependencyCacheRestoreStatusReport | undefined;
|
||||
try {
|
||||
if (
|
||||
config.overlayDatabaseMode === OverlayDatabaseMode.Overlay &&
|
||||
@@ -562,7 +568,7 @@ async function run() {
|
||||
codeql,
|
||||
);
|
||||
if (shouldRestoreCache(config.dependencyCachingEnabled)) {
|
||||
await downloadDependencyCaches(
|
||||
dependencyCachingResults = await downloadDependencyCaches(
|
||||
config.languages,
|
||||
logger,
|
||||
minimizeJavaJars,
|
||||
@@ -714,6 +720,7 @@ async function run() {
|
||||
toolsSource,
|
||||
toolsVersion,
|
||||
overlayBaseDatabaseStats,
|
||||
dependencyCachingResults,
|
||||
logger,
|
||||
error,
|
||||
);
|
||||
@@ -736,6 +743,7 @@ async function run() {
|
||||
toolsSource,
|
||||
toolsVersion,
|
||||
overlayBaseDatabaseStats,
|
||||
dependencyCachingResults,
|
||||
logger,
|
||||
);
|
||||
}
|
||||
|
||||
+139
-80
@@ -14,6 +14,13 @@ import {
|
||||
getDownloadUrl,
|
||||
UPDATEJOB_PROXY,
|
||||
} from "./start-proxy";
|
||||
import {
|
||||
ActionName,
|
||||
createStatusReportBase,
|
||||
getActionsStatus,
|
||||
sendStatusReport,
|
||||
StatusReportBase,
|
||||
} from "./status-report";
|
||||
import * as util from "./util";
|
||||
|
||||
const KEY_SIZE = 2048;
|
||||
@@ -83,46 +90,102 @@ function generateCertificateAuthority(): CertificateAuthority {
|
||||
return { cert: pem, key };
|
||||
}
|
||||
|
||||
interface StartProxyStatus extends StatusReportBase {
|
||||
// A comma-separated list of registry types which are configured for CodeQL.
|
||||
// This only includes registry types we support, not all that are configured.
|
||||
registry_types: string;
|
||||
}
|
||||
|
||||
async function sendSuccessStatusReport(
|
||||
startedAt: Date,
|
||||
registry_types: string[],
|
||||
logger: Logger,
|
||||
) {
|
||||
const statusReportBase = await createStatusReportBase(
|
||||
ActionName.StartProxy,
|
||||
"success",
|
||||
startedAt,
|
||||
undefined,
|
||||
await util.checkDiskUsage(logger),
|
||||
logger,
|
||||
);
|
||||
if (statusReportBase !== undefined) {
|
||||
const statusReport: StartProxyStatus = {
|
||||
...statusReportBase,
|
||||
registry_types: registry_types.join(","),
|
||||
};
|
||||
await sendStatusReport(statusReport);
|
||||
}
|
||||
}
|
||||
|
||||
async function runWrapper() {
|
||||
const startedAt = new Date();
|
||||
|
||||
// Make inputs accessible in the `post` step.
|
||||
actionsUtil.persistInputs();
|
||||
|
||||
const logger = getActionsLogger();
|
||||
|
||||
// Setup logging for the proxy
|
||||
const tempDir = actionsUtil.getTemporaryDirectory();
|
||||
const proxyLogFilePath = path.resolve(tempDir, "proxy.log");
|
||||
core.saveState("proxy-log-file", proxyLogFilePath);
|
||||
try {
|
||||
// Setup logging for the proxy
|
||||
const tempDir = actionsUtil.getTemporaryDirectory();
|
||||
const proxyLogFilePath = path.resolve(tempDir, "proxy.log");
|
||||
core.saveState("proxy-log-file", proxyLogFilePath);
|
||||
|
||||
// Get the configuration options
|
||||
const credentials = getCredentials(
|
||||
logger,
|
||||
actionsUtil.getOptionalInput("registry_secrets"),
|
||||
actionsUtil.getOptionalInput("registries_credentials"),
|
||||
actionsUtil.getOptionalInput("language"),
|
||||
);
|
||||
// Get the configuration options
|
||||
const credentials = getCredentials(
|
||||
logger,
|
||||
actionsUtil.getOptionalInput("registry_secrets"),
|
||||
actionsUtil.getOptionalInput("registries_credentials"),
|
||||
actionsUtil.getOptionalInput("language"),
|
||||
);
|
||||
|
||||
if (credentials.length === 0) {
|
||||
logger.info("No credentials found, skipping proxy setup.");
|
||||
return;
|
||||
if (credentials.length === 0) {
|
||||
logger.info("No credentials found, skipping proxy setup.");
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`Credentials loaded for the following registries:\n ${credentials
|
||||
.map((c) => credentialToStr(c))
|
||||
.join("\n")}`,
|
||||
);
|
||||
|
||||
const ca = generateCertificateAuthority();
|
||||
|
||||
const proxyConfig: ProxyConfig = {
|
||||
all_credentials: credentials,
|
||||
ca,
|
||||
};
|
||||
|
||||
// Start the Proxy
|
||||
const proxyBin = await getProxyBinaryPath(logger);
|
||||
await startProxy(proxyBin, proxyConfig, proxyLogFilePath, logger);
|
||||
|
||||
// Report success if we have reached this point.
|
||||
await sendSuccessStatusReport(
|
||||
startedAt,
|
||||
proxyConfig.all_credentials.map((c) => c.type),
|
||||
logger,
|
||||
);
|
||||
} catch (unwrappedError) {
|
||||
const error = util.wrapError(unwrappedError);
|
||||
core.setFailed(`start-proxy action failed: ${error.message}`);
|
||||
|
||||
// We skip sending the error message and stack trace here to avoid the possibility
|
||||
// of leaking any sensitive information into the telemetry.
|
||||
const errorStatusReportBase = await createStatusReportBase(
|
||||
ActionName.StartProxy,
|
||||
getActionsStatus(error),
|
||||
startedAt,
|
||||
undefined,
|
||||
await util.checkDiskUsage(logger),
|
||||
logger,
|
||||
);
|
||||
if (errorStatusReportBase !== undefined) {
|
||||
await sendStatusReport(errorStatusReportBase);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`Credentials loaded for the following registries:\n ${credentials
|
||||
.map((c) => credentialToStr(c))
|
||||
.join("\n")}`,
|
||||
);
|
||||
|
||||
const ca = generateCertificateAuthority();
|
||||
|
||||
const proxyConfig: ProxyConfig = {
|
||||
all_credentials: credentials,
|
||||
ca,
|
||||
};
|
||||
|
||||
// Start the Proxy
|
||||
const proxyBin = await getProxyBinaryPath(logger);
|
||||
await startProxy(proxyBin, proxyConfig, proxyLogFilePath, logger);
|
||||
}
|
||||
|
||||
async function startProxy(
|
||||
@@ -133,57 +196,53 @@ async function startProxy(
|
||||
) {
|
||||
const host = "127.0.0.1";
|
||||
let port = 49152;
|
||||
try {
|
||||
let subprocess: ChildProcess | undefined = undefined;
|
||||
let tries = 5;
|
||||
let subprocessError: Error | undefined = undefined;
|
||||
while (tries-- > 0 && !subprocess && !subprocessError) {
|
||||
subprocess = spawn(
|
||||
binPath,
|
||||
["-addr", `${host}:${port}`, "-config", "-", "-logfile", logFilePath],
|
||||
{
|
||||
detached: true,
|
||||
stdio: ["pipe", "ignore", "ignore"],
|
||||
},
|
||||
);
|
||||
subprocess.unref();
|
||||
if (subprocess.pid) {
|
||||
core.saveState("proxy-process-pid", `${subprocess.pid}`);
|
||||
let subprocess: ChildProcess | undefined = undefined;
|
||||
let tries = 5;
|
||||
let subprocessError: Error | undefined = undefined;
|
||||
while (tries-- > 0 && !subprocess && !subprocessError) {
|
||||
subprocess = spawn(
|
||||
binPath,
|
||||
["-addr", `${host}:${port}`, "-config", "-", "-logfile", logFilePath],
|
||||
{
|
||||
detached: true,
|
||||
stdio: ["pipe", "ignore", "ignore"],
|
||||
},
|
||||
);
|
||||
subprocess.unref();
|
||||
if (subprocess.pid) {
|
||||
core.saveState("proxy-process-pid", `${subprocess.pid}`);
|
||||
}
|
||||
subprocess.on("error", (error) => {
|
||||
subprocessError = error;
|
||||
});
|
||||
subprocess.on("exit", (code) => {
|
||||
if (code !== 0) {
|
||||
// If the proxy failed to start, try a different port from the ephemeral range [49152, 65535]
|
||||
port = Math.floor(Math.random() * (65535 - 49152) + 49152);
|
||||
subprocess = undefined;
|
||||
}
|
||||
subprocess.on("error", (error) => {
|
||||
subprocessError = error;
|
||||
});
|
||||
subprocess.on("exit", (code) => {
|
||||
if (code !== 0) {
|
||||
// If the proxy failed to start, try a different port from the ephemeral range [49152, 65535]
|
||||
port = Math.floor(Math.random() * (65535 - 49152) + 49152);
|
||||
subprocess = undefined;
|
||||
}
|
||||
});
|
||||
subprocess.stdin?.write(JSON.stringify(config));
|
||||
subprocess.stdin?.end();
|
||||
// Wait a little to allow the proxy to start
|
||||
await util.delay(1000);
|
||||
}
|
||||
if (subprocessError) {
|
||||
// eslint-disable-next-line @typescript-eslint/only-throw-error
|
||||
throw subprocessError;
|
||||
}
|
||||
logger.info(`Proxy started on ${host}:${port}`);
|
||||
core.setOutput("proxy_host", host);
|
||||
core.setOutput("proxy_port", port.toString());
|
||||
core.setOutput("proxy_ca_certificate", config.ca.cert);
|
||||
|
||||
const registry_urls = config.all_credentials
|
||||
.filter((credential) => credential.url !== undefined)
|
||||
.map((credential) => ({
|
||||
type: credential.type,
|
||||
url: credential.url,
|
||||
}));
|
||||
core.setOutput("proxy_urls", JSON.stringify(registry_urls));
|
||||
} catch (error) {
|
||||
core.setFailed(`start-proxy action failed: ${util.getErrorMessage(error)}`);
|
||||
});
|
||||
subprocess.stdin?.write(JSON.stringify(config));
|
||||
subprocess.stdin?.end();
|
||||
// Wait a little to allow the proxy to start
|
||||
await util.delay(1000);
|
||||
}
|
||||
if (subprocessError) {
|
||||
// eslint-disable-next-line @typescript-eslint/only-throw-error
|
||||
throw subprocessError;
|
||||
}
|
||||
logger.info(`Proxy started on ${host}:${port}`);
|
||||
core.setOutput("proxy_host", host);
|
||||
core.setOutput("proxy_port", port.toString());
|
||||
core.setOutput("proxy_ca_certificate", config.ca.cert);
|
||||
|
||||
const registry_urls = config.all_credentials
|
||||
.filter((credential) => credential.url !== undefined)
|
||||
.map((credential) => ({
|
||||
type: credential.type,
|
||||
url: credential.url,
|
||||
}));
|
||||
core.setOutput("proxy_urls", JSON.stringify(registry_urls));
|
||||
}
|
||||
|
||||
async function getProxyBinaryPath(logger: Logger): Promise<string> {
|
||||
|
||||
@@ -286,6 +286,7 @@ const testCreateInitWithConfigStatusReport = test.macro({
|
||||
undefined,
|
||||
1024,
|
||||
undefined,
|
||||
undefined,
|
||||
);
|
||||
|
||||
if (t.truthy(initWithConfigStatusReport)) {
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
} from "./actions-util";
|
||||
import { getAnalysisKey, getApiClient } from "./api-client";
|
||||
import { parseRegistriesWithoutCredentials, type Config } from "./config-utils";
|
||||
import { DependencyCacheRestoreStatusReport } from "./dependency-caching";
|
||||
import { DocUrl } from "./doc-url";
|
||||
import { EnvVar } from "./environment";
|
||||
import { getRef } from "./git-utils";
|
||||
@@ -35,11 +36,12 @@ import {
|
||||
} from "./util";
|
||||
|
||||
export enum ActionName {
|
||||
Autobuild = "autobuild",
|
||||
Analyze = "finish",
|
||||
Autobuild = "autobuild",
|
||||
Init = "init",
|
||||
InitPost = "init-post",
|
||||
ResolveEnvironment = "resolve-environment",
|
||||
StartProxy = "start-proxy",
|
||||
UploadSarif = "upload-sarif",
|
||||
}
|
||||
|
||||
@@ -497,6 +499,8 @@ export interface InitWithConfigStatusReport extends InitStatusReport {
|
||||
overlay_base_database_download_size_bytes?: number;
|
||||
/** Time taken to download the overlay-base database, in milliseconds. */
|
||||
overlay_base_database_download_duration_ms?: number;
|
||||
/** Stringified JSON object representing information about the results of restoring dependency caches. */
|
||||
dependency_caching_restore_results?: DependencyCacheRestoreStatusReport;
|
||||
/** Stringified JSON array of registry configuration objects, from the 'registries' config field
|
||||
or workflow input. **/
|
||||
registries: string;
|
||||
@@ -522,6 +526,7 @@ export async function createInitWithConfigStatusReport(
|
||||
configFile: string | undefined,
|
||||
totalCacheSize: number,
|
||||
overlayBaseDatabaseStats: OverlayBaseDatabaseDownloadStats | undefined,
|
||||
dependencyCachingResults: DependencyCacheRestoreStatusReport | undefined,
|
||||
): Promise<InitWithConfigStatusReport> {
|
||||
const languages = config.languages.join(",");
|
||||
const paths = (config.originalUserInput.paths || []).join(",");
|
||||
@@ -570,6 +575,7 @@ export async function createInitWithConfigStatusReport(
|
||||
overlayBaseDatabaseStats?.databaseSizeBytes,
|
||||
overlay_base_database_download_duration_ms:
|
||||
overlayBaseDatabaseStats?.databaseDownloadDurationMs,
|
||||
dependency_caching_restore_results: dependencyCachingResults,
|
||||
query_filters: JSON.stringify(
|
||||
config.originalUserInput["query-filters"] ?? [],
|
||||
),
|
||||
|
||||
+81
-11
@@ -3,7 +3,7 @@ import * as path from "path";
|
||||
|
||||
import test from "ava";
|
||||
|
||||
import { CodeQuality, CodeScanning } from "./analyses";
|
||||
import { AnalysisKind, CodeQuality, CodeScanning } from "./analyses";
|
||||
import { getRunnerLogger, Logger } from "./logging";
|
||||
import { setupTests } from "./testing-utils";
|
||||
import * as uploadLib from "./upload-lib";
|
||||
@@ -127,27 +127,97 @@ test("finding SARIF files", async (t) => {
|
||||
fs.writeFileSync(path.join(tmpDir, "a.quality.sarif"), "");
|
||||
fs.writeFileSync(path.join(tmpDir, "dir1", "b.quality.sarif"), "");
|
||||
|
||||
const expectedSarifFiles = [
|
||||
path.join(tmpDir, "a.sarif"),
|
||||
path.join(tmpDir, "b.sarif"),
|
||||
path.join(tmpDir, "dir1", "d.sarif"),
|
||||
path.join(tmpDir, "dir1", "dir2", "e.sarif"),
|
||||
];
|
||||
const sarifFiles = uploadLib.findSarifFilesInDir(
|
||||
tmpDir,
|
||||
CodeScanning.sarifPredicate,
|
||||
);
|
||||
|
||||
t.deepEqual(sarifFiles, [
|
||||
path.join(tmpDir, "a.sarif"),
|
||||
path.join(tmpDir, "b.sarif"),
|
||||
path.join(tmpDir, "dir1", "d.sarif"),
|
||||
path.join(tmpDir, "dir1", "dir2", "e.sarif"),
|
||||
]);
|
||||
t.deepEqual(sarifFiles, expectedSarifFiles);
|
||||
|
||||
const expectedQualitySarifFiles = [
|
||||
path.join(tmpDir, "a.quality.sarif"),
|
||||
path.join(tmpDir, "dir1", "b.quality.sarif"),
|
||||
];
|
||||
const qualitySarifFiles = uploadLib.findSarifFilesInDir(
|
||||
tmpDir,
|
||||
CodeQuality.sarifPredicate,
|
||||
);
|
||||
|
||||
t.deepEqual(qualitySarifFiles, [
|
||||
path.join(tmpDir, "a.quality.sarif"),
|
||||
path.join(tmpDir, "dir1", "b.quality.sarif"),
|
||||
]);
|
||||
t.deepEqual(qualitySarifFiles, expectedQualitySarifFiles);
|
||||
|
||||
const groupedSarifFiles = await uploadLib.getGroupedSarifFilePaths(
|
||||
getRunnerLogger(true),
|
||||
tmpDir,
|
||||
);
|
||||
|
||||
t.not(groupedSarifFiles, undefined);
|
||||
t.not(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
|
||||
t.not(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
|
||||
t.deepEqual(
|
||||
groupedSarifFiles[AnalysisKind.CodeScanning],
|
||||
expectedSarifFiles,
|
||||
);
|
||||
t.deepEqual(
|
||||
groupedSarifFiles[AnalysisKind.CodeQuality],
|
||||
expectedQualitySarifFiles,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test("getGroupedSarifFilePaths - Code Quality file", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const sarifPath = path.join(tmpDir, "a.quality.sarif");
|
||||
fs.writeFileSync(sarifPath, "");
|
||||
|
||||
const groupedSarifFiles = await uploadLib.getGroupedSarifFilePaths(
|
||||
getRunnerLogger(true),
|
||||
sarifPath,
|
||||
);
|
||||
|
||||
t.not(groupedSarifFiles, undefined);
|
||||
t.is(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
|
||||
t.not(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
|
||||
t.deepEqual(groupedSarifFiles[AnalysisKind.CodeQuality], [sarifPath]);
|
||||
});
|
||||
});
|
||||
|
||||
test("getGroupedSarifFilePaths - Code Scanning file", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const sarifPath = path.join(tmpDir, "a.sarif");
|
||||
fs.writeFileSync(sarifPath, "");
|
||||
|
||||
const groupedSarifFiles = await uploadLib.getGroupedSarifFilePaths(
|
||||
getRunnerLogger(true),
|
||||
sarifPath,
|
||||
);
|
||||
|
||||
t.not(groupedSarifFiles, undefined);
|
||||
t.not(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
|
||||
t.is(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
|
||||
t.deepEqual(groupedSarifFiles[AnalysisKind.CodeScanning], [sarifPath]);
|
||||
});
|
||||
});
|
||||
|
||||
test("getGroupedSarifFilePaths - Other file", async (t) => {
|
||||
await withTmpDir(async (tmpDir) => {
|
||||
const sarifPath = path.join(tmpDir, "a.json");
|
||||
fs.writeFileSync(sarifPath, "");
|
||||
|
||||
const groupedSarifFiles = await uploadLib.getGroupedSarifFilePaths(
|
||||
getRunnerLogger(true),
|
||||
sarifPath,
|
||||
);
|
||||
|
||||
t.not(groupedSarifFiles, undefined);
|
||||
t.not(groupedSarifFiles[AnalysisKind.CodeScanning], undefined);
|
||||
t.is(groupedSarifFiles[AnalysisKind.CodeQuality], undefined);
|
||||
t.deepEqual(groupedSarifFiles[AnalysisKind.CodeScanning], [sarifPath]);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -459,6 +459,79 @@ export function getSarifFilePaths(
|
||||
return sarifFiles;
|
||||
}
|
||||
|
||||
type GroupedSarifFiles = Partial<Record<analyses.AnalysisKind, string[]>>;
|
||||
|
||||
/**
|
||||
* Finds SARIF files in `sarifPath`, and groups them by analysis kind, following `SarifScanOrder`.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param sarifPath The path of a file or directory to recursively scan for SARIF files.
|
||||
* @returns The `.sarif` files found in `sarifPath`, grouped by analysis kind.
|
||||
*/
|
||||
export async function getGroupedSarifFilePaths(
|
||||
logger: Logger,
|
||||
sarifPath: string,
|
||||
): Promise<GroupedSarifFiles> {
|
||||
const stats = fs.statSync(sarifPath, { throwIfNoEntry: false });
|
||||
|
||||
if (stats === undefined) {
|
||||
// This is always a configuration error, even for first-party runs.
|
||||
throw new ConfigurationError(`Path does not exist: ${sarifPath}`);
|
||||
}
|
||||
|
||||
const results: GroupedSarifFiles = {};
|
||||
|
||||
if (stats.isDirectory()) {
|
||||
let unassignedSarifFiles = findSarifFilesInDir(
|
||||
sarifPath,
|
||||
(name) => path.extname(name) === ".sarif",
|
||||
);
|
||||
logger.debug(
|
||||
`Found the following .sarif files in ${sarifPath}: ${unassignedSarifFiles.join(", ")}`,
|
||||
);
|
||||
|
||||
for (const analysisConfig of analyses.SarifScanOrder) {
|
||||
const filesForCurrentAnalysis = unassignedSarifFiles.filter(
|
||||
analysisConfig.sarifPredicate,
|
||||
);
|
||||
if (filesForCurrentAnalysis.length > 0) {
|
||||
logger.debug(
|
||||
`The following SARIF files are for ${analysisConfig.name}: ${filesForCurrentAnalysis.join(", ")}`,
|
||||
);
|
||||
// Looping through the array a second time is not efficient, but more readable.
|
||||
// Change this to one loop for both calls to `filter` if this becomes a bottleneck.
|
||||
unassignedSarifFiles = unassignedSarifFiles.filter(
|
||||
(name) => !analysisConfig.sarifPredicate(name),
|
||||
);
|
||||
results[analysisConfig.kind] = filesForCurrentAnalysis;
|
||||
} else {
|
||||
logger.debug(`Found no SARIF files for ${analysisConfig.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (unassignedSarifFiles.length !== 0) {
|
||||
logger.warning(
|
||||
`Found files in ${sarifPath} which do not belong to any analysis: ${unassignedSarifFiles.join(", ")}`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
for (const analysisConfig of analyses.SarifScanOrder) {
|
||||
if (
|
||||
analysisConfig.kind === analyses.AnalysisKind.CodeScanning ||
|
||||
analysisConfig.sarifPredicate(sarifPath)
|
||||
) {
|
||||
logger.debug(
|
||||
`Using '${sarifPath}' as a SARIF file for ${analysisConfig.name}.`,
|
||||
);
|
||||
results[analysisConfig.kind] = [sarifPath];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
// Counts the number of results in the given SARIF file
|
||||
function countResultsInSarif(sarif: string): number {
|
||||
let numResults = 0;
|
||||
@@ -655,6 +728,7 @@ export async function uploadSpecifiedFiles(
|
||||
const gitHubVersion = await getGitHubVersion();
|
||||
|
||||
let sarif: SarifFile;
|
||||
category = uploadTarget.fixCategory(logger, category);
|
||||
|
||||
if (sarifPaths.length > 1) {
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
|
||||
+17
-91
@@ -1,5 +1,3 @@
|
||||
import * as fs from "fs";
|
||||
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
@@ -18,6 +16,7 @@ import {
|
||||
isThirdPartyAnalysis,
|
||||
} from "./status-report";
|
||||
import * as upload_lib from "./upload-lib";
|
||||
import { uploadSarif } from "./upload-sarif";
|
||||
import {
|
||||
ConfigurationError,
|
||||
checkActionVersion,
|
||||
@@ -32,60 +31,6 @@ interface UploadSarifStatusReport
|
||||
extends StatusReportBase,
|
||||
upload_lib.UploadStatusReport {}
|
||||
|
||||
/**
|
||||
* Searches for SARIF files for the given `analysis` in the given `sarifPath`.
|
||||
* If any are found, then they are uploaded to the appropriate endpoint for the given `analysis`.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param features Information about FFs.
|
||||
* @param sarifPath The path to a SARIF file or directory containing SARIF files.
|
||||
* @param pathStats Information about `sarifPath`.
|
||||
* @param checkoutPath The checkout path.
|
||||
* @param analysis The configuration of the analysis we should upload SARIF files for.
|
||||
* @param category The SARIF category to use for the upload.
|
||||
* @returns The result of uploading the SARIF file(s) or `undefined` if there are none.
|
||||
*/
|
||||
async function findAndUpload(
|
||||
logger: Logger,
|
||||
features: Features,
|
||||
sarifPath: string,
|
||||
pathStats: fs.Stats,
|
||||
checkoutPath: string,
|
||||
analysis: analyses.AnalysisConfig,
|
||||
category?: string,
|
||||
): Promise<upload_lib.UploadResult | undefined> {
|
||||
let sarifFiles: string[] | undefined;
|
||||
|
||||
if (pathStats.isDirectory()) {
|
||||
sarifFiles = upload_lib.findSarifFilesInDir(
|
||||
sarifPath,
|
||||
analysis.sarifPredicate,
|
||||
);
|
||||
} else if (
|
||||
pathStats.isFile() &&
|
||||
(analysis.sarifPredicate(sarifPath) ||
|
||||
(analysis.kind === analyses.AnalysisKind.CodeScanning &&
|
||||
!analyses.CodeQuality.sarifPredicate(sarifPath)))
|
||||
) {
|
||||
sarifFiles = [sarifPath];
|
||||
} else {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (sarifFiles.length !== 0) {
|
||||
return await upload_lib.uploadSpecifiedFiles(
|
||||
sarifFiles,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
analysis,
|
||||
);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async function sendSuccessStatusReport(
|
||||
startedAt: Date,
|
||||
uploadStats: upload_lib.UploadStatusReport,
|
||||
@@ -144,56 +89,37 @@ async function run() {
|
||||
const sarifPath = actionsUtil.getRequiredInput("sarif_file");
|
||||
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
|
||||
const category = actionsUtil.getOptionalInput("category");
|
||||
const pathStats = fs.lstatSync(sarifPath, { throwIfNoEntry: false });
|
||||
|
||||
if (pathStats === undefined) {
|
||||
throw new ConfigurationError(`Path does not exist: ${sarifPath}.`);
|
||||
}
|
||||
|
||||
const sarifIds: Array<{ analysis: string; id: string }> = [];
|
||||
const uploadResult = await findAndUpload(
|
||||
const uploadResults = await uploadSarif(
|
||||
logger,
|
||||
features,
|
||||
sarifPath,
|
||||
pathStats,
|
||||
checkoutPath,
|
||||
analyses.CodeScanning,
|
||||
sarifPath,
|
||||
category,
|
||||
);
|
||||
if (uploadResult !== undefined) {
|
||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||
sarifIds.push({
|
||||
analysis: analyses.AnalysisKind.CodeScanning,
|
||||
id: uploadResult.sarifID,
|
||||
});
|
||||
|
||||
// Fail if we didn't upload anything.
|
||||
if (Object.keys(uploadResults).length === 0) {
|
||||
throw new ConfigurationError(
|
||||
`No SARIF files found to upload in "${sarifPath}".`,
|
||||
);
|
||||
}
|
||||
|
||||
// If there are `.quality.sarif` files in `sarifPath`, then upload those to the code quality service.
|
||||
const qualityUploadResult = await findAndUpload(
|
||||
logger,
|
||||
features,
|
||||
sarifPath,
|
||||
pathStats,
|
||||
checkoutPath,
|
||||
analyses.CodeQuality,
|
||||
actionsUtil.fixCodeQualityCategory(logger, category),
|
||||
);
|
||||
if (qualityUploadResult !== undefined) {
|
||||
sarifIds.push({
|
||||
analysis: analyses.AnalysisKind.CodeQuality,
|
||||
id: qualityUploadResult.sarifID,
|
||||
});
|
||||
const codeScanningResult =
|
||||
uploadResults[analyses.AnalysisKind.CodeScanning];
|
||||
if (codeScanningResult !== undefined) {
|
||||
core.setOutput("sarif-id", codeScanningResult.sarifID);
|
||||
}
|
||||
core.setOutput("sarif-ids", JSON.stringify(sarifIds));
|
||||
core.setOutput("sarif-ids", JSON.stringify(uploadResults));
|
||||
|
||||
// We don't upload results in test mode, so don't wait for processing
|
||||
if (isInTestMode()) {
|
||||
core.debug("In test mode. Waiting for processing is disabled.");
|
||||
} else if (actionsUtil.getRequiredInput("wait-for-processing") === "true") {
|
||||
if (uploadResult !== undefined) {
|
||||
if (codeScanningResult !== undefined) {
|
||||
await upload_lib.waitForProcessing(
|
||||
getRepositoryNwo(),
|
||||
uploadResult.sarifID,
|
||||
codeScanningResult.sarifID,
|
||||
logger,
|
||||
);
|
||||
}
|
||||
@@ -202,7 +128,7 @@ async function run() {
|
||||
}
|
||||
await sendSuccessStatusReport(
|
||||
startedAt,
|
||||
uploadResult?.statusReport || {},
|
||||
codeScanningResult?.statusReport || {},
|
||||
logger,
|
||||
);
|
||||
} catch (unwrappedError) {
|
||||
|
||||
@@ -0,0 +1,185 @@
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import test, { ExecutionContext } from "ava";
|
||||
import * as sinon from "sinon";
|
||||
|
||||
import { AnalysisKind, getAnalysisConfig } from "./analyses";
|
||||
import { getRunnerLogger } from "./logging";
|
||||
import { createFeatures, setupTests } from "./testing-utils";
|
||||
import { UploadResult } from "./upload-lib";
|
||||
import * as uploadLib from "./upload-lib";
|
||||
import { uploadSarif } from "./upload-sarif";
|
||||
import * as util from "./util";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
interface UploadSarifExpectedResult {
|
||||
uploadResult?: UploadResult;
|
||||
expectedFiles?: string[];
|
||||
}
|
||||
|
||||
const uploadSarifMacro = test.macro({
|
||||
exec: async (
|
||||
t: ExecutionContext<unknown>,
|
||||
sarifFiles: string[],
|
||||
sarifPath: (tempDir: string) => string = (tempDir) => tempDir,
|
||||
expectedResult: Partial<Record<AnalysisKind, UploadSarifExpectedResult>>,
|
||||
) => {
|
||||
await util.withTmpDir(async (tempDir) => {
|
||||
const logger = getRunnerLogger(true);
|
||||
const testPath = sarifPath(tempDir);
|
||||
const features = createFeatures([]);
|
||||
|
||||
const toFullPath = (filename: string) => path.join(tempDir, filename);
|
||||
|
||||
const uploadSpecifiedFiles = sinon.stub(
|
||||
uploadLib,
|
||||
"uploadSpecifiedFiles",
|
||||
);
|
||||
|
||||
for (const analysisKind of Object.values(AnalysisKind)) {
|
||||
uploadSpecifiedFiles
|
||||
.withArgs(
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
features,
|
||||
logger,
|
||||
getAnalysisConfig(analysisKind),
|
||||
)
|
||||
.resolves(expectedResult[analysisKind as AnalysisKind]?.uploadResult);
|
||||
}
|
||||
|
||||
const fullSarifPaths = sarifFiles.map(toFullPath);
|
||||
for (const sarifFile of fullSarifPaths) {
|
||||
fs.writeFileSync(sarifFile, "");
|
||||
}
|
||||
|
||||
const actual = await uploadSarif(logger, features, "", testPath);
|
||||
|
||||
for (const analysisKind of Object.values(AnalysisKind)) {
|
||||
const analysisKindResult = expectedResult[analysisKind];
|
||||
if (analysisKindResult) {
|
||||
// We are expecting a result for this analysis kind, check that we have it.
|
||||
t.deepEqual(actual[analysisKind], analysisKindResult.uploadResult);
|
||||
// Additionally, check that the mocked `uploadSpecifiedFiles` was called with only the file paths
|
||||
// that we expected it to be called with.
|
||||
t.assert(
|
||||
uploadSpecifiedFiles.calledWith(
|
||||
analysisKindResult.expectedFiles?.map(toFullPath) ??
|
||||
fullSarifPaths,
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
features,
|
||||
logger,
|
||||
getAnalysisConfig(analysisKind),
|
||||
),
|
||||
);
|
||||
} else {
|
||||
// Otherwise, we are not expecting a result for this analysis kind. However, note that `undefined`
|
||||
// is also returned by our mocked `uploadSpecifiedFiles` when there is no expected result for this
|
||||
// analysis kind.
|
||||
t.is(actual[analysisKind], undefined);
|
||||
// Therefore, we also check that the mocked `uploadSpecifiedFiles` was not called for this analysis kind.
|
||||
t.assert(
|
||||
!uploadSpecifiedFiles.calledWith(
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
features,
|
||||
logger,
|
||||
getAnalysisConfig(analysisKind),
|
||||
),
|
||||
`uploadSpecifiedFiles was called for ${analysisKind}, but should not have been.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
title: (providedTitle = "") => `uploadSarif - ${providedTitle}`,
|
||||
});
|
||||
|
||||
test(
|
||||
"SARIF file",
|
||||
uploadSarifMacro,
|
||||
["test.sarif"],
|
||||
(tempDir) => path.join(tempDir, "test.sarif"),
|
||||
{
|
||||
"code-scanning": {
|
||||
uploadResult: {
|
||||
statusReport: {},
|
||||
sarifID: "code-scanning-sarif",
|
||||
},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
"JSON file",
|
||||
uploadSarifMacro,
|
||||
["test.json"],
|
||||
(tempDir) => path.join(tempDir, "test.json"),
|
||||
{
|
||||
"code-scanning": {
|
||||
uploadResult: {
|
||||
statusReport: {},
|
||||
sarifID: "code-scanning-sarif",
|
||||
},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
"Code Scanning files",
|
||||
uploadSarifMacro,
|
||||
["test.json", "test.sarif"],
|
||||
undefined,
|
||||
{
|
||||
"code-scanning": {
|
||||
uploadResult: {
|
||||
statusReport: {},
|
||||
sarifID: "code-scanning-sarif",
|
||||
},
|
||||
expectedFiles: ["test.sarif"],
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
"Code Quality file",
|
||||
uploadSarifMacro,
|
||||
["test.quality.sarif"],
|
||||
(tempDir) => path.join(tempDir, "test.quality.sarif"),
|
||||
{
|
||||
"code-quality": {
|
||||
uploadResult: {
|
||||
statusReport: {},
|
||||
sarifID: "code-quality-sarif",
|
||||
},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
test(
|
||||
"Mixed files",
|
||||
uploadSarifMacro,
|
||||
["test.sarif", "test.quality.sarif"],
|
||||
undefined,
|
||||
{
|
||||
"code-scanning": {
|
||||
uploadResult: {
|
||||
statusReport: {},
|
||||
sarifID: "code-scanning-sarif",
|
||||
},
|
||||
expectedFiles: ["test.sarif"],
|
||||
},
|
||||
"code-quality": {
|
||||
uploadResult: {
|
||||
statusReport: {},
|
||||
sarifID: "code-quality-sarif",
|
||||
},
|
||||
expectedFiles: ["test.quality.sarif"],
|
||||
},
|
||||
},
|
||||
);
|
||||
@@ -0,0 +1,51 @@
|
||||
import * as analyses from "./analyses";
|
||||
import { FeatureEnablement } from "./feature-flags";
|
||||
import { Logger } from "./logging";
|
||||
import * as upload_lib from "./upload-lib";
|
||||
import { unsafeEntriesInvariant } from "./util";
|
||||
|
||||
// Maps analysis kinds to SARIF IDs.
|
||||
export type UploadSarifResults = Partial<
|
||||
Record<analyses.AnalysisKind, upload_lib.UploadResult>
|
||||
>;
|
||||
|
||||
/**
|
||||
* Finds SARIF files in `sarifPath` and uploads them to the appropriate services.
|
||||
*
|
||||
* @param logger The logger to use.
|
||||
* @param features Information about enabled features.
|
||||
* @param checkoutPath The path where the repository was checked out at.
|
||||
* @param sarifPath The path to the file or directory to upload.
|
||||
* @param category The analysis category.
|
||||
*
|
||||
* @returns A partial mapping from analysis kinds to the upload results.
|
||||
*/
|
||||
export async function uploadSarif(
|
||||
logger: Logger,
|
||||
features: FeatureEnablement,
|
||||
checkoutPath: string,
|
||||
sarifPath: string,
|
||||
category?: string,
|
||||
): Promise<UploadSarifResults> {
|
||||
const sarifGroups = await upload_lib.getGroupedSarifFilePaths(
|
||||
logger,
|
||||
sarifPath,
|
||||
);
|
||||
|
||||
const uploadResults: UploadSarifResults = {};
|
||||
for (const [analysisKind, sarifFiles] of unsafeEntriesInvariant(
|
||||
sarifGroups,
|
||||
)) {
|
||||
const analysisConfig = analyses.getAnalysisConfig(analysisKind);
|
||||
uploadResults[analysisKind] = await upload_lib.uploadSpecifiedFiles(
|
||||
sarifFiles,
|
||||
checkoutPath,
|
||||
category,
|
||||
features,
|
||||
logger,
|
||||
analysisConfig,
|
||||
);
|
||||
}
|
||||
|
||||
return uploadResults;
|
||||
}
|
||||
+24
@@ -1287,3 +1287,27 @@ export async function asyncSome<T>(
|
||||
export function isDefined<T>(value: T | null | undefined): value is T {
|
||||
return value !== undefined && value !== null;
|
||||
}
|
||||
|
||||
/** Like `Object.keys`, but typed so that the elements of the resulting array have the
|
||||
* same type as the keys of the input object. Note that this may not be sound if the input
|
||||
* object has been cast to `T` from a subtype of `T` and contains additional keys that
|
||||
* are not represented by `keyof T`.
|
||||
*/
|
||||
export function unsafeKeysInvariant<T extends Record<string, any>>(
|
||||
object: T,
|
||||
): Array<keyof T> {
|
||||
return Object.keys(object) as Array<keyof T>;
|
||||
}
|
||||
|
||||
/** Like `Object.entries`, but typed so that the key elements of the result have the
|
||||
* same type as the keys of the input object. Note that this may not be sound if the input
|
||||
* object has been cast to `T` from a subtype of `T` and contains additional keys that
|
||||
* are not represented by `keyof T`.
|
||||
*/
|
||||
export function unsafeEntriesInvariant<T extends Record<string, any>>(
|
||||
object: T,
|
||||
): Array<[keyof T, Exclude<T[keyof T], undefined>]> {
|
||||
return Object.entries(object).filter(
|
||||
([_, val]) => val !== undefined,
|
||||
) as Array<[keyof T, Exclude<T[keyof T], undefined>]>;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user