Merge pull request #3235 from github/update-v4.31.0-1d36546c1

Merge main into releases/v4
This commit is contained in:
Michael B. Gale
2025-10-24 18:08:08 +01:00
committed by GitHub
66 changed files with 16040 additions and 4521 deletions
+55
View File
@@ -0,0 +1,55 @@
labeling:
applyCategoryLabels: true
categoryLabelPrefix: "size/"
commenting:
addCommentWhenScoreThresholdHasBeenExceeded: false
sizeup:
categories:
- name: extra small
lte: 25
label:
name: XS
description: Should be very easy to review
color: 3cbf00
- name: small
lte: 100
label:
name: S
description: Should be easy to review
color: 5d9801
- name: medium
lte: 250
label:
name: M
description: Should be of average difficulty to review
color: 7f7203
- name: large
lte: 500
label:
name: L
description: May be hard to review
color: a14c05
- name: extra large
lte: 1000
label:
name: XL
description: May be very hard to review
color: c32607
- name: extra extra large
label:
name: XXL
description: May be extremely hard to review
color: e50009
ignoredFilePatterns:
- ".github/workflows/__*"
- "lib/**/*"
- "package-lock.json"
testFilePatterns:
- "**/*.test.ts"
scoring:
# This formula and the aliases below it are written in prefix notation.
# For an explanation of how this works, please see:
# https://github.com/lerebear/sizeup-core/blob/main/README.md#prefix-notation
formula: "- - + additions deletions comments whitespace"
+1 -1
View File
@@ -49,7 +49,7 @@ jobs:
- name: Check out repository
uses: actions/checkout@v5
- name: Install Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: npm
@@ -73,7 +73,7 @@ jobs:
- name: Check out repository
uses: actions/checkout@v5
- name: Install Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: npm
+1 -1
View File
@@ -63,7 +63,7 @@ jobs:
- name: Check out repository
uses: actions/checkout@v5
- name: Install Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: npm
+1 -1
View File
@@ -63,7 +63,7 @@ jobs:
- name: Check out repository
uses: actions/checkout@v5
- name: Install Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: npm
+1 -1
View File
@@ -63,7 +63,7 @@ jobs:
- name: Check out repository
uses: actions/checkout@v5
- name: Install Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 20.x
cache: npm
+9
View File
@@ -80,6 +80,7 @@ jobs:
with:
output: ${{ runner.temp }}/results
upload-database: false
post-processed-sarif-path: ${{ runner.temp }}/post-processed
- name: Upload security SARIF
if: contains(matrix.analysis-kinds, 'code-scanning')
uses: actions/upload-artifact@v4
@@ -96,6 +97,14 @@ jobs:
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
path: ${{ runner.temp }}/results/javascript.quality.sarif
retention-days: 7
- name: Upload post-processed SARIF
uses: actions/upload-artifact@v4
with:
name: |
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
path: ${{ runner.temp }}/post-processed
retention-days: 7
if-no-files-found: error
- name: Check quality query does not appear in security SARIF
if: contains(matrix.analysis-kinds, 'code-scanning')
uses: actions/github-script@v8
@@ -56,7 +56,7 @@ jobs:
uses: actions/checkout@v5
- name: Set up Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 24
cache: 'npm'
+26
View File
@@ -0,0 +1,26 @@
name: Label PR with size
on:
pull_request:
types:
- opened
- synchronize
- reopened
- edited
- ready_for_review
permissions:
contents: read
pull-requests: write
jobs:
sizeup:
name: Label PR with size
runs-on: ubuntu-latest
steps:
- name: Run sizeup
uses: lerebear/sizeup-action@b7beb3dd273e36039e16e48e7bc690c189e61951 # 0.8.12
with:
token: "${{ secrets.GITHUB_TOKEN }}"
configuration-file-path: ".github/sizeup.yml"
+1 -1
View File
@@ -47,7 +47,7 @@ jobs:
- uses: actions/checkout@v5
with:
fetch-depth: 0 # ensure we have all tags and can push commits
- uses: actions/setup-node@v5
- uses: actions/setup-node@v6
- name: Update git config
run: |
+1 -1
View File
@@ -35,7 +35,7 @@ jobs:
- uses: actions/checkout@v5
- name: Set up Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'
+1 -1
View File
@@ -32,7 +32,7 @@ jobs:
uses: actions/checkout@v5
- name: Install Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 24
cache: npm
+1 -1
View File
@@ -41,7 +41,7 @@ jobs:
git config --global user.name "github-actions[bot]"
- name: Set up Node.js
uses: actions/setup-node@v5
uses: actions/setup-node@v6
with:
node-version: 24
cache: 'npm'
+5
View File
@@ -2,6 +2,11 @@
See the [releases page](https://github.com/github/codeql-action/releases) for the relevant changes to the CodeQL CLI and language packs.
## 4.31.0 - 24 Oct 2025
- Bump minimum CodeQL bundle version to 2.17.6. [#3223](https://github.com/github/codeql-action/pull/3223)
- When SARIF files are uploaded by the `analyze` or `upload-sarif` actions, the CodeQL Action automatically performs post-processing steps to prepare the data for the upload. Previously, these post-processing steps were only performed before an upload took place. We are now changing this so that the post-processing steps will always be performed, even when the SARIF files are not uploaded. This does not change anything for the `upload-sarif` action. For `analyze`, this may affect Advanced Setup for CodeQL users who specify a value other than `always` for the `upload` input. [#3222](https://github.com/github/codeql-action/pull/3222)
## 4.30.9 - 17 Oct 2025
- Update default CodeQL bundle version to 2.23.3. [#3205](https://github.com/github/codeql-action/pull/3205)
+7 -1
View File
@@ -6,7 +6,7 @@ inputs:
description: The name of the check run to add text to.
required: false
output:
description: The path of the directory in which to save the SARIF results
description: The path of the directory in which to save the SARIF results from the CodeQL CLI.
required: false
default: "../results"
upload:
@@ -70,6 +70,12 @@ inputs:
description: Whether to upload the resulting CodeQL database
required: false
default: "true"
post-processed-sarif-path:
description: >-
Before uploading the SARIF files produced by the CodeQL CLI, the CodeQL Action may perform some post-processing
on them. Ordinarily, these post-processed SARIF files are not saved to disk. However, if a path is provided as an
argument for this input, they are written to the specified directory.
required: false
wait-for-processing:
description: If true, the Action will wait for the uploaded SARIF to be processed before completing.
required: true
+1
View File
@@ -131,6 +131,7 @@ export default [
"no-sequences": "error",
"no-shadow": "off",
"@typescript-eslint/no-shadow": "error",
"@typescript-eslint/prefer-optional-chain": "error",
"one-var": ["error", "never"],
},
},
+1334 -29
View File
File diff suppressed because it is too large Load Diff
+1634 -1483
View File
File diff suppressed because it is too large Load Diff
+1363 -39
View File
File diff suppressed because it is too large Load Diff
+1461 -1479
View File
File diff suppressed because it is too large Load Diff
+1632 -119
View File
File diff suppressed because it is too large Load Diff
+1359 -36
View File
File diff suppressed because it is too large Load Diff
+1402 -95
View File
File diff suppressed because it is too large Load Diff
+1334 -23
View File
File diff suppressed because it is too large Load Diff
+1355 -29
View File
File diff suppressed because it is too large Load Diff
+182 -165
View File
@@ -20885,19 +20885,19 @@ var require_validator = __commonJS({
var SchemaError = helpers.SchemaError;
var SchemaContext = helpers.SchemaContext;
var anonymousBase = "/";
var Validator2 = function Validator3() {
this.customFormats = Object.create(Validator3.prototype.customFormats);
var Validator3 = function Validator4() {
this.customFormats = Object.create(Validator4.prototype.customFormats);
this.schemas = {};
this.unresolvedRefs = [];
this.types = Object.create(types);
this.attributes = Object.create(attribute.validators);
};
Validator2.prototype.customFormats = {};
Validator2.prototype.schemas = null;
Validator2.prototype.types = null;
Validator2.prototype.attributes = null;
Validator2.prototype.unresolvedRefs = null;
Validator2.prototype.addSchema = function addSchema(schema2, base) {
Validator3.prototype.customFormats = {};
Validator3.prototype.schemas = null;
Validator3.prototype.types = null;
Validator3.prototype.attributes = null;
Validator3.prototype.unresolvedRefs = null;
Validator3.prototype.addSchema = function addSchema(schema2, base) {
var self2 = this;
if (!schema2) {
return null;
@@ -20915,25 +20915,25 @@ var require_validator = __commonJS({
});
return this.schemas[ourUri];
};
Validator2.prototype.addSubSchemaArray = function addSubSchemaArray(baseuri, schemas) {
Validator3.prototype.addSubSchemaArray = function addSubSchemaArray(baseuri, schemas) {
if (!Array.isArray(schemas)) return;
for (var i = 0; i < schemas.length; i++) {
this.addSubSchema(baseuri, schemas[i]);
}
};
Validator2.prototype.addSubSchemaObject = function addSubSchemaArray(baseuri, schemas) {
Validator3.prototype.addSubSchemaObject = function addSubSchemaArray(baseuri, schemas) {
if (!schemas || typeof schemas != "object") return;
for (var p in schemas) {
this.addSubSchema(baseuri, schemas[p]);
}
};
Validator2.prototype.setSchemas = function setSchemas(schemas) {
Validator3.prototype.setSchemas = function setSchemas(schemas) {
this.schemas = schemas;
};
Validator2.prototype.getSchema = function getSchema(urn) {
Validator3.prototype.getSchema = function getSchema(urn) {
return this.schemas[urn];
};
Validator2.prototype.validate = function validate(instance, schema2, options, ctx) {
Validator3.prototype.validate = function validate(instance, schema2, options, ctx) {
if (typeof schema2 !== "boolean" && typeof schema2 !== "object" || schema2 === null) {
throw new SchemaError("Expected `schema` to be an object or boolean");
}
@@ -20971,7 +20971,7 @@ var require_validator = __commonJS({
if (typeof ref == "string") return ref;
return false;
}
Validator2.prototype.validateSchema = function validateSchema(instance, schema2, options, ctx) {
Validator3.prototype.validateSchema = function validateSchema(instance, schema2, options, ctx) {
var result = new ValidatorResult(instance, schema2, options, ctx);
if (typeof schema2 === "boolean") {
if (schema2 === true) {
@@ -21021,17 +21021,17 @@ var require_validator = __commonJS({
}
return result;
};
Validator2.prototype.schemaTraverser = function schemaTraverser(schemaobj, s) {
Validator3.prototype.schemaTraverser = function schemaTraverser(schemaobj, s) {
schemaobj.schema = helpers.deepMerge(schemaobj.schema, this.superResolve(s, schemaobj.ctx));
};
Validator2.prototype.superResolve = function superResolve(schema2, ctx) {
Validator3.prototype.superResolve = function superResolve(schema2, ctx) {
var ref = shouldResolve(schema2);
if (ref) {
return this.resolve(schema2, ref, ctx).subschema;
}
return schema2;
};
Validator2.prototype.resolve = function resolve6(schema2, switchSchema, ctx) {
Validator3.prototype.resolve = function resolve6(schema2, switchSchema, ctx) {
switchSchema = ctx.resolve(switchSchema);
if (ctx.schemas[switchSchema]) {
return { subschema: ctx.schemas[switchSchema], switchSchema };
@@ -21048,7 +21048,7 @@ var require_validator = __commonJS({
}
return { subschema, switchSchema };
};
Validator2.prototype.testType = function validateType(instance, schema2, options, ctx, type2) {
Validator3.prototype.testType = function validateType(instance, schema2, options, ctx, type2) {
if (type2 === void 0) {
return;
} else if (type2 === null) {
@@ -21063,7 +21063,7 @@ var require_validator = __commonJS({
}
return true;
};
var types = Validator2.prototype.types = {};
var types = Validator3.prototype.types = {};
types.string = function testString(instance) {
return typeof instance == "string";
};
@@ -21091,7 +21091,7 @@ var require_validator = __commonJS({
types.object = function testObject(instance) {
return instance && typeof instance === "object" && !Array.isArray(instance) && !(instance instanceof Date);
};
module2.exports = Validator2;
module2.exports = Validator3;
}
});
@@ -21099,7 +21099,7 @@ var require_validator = __commonJS({
var require_lib2 = __commonJS({
"node_modules/jsonschema/lib/index.js"(exports2, module2) {
"use strict";
var Validator2 = module2.exports.Validator = require_validator();
var Validator3 = module2.exports.Validator = require_validator();
module2.exports.ValidatorResult = require_helpers().ValidatorResult;
module2.exports.ValidatorResultError = require_helpers().ValidatorResultError;
module2.exports.ValidationError = require_helpers().ValidationError;
@@ -21107,7 +21107,7 @@ var require_lib2 = __commonJS({
module2.exports.SchemaScanResult = require_scan().SchemaScanResult;
module2.exports.scan = require_scan().scan;
module2.exports.validate = function(instance, schema2, options) {
var v = new Validator2();
var v = new Validator3();
return v.validate(instance, schema2, options);
};
}
@@ -21899,14 +21899,14 @@ var require_dist_node4 = __commonJS({
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
var dist_src_exports = {};
__export2(dist_src_exports, {
RequestError: () => RequestError2
RequestError: () => RequestError
});
module2.exports = __toCommonJS2(dist_src_exports);
var import_deprecation = require_dist_node3();
var import_once = __toESM2(require_once());
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
var RequestError2 = class extends Error {
var RequestError = class extends Error {
constructor(message, statusCode, options) {
super(message);
if (Error.captureStackTrace) {
@@ -21998,7 +21998,7 @@ var require_dist_node5 = __commonJS({
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
}
var import_request_error2 = require_dist_node4();
var import_request_error = require_dist_node4();
function getBufferResponse(response) {
return response.arrayBuffer();
}
@@ -22050,7 +22050,7 @@ var require_dist_node5 = __commonJS({
if (status < 400) {
return;
}
throw new import_request_error2.RequestError(response.statusText, status, {
throw new import_request_error.RequestError(response.statusText, status, {
response: {
url: url2,
status,
@@ -22061,7 +22061,7 @@ var require_dist_node5 = __commonJS({
});
}
if (status === 304) {
throw new import_request_error2.RequestError("Not modified", status, {
throw new import_request_error.RequestError("Not modified", status, {
response: {
url: url2,
status,
@@ -22073,7 +22073,7 @@ var require_dist_node5 = __commonJS({
}
if (status >= 400) {
const data = await getResponseData(response);
const error2 = new import_request_error2.RequestError(toErrorMessage(data), status, {
const error2 = new import_request_error.RequestError(toErrorMessage(data), status, {
response: {
url: url2,
status,
@@ -22093,7 +22093,7 @@ var require_dist_node5 = __commonJS({
data
};
}).catch((error2) => {
if (error2 instanceof import_request_error2.RequestError)
if (error2 instanceof import_request_error.RequestError)
throw error2;
else if (error2.name === "AbortError")
throw error2;
@@ -22105,7 +22105,7 @@ var require_dist_node5 = __commonJS({
message = error2.cause;
}
}
throw new import_request_error2.RequestError(message, 500, {
throw new import_request_error.RequestError(message, 500, {
request: requestOptions
});
});
@@ -22547,14 +22547,14 @@ var require_dist_node7 = __commonJS({
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
var dist_src_exports = {};
__export2(dist_src_exports, {
RequestError: () => RequestError2
RequestError: () => RequestError
});
module2.exports = __toCommonJS2(dist_src_exports);
var import_deprecation = require_dist_node3();
var import_once = __toESM2(require_once());
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
var RequestError2 = class extends Error {
var RequestError = class extends Error {
constructor(message, statusCode, options) {
super(message);
if (Error.captureStackTrace) {
@@ -22646,7 +22646,7 @@ var require_dist_node8 = __commonJS({
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
}
var import_request_error2 = require_dist_node7();
var import_request_error = require_dist_node7();
function getBufferResponse(response) {
return response.arrayBuffer();
}
@@ -22698,7 +22698,7 @@ var require_dist_node8 = __commonJS({
if (status < 400) {
return;
}
throw new import_request_error2.RequestError(response.statusText, status, {
throw new import_request_error.RequestError(response.statusText, status, {
response: {
url: url2,
status,
@@ -22709,7 +22709,7 @@ var require_dist_node8 = __commonJS({
});
}
if (status === 304) {
throw new import_request_error2.RequestError("Not modified", status, {
throw new import_request_error.RequestError("Not modified", status, {
response: {
url: url2,
status,
@@ -22721,7 +22721,7 @@ var require_dist_node8 = __commonJS({
}
if (status >= 400) {
const data = await getResponseData(response);
const error2 = new import_request_error2.RequestError(toErrorMessage(data), status, {
const error2 = new import_request_error.RequestError(toErrorMessage(data), status, {
response: {
url: url2,
status,
@@ -22741,7 +22741,7 @@ var require_dist_node8 = __commonJS({
data
};
}).catch((error2) => {
if (error2 instanceof import_request_error2.RequestError)
if (error2 instanceof import_request_error.RequestError)
throw error2;
else if (error2.name === "AbortError")
throw error2;
@@ -22753,7 +22753,7 @@ var require_dist_node8 = __commonJS({
message = error2.cause;
}
}
throw new import_request_error2.RequestError(message, 500, {
throw new import_request_error.RequestError(message, 500, {
request: requestOptions
});
});
@@ -33606,7 +33606,7 @@ var require_package = __commonJS({
"package.json"(exports2, module2) {
module2.exports = {
name: "codeql",
version: "4.30.9",
version: "4.31.0",
private: true,
description: "CodeQL action",
scripts: {
@@ -33654,7 +33654,7 @@ var require_package = __commonJS({
jsonschema: "1.4.1",
long: "^5.3.2",
"node-forge": "^1.3.1",
octokit: "^5.0.3",
octokit: "^5.0.4",
semver: "^7.7.3",
uuid: "^13.0.0"
},
@@ -33662,7 +33662,7 @@ var require_package = __commonJS({
"@ava/typescript": "6.0.0",
"@eslint/compat": "^1.4.0",
"@eslint/eslintrc": "^3.3.1",
"@eslint/js": "^9.37.0",
"@eslint/js": "^9.38.0",
"@microsoft/eslint-formatter-sarif": "^3.1.0",
"@octokit/types": "^15.0.0",
"@types/archiver": "^6.0.3",
@@ -33673,10 +33673,10 @@ var require_package = __commonJS({
"@types/node-forge": "^1.3.14",
"@types/semver": "^7.7.1",
"@types/sinon": "^17.0.4",
"@typescript-eslint/eslint-plugin": "^8.46.0",
"@typescript-eslint/eslint-plugin": "^8.46.1",
"@typescript-eslint/parser": "^8.41.0",
ava: "^6.4.1",
esbuild: "^0.25.10",
esbuild: "^0.25.11",
eslint: "^8.57.1",
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-filenames": "^1.3.2",
@@ -35065,14 +35065,14 @@ var require_dist_node14 = __commonJS({
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
var dist_src_exports = {};
__export2(dist_src_exports, {
RequestError: () => RequestError2
RequestError: () => RequestError
});
module2.exports = __toCommonJS2(dist_src_exports);
var import_deprecation = require_dist_node3();
var import_once = __toESM2(require_once());
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
var RequestError2 = class extends Error {
var RequestError = class extends Error {
constructor(message, statusCode, options) {
super(message);
if (Error.captureStackTrace) {
@@ -35174,7 +35174,7 @@ var require_dist_node15 = __commonJS({
throw error2;
}
var import_light = __toESM2(require_light());
var import_request_error2 = require_dist_node14();
var import_request_error = require_dist_node14();
async function wrapRequest(state, octokit, request, options) {
const limiter = new import_light.default();
limiter.on("failed", function(error2, info4) {
@@ -35195,7 +35195,7 @@ var require_dist_node15 = __commonJS({
if (response.data && response.data.errors && response.data.errors.length > 0 && /Something went wrong while executing your query/.test(
response.data.errors[0].message
)) {
const error2 = new import_request_error2.RequestError(response.data.errors[0].message, 500, {
const error2 = new import_request_error.RequestError(response.data.errors[0].message, 500, {
request: options,
response
});
@@ -80921,14 +80921,14 @@ var require_tool_cache = __commonJS({
var assert_1 = require("assert");
var exec_1 = require_exec();
var retry_helper_1 = require_retry_helper();
var HTTPError = class extends Error {
var HTTPError2 = class extends Error {
constructor(httpStatusCode) {
super(`Unexpected HTTP response: ${httpStatusCode}`);
this.httpStatusCode = httpStatusCode;
Object.setPrototypeOf(this, new.target.prototype);
}
};
exports2.HTTPError = HTTPError;
exports2.HTTPError = HTTPError2;
var IS_WINDOWS = process.platform === "win32";
var IS_MAC = process.platform === "darwin";
var userAgent = "actions/tool-cache";
@@ -80945,7 +80945,7 @@ var require_tool_cache = __commonJS({
return yield retryHelper.execute(() => __awaiter4(this, void 0, void 0, function* () {
return yield downloadToolAttempt(url2, dest || "", auth, headers);
}), (err) => {
if (err instanceof HTTPError && err.httpStatusCode) {
if (err instanceof HTTPError2 && err.httpStatusCode) {
if (err.httpStatusCode < 500 && err.httpStatusCode !== 408 && err.httpStatusCode !== 429) {
return false;
}
@@ -80972,7 +80972,7 @@ var require_tool_cache = __commonJS({
}
const response = yield http.get(url2, headers);
if (response.message.statusCode !== 200) {
const err = new HTTPError(response.message.statusCode);
const err = new HTTPError2(response.message.statusCode);
core12.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
throw err;
}
@@ -84847,6 +84847,7 @@ __export(upload_lib_exports, {
getGroupedSarifFilePaths: () => getGroupedSarifFilePaths,
getSarifFilePaths: () => getSarifFilePaths,
populateRunAutomationDetails: () => populateRunAutomationDetails,
postProcessSarifFiles: () => postProcessSarifFiles,
readSarifFile: () => readSarifFile,
shouldConsiderConfigurationError: () => shouldConsiderConfigurationError,
shouldConsiderInvalidRequest: () => shouldConsiderInvalidRequest,
@@ -84854,10 +84855,11 @@ __export(upload_lib_exports, {
throwIfCombineSarifFilesDisabled: () => throwIfCombineSarifFilesDisabled,
uploadFiles: () => uploadFiles,
uploadPayload: () => uploadPayload,
uploadSpecifiedFiles: () => uploadSpecifiedFiles,
uploadPostProcessedFiles: () => uploadPostProcessedFiles,
validateSarifFileSchema: () => validateSarifFileSchema,
validateUniqueCategory: () => validateUniqueCategory,
waitForProcessing: () => waitForProcessing
waitForProcessing: () => waitForProcessing,
writePostProcessedFiles: () => writePostProcessedFiles
});
module.exports = __toCommonJS(upload_lib_exports);
var fs13 = __toESM(require("fs"));
@@ -84865,7 +84867,7 @@ var path14 = __toESM(require("path"));
var url = __toESM(require("url"));
var import_zlib = __toESM(require("zlib"));
var core11 = __toESM(require_core());
var jsonschema = __toESM(require_lib2());
var jsonschema2 = __toESM(require_lib2());
// src/actions-util.ts
var fs4 = __toESM(require("fs"));
@@ -88328,13 +88330,35 @@ function getRequiredEnvParam(paramName) {
}
return value;
}
function getOptionalEnvVar(paramName) {
const value = process.env[paramName];
if (value?.trim().length === 0) {
return void 0;
}
return value;
}
var HTTPError = class extends Error {
constructor(message, status) {
super(message);
this.status = status;
}
};
var ConfigurationError = class extends Error {
constructor(message) {
super(message);
}
};
function isHTTPError(arg) {
return arg?.status !== void 0 && Number.isInteger(arg.status);
function asHTTPError(arg) {
if (typeof arg !== "object" || arg === null || typeof arg.message !== "string") {
return void 0;
}
if (Number.isInteger(arg.status)) {
return new HTTPError(arg.message, arg.status);
}
if (Number.isInteger(arg.httpStatusCode)) {
return new HTTPError(arg.message, arg.httpStatusCode);
}
return void 0;
}
var cachedCodeQlVersion = void 0;
function cacheCodeQlVersion(version) {
@@ -88747,14 +88771,24 @@ function computeAutomationID(analysis_key, environment) {
return automationID;
}
function wrapApiConfigurationError(e) {
if (isHTTPError(e)) {
if (e.message.includes("API rate limit exceeded for installation") || e.message.includes("commit not found") || e.message.includes("Resource not accessible by integration") || /ref .* not found in this repository/.test(e.message)) {
return new ConfigurationError(e.message);
} else if (e.message.includes("Bad credentials") || e.message.includes("Not Found")) {
const httpError = asHTTPError(e);
if (httpError !== void 0) {
if ([
/API rate limit exceeded/,
/commit not found/,
/Resource not accessible by integration/,
/ref .* not found in this repository/
].some((pattern) => pattern.test(httpError.message))) {
return new ConfigurationError(httpError.message);
}
if (httpError.message.includes("Bad credentials") || httpError.message.includes("Not Found")) {
return new ConfigurationError(
"Please check that your token is valid and has the required permissions: contents: read, security-events: write"
);
}
if (httpError.status === 429) {
return new ConfigurationError("API rate limit exceeded");
}
}
return e;
}
@@ -88765,45 +88799,6 @@ var path12 = __toESM(require("path"));
var core10 = __toESM(require_core());
var toolrunner3 = __toESM(require_toolrunner());
// node_modules/@octokit/request-error/dist-src/index.js
var RequestError = class extends Error {
name;
/**
* http status code
*/
status;
/**
* Request options that lead to the error.
*/
request;
/**
* Response object if a response was received
*/
response;
constructor(message, statusCode, options) {
super(message);
this.name = "HttpError";
this.status = Number.parseInt(statusCode);
if (Number.isNaN(this.status)) {
this.status = 0;
}
if ("response" in options) {
this.response = options.response;
}
const requestCopy = Object.assign({}, options.request);
if (options.request.headers.authorization) {
requestCopy.headers = Object.assign({}, options.request.headers, {
authorization: options.request.headers.authorization.replace(
/(?<! ) .*$/,
" [REDACTED]"
)
});
}
requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
this.request = requestCopy;
}
};
// src/cli-errors.ts
var SUPPORTED_PLATFORMS = [
["linux", "x64"],
@@ -88971,6 +88966,9 @@ var cliErrorsConfig = {
cliErrorMessageCandidates: [
new RegExp(
"Query pack .* cannot be found\\. Check the spelling of the pack\\."
),
new RegExp(
"is not a .ql file, .qls file, a directory, or a query pack specification."
)
]
},
@@ -89049,6 +89047,7 @@ var path9 = __toESM(require("path"));
var core6 = __toESM(require_core());
// src/config/db-config.ts
var jsonschema = __toESM(require_lib2());
var semver2 = __toESM(require_semver2());
var PACK_IDENTIFIER_PATTERN = (function() {
const alphaNumeric = "[a-z0-9]";
@@ -89286,7 +89285,7 @@ function formatDuration(durationMs) {
// src/overlay-database-utils.ts
var CODEQL_OVERLAY_MINIMUM_VERSION = "2.22.4";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 15e3;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
@@ -89359,6 +89358,11 @@ var featureConfig = {
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
minimumVersion: void 0
},
["analyze_use_new_upload" /* AnalyzeUseNewUpload */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_ANALYZE_USE_NEW_UPLOAD",
minimumVersion: void 0
},
["cleanup_trap_caches" /* CleanupTrapCaches */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
@@ -89530,6 +89534,11 @@ var featureConfig = {
defaultValue: false,
envVar: "CODEQL_ACTION_JAVA_MINIMIZE_DEPENDENCY_JARS",
minimumVersion: "2.23.0"
},
["validate_db_config" /* ValidateDbConfig */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG",
minimumVersion: void 0
}
};
@@ -89708,13 +89717,13 @@ async function getTarVersion() {
}
if (stdout.includes("GNU tar")) {
const match = stdout.match(/tar \(GNU tar\) ([0-9.]+)/);
if (!match || !match[1]) {
if (!match?.[1]) {
throw new Error("Failed to parse output of tar --version.");
}
return { type: "gnu", version: match[1] };
} else if (stdout.includes("bsdtar")) {
const match = stdout.match(/bsdtar ([0-9.]+)/);
if (!match || !match[1]) {
if (!match?.[1]) {
throw new Error("Failed to parse output of tar --version.");
}
return { type: "bsd", version: match[1] };
@@ -90094,7 +90103,7 @@ function tryGetTagNameFromUrl(url2, logger) {
return void 0;
}
const match = matches[matches.length - 1];
if (match === null || match.length !== 2) {
if (match?.length !== 2) {
logger.debug(
`Could not determine tag name for URL ${url2}. Matched ${JSON.stringify(
match
@@ -90554,7 +90563,7 @@ async function shouldEnableIndirectTracing(codeql, config) {
// src/codeql.ts
var cachedCodeQL = void 0;
var CODEQL_MINIMUM_VERSION = "2.16.6";
var CODEQL_MINIMUM_VERSION = "2.17.6";
var CODEQL_NEXT_MINIMUM_VERSION = "2.17.6";
var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.13";
var GHES_MOST_RECENT_DEPRECATION_DATE = "2025-06-19";
@@ -90598,9 +90607,9 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
toolsVersion,
zstdAvailability
};
} catch (e) {
const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") || // out of disk space
e instanceof RequestError && e.status === 429 ? ConfigurationError : Error;
} catch (rawError) {
const e = wrapApiConfigurationError(rawError);
const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") ? ConfigurationError : Error;
throw new ErrorClass(
`Unable to download and extract CodeQL CLI: ${getErrorMessage(e)}${e instanceof Error && e.stack ? `
@@ -90889,12 +90898,6 @@ ${output}`
} else {
codeqlArgs.push("--no-sarif-include-diagnostics");
}
if (!isSupportedToolsFeature(
await this.getVersion(),
"analysisSummaryV2Default" /* AnalysisSummaryV2IsDefault */
)) {
codeqlArgs.push("--new-analysis-summary");
}
codeqlArgs.push(databasePath);
if (querySuitePaths) {
codeqlArgs.push(...querySuitePaths);
@@ -92444,24 +92447,6 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
);
codeQL = initCodeQLResult.codeql;
}
if (!await codeQL.supportsFeature(
"sarifMergeRunsFromEqualCategory" /* SarifMergeRunsFromEqualCategory */
)) {
await throwIfCombineSarifFilesDisabled(sarifObjects, gitHubVersion);
logger.warning(
"The CodeQL CLI does not support merging SARIF files. Merging files in the action."
);
if (await shouldShowCombineSarifFilesDeprecationWarning(
sarifObjects,
gitHubVersion
)) {
logger.warning(
`Uploading multiple CodeQL runs with the same category is deprecated ${deprecationWarningMessage} for CodeQL CLI 2.16.6 and earlier. Please update your CodeQL CLI version or update your workflow to set a distinct category for each CodeQL run. ${deprecationMoreInformationMessage}`
);
core11.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true");
}
return combineSarifFiles(sarifFiles, logger);
}
const baseTempDir = path14.resolve(tempDir, "combined-sarif");
fs13.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs13.mkdtempSync(path14.resolve(baseTempDir, "output-"));
@@ -92520,16 +92505,17 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
logger.info("Successfully uploaded results");
return response.data.id;
} catch (e) {
if (isHTTPError(e)) {
switch (e.status) {
const httpError = asHTTPError(e);
if (httpError !== void 0) {
switch (httpError.status) {
case 403:
core11.warning(e.message || GENERIC_403_MSG);
core11.warning(httpError.message || GENERIC_403_MSG);
break;
case 404:
core11.warning(e.message || GENERIC_404_MSG);
core11.warning(httpError.message || GENERIC_404_MSG);
break;
default:
core11.warning(e.message);
core11.warning(httpError.message);
break;
}
}
@@ -92651,7 +92637,7 @@ function validateSarifFileSchema(sarif, sarifFilePath, logger) {
}
logger.info(`Validating ${sarifFilePath}`);
const schema2 = require_sarif_schema_2_1_0();
const result = new jsonschema.Validator().validate(sarif, schema2);
const result = new jsonschema2.Validator().validate(sarif, schema2);
const warningAttributes = ["uri-reference", "uri"];
const errors = (result.errors ?? []).filter(
(err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument))
@@ -92711,26 +92697,11 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
}
return payloadObj;
}
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
const sarifPaths = getSarifFilePaths(
inputSarifPath,
uploadTarget.sarifPredicate
);
return uploadSpecifiedFiles(
sarifPaths,
checkoutPath,
category,
features,
logger,
uploadTarget
);
}
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) {
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif;
category = uploadTarget.fixCategory(logger, category);
category = analysis.fixCategory(logger, category);
if (sarifPaths.length > 1) {
for (const sarifPath of sarifPaths) {
const parsedSarif = readSarifFile(sarifPath);
@@ -92758,28 +92729,72 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
analysisKey,
environment
);
return { sarif, analysisKey, environment };
}
async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) {
const outputPath = pathInput || getOptionalEnvVar("CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */);
if (outputPath !== void 0) {
dumpSarifFile(
JSON.stringify(postProcessingResults.sarif),
outputPath,
logger,
uploadTarget
);
} else {
logger.debug(`Not writing post-processed SARIF files.`);
}
}
async function uploadFiles(inputSarifPath, checkoutPath, category, features, logger, uploadTarget) {
const sarifPaths = getSarifFilePaths(
inputSarifPath,
uploadTarget.sarifPredicate
);
return uploadSpecifiedFiles(
sarifPaths,
checkoutPath,
category,
features,
logger,
uploadTarget
);
}
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
const processingResults = await postProcessSarifFiles(
logger,
features,
checkoutPath,
sarifPaths,
category,
uploadTarget
);
return uploadPostProcessedFiles(
logger,
checkoutPath,
uploadTarget,
processingResults
);
}
async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
const sarif = postProcessingResults.sarif;
const toolNames = getToolNames(sarif);
logger.debug(`Validating that each SARIF run has a unique category`);
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
logger.debug(`Serializing SARIF for upload`);
const sarifPayload = JSON.stringify(sarif);
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
if (dumpDir) {
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
}
logger.debug(`Compressing serialized SARIF`);
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = url.pathToFileURL(checkoutPath).href;
const payload = buildPayload(
await getCommitOid(checkoutPath),
await getRef(),
analysisKey,
postProcessingResults.analysisKey,
getRequiredEnvParam("GITHUB_WORKFLOW"),
zippedSarif,
getWorkflowRunID(),
getWorkflowRunAttempt(),
checkoutURI,
environment,
postProcessingResults.environment,
toolNames,
await determineBaseBranchHeadCommitOid()
);
@@ -92810,14 +92825,14 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
fs13.mkdirSync(outputDir, { recursive: true });
} else if (!fs13.lstatSync(outputDir).isDirectory()) {
throw new ConfigurationError(
`The path specified by the ${"CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */} environment variable exists and is not a directory: ${outputDir}`
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`
);
}
const outputFile = path14.resolve(
outputDir,
`upload${uploadTarget.sarifExtension}`
);
logger.info(`Dumping processed SARIF file to ${outputFile}`);
logger.info(`Writing processed SARIF file to ${outputFile}`);
fs13.writeFileSync(outputFile, sarifPayload);
}
var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3;
@@ -92979,6 +92994,7 @@ function filterAlertsByDiffRange(logger, sarif) {
getGroupedSarifFilePaths,
getSarifFilePaths,
populateRunAutomationDetails,
postProcessSarifFiles,
readSarifFile,
shouldConsiderConfigurationError,
shouldConsiderInvalidRequest,
@@ -92986,10 +93002,11 @@ function filterAlertsByDiffRange(logger, sarif) {
throwIfCombineSarifFilesDisabled,
uploadFiles,
uploadPayload,
uploadSpecifiedFiles,
uploadPostProcessedFiles,
validateSarifFileSchema,
validateUniqueCategory,
waitForProcessing
waitForProcessing,
writePostProcessedFiles
});
/*! Bundled license information:
+1317 -6
View File
File diff suppressed because it is too large Load Diff
+181 -165
View File
@@ -20602,14 +20602,14 @@ var require_dist_node4 = __commonJS({
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
var dist_src_exports = {};
__export2(dist_src_exports, {
RequestError: () => RequestError2
RequestError: () => RequestError
});
module2.exports = __toCommonJS2(dist_src_exports);
var import_deprecation = require_dist_node3();
var import_once = __toESM2(require_once());
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
var RequestError2 = class extends Error {
var RequestError = class extends Error {
constructor(message, statusCode, options) {
super(message);
if (Error.captureStackTrace) {
@@ -20701,7 +20701,7 @@ var require_dist_node5 = __commonJS({
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
}
var import_request_error2 = require_dist_node4();
var import_request_error = require_dist_node4();
function getBufferResponse(response) {
return response.arrayBuffer();
}
@@ -20753,7 +20753,7 @@ var require_dist_node5 = __commonJS({
if (status < 400) {
return;
}
throw new import_request_error2.RequestError(response.statusText, status, {
throw new import_request_error.RequestError(response.statusText, status, {
response: {
url: url2,
status,
@@ -20764,7 +20764,7 @@ var require_dist_node5 = __commonJS({
});
}
if (status === 304) {
throw new import_request_error2.RequestError("Not modified", status, {
throw new import_request_error.RequestError("Not modified", status, {
response: {
url: url2,
status,
@@ -20776,7 +20776,7 @@ var require_dist_node5 = __commonJS({
}
if (status >= 400) {
const data = await getResponseData(response);
const error2 = new import_request_error2.RequestError(toErrorMessage(data), status, {
const error2 = new import_request_error.RequestError(toErrorMessage(data), status, {
response: {
url: url2,
status,
@@ -20796,7 +20796,7 @@ var require_dist_node5 = __commonJS({
data
};
}).catch((error2) => {
if (error2 instanceof import_request_error2.RequestError)
if (error2 instanceof import_request_error.RequestError)
throw error2;
else if (error2.name === "AbortError")
throw error2;
@@ -20808,7 +20808,7 @@ var require_dist_node5 = __commonJS({
message = error2.cause;
}
}
throw new import_request_error2.RequestError(message, 500, {
throw new import_request_error.RequestError(message, 500, {
request: requestOptions
});
});
@@ -21250,14 +21250,14 @@ var require_dist_node7 = __commonJS({
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
var dist_src_exports = {};
__export2(dist_src_exports, {
RequestError: () => RequestError2
RequestError: () => RequestError
});
module2.exports = __toCommonJS2(dist_src_exports);
var import_deprecation = require_dist_node3();
var import_once = __toESM2(require_once());
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
var RequestError2 = class extends Error {
var RequestError = class extends Error {
constructor(message, statusCode, options) {
super(message);
if (Error.captureStackTrace) {
@@ -21349,7 +21349,7 @@ var require_dist_node8 = __commonJS({
const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor;
return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
}
var import_request_error2 = require_dist_node7();
var import_request_error = require_dist_node7();
function getBufferResponse(response) {
return response.arrayBuffer();
}
@@ -21401,7 +21401,7 @@ var require_dist_node8 = __commonJS({
if (status < 400) {
return;
}
throw new import_request_error2.RequestError(response.statusText, status, {
throw new import_request_error.RequestError(response.statusText, status, {
response: {
url: url2,
status,
@@ -21412,7 +21412,7 @@ var require_dist_node8 = __commonJS({
});
}
if (status === 304) {
throw new import_request_error2.RequestError("Not modified", status, {
throw new import_request_error.RequestError("Not modified", status, {
response: {
url: url2,
status,
@@ -21424,7 +21424,7 @@ var require_dist_node8 = __commonJS({
}
if (status >= 400) {
const data = await getResponseData(response);
const error2 = new import_request_error2.RequestError(toErrorMessage(data), status, {
const error2 = new import_request_error.RequestError(toErrorMessage(data), status, {
response: {
url: url2,
status,
@@ -21444,7 +21444,7 @@ var require_dist_node8 = __commonJS({
data
};
}).catch((error2) => {
if (error2 instanceof import_request_error2.RequestError)
if (error2 instanceof import_request_error.RequestError)
throw error2;
else if (error2.name === "AbortError")
throw error2;
@@ -21456,7 +21456,7 @@ var require_dist_node8 = __commonJS({
message = error2.cause;
}
}
throw new import_request_error2.RequestError(message, 500, {
throw new import_request_error.RequestError(message, 500, {
request: requestOptions
});
});
@@ -32309,7 +32309,7 @@ var require_package = __commonJS({
"package.json"(exports2, module2) {
module2.exports = {
name: "codeql",
version: "4.30.9",
version: "4.31.0",
private: true,
description: "CodeQL action",
scripts: {
@@ -32357,7 +32357,7 @@ var require_package = __commonJS({
jsonschema: "1.4.1",
long: "^5.3.2",
"node-forge": "^1.3.1",
octokit: "^5.0.3",
octokit: "^5.0.4",
semver: "^7.7.3",
uuid: "^13.0.0"
},
@@ -32365,7 +32365,7 @@ var require_package = __commonJS({
"@ava/typescript": "6.0.0",
"@eslint/compat": "^1.4.0",
"@eslint/eslintrc": "^3.3.1",
"@eslint/js": "^9.37.0",
"@eslint/js": "^9.38.0",
"@microsoft/eslint-formatter-sarif": "^3.1.0",
"@octokit/types": "^15.0.0",
"@types/archiver": "^6.0.3",
@@ -32376,10 +32376,10 @@ var require_package = __commonJS({
"@types/node-forge": "^1.3.14",
"@types/semver": "^7.7.1",
"@types/sinon": "^17.0.4",
"@typescript-eslint/eslint-plugin": "^8.46.0",
"@typescript-eslint/eslint-plugin": "^8.46.1",
"@typescript-eslint/parser": "^8.41.0",
ava: "^6.4.1",
esbuild: "^0.25.10",
esbuild: "^0.25.11",
eslint: "^8.57.1",
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-filenames": "^1.3.2",
@@ -33768,14 +33768,14 @@ var require_dist_node14 = __commonJS({
var __toCommonJS2 = (mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod);
var dist_src_exports = {};
__export2(dist_src_exports, {
RequestError: () => RequestError2
RequestError: () => RequestError
});
module2.exports = __toCommonJS2(dist_src_exports);
var import_deprecation = require_dist_node3();
var import_once = __toESM2(require_once());
var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation));
var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation));
var RequestError2 = class extends Error {
var RequestError = class extends Error {
constructor(message, statusCode, options) {
super(message);
if (Error.captureStackTrace) {
@@ -33877,7 +33877,7 @@ var require_dist_node15 = __commonJS({
throw error2;
}
var import_light = __toESM2(require_light());
var import_request_error2 = require_dist_node14();
var import_request_error = require_dist_node14();
async function wrapRequest(state, octokit, request, options) {
const limiter = new import_light.default();
limiter.on("failed", function(error2, info4) {
@@ -33898,7 +33898,7 @@ var require_dist_node15 = __commonJS({
if (response.data && response.data.errors && response.data.errors.length > 0 && /Something went wrong while executing your query/.test(
response.data.errors[0].message
)) {
const error2 = new import_request_error2.RequestError(response.data.errors[0].message, 500, {
const error2 = new import_request_error.RequestError(response.data.errors[0].message, 500, {
request: options,
response
});
@@ -80374,19 +80374,19 @@ var require_validator2 = __commonJS({
var SchemaError = helpers.SchemaError;
var SchemaContext = helpers.SchemaContext;
var anonymousBase = "/";
var Validator2 = function Validator3() {
this.customFormats = Object.create(Validator3.prototype.customFormats);
var Validator3 = function Validator4() {
this.customFormats = Object.create(Validator4.prototype.customFormats);
this.schemas = {};
this.unresolvedRefs = [];
this.types = Object.create(types);
this.attributes = Object.create(attribute.validators);
};
Validator2.prototype.customFormats = {};
Validator2.prototype.schemas = null;
Validator2.prototype.types = null;
Validator2.prototype.attributes = null;
Validator2.prototype.unresolvedRefs = null;
Validator2.prototype.addSchema = function addSchema(schema2, base) {
Validator3.prototype.customFormats = {};
Validator3.prototype.schemas = null;
Validator3.prototype.types = null;
Validator3.prototype.attributes = null;
Validator3.prototype.unresolvedRefs = null;
Validator3.prototype.addSchema = function addSchema(schema2, base) {
var self2 = this;
if (!schema2) {
return null;
@@ -80404,25 +80404,25 @@ var require_validator2 = __commonJS({
});
return this.schemas[ourUri];
};
Validator2.prototype.addSubSchemaArray = function addSubSchemaArray(baseuri, schemas) {
Validator3.prototype.addSubSchemaArray = function addSubSchemaArray(baseuri, schemas) {
if (!Array.isArray(schemas)) return;
for (var i = 0; i < schemas.length; i++) {
this.addSubSchema(baseuri, schemas[i]);
}
};
Validator2.prototype.addSubSchemaObject = function addSubSchemaArray(baseuri, schemas) {
Validator3.prototype.addSubSchemaObject = function addSubSchemaArray(baseuri, schemas) {
if (!schemas || typeof schemas != "object") return;
for (var p in schemas) {
this.addSubSchema(baseuri, schemas[p]);
}
};
Validator2.prototype.setSchemas = function setSchemas(schemas) {
Validator3.prototype.setSchemas = function setSchemas(schemas) {
this.schemas = schemas;
};
Validator2.prototype.getSchema = function getSchema(urn) {
Validator3.prototype.getSchema = function getSchema(urn) {
return this.schemas[urn];
};
Validator2.prototype.validate = function validate(instance, schema2, options, ctx) {
Validator3.prototype.validate = function validate(instance, schema2, options, ctx) {
if (typeof schema2 !== "boolean" && typeof schema2 !== "object" || schema2 === null) {
throw new SchemaError("Expected `schema` to be an object or boolean");
}
@@ -80460,7 +80460,7 @@ var require_validator2 = __commonJS({
if (typeof ref == "string") return ref;
return false;
}
Validator2.prototype.validateSchema = function validateSchema(instance, schema2, options, ctx) {
Validator3.prototype.validateSchema = function validateSchema(instance, schema2, options, ctx) {
var result = new ValidatorResult(instance, schema2, options, ctx);
if (typeof schema2 === "boolean") {
if (schema2 === true) {
@@ -80510,17 +80510,17 @@ var require_validator2 = __commonJS({
}
return result;
};
Validator2.prototype.schemaTraverser = function schemaTraverser(schemaobj, s) {
Validator3.prototype.schemaTraverser = function schemaTraverser(schemaobj, s) {
schemaobj.schema = helpers.deepMerge(schemaobj.schema, this.superResolve(s, schemaobj.ctx));
};
Validator2.prototype.superResolve = function superResolve(schema2, ctx) {
Validator3.prototype.superResolve = function superResolve(schema2, ctx) {
var ref = shouldResolve(schema2);
if (ref) {
return this.resolve(schema2, ref, ctx).subschema;
}
return schema2;
};
Validator2.prototype.resolve = function resolve6(schema2, switchSchema, ctx) {
Validator3.prototype.resolve = function resolve6(schema2, switchSchema, ctx) {
switchSchema = ctx.resolve(switchSchema);
if (ctx.schemas[switchSchema]) {
return { subschema: ctx.schemas[switchSchema], switchSchema };
@@ -80537,7 +80537,7 @@ var require_validator2 = __commonJS({
}
return { subschema, switchSchema };
};
Validator2.prototype.testType = function validateType(instance, schema2, options, ctx, type2) {
Validator3.prototype.testType = function validateType(instance, schema2, options, ctx, type2) {
if (type2 === void 0) {
return;
} else if (type2 === null) {
@@ -80552,7 +80552,7 @@ var require_validator2 = __commonJS({
}
return true;
};
var types = Validator2.prototype.types = {};
var types = Validator3.prototype.types = {};
types.string = function testString(instance) {
return typeof instance == "string";
};
@@ -80580,7 +80580,7 @@ var require_validator2 = __commonJS({
types.object = function testObject(instance) {
return instance && typeof instance === "object" && !Array.isArray(instance) && !(instance instanceof Date);
};
module2.exports = Validator2;
module2.exports = Validator3;
}
});
@@ -80588,7 +80588,7 @@ var require_validator2 = __commonJS({
var require_lib2 = __commonJS({
"node_modules/jsonschema/lib/index.js"(exports2, module2) {
"use strict";
var Validator2 = module2.exports.Validator = require_validator2();
var Validator3 = module2.exports.Validator = require_validator2();
module2.exports.ValidatorResult = require_helpers3().ValidatorResult;
module2.exports.ValidatorResultError = require_helpers3().ValidatorResultError;
module2.exports.ValidationError = require_helpers3().ValidationError;
@@ -80596,7 +80596,7 @@ var require_lib2 = __commonJS({
module2.exports.SchemaScanResult = require_scan2().SchemaScanResult;
module2.exports.scan = require_scan2().scan;
module2.exports.validate = function(instance, schema2, options) {
var v = new Validator2();
var v = new Validator3();
return v.validate(instance, schema2, options);
};
}
@@ -80921,14 +80921,14 @@ var require_tool_cache = __commonJS({
var assert_1 = require("assert");
var exec_1 = require_exec();
var retry_helper_1 = require_retry_helper();
var HTTPError = class extends Error {
var HTTPError2 = class extends Error {
constructor(httpStatusCode) {
super(`Unexpected HTTP response: ${httpStatusCode}`);
this.httpStatusCode = httpStatusCode;
Object.setPrototypeOf(this, new.target.prototype);
}
};
exports2.HTTPError = HTTPError;
exports2.HTTPError = HTTPError2;
var IS_WINDOWS = process.platform === "win32";
var IS_MAC = process.platform === "darwin";
var userAgent = "actions/tool-cache";
@@ -80945,7 +80945,7 @@ var require_tool_cache = __commonJS({
return yield retryHelper.execute(() => __awaiter4(this, void 0, void 0, function* () {
return yield downloadToolAttempt(url2, dest || "", auth, headers);
}), (err) => {
if (err instanceof HTTPError && err.httpStatusCode) {
if (err instanceof HTTPError2 && err.httpStatusCode) {
if (err.httpStatusCode < 500 && err.httpStatusCode !== 408 && err.httpStatusCode !== 429) {
return false;
}
@@ -80972,7 +80972,7 @@ var require_tool_cache = __commonJS({
}
const response = yield http.get(url2, headers);
if (response.message.statusCode !== 200) {
const err = new HTTPError(response.message.statusCode);
const err = new HTTPError2(response.message.statusCode);
core14.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`);
throw err;
}
@@ -88444,13 +88444,35 @@ function getRequiredEnvParam(paramName) {
}
return value;
}
function getOptionalEnvVar(paramName) {
const value = process.env[paramName];
if (value?.trim().length === 0) {
return void 0;
}
return value;
}
var HTTPError = class extends Error {
constructor(message, status) {
super(message);
this.status = status;
}
};
var ConfigurationError = class extends Error {
constructor(message) {
super(message);
}
};
function isHTTPError(arg) {
return arg?.status !== void 0 && Number.isInteger(arg.status);
function asHTTPError(arg) {
if (typeof arg !== "object" || arg === null || typeof arg.message !== "string") {
return void 0;
}
if (Number.isInteger(arg.status)) {
return new HTTPError(arg.message, arg.status);
}
if (Number.isInteger(arg.httpStatusCode)) {
return new HTTPError(arg.message, arg.httpStatusCode);
}
return void 0;
}
var cachedCodeQlVersion = void 0;
function cacheCodeQlVersion(version) {
@@ -88958,14 +88980,24 @@ function computeAutomationID(analysis_key, environment) {
return automationID;
}
function wrapApiConfigurationError(e) {
if (isHTTPError(e)) {
if (e.message.includes("API rate limit exceeded for installation") || e.message.includes("commit not found") || e.message.includes("Resource not accessible by integration") || /ref .* not found in this repository/.test(e.message)) {
return new ConfigurationError(e.message);
} else if (e.message.includes("Bad credentials") || e.message.includes("Not Found")) {
const httpError = asHTTPError(e);
if (httpError !== void 0) {
if ([
/API rate limit exceeded/,
/commit not found/,
/Resource not accessible by integration/,
/ref .* not found in this repository/
].some((pattern) => pattern.test(httpError.message))) {
return new ConfigurationError(httpError.message);
}
if (httpError.message.includes("Bad credentials") || httpError.message.includes("Not Found")) {
return new ConfigurationError(
"Please check that your token is valid and has the required permissions: contents: read, security-events: write"
);
}
if (httpError.status === 429) {
return new ConfigurationError("API rate limit exceeded");
}
}
return e;
}
@@ -89200,7 +89232,7 @@ function formatDuration(durationMs) {
// src/overlay-database-utils.ts
var CODEQL_OVERLAY_MINIMUM_VERSION = "2.22.4";
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 15e3;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6;
async function writeBaseDatabaseOidsFile(config, sourceRoot) {
const gitFileOids = await getFileOidsUnderPath(sourceRoot);
@@ -89275,6 +89307,11 @@ var featureConfig = {
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
minimumVersion: void 0
},
["analyze_use_new_upload" /* AnalyzeUseNewUpload */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_ANALYZE_USE_NEW_UPLOAD",
minimumVersion: void 0
},
["cleanup_trap_caches" /* CleanupTrapCaches */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
@@ -89446,6 +89483,11 @@ var featureConfig = {
defaultValue: false,
envVar: "CODEQL_ACTION_JAVA_MINIMIZE_DEPENDENCY_JARS",
minimumVersion: "2.23.0"
},
["validate_db_config" /* ValidateDbConfig */]: {
defaultValue: false,
envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG",
minimumVersion: void 0
}
};
var FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json";
@@ -89688,7 +89730,7 @@ var GitHubFeatureFlags = class {
remoteFlags = { ...remoteFlags, ...chunkFlags };
}
this.logger.debug(
"Loaded the following default values for the feature flags from the Code Scanning API:"
"Loaded the following default values for the feature flags from the CodeQL Action API:"
);
for (const [feature, value] of Object.entries(remoteFlags).sort(
([nameA], [nameB]) => nameA.localeCompare(nameB)
@@ -89698,9 +89740,10 @@ var GitHubFeatureFlags = class {
this.hasAccessedRemoteFeatureFlags = true;
return remoteFlags;
} catch (e) {
if (isHTTPError(e) && e.status === 403) {
const httpError = asHTTPError(e);
if (httpError?.status === 403) {
this.logger.warning(
`This run of the CodeQL Action does not have permission to access Code Scanning API endpoints. As a result, it will not be opted into any experimental features. This could be because the Action is running on a pull request from a fork. If not, please ensure the Action has the 'security-events: write' permission. Details: ${e.message}`
`This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. As a result, it will not be opted into any experimental features. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}`
);
this.hasAccessedRemoteFeatureFlags = false;
return {};
@@ -89725,6 +89768,7 @@ var path10 = __toESM(require("path"));
var core8 = __toESM(require_core());
// src/config/db-config.ts
var jsonschema = __toESM(require_lib2());
var semver4 = __toESM(require_semver2());
var PACK_IDENTIFIER_PATTERN = (function() {
const alphaNumeric = "[a-z0-9]";
@@ -89950,8 +89994,8 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi
return void 0;
}
}
var OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
var OUT_OF_DATE_MSG = "CodeQL Action is out-of-date. Please upgrade to the latest version of `codeql-action`.";
var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpoint. Please update to a compatible version of `codeql-action`.";
async function sendStatusReport(statusReport) {
setJobStatusIfUnsuccessful(statusReport.status);
const statusReportJSON = JSON.stringify(statusReport);
@@ -89972,19 +90016,22 @@ async function sendStatusReport(statusReport) {
}
);
} catch (e) {
if (isHTTPError(e)) {
switch (e.status) {
const httpError = asHTTPError(e);
if (httpError !== void 0) {
switch (httpError.status) {
case 403:
if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") {
core9.warning(
`Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading Code Scanning results requires write access. To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.`
`Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.`
);
} else {
core9.warning(e.message);
core9.warning(
`This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}`
);
}
return;
case 404:
core9.warning(e.message);
core9.warning(httpError.message);
return;
case 422:
if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) {
@@ -89996,7 +90043,7 @@ async function sendStatusReport(statusReport) {
}
}
core9.warning(
`An unexpected error occurred when sending code scanning status report: ${getErrorMessage(
`An unexpected error occurred when sending a status report: ${getErrorMessage(
e
)}`
);
@@ -90009,7 +90056,7 @@ var path15 = __toESM(require("path"));
var url = __toESM(require("url"));
var import_zlib = __toESM(require("zlib"));
var core12 = __toESM(require_core());
var jsonschema = __toESM(require_lib2());
var jsonschema2 = __toESM(require_lib2());
// src/codeql.ts
var fs12 = __toESM(require("fs"));
@@ -90017,45 +90064,6 @@ var path13 = __toESM(require("path"));
var core11 = __toESM(require_core());
var toolrunner3 = __toESM(require_toolrunner());
// node_modules/@octokit/request-error/dist-src/index.js
var RequestError = class extends Error {
name;
/**
* http status code
*/
status;
/**
* Request options that lead to the error.
*/
request;
/**
* Response object if a response was received
*/
response;
constructor(message, statusCode, options) {
super(message);
this.name = "HttpError";
this.status = Number.parseInt(statusCode);
if (Number.isNaN(this.status)) {
this.status = 0;
}
if ("response" in options) {
this.response = options.response;
}
const requestCopy = Object.assign({}, options.request);
if (options.request.headers.authorization) {
requestCopy.headers = Object.assign({}, options.request.headers, {
authorization: options.request.headers.authorization.replace(
/(?<! ) .*$/,
" [REDACTED]"
)
});
}
requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
this.request = requestCopy;
}
};
// src/cli-errors.ts
var SUPPORTED_PLATFORMS = [
["linux", "x64"],
@@ -90223,6 +90231,9 @@ var cliErrorsConfig = {
cliErrorMessageCandidates: [
new RegExp(
"Query pack .* cannot be found\\. Check the spelling of the pack\\."
),
new RegExp(
"is not a .ql file, .qls file, a directory, or a query pack specification."
)
]
},
@@ -90379,13 +90390,13 @@ async function getTarVersion() {
}
if (stdout.includes("GNU tar")) {
const match = stdout.match(/tar \(GNU tar\) ([0-9.]+)/);
if (!match || !match[1]) {
if (!match?.[1]) {
throw new Error("Failed to parse output of tar --version.");
}
return { type: "gnu", version: match[1] };
} else if (stdout.includes("bsdtar")) {
const match = stdout.match(/bsdtar ([0-9.]+)/);
if (!match || !match[1]) {
if (!match?.[1]) {
throw new Error("Failed to parse output of tar --version.");
}
return { type: "bsd", version: match[1] };
@@ -90765,7 +90776,7 @@ function tryGetTagNameFromUrl(url2, logger) {
return void 0;
}
const match = matches[matches.length - 1];
if (match === null || match.length !== 2) {
if (match?.length !== 2) {
logger.debug(
`Could not determine tag name for URL ${url2}. Matched ${JSON.stringify(
match
@@ -91225,7 +91236,7 @@ async function shouldEnableIndirectTracing(codeql, config) {
// src/codeql.ts
var cachedCodeQL = void 0;
var CODEQL_MINIMUM_VERSION = "2.16.6";
var CODEQL_MINIMUM_VERSION = "2.17.6";
var CODEQL_NEXT_MINIMUM_VERSION = "2.17.6";
var GHES_VERSION_MOST_RECENTLY_DEPRECATED = "3.13";
var GHES_MOST_RECENT_DEPRECATION_DATE = "2025-06-19";
@@ -91269,9 +91280,9 @@ async function setupCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliV
toolsVersion,
zstdAvailability
};
} catch (e) {
const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") || // out of disk space
e instanceof RequestError && e.status === 429 ? ConfigurationError : Error;
} catch (rawError) {
const e = wrapApiConfigurationError(rawError);
const ErrorClass = e instanceof ConfigurationError || e instanceof Error && e.message.includes("ENOSPC") ? ConfigurationError : Error;
throw new ErrorClass(
`Unable to download and extract CodeQL CLI: ${getErrorMessage(e)}${e instanceof Error && e.stack ? `
@@ -91560,12 +91571,6 @@ ${output}`
} else {
codeqlArgs.push("--no-sarif-include-diagnostics");
}
if (!isSupportedToolsFeature(
await this.getVersion(),
"analysisSummaryV2Default" /* AnalysisSummaryV2IsDefault */
)) {
codeqlArgs.push("--new-analysis-summary");
}
codeqlArgs.push(databasePath);
if (querySuitePaths) {
codeqlArgs.push(...querySuitePaths);
@@ -93115,24 +93120,6 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo
);
codeQL = initCodeQLResult.codeql;
}
if (!await codeQL.supportsFeature(
"sarifMergeRunsFromEqualCategory" /* SarifMergeRunsFromEqualCategory */
)) {
await throwIfCombineSarifFilesDisabled(sarifObjects, gitHubVersion);
logger.warning(
"The CodeQL CLI does not support merging SARIF files. Merging files in the action."
);
if (await shouldShowCombineSarifFilesDeprecationWarning(
sarifObjects,
gitHubVersion
)) {
logger.warning(
`Uploading multiple CodeQL runs with the same category is deprecated ${deprecationWarningMessage} for CodeQL CLI 2.16.6 and earlier. Please update your CodeQL CLI version or update your workflow to set a distinct category for each CodeQL run. ${deprecationMoreInformationMessage}`
);
core12.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true");
}
return combineSarifFiles(sarifFiles, logger);
}
const baseTempDir = path15.resolve(tempDir, "combined-sarif");
fs14.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs14.mkdtempSync(path15.resolve(baseTempDir, "output-"));
@@ -93191,16 +93178,17 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) {
logger.info("Successfully uploaded results");
return response.data.id;
} catch (e) {
if (isHTTPError(e)) {
switch (e.status) {
const httpError = asHTTPError(e);
if (httpError !== void 0) {
switch (httpError.status) {
case 403:
core12.warning(e.message || GENERIC_403_MSG);
core12.warning(httpError.message || GENERIC_403_MSG);
break;
case 404:
core12.warning(e.message || GENERIC_404_MSG);
core12.warning(httpError.message || GENERIC_404_MSG);
break;
default:
core12.warning(e.message);
core12.warning(httpError.message);
break;
}
}
@@ -93305,7 +93293,7 @@ function validateSarifFileSchema(sarif, sarifFilePath, logger) {
}
logger.info(`Validating ${sarifFilePath}`);
const schema2 = require_sarif_schema_2_1_0();
const result = new jsonschema.Validator().validate(sarif, schema2);
const result = new jsonschema2.Validator().validate(sarif, schema2);
const warningAttributes = ["uri-reference", "uri"];
const errors = (result.errors ?? []).filter(
(err) => !(err.name === "format" && typeof err.argument === "string" && warningAttributes.includes(err.argument))
@@ -93365,12 +93353,11 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
}
return payloadObj;
}
async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features, logger, uploadTarget) {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
async function postProcessSarifFiles(logger, features, checkoutPath, sarifPaths, category, analysis) {
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif;
category = uploadTarget.fixCategory(logger, category);
category = analysis.fixCategory(logger, category);
if (sarifPaths.length > 1) {
for (const sarifPath of sarifPaths) {
const parsedSarif = readSarifFile(sarifPath);
@@ -93398,28 +93385,42 @@ async function uploadSpecifiedFiles(sarifPaths, checkoutPath, category, features
analysisKey,
environment
);
return { sarif, analysisKey, environment };
}
async function writePostProcessedFiles(logger, pathInput, uploadTarget, postProcessingResults) {
const outputPath = pathInput || getOptionalEnvVar("CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */);
if (outputPath !== void 0) {
dumpSarifFile(
JSON.stringify(postProcessingResults.sarif),
outputPath,
logger,
uploadTarget
);
} else {
logger.debug(`Not writing post-processed SARIF files.`);
}
}
async function uploadPostProcessedFiles(logger, checkoutPath, uploadTarget, postProcessingResults) {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
const sarif = postProcessingResults.sarif;
const toolNames = getToolNames(sarif);
logger.debug(`Validating that each SARIF run has a unique category`);
validateUniqueCategory(sarif, uploadTarget.sentinelPrefix);
logger.debug(`Serializing SARIF for upload`);
const sarifPayload = JSON.stringify(sarif);
const dumpDir = process.env["CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */];
if (dumpDir) {
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
}
logger.debug(`Compressing serialized SARIF`);
const zippedSarif = import_zlib.default.gzipSync(sarifPayload).toString("base64");
const checkoutURI = url.pathToFileURL(checkoutPath).href;
const payload = buildPayload(
await getCommitOid(checkoutPath),
await getRef(),
analysisKey,
postProcessingResults.analysisKey,
getRequiredEnvParam("GITHUB_WORKFLOW"),
zippedSarif,
getWorkflowRunID(),
getWorkflowRunAttempt(),
checkoutURI,
environment,
postProcessingResults.environment,
toolNames,
await determineBaseBranchHeadCommitOid()
);
@@ -93450,14 +93451,14 @@ function dumpSarifFile(sarifPayload, outputDir, logger, uploadTarget) {
fs14.mkdirSync(outputDir, { recursive: true });
} else if (!fs14.lstatSync(outputDir).isDirectory()) {
throw new ConfigurationError(
`The path specified by the ${"CODEQL_ACTION_SARIF_DUMP_DIR" /* SARIF_DUMP_DIR */} environment variable exists and is not a directory: ${outputDir}`
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`
);
}
const outputFile = path15.resolve(
outputDir,
`upload${uploadTarget.sarifExtension}`
);
logger.info(`Dumping processed SARIF file to ${outputFile}`);
logger.info(`Writing processed SARIF file to ${outputFile}`);
fs14.writeFileSync(outputFile, sarifPayload);
}
var STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1e3;
@@ -93613,7 +93614,7 @@ function filterAlertsByDiffRange(logger, sarif) {
}
// src/upload-sarif.ts
async function uploadSarif(logger, features, checkoutPath, sarifPath, category) {
async function postProcessAndUploadSarif(logger, features, uploadKind, checkoutPath, sarifPath, category, postProcessedOutputPath) {
const sarifGroups = await getGroupedSarifFilePaths(
logger,
sarifPath
@@ -93623,14 +93624,28 @@ async function uploadSarif(logger, features, checkoutPath, sarifPath, category)
sarifGroups
)) {
const analysisConfig = getAnalysisConfig(analysisKind);
uploadResults[analysisKind] = await uploadSpecifiedFiles(
sarifFiles,
checkoutPath,
category,
features,
const postProcessingResults = await postProcessSarifFiles(
logger,
features,
checkoutPath,
sarifFiles,
category,
analysisConfig
);
await writePostProcessedFiles(
logger,
postProcessedOutputPath,
analysisConfig,
postProcessingResults
);
if (uploadKind === "always") {
uploadResults[analysisKind] = await uploadPostProcessedFiles(
logger,
checkoutPath,
analysisConfig,
postProcessingResults
);
}
}
return uploadResults;
}
@@ -93682,9 +93697,10 @@ async function run() {
const sarifPath = getRequiredInput("sarif_file");
const checkoutPath = getRequiredInput("checkout_path");
const category = getOptionalInput("category");
const uploadResults = await uploadSarif(
const uploadResults = await postProcessAndUploadSarif(
logger,
features,
"always",
checkoutPath,
sarifPath,
category
+418 -540
View File
File diff suppressed because it is too large Load Diff
+5 -5
View File
@@ -1,6 +1,6 @@
{
"name": "codeql",
"version": "4.30.9",
"version": "4.31.0",
"private": true,
"description": "CodeQL action",
"scripts": {
@@ -48,7 +48,7 @@
"jsonschema": "1.4.1",
"long": "^5.3.2",
"node-forge": "^1.3.1",
"octokit": "^5.0.3",
"octokit": "^5.0.4",
"semver": "^7.7.3",
"uuid": "^13.0.0"
},
@@ -56,7 +56,7 @@
"@ava/typescript": "6.0.0",
"@eslint/compat": "^1.4.0",
"@eslint/eslintrc": "^3.3.1",
"@eslint/js": "^9.37.0",
"@eslint/js": "^9.38.0",
"@microsoft/eslint-formatter-sarif": "^3.1.0",
"@octokit/types": "^15.0.0",
"@types/archiver": "^6.0.3",
@@ -67,10 +67,10 @@
"@types/node-forge": "^1.3.14",
"@types/semver": "^7.7.1",
"@types/sinon": "^17.0.4",
"@typescript-eslint/eslint-plugin": "^8.46.0",
"@typescript-eslint/eslint-plugin": "^8.46.1",
"@typescript-eslint/parser": "^8.41.0",
"ava": "^6.4.1",
"esbuild": "^0.25.10",
"esbuild": "^0.25.11",
"eslint": "^8.57.1",
"eslint-import-resolver-typescript": "^3.8.7",
"eslint-plugin-filenames": "^1.3.2",
+9
View File
@@ -36,6 +36,7 @@ steps:
with:
output: "${{ runner.temp }}/results"
upload-database: false
post-processed-sarif-path: "${{ runner.temp }}/post-processed"
- name: Upload security SARIF
if: contains(matrix.analysis-kinds, 'code-scanning')
uses: actions/upload-artifact@v4
@@ -52,6 +53,14 @@ steps:
quality-queries-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.quality.sarif.json
path: "${{ runner.temp }}/results/javascript.quality.sarif"
retention-days: 7
- name: Upload post-processed SARIF
uses: actions/upload-artifact@v4
with:
name: |
post-processed-${{ matrix.os }}-${{ matrix.version }}-${{ matrix.analysis-kinds }}.sarif.json
path: "${{ runner.temp }}/post-processed"
retention-days: 7
if-no-files-found: error
- name: Check quality query does not appear in security SARIF
if: contains(matrix.analysis-kinds, 'code-scanning')
uses: actions/github-script@v8
+1 -1
View File
@@ -117,7 +117,7 @@ for file in sorted((this_dir / 'checks').glob('*.yml')):
steps.extend([
{
'name': 'Install Node.js',
'uses': 'actions/setup-node@v5',
'uses': 'actions/setup-node@v6',
'with': {
'node-version': '20.x',
'cache': 'npm',
+3
View File
@@ -24,6 +24,9 @@ setupTests(test);
// but the first test would fail.
test("analyze action with RAM & threads from environment variables", async (t) => {
// This test frequently times out on Windows with the default timeout, so we bump
// it a bit to 20s.
t.timeout(1000 * 20);
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
+1
View File
@@ -24,6 +24,7 @@ setupTests(test);
// but the first test would fail.
test("analyze action with RAM & threads from action inputs", async (t) => {
t.timeout(1000 * 20);
await util.withTmpDir(async (tmpDir) => {
process.env["GITHUB_SERVER_URL"] = util.GITHUB_DOTCOM_URL;
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
+68 -26
View File
@@ -52,6 +52,7 @@ import {
} from "./trap-caching";
import * as uploadLib from "./upload-lib";
import { UploadResult } from "./upload-lib";
import { postProcessAndUploadSarif } from "./upload-sarif";
import * as util from "./util";
interface AnalysisStatusReport
@@ -211,7 +212,9 @@ async function runAutobuildIfLegacyGoWorkflow(config: Config, logger: Logger) {
async function run() {
const startedAt = new Date();
let uploadResult: UploadResult | undefined = undefined;
let uploadResults:
| Partial<Record<analyses.AnalysisKind, UploadResult>>
| undefined = undefined;
let runStats: QueriesStatusReport | undefined = undefined;
let config: Config | undefined = undefined;
let trapCacheCleanupTelemetry: TrapCacheCleanupStatusReport | undefined =
@@ -341,31 +344,67 @@ async function run() {
}
core.setOutput("db-locations", dbLocations);
core.setOutput("sarif-output", path.resolve(outputDir));
const uploadInput = actionsUtil.getOptionalInput("upload");
if (runStats && actionsUtil.getUploadValue(uploadInput) === "always") {
if (isCodeScanningEnabled(config)) {
uploadResult = await uploadLib.uploadFiles(
outputDir,
actionsUtil.getRequiredInput("checkout_path"),
actionsUtil.getOptionalInput("category"),
features,
const uploadKind = actionsUtil.getUploadValue(
actionsUtil.getOptionalInput("upload"),
);
if (runStats) {
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
const category = actionsUtil.getOptionalInput("category");
if (await features.getValue(Feature.AnalyzeUseNewUpload)) {
uploadResults = await postProcessAndUploadSarif(
logger,
analyses.CodeScanning,
features,
uploadKind,
checkoutPath,
outputDir,
category,
actionsUtil.getOptionalInput("post-processed-sarif-path"),
);
core.setOutput("sarif-id", uploadResult.sarifID);
} else if (uploadKind === "always") {
uploadResults = {};
if (isCodeScanningEnabled(config)) {
uploadResults[analyses.AnalysisKind.CodeScanning] =
await uploadLib.uploadFiles(
outputDir,
checkoutPath,
category,
features,
logger,
analyses.CodeScanning,
);
}
if (isCodeQualityEnabled(config)) {
uploadResults[analyses.AnalysisKind.CodeQuality] =
await uploadLib.uploadFiles(
outputDir,
checkoutPath,
category,
features,
logger,
analyses.CodeQuality,
);
}
} else {
uploadResults = {};
logger.info("Not uploading results");
}
if (isCodeQualityEnabled(config)) {
const analysis = analyses.CodeQuality;
const qualityUploadResult = await uploadLib.uploadFiles(
outputDir,
actionsUtil.getRequiredInput("checkout_path"),
actionsUtil.getOptionalInput("category"),
features,
logger,
analysis,
// Set the SARIF id outputs only if we have results for them, to avoid
// having keys with empty values in the action output.
if (uploadResults[analyses.AnalysisKind.CodeScanning] !== undefined) {
core.setOutput(
"sarif-id",
uploadResults[analyses.AnalysisKind.CodeScanning].sarifID,
);
}
if (uploadResults[analyses.AnalysisKind.CodeQuality] !== undefined) {
core.setOutput(
"quality-sarif-id",
uploadResults[analyses.AnalysisKind.CodeQuality].sarifID,
);
core.setOutput("quality-sarif-id", qualityUploadResult.sarifID);
}
} else {
logger.info("Not uploading results");
@@ -408,12 +447,12 @@ async function run() {
if (util.isInTestMode()) {
logger.debug("In test mode. Waiting for processing is disabled.");
} else if (
uploadResult !== undefined &&
uploadResults?.[analyses.AnalysisKind.CodeScanning] !== undefined &&
actionsUtil.getRequiredInput("wait-for-processing") === "true"
) {
await uploadLib.waitForProcessing(
getRepositoryNwo(),
uploadResult.sarifID,
uploadResults[analyses.AnalysisKind.CodeScanning].sarifID,
getActionsLogger(),
);
}
@@ -450,13 +489,16 @@ async function run() {
return;
}
if (runStats && uploadResult) {
if (
runStats !== undefined &&
uploadResults?.[analyses.AnalysisKind.CodeScanning] !== undefined
) {
await sendStatusReport(
startedAt,
config,
{
...runStats,
...uploadResult.statusReport,
...uploadResults[analyses.AnalysisKind.CodeScanning].statusReport,
},
undefined,
trapCacheUploadTime,
@@ -466,7 +508,7 @@ async function run() {
dependencyCacheResults,
logger,
);
} else if (runStats) {
} else if (runStats !== undefined) {
await sendStatusReport(
startedAt,
config,
+17 -10
View File
@@ -7,12 +7,12 @@ import { getActionVersion, getRequiredInput } from "./actions-util";
import { Logger } from "./logging";
import { getRepositoryNwo, RepositoryNwo } from "./repository";
import {
asHTTPError,
ConfigurationError,
getRequiredEnvParam,
GITHUB_DOTCOM_URL,
GitHubVariant,
GitHubVersion,
isHTTPError,
parseGitHubUrl,
parseMatrixInput,
} from "./util";
@@ -280,22 +280,29 @@ export async function getRepositoryProperties(repositoryNwo: RepositoryNwo) {
}
export function wrapApiConfigurationError(e: unknown) {
if (isHTTPError(e)) {
const httpError = asHTTPError(e);
if (httpError !== undefined) {
if (
e.message.includes("API rate limit exceeded for installation") ||
e.message.includes("commit not found") ||
e.message.includes("Resource not accessible by integration") ||
/ref .* not found in this repository/.test(e.message)
[
/API rate limit exceeded/,
/commit not found/,
/Resource not accessible by integration/,
/ref .* not found in this repository/,
].some((pattern) => pattern.test(httpError.message))
) {
return new ConfigurationError(e.message);
} else if (
e.message.includes("Bad credentials") ||
e.message.includes("Not Found")
return new ConfigurationError(httpError.message);
}
if (
httpError.message.includes("Bad credentials") ||
httpError.message.includes("Not Found")
) {
return new ConfigurationError(
"Please check that your token is valid and has the required permissions: contents: read, security-events: write",
);
}
if (httpError.status === 429) {
return new ConfigurationError("API rate limit exceeded");
}
}
return e;
}
+14
View File
@@ -310,6 +310,20 @@ test("wrapCliConfigurationError - pack cannot be found", (t) => {
t.true(wrappedError instanceof ConfigurationError);
});
test("wrapCliConfigurationError - unknown query file", (t) => {
const commandError = new CommandInvocationError(
"codeql",
["database", "init"],
2,
"my-query-file is not a .ql file, .qls file, a directory, or a query pack specification. See the logs for more details.",
);
const cliError = new CliError(commandError);
const wrappedError = wrapCliConfigurationError(cliError);
t.true(wrappedError instanceof ConfigurationError);
});
test("wrapCliConfigurationError - pack missing auth", (t) => {
const commandError = new CommandInvocationError(
"codeql",
+3
View File
@@ -264,6 +264,9 @@ export const cliErrorsConfig: Record<
new RegExp(
"Query pack .* cannot be found\\. Check the spelling of the pack\\.",
),
new RegExp(
"is not a .ql file, .qls file, a directory, or a query pack specification.",
),
],
},
[CliConfigErrorCategory.PackMissingAuth]: {
-79
View File
@@ -36,7 +36,6 @@ import {
createTestConfig,
} from "./testing-utils";
import { ToolsDownloadStatusReport } from "./tools-download";
import { ToolsFeature } from "./tools-features";
import * as util from "./util";
import { initializeEnvironment } from "./util";
@@ -870,84 +869,6 @@ test("does not pass a qlconfig to the CLI when it is undefined", async (t: Execu
});
});
const NEW_ANALYSIS_SUMMARY_TEST_CASES = [
{
codeqlVersion: makeVersionInfo("2.15.0", {
[ToolsFeature.AnalysisSummaryV2IsDefault]: true,
}),
githubVersion: {
type: util.GitHubVariant.DOTCOM,
},
flagPassed: false,
negativeFlagPassed: false,
},
{
codeqlVersion: makeVersionInfo("2.15.0"),
githubVersion: {
type: util.GitHubVariant.DOTCOM,
},
flagPassed: true,
negativeFlagPassed: false,
},
{
codeqlVersion: makeVersionInfo("2.15.0"),
githubVersion: {
type: util.GitHubVariant.GHES,
version: "3.10.0",
},
flagPassed: true,
negativeFlagPassed: false,
},
];
for (const {
codeqlVersion,
flagPassed,
githubVersion,
negativeFlagPassed,
} of NEW_ANALYSIS_SUMMARY_TEST_CASES) {
test(`database interpret-results passes ${
flagPassed
? "--new-analysis-summary"
: negativeFlagPassed
? "--no-new-analysis-summary"
: "nothing"
} for CodeQL version ${JSON.stringify(codeqlVersion)} and ${
util.GitHubVariant[githubVersion.type]
} ${githubVersion.version ? ` ${githubVersion.version}` : ""}`, async (t) => {
const runnerConstructorStub = stubToolRunnerConstructor();
const codeqlObject = await codeql.getCodeQLForTesting();
sinon.stub(codeqlObject, "getVersion").resolves(codeqlVersion);
// io throws because of the test CodeQL object.
sinon.stub(io, "which").resolves("");
await codeqlObject.databaseInterpretResults(
"",
[],
"",
"",
"",
"-v",
undefined,
"",
Object.assign({}, stubConfig, { gitHubVersion: githubVersion }),
createFeatures([]),
);
const actualArgs = runnerConstructorStub.firstCall.args[1] as string[];
t.is(
actualArgs.includes("--new-analysis-summary"),
flagPassed,
`--new-analysis-summary should${flagPassed ? "" : "n't"} be passed`,
);
t.is(
actualArgs.includes("--no-new-analysis-summary"),
negativeFlagPassed,
`--no-new-analysis-summary should${
negativeFlagPassed ? "" : "n't"
} be passed`,
);
});
}
test("runTool summarizes several fatal errors", async (t) => {
const heapError =
"A fatal error occurred: Evaluator heap must be at least 384.00 MiB";
+4 -13
View File
@@ -3,7 +3,6 @@ import * as path from "path";
import * as core from "@actions/core";
import * as toolrunner from "@actions/exec/lib/toolrunner";
import { RequestError } from "@octokit/request-error";
import * as yaml from "js-yaml";
import {
@@ -268,7 +267,7 @@ let cachedCodeQL: CodeQL | undefined = undefined;
* The version flags below can be used to conditionally enable certain features
* on versions newer than this.
*/
const CODEQL_MINIMUM_VERSION = "2.16.6";
const CODEQL_MINIMUM_VERSION = "2.17.6";
/**
* This version will shortly become the oldest version of CodeQL that the Action will run with.
@@ -371,11 +370,11 @@ export async function setupCodeQL(
toolsVersion,
zstdAvailability,
};
} catch (e) {
} catch (rawError) {
const e = api.wrapApiConfigurationError(rawError);
const ErrorClass =
e instanceof util.ConfigurationError ||
(e instanceof Error && e.message.includes("ENOSPC")) || // out of disk space
(e instanceof RequestError && e.status === 429) // rate limited
(e instanceof Error && e.message.includes("ENOSPC")) // out of disk space
? util.ConfigurationError
: Error;
@@ -861,14 +860,6 @@ export async function getCodeQLForCmd(
} else {
codeqlArgs.push("--no-sarif-include-diagnostics");
}
if (
!isSupportedToolsFeature(
await this.getVersion(),
ToolsFeature.AnalysisSummaryV2IsDefault,
)
) {
codeqlArgs.push("--new-analysis-summary");
}
codeqlArgs.push(databasePath);
if (querySuitePaths) {
codeqlArgs.push(...querySuitePaths);
+15
View File
@@ -148,6 +148,7 @@ test("load empty config", async (t) => {
});
const config = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput: languages,
repository: { owner: "github", repo: "example" },
@@ -187,6 +188,7 @@ test("load code quality config", async (t) => {
});
const config = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
analysisKinds: [AnalysisKind.CodeQuality],
languagesInput: languages,
@@ -271,6 +273,7 @@ test("initActionState doesn't throw if there are queries configured in the repos
await t.notThrowsAsync(async () => {
const config = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
analysisKinds: [AnalysisKind.CodeQuality],
languagesInput: languages,
@@ -309,6 +312,7 @@ test("loading a saved config produces the same config", async (t) => {
t.deepEqual(await configUtils.getConfig(tempDir, logger), undefined);
const config1 = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput: "javascript,python",
tempDir,
@@ -360,6 +364,7 @@ test("loading config with version mismatch throws", async (t) => {
.returns("does-not-exist");
const config = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput: "javascript,python",
tempDir,
@@ -388,6 +393,7 @@ test("load input outside of workspace", async (t) => {
return await withTmpDir(async (tempDir) => {
try {
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
configFile: "../input",
tempDir,
@@ -415,6 +421,7 @@ test("load non-local input with invalid repo syntax", async (t) => {
try {
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
configFile,
tempDir,
@@ -443,6 +450,7 @@ test("load non-existent input", async (t) => {
try {
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput,
configFile,
@@ -526,6 +534,7 @@ test("load non-empty input", async (t) => {
const configFilePath = createConfigFile(inputFileContents, tempDir);
const actualConfig = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput,
buildModeInput: "none",
@@ -582,6 +591,7 @@ test("Using config input and file together, config input should be used.", async
const languagesInput = "javascript";
const config = await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput,
configFile: configFilePath,
@@ -632,6 +642,7 @@ test("API client used when reading remote config", async (t) => {
const languagesInput = "javascript";
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput,
configFile,
@@ -652,6 +663,7 @@ test("Remote config handles the case where a directory is provided", async (t) =
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
configFile: repoReference,
tempDir,
@@ -680,6 +692,7 @@ test("Invalid format of remote config handled correctly", async (t) => {
const repoReference = "octo-org/codeql-config/config.yaml@main";
try {
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
configFile: repoReference,
tempDir,
@@ -709,6 +722,7 @@ test("No detected languages", async (t) => {
try {
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
tempDir,
codeql,
@@ -731,6 +745,7 @@ test("Unknown languages", async (t) => {
try {
await configUtils.initConfig(
createFeatures([]),
createTestInitConfigInputs({
languagesInput,
tempDir,
+36 -8
View File
@@ -19,6 +19,7 @@ import {
calculateAugmentation,
ExcludeQueryFilter,
generateCodeScanningConfig,
parseUserConfig,
UserConfig,
} from "./config/db-config";
import { shouldPerformDiffInformedAnalysis } from "./diff-informed-analysis-utils";
@@ -525,10 +526,12 @@ async function downloadCacheWithTime(
}
async function loadUserConfig(
logger: Logger,
configFile: string,
workspacePath: string,
apiDetails: api.GitHubApiCombinedDetails,
tempDir: string,
validateConfig: boolean,
): Promise<UserConfig> {
if (isLocal(configFile)) {
if (configFile !== userConfigFromActionPath(tempDir)) {
@@ -541,9 +544,14 @@ async function loadUserConfig(
);
}
}
return getLocalConfig(configFile);
return getLocalConfig(logger, configFile, validateConfig);
} else {
return await getRemoteConfig(configFile, apiDetails);
return await getRemoteConfig(
logger,
configFile,
apiDetails,
validateConfig,
);
}
}
@@ -779,7 +787,10 @@ function hasQueryCustomisation(userConfig: UserConfig): boolean {
* This will parse the config from the user input if present, or generate
* a default config. The parsed config is then stored to a known location.
*/
export async function initConfig(inputs: InitConfigInputs): Promise<Config> {
export async function initConfig(
features: FeatureEnablement,
inputs: InitConfigInputs,
): Promise<Config> {
const { logger, tempDir } = inputs;
// if configInput is set, it takes precedence over configFile
@@ -799,11 +810,14 @@ export async function initConfig(inputs: InitConfigInputs): Promise<Config> {
logger.debug("No configuration file was provided");
} else {
logger.debug(`Using configuration file: ${inputs.configFile}`);
const validateConfig = await features.getValue(Feature.ValidateDbConfig);
userConfig = await loadUserConfig(
logger,
inputs.configFile,
inputs.workspacePath,
inputs.apiDetails,
tempDir,
validateConfig,
);
}
@@ -897,7 +911,11 @@ function isLocal(configPath: string): boolean {
return configPath.indexOf("@") === -1;
}
function getLocalConfig(configFile: string): UserConfig {
function getLocalConfig(
logger: Logger,
configFile: string,
validateConfig: boolean,
): UserConfig {
// Error if the file does not exist
if (!fs.existsSync(configFile)) {
throw new ConfigurationError(
@@ -905,12 +923,19 @@ function getLocalConfig(configFile: string): UserConfig {
);
}
return yaml.load(fs.readFileSync(configFile, "utf8")) as UserConfig;
return parseUserConfig(
logger,
configFile,
fs.readFileSync(configFile, "utf-8"),
validateConfig,
);
}
async function getRemoteConfig(
logger: Logger,
configFile: string,
apiDetails: api.GitHubApiCombinedDetails,
validateConfig: boolean,
): Promise<UserConfig> {
// retrieve the various parts of the config location, and ensure they're present
const format = new RegExp(
@@ -918,7 +943,7 @@ async function getRemoteConfig(
);
const pieces = format.exec(configFile);
// 5 = 4 groups + the whole expression
if (pieces === null || pieces.groups === undefined || pieces.length < 5) {
if (pieces?.groups === undefined || pieces.length < 5) {
throw new ConfigurationError(
errorMessages.getConfigFileRepoFormatInvalidMessage(configFile),
);
@@ -946,9 +971,12 @@ async function getRemoteConfig(
);
}
return yaml.load(
return parseUserConfig(
logger,
configFile,
Buffer.from(fileContents, "base64").toString("binary"),
) as UserConfig;
validateConfig,
);
}
/**
+115 -1
View File
@@ -2,7 +2,13 @@ import test, { ExecutionContext } from "ava";
import { RepositoryProperties } from "../feature-flags/properties";
import { KnownLanguage, Language } from "../languages";
import { prettyPrintPack } from "../util";
import { getRunnerLogger } from "../logging";
import {
checkExpectedLogMessages,
getRecordingLogger,
LoggedMessage,
} from "../testing-utils";
import { ConfigurationError, prettyPrintPack } from "../util";
import * as dbConfig from "./db-config";
@@ -391,3 +397,111 @@ test(
{},
/"a-pack-without-a-scope" is not a valid pack/,
);
test("parseUserConfig - successfully parses valid YAML", (t) => {
const result = dbConfig.parseUserConfig(
getRunnerLogger(true),
"test",
`
paths-ignore:
- "some/path"
queries:
- uses: foo
some-unknown-option: true
`,
true,
);
t.truthy(result);
if (t.truthy(result["paths-ignore"])) {
t.is(result["paths-ignore"].length, 1);
t.is(result["paths-ignore"][0], "some/path");
}
if (t.truthy(result["queries"])) {
t.is(result["queries"].length, 1);
t.deepEqual(result["queries"][0], { uses: "foo" });
}
});
test("parseUserConfig - throws a ConfigurationError if the file is not valid YAML", (t) => {
t.throws(
() =>
dbConfig.parseUserConfig(
getRunnerLogger(true),
"test",
`
paths-ignore:
- "some/path"
queries:
- foo
`,
true,
),
{
instanceOf: ConfigurationError,
},
);
});
test("parseUserConfig - validation isn't picky about `query-filters`", (t) => {
const loggedMessages: LoggedMessage[] = [];
const logger = getRecordingLogger(loggedMessages);
t.notThrows(() =>
dbConfig.parseUserConfig(
logger,
"test",
`
query-filters:
- something
- include: foo
- exclude: bar
`,
true,
),
);
});
test("parseUserConfig - throws a ConfigurationError if validation fails", (t) => {
const loggedMessages: LoggedMessage[] = [];
const logger = getRecordingLogger(loggedMessages);
t.throws(
() =>
dbConfig.parseUserConfig(
logger,
"test",
`
paths-ignore:
- "some/path"
queries: true
`,
true,
),
{
instanceOf: ConfigurationError,
message:
'The configuration file "test" is invalid: instance.queries is not of a type(s) array.',
},
);
const expectedMessages = ["instance.queries is not of a type(s) array"];
checkExpectedLogMessages(t, loggedMessages, expectedMessages);
});
test("parseUserConfig - throws no ConfigurationError if validation should fail, but feature is disabled", (t) => {
const loggedMessages: LoggedMessage[] = [];
const logger = getRecordingLogger(loggedMessages);
t.notThrows(() =>
dbConfig.parseUserConfig(
logger,
"test",
`
paths-ignore:
- "some/path"
queries: true
`,
false,
),
);
});
+53 -4
View File
@@ -1,5 +1,7 @@
import * as path from "path";
import * as yaml from "js-yaml";
import * as jsonschema from "jsonschema";
import * as semver from "semver";
import * as errorMessages from "../error-messages";
@@ -378,10 +380,7 @@ function combineQueries(
const result: QuerySpec[] = [];
// Query settings obtained from the repository properties have the highest precedence.
if (
augmentationProperties.repoPropertyQueries &&
augmentationProperties.repoPropertyQueries.input
) {
if (augmentationProperties.repoPropertyQueries?.input) {
logger.info(
`Found query configuration in the repository properties (${RepositoryPropertyName.EXTRA_QUERIES}): ` +
`${augmentationProperties.repoPropertyQueries.input.map((q) => q.uses).join(", ")}`,
@@ -474,3 +473,53 @@ export function generateCodeScanningConfig(
return augmentedConfig;
}
/**
* Attempts to parse `contents` into a `UserConfig` value.
*
* @param logger The logger to use.
* @param pathInput The path to the file where `contents` was obtained from, for use in error messages.
* @param contents The string contents of a YAML file to try and parse as a `UserConfig`.
* @param validateConfig Whether to validate the configuration file against the schema.
* @returns The `UserConfig` corresponding to `contents`, if parsing was successful.
* @throws A `ConfigurationError` if parsing failed.
*/
export function parseUserConfig(
logger: Logger,
pathInput: string,
contents: string,
validateConfig: boolean,
): UserConfig {
try {
const schema =
// eslint-disable-next-line @typescript-eslint/no-require-imports
require("../../src/db-config-schema.json") as jsonschema.Schema;
const doc = yaml.load(contents);
if (validateConfig) {
const result = new jsonschema.Validator().validate(doc, schema);
if (result.errors.length > 0) {
for (const error of result.errors) {
logger.error(error.stack);
}
throw new ConfigurationError(
errorMessages.getInvalidConfigFileMessage(
pathInput,
result.errors.map((e) => e.stack),
),
);
}
}
return doc as UserConfig;
} catch (error) {
if (error instanceof yaml.YAMLException) {
throw new ConfigurationError(
errorMessages.getConfigFileParseErrorMessage(pathInput, error.message),
);
}
throw error;
}
}
+34
View File
@@ -5,6 +5,7 @@ import test from "ava";
import * as sinon from "sinon";
import * as actionsUtil from "./actions-util";
import { AnalysisKind } from "./analyses";
import { GitHubApiDetails } from "./api-client";
import * as apiClient from "./api-client";
import { createStubCodeQL } from "./codeql";
@@ -108,6 +109,39 @@ test("Abort database upload if 'upload-database' input set to false", async (t)
});
});
test("Abort database upload if 'analysis-kinds: code-scanning' is not enabled", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("upload-database")
.returns("true");
sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true);
await mockHttpRequests(201);
const loggedMessages = [];
await uploadDatabases(
testRepoName,
getCodeQL(),
{
...getTestConfig(tmpDir),
analysisKinds: [AnalysisKind.CodeQuality],
},
testApiDetails,
getRecordingLogger(loggedMessages),
);
t.assert(
loggedMessages.find(
(v: LoggedMessage) =>
v.type === "debug" &&
v.message ===
"Not uploading database because 'analysis-kinds: code-scanning' is not enabled.",
) !== undefined,
);
});
});
test("Abort database upload if running against GHES", async (t) => {
await withTmpDir(async (tmpDir) => {
setupActionsVars(tmpDir, tmpDir);
+8
View File
@@ -1,6 +1,7 @@
import * as fs from "fs";
import * as actionsUtil from "./actions-util";
import { AnalysisKind } from "./analyses";
import { getApiClient, GitHubApiDetails } from "./api-client";
import { type CodeQL } from "./codeql";
import { Config } from "./config-utils";
@@ -22,6 +23,13 @@ export async function uploadDatabases(
return;
}
if (!config.analysisKinds.includes(AnalysisKind.CodeScanning)) {
logger.debug(
`Not uploading database because 'analysis-kinds: ${AnalysisKind.CodeScanning}' is not enabled.`,
);
return;
}
if (util.isInTestMode()) {
logger.debug("In test mode. Skipping database upload.");
return;
+145
View File
@@ -0,0 +1,145 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"title": "CodeQL Database Configuration",
"description": "Format of the config file supplied by the user for CodeQL analysis",
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "Name of the configuration"
},
"disable-default-queries": {
"type": "boolean",
"description": "Whether to disable default queries"
},
"queries": {
"type": "array",
"description": "List of additional queries to run",
"items": {
"$ref": "#/definitions/QuerySpec"
}
},
"paths-ignore": {
"type": "array",
"description": "Paths to ignore during analysis",
"items": {
"type": "string"
}
},
"paths": {
"type": "array",
"description": "Paths to include in analysis",
"items": {
"type": "string"
}
},
"packs": {
"description": "Query packs to include. Can be a simple array for single-language analysis or an object with language-specific arrays for multi-language analysis",
"oneOf": [
{
"type": "array",
"items": {
"type": "string"
}
},
{
"type": "object",
"additionalProperties": {
"type": "array",
"items": {
"type": "string"
}
}
}
]
},
"query-filters": {
"type": "array",
"description": "Set of query filters to include and exclude extra queries based on CodeQL query suite include and exclude properties",
"items": {
"$ref": "#/definitions/QueryFilter"
}
}
},
"additionalProperties": true,
"definitions": {
"QuerySpec": {
"type": "object",
"description": "Detailed query specification object",
"properties": {
"name": {
"type": "string",
"description": "Optional name for the query"
},
"uses": {
"type": "string",
"description": "The query or query suite to use"
}
},
"required": ["uses"],
"additionalProperties": false
},
"QueryFilter": {
"description": "Query filter that can either include or exclude queries",
"oneOf": [
{
"$ref": "#/definitions/ExcludeQueryFilter"
},
{
"$ref": "#/definitions/IncludeQueryFilter"
},
{}
]
},
"ExcludeQueryFilter": {
"type": "object",
"description": "Filter to exclude queries",
"properties": {
"exclude": {
"type": "object",
"description": "Queries to exclude",
"additionalProperties": {
"oneOf": [
{
"type": "array",
"items": {
"type": "string"
}
},
{
"type": "string"
}
]
}
}
},
"required": ["exclude"],
"additionalProperties": false
},
"IncludeQueryFilter": {
"type": "object",
"description": "Filter to include queries",
"properties": {
"include": {
"type": "object",
"description": "Queries to include",
"additionalProperties": {
"oneOf": [
{
"type": "array",
"items": {
"type": "string"
}
},
{
"type": "string"
}
]
}
}
},
"required": ["include"],
"additionalProperties": false
}
}
}
+16
View File
@@ -14,6 +14,22 @@ export function getConfigFileDoesNotExistErrorMessage(
return `The configuration file "${configFile}" does not exist`;
}
export function getConfigFileParseErrorMessage(
configFile: string,
message: string,
): string {
return `Cannot parse "${configFile}": ${message}`;
}
export function getInvalidConfigFileMessage(
configFile: string,
messages: string[],
): string {
const andMore =
messages.length > 10 ? `, and ${messages.length - 10} more.` : ".";
return `The configuration file "${configFile}" is invalid: ${messages.slice(0, 10).join(", ")}${andMore}`;
}
export function getConfigFileRepoFormatInvalidMessage(
configFile: string,
): string {
+17 -4
View File
@@ -44,6 +44,7 @@ export interface FeatureEnablement {
*/
export enum Feature {
AllowToolcacheInput = "allow_toolcache_input",
AnalyzeUseNewUpload = "analyze_use_new_upload",
CleanupTrapCaches = "cleanup_trap_caches",
CppDependencyInstallation = "cpp_dependency_installation_enabled",
DiffInformedQueries = "diff_informed_queries",
@@ -77,6 +78,7 @@ export enum Feature {
QaTelemetryEnabled = "qa_telemetry_enabled",
ResolveSupportedLanguagesUsingCli = "resolve_supported_languages_using_cli",
UseRepositoryProperties = "use_repository_properties",
ValidateDbConfig = "validate_db_config",
}
export const featureConfig: Record<
@@ -115,6 +117,11 @@ export const featureConfig: Record<
envVar: "CODEQL_ACTION_ALLOW_TOOLCACHE_INPUT",
minimumVersion: undefined,
},
[Feature.AnalyzeUseNewUpload]: {
defaultValue: false,
envVar: "CODEQL_ACTION_ANALYZE_USE_NEW_UPLOAD",
minimumVersion: undefined,
},
[Feature.CleanupTrapCaches]: {
defaultValue: false,
envVar: "CODEQL_ACTION_CLEANUP_TRAP_CACHES",
@@ -287,6 +294,11 @@ export const featureConfig: Record<
envVar: "CODEQL_ACTION_JAVA_MINIMIZE_DEPENDENCY_JARS",
minimumVersion: "2.23.0",
},
[Feature.ValidateDbConfig]: {
defaultValue: false,
envVar: "CODEQL_ACTION_VALIDATE_DB_CONFIG",
minimumVersion: undefined,
},
};
/**
@@ -641,7 +653,7 @@ class GitHubFeatureFlags {
}
this.logger.debug(
"Loaded the following default values for the feature flags from the Code Scanning API:",
"Loaded the following default values for the feature flags from the CodeQL Action API:",
);
for (const [feature, value] of Object.entries(remoteFlags).sort(
([nameA], [nameB]) => nameA.localeCompare(nameB),
@@ -651,12 +663,13 @@ class GitHubFeatureFlags {
this.hasAccessedRemoteFeatureFlags = true;
return remoteFlags;
} catch (e) {
if (util.isHTTPError(e) && e.status === 403) {
const httpError = util.asHTTPError(e);
if (httpError?.status === 403) {
this.logger.warning(
"This run of the CodeQL Action does not have permission to access Code Scanning API endpoints. " +
"This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. " +
"As a result, it will not be opted into any experimental features. " +
"This could be because the Action is running on a pull request from a fork. If not, " +
`please ensure the Action has the 'security-events: write' permission. Details: ${e.message}`,
`please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}`,
);
this.hasAccessedRemoteFeatureFlags = false;
return {};
+1 -1
View File
@@ -325,7 +325,7 @@ async function run() {
}
analysisKinds = await getAnalysisKinds(logger);
config = await initConfig({
config = await initConfig(features, {
analysisKinds,
languagesInput: getOptionalInput("languages"),
queriesInput: getOptionalInput("queries"),
+2 -1
View File
@@ -61,10 +61,11 @@ export async function initCodeQL(
}
export async function initConfig(
features: FeatureEnablement,
inputs: configUtils.InitConfigInputs,
): Promise<configUtils.Config> {
return await withGroupAsync("Load language configuration", async () => {
return await configUtils.initConfig(inputs);
return await configUtils.initConfig(features, inputs);
});
}
+3 -8
View File
@@ -34,15 +34,10 @@ export const CODEQL_OVERLAY_MINIMUM_VERSION = "2.22.4";
* Actions Cache client library. Instead we place a limit on the uncompressed
* size of the overlay-base database.
*
* Assuming 2.5:1 compression ratio, the 15 GB limit on uncompressed data would
* translate to a limit of around 6 GB after compression. This is a high limit
* compared to the default 10GB Actions Cache capacity, but enforcement of Actions
* Cache quotas is not immediate.
*
* TODO: revisit this limit before removing the restriction for overlay analysis
* to the `github` and `dsp-testing` orgs.
* Assuming 2.5:1 compression ratio, the 7.5 GB limit on uncompressed data would
* translate to a limit of around 3 GB after compression.
*/
const OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 15000;
const OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500;
const OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES =
OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1_000_000;
+1 -1
View File
@@ -168,7 +168,7 @@ export function tryGetTagNameFromUrl(
// assumes less about the structure of the URL.
const match = matches[matches.length - 1];
if (match === null || match.length !== 2) {
if (match?.length !== 2) {
logger.debug(
`Could not determine tag name for URL ${url}. Matched ${JSON.stringify(
match,
+1 -1
View File
@@ -30,7 +30,7 @@ async function runWrapper() {
logger,
);
if ((config && config.debugMode) || core.isDebug()) {
if (config?.debugMode || core.isDebug()) {
const logFilePath = core.getState("proxy-log-file");
logger.info(
"Debug mode is on. Uploading proxy log as Actions debugging artifact...",
+15 -10
View File
@@ -23,7 +23,6 @@ import { getRepositoryNwo } from "./repository";
import { ToolsSource } from "./setup-codeql";
import {
ConfigurationError,
isHTTPError,
getRequiredEnvParam,
getCachedCodeQlVersion,
isInTestMode,
@@ -33,6 +32,7 @@ import {
BuildMode,
getErrorMessage,
getTestingEnvironment,
asHTTPError,
} from "./util";
export enum ActionName {
@@ -387,9 +387,9 @@ export async function createStatusReportBase(
}
const OUT_OF_DATE_MSG =
"CodeQL Action is out-of-date. Please upgrade to the latest version of codeql-action.";
"CodeQL Action is out-of-date. Please upgrade to the latest version of `codeql-action`.";
const INCOMPATIBLE_MSG =
"CodeQL Action version is incompatible with the code scanning endpoint. Please update to a compatible version of codeql-action.";
"CodeQL Action version is incompatible with the API endpoint. Please update to a compatible version of `codeql-action`.";
/**
* Send a status report to the code_scanning/analysis/status endpoint.
@@ -429,8 +429,9 @@ export async function sendStatusReport<S extends StatusReportBase>(
},
);
} catch (e) {
if (isHTTPError(e)) {
switch (e.status) {
const httpError = asHTTPError(e);
if (httpError !== undefined) {
switch (httpError.status) {
case 403:
if (
getWorkflowEventName() === "push" &&
@@ -438,16 +439,20 @@ export async function sendStatusReport<S extends StatusReportBase>(
) {
core.warning(
'Workflows triggered by Dependabot on the "push" event run with read-only access. ' +
"Uploading Code Scanning results requires write access. " +
'To use Code Scanning with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
"Uploading CodeQL results requires write access. " +
'To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. ' +
`See ${DocUrl.SCANNING_ON_PUSH} for more information on how to configure these events.`,
);
} else {
core.warning(e.message);
core.warning(
"This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. " +
"This could be because the Action is running on a pull request from a fork. If not, " +
`please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}`,
);
}
return;
case 404:
core.warning(e.message);
core.warning(httpError.message);
return;
case 422:
// schema incompatibility when reporting status
@@ -465,7 +470,7 @@ export async function sendStatusReport<S extends StatusReportBase>(
// something else has gone wrong and the request/response will be logged by octokit
// it's possible this is a transient error and we should continue scanning
core.warning(
`An unexpected error occurred when sending code scanning status report: ${getErrorMessage(
`An unexpected error occurred when sending a status report: ${getErrorMessage(
e,
)}`,
);
+2 -2
View File
@@ -35,14 +35,14 @@ async function getTarVersion(): Promise<TarVersion> {
// Return whether this is GNU tar or BSD tar, and the version number
if (stdout.includes("GNU tar")) {
const match = stdout.match(/tar \(GNU tar\) ([0-9.]+)/);
if (!match || !match[1]) {
if (!match?.[1]) {
throw new Error("Failed to parse output of tar --version.");
}
return { type: "gnu", version: match[1] };
} else if (stdout.includes("bsdtar")) {
const match = stdout.match(/bsdtar ([0-9.]+)/);
if (!match || !match[1]) {
if (!match?.[1]) {
throw new Error("Failed to parse output of tar --version.");
}
+18 -1
View File
@@ -2,7 +2,7 @@ import { TextDecoder } from "node:util";
import path from "path";
import * as github from "@actions/github";
import { TestFn } from "ava";
import { ExecutionContext, TestFn } from "ava";
import nock from "nock";
import * as sinon from "sinon";
@@ -180,6 +180,23 @@ export function getRecordingLogger(messages: LoggedMessage[]): Logger {
};
}
export function checkExpectedLogMessages(
t: ExecutionContext<any>,
messages: LoggedMessage[],
expectedMessages: string[],
) {
for (const expectedMessage of expectedMessages) {
t.assert(
messages.some(
(msg) =>
typeof msg.message === "string" &&
msg.message.includes(expectedMessage),
),
`Expected '${expectedMessage}' in the logger output, but didn't find it in:\n ${messages.map((m) => ` - '${m.message}'`).join("\n")}`,
);
}
}
/** Mock the HTTP request to the feature flags enablement API endpoint. */
export function mockFeatureFlagApiEndpoint(
responseStatusCode: number,
-2
View File
@@ -3,13 +3,11 @@ import * as semver from "semver";
import type { VersionInfo } from "./codeql";
export enum ToolsFeature {
AnalysisSummaryV2IsDefault = "analysisSummaryV2Default",
BuiltinExtractorsSpecifyDefaultQueries = "builtinExtractorsSpecifyDefaultQueries",
DatabaseInterpretResultsSupportsSarifRunProperty = "databaseInterpretResultsSupportsSarifRunProperty",
ForceOverwrite = "forceOverwrite",
IndirectTracingSupportsStaticBinaries = "indirectTracingSupportsStaticBinaries",
PythonDefaultIsToNotExtractStdlib = "pythonDefaultIsToNotExtractStdlib",
SarifMergeRunsFromEqualCategory = "sarifMergeRunsFromEqualCategory",
}
/**
+2 -2
View File
@@ -13,8 +13,8 @@ import * as gitUtils from "./git-utils";
import { Language } from "./languages";
import { Logger } from "./logging";
import {
asHTTPError,
getErrorMessage,
isHTTPError,
tryGetFolderBytes,
waitForResultWithTimeLimit,
} from "./util";
@@ -236,7 +236,7 @@ export async function cleanupTrapCaches(
}
return { trap_cache_cleanup_size_bytes: totalBytesCleanedUp };
} catch (e) {
if (isHTTPError(e) && e.status === 403) {
if (asHTTPError(e)?.status === 403) {
logger.warning(
"Could not cleanup TRAP caches as the token did not have the required permissions. " +
'To clean up TRAP caches, ensure the token has the "actions:write" permission. ' +
+141 -77
View File
@@ -21,7 +21,6 @@ import * as gitUtils from "./git-utils";
import { initCodeQL } from "./init";
import { Logger } from "./logging";
import { getRepositoryNwo, RepositoryNwo } from "./repository";
import { ToolsFeature } from "./tools-features";
import * as util from "./util";
import {
ConfigurationError,
@@ -269,32 +268,6 @@ async function combineSarifFilesUsingCLI(
codeQL = initCodeQLResult.codeql;
}
if (
!(await codeQL.supportsFeature(
ToolsFeature.SarifMergeRunsFromEqualCategory,
))
) {
await throwIfCombineSarifFilesDisabled(sarifObjects, gitHubVersion);
logger.warning(
"The CodeQL CLI does not support merging SARIF files. Merging files in the action.",
);
if (
await shouldShowCombineSarifFilesDeprecationWarning(
sarifObjects,
gitHubVersion,
)
) {
logger.warning(
`Uploading multiple CodeQL runs with the same category is deprecated ${deprecationWarningMessage} for CodeQL CLI 2.16.6 and earlier. Please update your CodeQL CLI version or update your workflow to set a distinct category for each CodeQL run. ${deprecationMoreInformationMessage}`,
);
core.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true");
}
return combineSarifFiles(sarifFiles, logger);
}
const baseTempDir = path.resolve(tempDir, "combined-sarif");
fs.mkdirSync(baseTempDir, { recursive: true });
const outputDirectory = fs.mkdtempSync(path.resolve(baseTempDir, "output-"));
@@ -386,16 +359,17 @@ export async function uploadPayload(
logger.info("Successfully uploaded results");
return response.data.id as string;
} catch (e) {
if (util.isHTTPError(e)) {
switch (e.status) {
const httpError = util.asHTTPError(e);
if (httpError !== undefined) {
switch (httpError.status) {
case 403:
core.warning(e.message || GENERIC_403_MSG);
core.warning(httpError.message || GENERIC_403_MSG);
break;
case 404:
core.warning(e.message || GENERIC_404_MSG);
core.warning(httpError.message || GENERIC_404_MSG);
break;
default:
core.warning(e.message);
core.warning(httpError.message);
break;
}
}
@@ -687,51 +661,39 @@ export function buildPayload(
return payloadObj;
}
/**
* Uploads a single SARIF file or a directory of SARIF files depending on what `inputSarifPath` refers
* to.
*/
export async function uploadFiles(
inputSarifPath: string,
checkoutPath: string,
category: string | undefined,
features: FeatureEnablement,
logger: Logger,
uploadTarget: analyses.AnalysisConfig,
): Promise<UploadResult> {
const sarifPaths = getSarifFilePaths(
inputSarifPath,
uploadTarget.sarifPredicate,
);
return uploadSpecifiedFiles(
sarifPaths,
checkoutPath,
category,
features,
logger,
uploadTarget,
);
export interface PostProcessingResults {
sarif: util.SarifFile;
analysisKey: string;
environment: string;
}
/**
* Uploads the given array of SARIF files.
* Performs post-processing of the SARIF files given by `sarifPaths`.
*
* @param logger The logger to use.
* @param features Information about enabled features.
* @param checkoutPath The path where the repo was checked out at.
* @param sarifPaths The paths of the SARIF files to post-process.
* @param category The analysis category.
* @param analysis The analysis configuration.
*
* @returns Returns the results of post-processing the SARIF files,
* including the resulting SARIF file.
*/
export async function uploadSpecifiedFiles(
sarifPaths: string[],
checkoutPath: string,
category: string | undefined,
features: FeatureEnablement,
export async function postProcessSarifFiles(
logger: Logger,
uploadTarget: analyses.AnalysisConfig,
): Promise<UploadResult> {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
logger.info(`Processing sarif files: ${JSON.stringify(sarifPaths)}`);
features: FeatureEnablement,
checkoutPath: string,
sarifPaths: string[],
category: string | undefined,
analysis: analyses.AnalysisConfig,
): Promise<PostProcessingResults> {
logger.info(`Post-processing sarif files: ${JSON.stringify(sarifPaths)}`);
const gitHubVersion = await getGitHubVersion();
let sarif: SarifFile;
category = uploadTarget.fixCategory(logger, category);
category = analysis.fixCategory(logger, category);
if (sarifPaths.length > 1) {
// Validate that the files we were asked to upload are all valid SARIF files
@@ -767,6 +729,113 @@ export async function uploadSpecifiedFiles(
environment,
);
return { sarif, analysisKey, environment };
}
/**
* Writes the post-processed SARIF file to disk, if needed based on `pathInput` or the `SARIF_DUMP_DIR`.
*
* @param logger The logger to use.
* @param pathInput The input provided for `post-processed-sarif-path`.
* @param uploadTarget The upload target.
* @param processingResults The results of post-processing SARIF files.
*/
export async function writePostProcessedFiles(
logger: Logger,
pathInput: string | undefined,
uploadTarget: analyses.AnalysisConfig,
postProcessingResults: PostProcessingResults,
) {
// If there's an explicit input, use that. Otherwise, use the value from the environment variable.
const outputPath = pathInput || util.getOptionalEnvVar(EnvVar.SARIF_DUMP_DIR);
// If we have a non-empty output path, write the SARIF file to it.
if (outputPath !== undefined) {
dumpSarifFile(
JSON.stringify(postProcessingResults.sarif),
outputPath,
logger,
uploadTarget,
);
} else {
logger.debug(`Not writing post-processed SARIF files.`);
}
}
/**
* Uploads a single SARIF file or a directory of SARIF files depending on what `inputSarifPath` refers
* to.
*/
export async function uploadFiles(
inputSarifPath: string,
checkoutPath: string,
category: string | undefined,
features: FeatureEnablement,
logger: Logger,
uploadTarget: analyses.AnalysisConfig,
): Promise<UploadResult> {
const sarifPaths = getSarifFilePaths(
inputSarifPath,
uploadTarget.sarifPredicate,
);
return uploadSpecifiedFiles(
sarifPaths,
checkoutPath,
category,
features,
logger,
uploadTarget,
);
}
/**
* Uploads the given array of SARIF files.
*/
async function uploadSpecifiedFiles(
sarifPaths: string[],
checkoutPath: string,
category: string | undefined,
features: FeatureEnablement,
logger: Logger,
uploadTarget: analyses.AnalysisConfig,
): Promise<UploadResult> {
const processingResults: PostProcessingResults = await postProcessSarifFiles(
logger,
features,
checkoutPath,
sarifPaths,
category,
uploadTarget,
);
return uploadPostProcessedFiles(
logger,
checkoutPath,
uploadTarget,
processingResults,
);
}
/**
* Uploads the results of post-processing SARIF files to the specified upload target.
*
* @param logger The logger to use.
* @param checkoutPath The path at which the repository was checked out.
* @param uploadTarget The analysis configuration.
* @param postProcessingResults The results of post-processing SARIF files.
*
* @returns The results of uploading the `postProcessingResults` to `uploadTarget`.
*/
export async function uploadPostProcessedFiles(
logger: Logger,
checkoutPath: string,
uploadTarget: analyses.AnalysisConfig,
postProcessingResults: PostProcessingResults,
): Promise<UploadResult> {
logger.startGroup(`Uploading ${uploadTarget.name} results`);
const sarif = postProcessingResults.sarif;
const toolNames = util.getToolNames(sarif);
logger.debug(`Validating that each SARIF run has a unique category`);
@@ -774,11 +843,6 @@ export async function uploadSpecifiedFiles(
logger.debug(`Serializing SARIF for upload`);
const sarifPayload = JSON.stringify(sarif);
const dumpDir = process.env[EnvVar.SARIF_DUMP_DIR];
if (dumpDir) {
dumpSarifFile(sarifPayload, dumpDir, logger, uploadTarget);
}
logger.debug(`Compressing serialized SARIF`);
const zippedSarif = zlib.gzipSync(sarifPayload).toString("base64");
const checkoutURI = url.pathToFileURL(checkoutPath).href;
@@ -786,13 +850,13 @@ export async function uploadSpecifiedFiles(
const payload = buildPayload(
await gitUtils.getCommitOid(checkoutPath),
await gitUtils.getRef(),
analysisKey,
postProcessingResults.analysisKey,
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
zippedSarif,
actionsUtil.getWorkflowRunID(),
actionsUtil.getWorkflowRunAttempt(),
checkoutURI,
environment,
postProcessingResults.environment,
toolNames,
await gitUtils.determineBaseBranchHeadCommitOid(),
);
@@ -838,14 +902,14 @@ function dumpSarifFile(
fs.mkdirSync(outputDir, { recursive: true });
} else if (!fs.lstatSync(outputDir).isDirectory()) {
throw new ConfigurationError(
`The path specified by the ${EnvVar.SARIF_DUMP_DIR} environment variable exists and is not a directory: ${outputDir}`,
`The path that processed SARIF files should be written to exists, but is not a directory: ${outputDir}`,
);
}
const outputFile = path.resolve(
outputDir,
`upload${uploadTarget.sarifExtension}`,
);
logger.info(`Dumping processed SARIF file to ${outputFile}`);
logger.info(`Writing processed SARIF file to ${outputFile}`);
fs.writeFileSync(outputFile, sarifPayload);
}
+3 -2
View File
@@ -16,7 +16,7 @@ import {
isThirdPartyAnalysis,
} from "./status-report";
import * as upload_lib from "./upload-lib";
import { uploadSarif } from "./upload-sarif";
import { postProcessAndUploadSarif } from "./upload-sarif";
import {
ConfigurationError,
checkActionVersion,
@@ -90,9 +90,10 @@ async function run() {
const checkoutPath = actionsUtil.getRequiredInput("checkout_path");
const category = actionsUtil.getOptionalInput("category");
const uploadResults = await uploadSarif(
const uploadResults = await postProcessAndUploadSarif(
logger,
features,
"always",
checkoutPath,
sarifPath,
category,
+114 -33
View File
@@ -9,7 +9,7 @@ import { getRunnerLogger } from "./logging";
import { createFeatures, setupTests } from "./testing-utils";
import { UploadResult } from "./upload-lib";
import * as uploadLib from "./upload-lib";
import { uploadSarif } from "./upload-sarif";
import { postProcessAndUploadSarif } from "./upload-sarif";
import * as util from "./util";
setupTests(test);
@@ -19,7 +19,27 @@ interface UploadSarifExpectedResult {
expectedFiles?: string[];
}
const uploadSarifMacro = test.macro({
function mockPostProcessSarifFiles() {
const postProcessSarifFiles = sinon.stub(uploadLib, "postProcessSarifFiles");
for (const analysisKind of Object.values(AnalysisKind)) {
const analysisConfig = getAnalysisConfig(analysisKind);
postProcessSarifFiles
.withArgs(
sinon.match.any,
sinon.match.any,
sinon.match.any,
sinon.match.any,
sinon.match.any,
analysisConfig,
)
.resolves({ sarif: { runs: [] }, analysisKey: "", environment: "" });
}
return postProcessSarifFiles;
}
const postProcessAndUploadSarifMacro = test.macro({
exec: async (
t: ExecutionContext<unknown>,
sarifFiles: string[],
@@ -33,21 +53,16 @@ const uploadSarifMacro = test.macro({
const toFullPath = (filename: string) => path.join(tempDir, filename);
const uploadSpecifiedFiles = sinon.stub(
const postProcessSarifFiles = mockPostProcessSarifFiles();
const uploadPostProcessedFiles = sinon.stub(
uploadLib,
"uploadSpecifiedFiles",
"uploadPostProcessedFiles",
);
for (const analysisKind of Object.values(AnalysisKind)) {
uploadSpecifiedFiles
.withArgs(
sinon.match.any,
sinon.match.any,
sinon.match.any,
features,
logger,
getAnalysisConfig(analysisKind),
)
const analysisConfig = getAnalysisConfig(analysisKind);
uploadPostProcessedFiles
.withArgs(logger, sinon.match.any, analysisConfig, sinon.match.any)
.resolves(expectedResult[analysisKind as AnalysisKind]?.uploadResult);
}
@@ -56,53 +71,57 @@ const uploadSarifMacro = test.macro({
fs.writeFileSync(sarifFile, "");
}
const actual = await uploadSarif(logger, features, "", testPath);
const actual = await postProcessAndUploadSarif(
logger,
features,
"always",
"",
testPath,
);
for (const analysisKind of Object.values(AnalysisKind)) {
const analysisKindResult = expectedResult[analysisKind];
if (analysisKindResult) {
// We are expecting a result for this analysis kind, check that we have it.
t.deepEqual(actual[analysisKind], analysisKindResult.uploadResult);
// Additionally, check that the mocked `uploadSpecifiedFiles` was called with only the file paths
// Additionally, check that the mocked `postProcessSarifFiles` was called with only the file paths
// that we expected it to be called with.
t.assert(
uploadSpecifiedFiles.calledWith(
postProcessSarifFiles.calledWith(
logger,
features,
sinon.match.any,
analysisKindResult.expectedFiles?.map(toFullPath) ??
fullSarifPaths,
sinon.match.any,
sinon.match.any,
features,
logger,
getAnalysisConfig(analysisKind),
),
);
} else {
// Otherwise, we are not expecting a result for this analysis kind. However, note that `undefined`
// is also returned by our mocked `uploadSpecifiedFiles` when there is no expected result for this
// is also returned by our mocked `uploadProcessedFiles` when there is no expected result for this
// analysis kind.
t.is(actual[analysisKind], undefined);
// Therefore, we also check that the mocked `uploadSpecifiedFiles` was not called for this analysis kind.
// Therefore, we also check that the mocked `uploadProcessedFiles` was not called for this analysis kind.
t.assert(
!uploadSpecifiedFiles.calledWith(
sinon.match.any,
sinon.match.any,
sinon.match.any,
features,
!uploadPostProcessedFiles.calledWith(
logger,
sinon.match.any,
getAnalysisConfig(analysisKind),
sinon.match.any,
),
`uploadSpecifiedFiles was called for ${analysisKind}, but should not have been.`,
`uploadProcessedFiles was called for ${analysisKind}, but should not have been.`,
);
}
}
});
},
title: (providedTitle = "") => `uploadSarif - ${providedTitle}`,
title: (providedTitle = "") => `processAndUploadSarif - ${providedTitle}`,
});
test(
"SARIF file",
uploadSarifMacro,
postProcessAndUploadSarifMacro,
["test.sarif"],
(tempDir) => path.join(tempDir, "test.sarif"),
{
@@ -117,7 +136,7 @@ test(
test(
"JSON file",
uploadSarifMacro,
postProcessAndUploadSarifMacro,
["test.json"],
(tempDir) => path.join(tempDir, "test.json"),
{
@@ -132,7 +151,7 @@ test(
test(
"Code Scanning files",
uploadSarifMacro,
postProcessAndUploadSarifMacro,
["test.json", "test.sarif"],
undefined,
{
@@ -148,7 +167,7 @@ test(
test(
"Code Quality file",
uploadSarifMacro,
postProcessAndUploadSarifMacro,
["test.quality.sarif"],
(tempDir) => path.join(tempDir, "test.quality.sarif"),
{
@@ -163,7 +182,7 @@ test(
test(
"Mixed files",
uploadSarifMacro,
postProcessAndUploadSarifMacro,
["test.sarif", "test.quality.sarif"],
undefined,
{
@@ -183,3 +202,65 @@ test(
},
},
);
test("postProcessAndUploadSarif doesn't upload if upload is disabled", async (t) => {
await util.withTmpDir(async (tempDir) => {
const logger = getRunnerLogger(true);
const features = createFeatures([]);
const toFullPath = (filename: string) => path.join(tempDir, filename);
const postProcessSarifFiles = mockPostProcessSarifFiles();
const uploadPostProcessedFiles = sinon.stub(
uploadLib,
"uploadPostProcessedFiles",
);
fs.writeFileSync(toFullPath("test.sarif"), "");
fs.writeFileSync(toFullPath("test.quality.sarif"), "");
const actual = await postProcessAndUploadSarif(
logger,
features,
"never",
"",
tempDir,
);
t.truthy(actual);
t.assert(postProcessSarifFiles.calledTwice);
t.assert(uploadPostProcessedFiles.notCalled);
});
});
test("postProcessAndUploadSarif writes post-processed SARIF files if output directory is provided", async (t) => {
await util.withTmpDir(async (tempDir) => {
const logger = getRunnerLogger(true);
const features = createFeatures([]);
const toFullPath = (filename: string) => path.join(tempDir, filename);
const postProcessSarifFiles = mockPostProcessSarifFiles();
fs.writeFileSync(toFullPath("test.sarif"), "");
fs.writeFileSync(toFullPath("test.quality.sarif"), "");
const postProcessedOutPath = path.join(tempDir, "post-processed");
const actual = await postProcessAndUploadSarif(
logger,
features,
"never",
"",
tempDir,
"",
postProcessedOutPath,
);
t.truthy(actual);
t.assert(postProcessSarifFiles.calledTwice);
t.assert(fs.existsSync(path.join(postProcessedOutPath, "upload.sarif")));
t.assert(
fs.existsSync(path.join(postProcessedOutPath, "upload.quality.sarif")),
);
});
});
+31 -7
View File
@@ -1,3 +1,4 @@
import { UploadKind } from "./actions-util";
import * as analyses from "./analyses";
import { FeatureEnablement } from "./feature-flags";
import { Logger } from "./logging";
@@ -10,22 +11,26 @@ export type UploadSarifResults = Partial<
>;
/**
* Finds SARIF files in `sarifPath` and uploads them to the appropriate services.
* Finds SARIF files in `sarifPath`, post-processes them, and uploads them to the appropriate services.
*
* @param logger The logger to use.
* @param features Information about enabled features.
* @param uploadKind The kind of upload that is requested.
* @param checkoutPath The path where the repository was checked out at.
* @param sarifPath The path to the file or directory to upload.
* @param category The analysis category.
* @param postProcessedOutputPath The path to a directory to which the post-processed SARIF files should be written to.
*
* @returns A partial mapping from analysis kinds to the upload results.
*/
export async function uploadSarif(
export async function postProcessAndUploadSarif(
logger: Logger,
features: FeatureEnablement,
uploadKind: UploadKind,
checkoutPath: string,
sarifPath: string,
category?: string,
postProcessedOutputPath?: string,
): Promise<UploadSarifResults> {
const sarifGroups = await upload_lib.getGroupedSarifFilePaths(
logger,
@@ -37,14 +42,33 @@ export async function uploadSarif(
sarifGroups,
)) {
const analysisConfig = analyses.getAnalysisConfig(analysisKind);
uploadResults[analysisKind] = await upload_lib.uploadSpecifiedFiles(
sarifFiles,
checkoutPath,
category,
features,
const postProcessingResults = await upload_lib.postProcessSarifFiles(
logger,
features,
checkoutPath,
sarifFiles,
category,
analysisConfig,
);
// Write the post-processed SARIF files to disk. This will only write them if needed based on user inputs
// or environment variables.
await upload_lib.writePostProcessedFiles(
logger,
postProcessedOutputPath,
analysisConfig,
postProcessingResults,
);
// Only perform the actual upload of the post-processed files if `uploadKind` is `always`.
if (uploadKind === "always") {
uploadResults[analysisKind] = await upload_lib.uploadPostProcessedFiles(
logger,
checkoutPath,
analysisConfig,
postProcessingResults,
);
}
}
return uploadResults;
+29
View File
@@ -252,6 +252,35 @@ test("allowed API versions", async (t) => {
);
});
test("getRequiredEnvParam - gets environment variables", (t) => {
process.env.SOME_UNIT_TEST_VAR = "foo";
const result = util.getRequiredEnvParam("SOME_UNIT_TEST_VAR");
t.is(result, "foo");
});
test("getRequiredEnvParam - throws if an environment variable isn't set", (t) => {
t.throws(() => util.getRequiredEnvParam("SOME_UNIT_TEST_VAR"));
});
test("getOptionalEnvVar - gets environment variables", (t) => {
process.env.SOME_UNIT_TEST_VAR = "foo";
const result = util.getOptionalEnvVar("SOME_UNIT_TEST_VAR");
t.is(result, "foo");
});
test("getOptionalEnvVar - gets undefined for empty environment variables", (t) => {
process.env.SOME_UNIT_TEST_VAR = "";
const result = util.getOptionalEnvVar("SOME_UNIT_TEST_VAR");
t.is(result, undefined);
});
test("getOptionalEnvVar - doesn't throw for undefined environment variables", (t) => {
t.notThrows(() => {
const result = util.getOptionalEnvVar("SOME_UNIT_TEST_VAR");
t.is(result, undefined);
});
});
test("doesDirectoryExist", async (t) => {
// Returns false if no file/dir of this name exists
t.false(util.doesDirectoryExist("non-existent-file.txt"));
+27 -2
View File
@@ -673,6 +673,17 @@ export function getRequiredEnvParam(paramName: string): string {
return value;
}
/**
* Get an environment variable, but return `undefined` if it is not set or empty.
*/
export function getOptionalEnvVar(paramName: string): string | undefined {
const value = process.env[paramName];
if (value?.trim().length === 0) {
return undefined;
}
return value;
}
export class HTTPError extends Error {
public status: number;
@@ -692,8 +703,22 @@ export class ConfigurationError extends Error {
}
}
export function isHTTPError(arg: any): arg is HTTPError {
return arg?.status !== undefined && Number.isInteger(arg.status);
export function asHTTPError(arg: any): HTTPError | undefined {
if (
typeof arg !== "object" ||
arg === null ||
typeof arg.message !== "string"
) {
return undefined;
}
if (Number.isInteger(arg.status)) {
return new HTTPError(arg.message as string, arg.status as number);
}
// See https://github.com/actions/toolkit/blob/acb230b99a46ed33a3f04a758cd68b47b9a82908/packages/tool-cache/src/tool-cache.ts#L19
if (Number.isInteger(arg.httpStatusCode)) {
return new HTTPError(arg.message as string, arg.httpStatusCode as number);
}
return undefined;
}
let cachedCodeQlVersion: undefined | VersionInfo = undefined;
+1 -1
View File
@@ -371,7 +371,7 @@ function getInputOrThrow(
input = input.replace(`\${{matrix.${key}}}`, value);
}
}
if (input !== undefined && input.includes("${{")) {
if (input?.includes("${{")) {
throw new Error(
`Could not get ${inputName} input to ${actionName} since it contained an unrecognized dynamic value.`,
);