From 0f3e6325802cb6fbe08b3ecb70454a8b213107d7 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Tue, 24 Feb 2026 16:31:39 +0000 Subject: [PATCH 01/50] Rename secondary `run` to `uploadFailureInfo` --- lib/init-action-post.js | 86 ++++++++++++++--------------- src/init-action-post-helper.test.ts | 12 ++-- src/init-action-post-helper.ts | 18 +++++- src/init-action-post.ts | 2 +- 4 files changed, 66 insertions(+), 52 deletions(-) diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 6b62621a1..c60ad92dd 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -46427,7 +46427,7 @@ var require_light = __commonJS({ } return this.Events.trigger("scheduled", { args: this.args, options: this.options }); } - async doExecute(chained, clearGlobalState, run3, free) { + async doExecute(chained, clearGlobalState, run2, free) { var error3, eventInfo, passed; if (this.retryCount === 0) { this._assertStatus("RUNNING"); @@ -46447,10 +46447,10 @@ var require_light = __commonJS({ } } catch (error1) { error3 = error1; - return this._onFailure(error3, eventInfo, clearGlobalState, run3, free); + return this._onFailure(error3, eventInfo, clearGlobalState, run2, free); } } - doExpire(clearGlobalState, run3, free) { + doExpire(clearGlobalState, run2, free) { var error3, eventInfo; if (this._states.jobStatus(this.options.id === "RUNNING")) { this._states.next(this.options.id); @@ -46458,9 +46458,9 @@ var require_light = __commonJS({ this._assertStatus("EXECUTING"); eventInfo = { args: this.args, options: this.options, retryCount: this.retryCount }; error3 = new BottleneckError$1(`This job timed out after ${this.options.expiration} ms.`); - return this._onFailure(error3, eventInfo, clearGlobalState, run3, free); + return this._onFailure(error3, eventInfo, clearGlobalState, run2, free); } - async _onFailure(error3, eventInfo, clearGlobalState, run3, free) { + async _onFailure(error3, eventInfo, clearGlobalState, run2, free) { var retry2, retryAfter; if (clearGlobalState()) { retry2 = await this.Events.trigger("failed", error3, eventInfo); @@ -46468,7 +46468,7 @@ var require_light = __commonJS({ retryAfter = ~~retry2; this.Events.trigger("retry", `Retrying ${this.options.id} after ${retryAfter} ms`, eventInfo); this.retryCount++; - return run3(retryAfter); + return run2(retryAfter); } else { this.doDone(eventInfo); await free(this.options, eventInfo); @@ -47106,17 +47106,17 @@ var require_light = __commonJS({ } } _run(index, job, wait) { - var clearGlobalState, free, run3; + var clearGlobalState, free, run2; job.doRun(); clearGlobalState = this._clearGlobalState.bind(this, index); - run3 = this._run.bind(this, index, job); + run2 = this._run.bind(this, index, job); free = this._free.bind(this, index, job); return this._scheduled[index] = { timeout: setTimeout(() => { - return job.doExecute(this._limiter, clearGlobalState, run3, free); + return job.doExecute(this._limiter, clearGlobalState, run2, free); }, wait), expiration: job.options.expiration != null ? setTimeout(function() { - return job.doExpire(clearGlobalState, run3, free); + return job.doExpire(clearGlobalState, run2, free); }, wait + job.options.expiration) : void 0, job }; @@ -104471,8 +104471,8 @@ var require_async = __commonJS({ return callback(null, results); } while (readyTasks.length && runningTasks < concurrency) { - var run3 = readyTasks.shift(); - run3(); + var run2 = readyTasks.shift(); + run2(); } } function addListener(taskName, fn) { @@ -163563,8 +163563,8 @@ function getExtraOptionsEnvParam() { } function getToolNames(sarif) { const toolNames = {}; - for (const run3 of sarif.runs || []) { - const tool = run3.tool || {}; + for (const run2 of sarif.runs || []) { + const tool = run2.tool || {}; const driver = tool.driver || {}; if (typeof driver.name === "string" && driver.name.length > 0) { toolNames[driver.name] = true; @@ -168940,9 +168940,9 @@ async function addFingerprints(sarif, sourceRoot, logger) { `Adding fingerprints to SARIF file. See ${"https://docs.github.com/en/code-security/reference/code-scanning/sarif-support-for-code-scanning#data-for-preventing-duplicated-alerts" /* TRACK_CODE_SCANNING_ALERTS_ACROSS_RUNS */} for more information.` ); const callbacksByFile = {}; - for (const run3 of sarif.runs || []) { - const artifacts = run3.artifacts || []; - for (const result of run3.results || []) { + for (const run2 of sarif.runs || []) { + const artifacts = run2.artifacts || []; + for (const result of run2.results || []) { const primaryLocation = (result.locations || [])[0]; if (!primaryLocation?.physicalLocation?.artifactLocation) { logger.debug( @@ -169043,25 +169043,25 @@ function combineSarifFiles(sarifFiles, logger) { function areAllRunsProducedByCodeQL(sarifObjects) { return sarifObjects.every((sarifObject) => { return sarifObject.runs?.every( - (run3) => run3.tool?.driver?.name === "CodeQL" + (run2) => run2.tool?.driver?.name === "CodeQL" ); }); } -function createRunKey(run3) { +function createRunKey(run2) { return { - name: run3.tool?.driver?.name, - fullName: run3.tool?.driver?.fullName, - version: run3.tool?.driver?.version, - semanticVersion: run3.tool?.driver?.semanticVersion, - guid: run3.tool?.driver?.guid, - automationId: run3.automationDetails?.id + name: run2.tool?.driver?.name, + fullName: run2.tool?.driver?.fullName, + version: run2.tool?.driver?.version, + semanticVersion: run2.tool?.driver?.semanticVersion, + guid: run2.tool?.driver?.guid, + automationId: run2.automationDetails?.id }; } function areAllRunsUnique(sarifObjects) { const keys = /* @__PURE__ */ new Set(); for (const sarifObject of sarifObjects) { - for (const run3 of sarifObject.runs) { - const key = JSON.stringify(createRunKey(run3)); + for (const run2 of sarifObject.runs) { + const key = JSON.stringify(createRunKey(run2)); if (keys.has(key)) { return false; } @@ -169164,9 +169164,9 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo function populateRunAutomationDetails(sarif, category, analysis_key, environment) { const automationID = getAutomationID2(category, analysis_key, environment); if (automationID !== void 0) { - for (const run3 of sarif.runs || []) { - if (run3.automationDetails === void 0) { - run3.automationDetails = { + for (const run2 of sarif.runs || []) { + if (run2.automationDetails === void 0) { + run2.automationDetails = { id: automationID }; } @@ -169265,13 +169265,13 @@ function countResultsInSarif(sarif) { if (!Array.isArray(parsedSarif.runs)) { throw new InvalidSarifUploadError("Invalid SARIF. Missing 'runs' array."); } - for (const run3 of parsedSarif.runs) { - if (!Array.isArray(run3.results)) { + for (const run2 of parsedSarif.runs) { + if (!Array.isArray(run2.results)) { throw new InvalidSarifUploadError( "Invalid SARIF. Missing 'results' array in run." ); } - numResults += run3.results.length; + numResults += run2.results.length; } return numResults; } @@ -169564,9 +169564,9 @@ function handleProcessingResultForUnsuccessfulExecution(response, status, logger } function validateUniqueCategory(sarif, sentinelPrefix) { const categories = {}; - for (const run3 of sarif.runs) { - const id = run3?.automationDetails?.id; - const tool = run3.tool?.driver?.name; + for (const run2 of sarif.runs) { + const id = run2?.automationDetails?.id; + const tool = run2.tool?.driver?.name; const category = `${sanitize(id)}_${sanitize(tool)}`; categories[category] = { id, tool }; } @@ -169591,9 +169591,9 @@ function filterAlertsByDiffRange(logger, sarif) { return sarif; } const checkoutPath = getRequiredInput("checkout_path"); - for (const run3 of sarif.runs) { - if (run3.results) { - run3.results = run3.results.filter((result) => { + for (const run2 of sarif.runs) { + if (run2.results) { + run2.results = run2.results.filter((result) => { const locations = [ ...(result.locations || []).map((loc) => loc.physicalLocation), ...(result.relatedLocations || []).map((loc) => loc.physicalLocation) @@ -169820,7 +169820,7 @@ async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger return createFailedUploadFailedSarifResult(e); } } -async function run(uploadAllAvailableDebugArtifacts, printDebugLogs2, codeql, config, repositoryNwo, features, logger) { +async function uploadFailureInfo(uploadAllAvailableDebugArtifacts, printDebugLogs2, codeql, config, repositoryNwo, features, logger) { await recordOverlayStatus(codeql, config, features, logger); const uploadFailedSarifResult = await tryUploadSarifIfRunFailed( config, @@ -170205,7 +170205,7 @@ async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error } // src/init-action-post.ts -async function run2(startedAt) { +async function run(startedAt) { const logger = getActionsLogger(); let config; let uploadFailedSarifResult; @@ -170228,7 +170228,7 @@ async function run2(startedAt) { ); } else { const codeql = await getCodeQL(config.codeQLCmd); - uploadFailedSarifResult = await run( + uploadFailedSarifResult = await uploadFailureInfo( tryUploadAllAvailableDebugArtifacts, printDebugLogs, codeql, @@ -170312,7 +170312,7 @@ async function runWrapper() { const startedAt = /* @__PURE__ */ new Date(); const logger = getActionsLogger(); try { - await run2(startedAt); + await run(startedAt); } catch (error3) { core16.setFailed(`init post action failed: ${wrapError(error3).message}`); await sendUnhandledErrorStatusReport( diff --git a/src/init-action-post-helper.test.ts b/src/init-action-post-helper.test.ts index 1ddb70287..645ec87d6 100644 --- a/src/init-action-post-helper.test.ts +++ b/src/init-action-post-helper.test.ts @@ -45,7 +45,7 @@ test("init-post action with debug mode off", async (t) => { const uploadAllAvailableDebugArtifactsSpy = sinon.spy(); const printDebugLogsSpy = sinon.spy(); - await initActionPostHelper.run( + await initActionPostHelper.uploadFailureInfo( uploadAllAvailableDebugArtifactsSpy, printDebugLogsSpy, codeql.createStubCodeQL({}), @@ -68,7 +68,7 @@ test("init-post action with debug mode on", async (t) => { const uploadAllAvailableDebugArtifactsSpy = sinon.spy(); const printDebugLogsSpy = sinon.spy(); - await initActionPostHelper.run( + await initActionPostHelper.uploadFailureInfo( uploadAllAvailableDebugArtifactsSpy, printDebugLogsSpy, codeql.createStubCodeQL({}), @@ -332,7 +332,7 @@ test("saves overlay status when overlay-base analysis did not complete successfu const stubCodeQL = codeql.createStubCodeQL({}); - await initActionPostHelper.run( + await initActionPostHelper.uploadFailureInfo( sinon.spy(), sinon.spy(), stubCodeQL, @@ -392,7 +392,7 @@ test("does not save overlay status when OverlayAnalysisStatusSave feature flag i .stub(overlayStatus, "saveOverlayStatus") .resolves(true); - await initActionPostHelper.run( + await initActionPostHelper.uploadFailureInfo( sinon.spy(), sinon.spy(), codeql.createStubCodeQL({}), @@ -429,7 +429,7 @@ test("does not save overlay status when build successful", async (t) => { .stub(overlayStatus, "saveOverlayStatus") .resolves(true); - await initActionPostHelper.run( + await initActionPostHelper.uploadFailureInfo( sinon.spy(), sinon.spy(), codeql.createStubCodeQL({}), @@ -465,7 +465,7 @@ test("does not save overlay status when overlay not enabled", async (t) => { .stub(overlayStatus, "saveOverlayStatus") .resolves(true); - await initActionPostHelper.run( + await initActionPostHelper.uploadFailureInfo( sinon.spy(), sinon.spy(), codeql.createStubCodeQL({}), diff --git a/src/init-action-post-helper.ts b/src/init-action-post-helper.ts index 7cbf20d5d..0b778c58d 100644 --- a/src/init-action-post-helper.ts +++ b/src/init-action-post-helper.ts @@ -158,7 +158,21 @@ export async function tryUploadSarifIfRunFailed( } } -export async function run( +/** + * Handles the majority of the `post-init` step logic which, depending on the configuration, + * mainly involves uploading a SARIF file with information about the failued run, debug + * artifacts, and performing clean-up operations. + * + * @param uploadAllAvailableDebugArtifacts A function with which to upload debug artifacts. + * @param printDebugLogs A function with which to print debug logs. + * @param codeql The CodeQL CLI instance. + * @param config The CodeQL Action configuration. + * @param repositoryNwo The name and owner of the repository. + * @param features Information about enabled features. + * @param logger The logger to use. + * @returns The results of uploading the SARIF file for the failure. + */ +export async function uploadFailureInfo( uploadAllAvailableDebugArtifacts: ( codeql: CodeQL, config: Config, @@ -171,7 +185,7 @@ export async function run( repositoryNwo: RepositoryNwo, features: FeatureEnablement, logger: Logger, -) { +): Promise { await recordOverlayStatus(codeql, config, features, logger); const uploadFailedSarifResult = await tryUploadSarifIfRunFailed( diff --git a/src/init-action-post.ts b/src/init-action-post.ts index d5aab32f3..ed9970170 100644 --- a/src/init-action-post.ts +++ b/src/init-action-post.ts @@ -77,7 +77,7 @@ async function run(startedAt: Date) { } else { const codeql = await getCodeQL(config.codeQLCmd); - uploadFailedSarifResult = await initActionPostHelper.run( + uploadFailedSarifResult = await initActionPostHelper.uploadFailureInfo( debugArtifacts.tryUploadAllAvailableDebugArtifacts, printDebugLogs, codeql, From e9ce32d8078f95509668f449a64c013fd9a4b329 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Tue, 24 Feb 2026 16:39:28 +0000 Subject: [PATCH 02/50] Change order of checks in `tryUploadSarifIfRunFailed` --- lib/init-action-post.js | 23 +++++++++++----------- src/init-action-post-helper.ts | 36 ++++++++++++++++++++++------------ 2 files changed, 36 insertions(+), 23 deletions(-) diff --git a/lib/init-action-post.js b/lib/init-action-post.js index c60ad92dd..a3c5f17b8 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -169796,23 +169796,24 @@ async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) { return uploadResult ? { ...uploadResult.statusReport, sarifID: uploadResult.sarifID } : {}; } async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger) { - if (!isCodeScanningEnabled(config)) { - return { - upload_failed_run_skipped_because: "Code Scanning is not enabled." - }; - } if (process.env["CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */] === "true") { return { upload_failed_run_skipped_because: "Analyze Action completed successfully" }; } try { - return await maybeUploadFailedSarif( - config, - repositoryNwo, - features, - logger - ); + if (isCodeScanningEnabled(config)) { + return await maybeUploadFailedSarif( + config, + repositoryNwo, + features, + logger + ); + } else { + return { + upload_failed_run_skipped_because: "Code Scanning is not enabled." + }; + } } catch (e) { logger.debug( `Failed to upload a SARIF file for this failed CodeQL code scanning run. ${e}` diff --git a/src/init-action-post-helper.ts b/src/init-action-post-helper.ts index 0b778c58d..5c60961d9 100644 --- a/src/init-action-post-helper.ts +++ b/src/init-action-post-helper.ts @@ -125,31 +125,43 @@ async function maybeUploadFailedSarif( : {}; } +/** + * Tries to upload a SARIF file with information about the run, if it failed. + * + * @param config The CodeQL Action configuration. + * @param repositoryNwo The name and owner of the repository. + * @param features Information about enabled features. + * @param logger The logger to use. + * @returns The results of uploading the SARIF file for the failure. + */ export async function tryUploadSarifIfRunFailed( config: Config, repositoryNwo: RepositoryNwo, features: FeatureEnablement, logger: Logger, ): Promise { - // Only upload the failed SARIF to Code scanning if Code scanning is enabled. - if (!isCodeScanningEnabled(config)) { - return { - upload_failed_run_skipped_because: "Code Scanning is not enabled.", - }; - } + // There's nothing to do here if the analysis succeeded. if (process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY] === "true") { return { upload_failed_run_skipped_because: "Analyze Action completed successfully", }; } + try { - return await maybeUploadFailedSarif( - config, - repositoryNwo, - features, - logger, - ); + // Only upload the failed SARIF to Code scanning if Code scanning is enabled. + if (isCodeScanningEnabled(config)) { + return await maybeUploadFailedSarif( + config, + repositoryNwo, + features, + logger, + ); + } else { + return { + upload_failed_run_skipped_because: "Code Scanning is not enabled.", + }; + } } catch (e) { logger.debug( `Failed to upload a SARIF file for this failed CodeQL code scanning run. ${e}`, From 56d1ccc87a4a2034c5daeec44601f197fbaf4bda Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Tue, 24 Feb 2026 16:55:37 +0000 Subject: [PATCH 03/50] Change skipped reason message --- lib/init-action-post.js | 2 +- src/init-action-post-helper.test.ts | 2 +- src/init-action-post-helper.ts | 3 ++- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/lib/init-action-post.js b/lib/init-action-post.js index a3c5f17b8..c7cb2af50 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -169811,7 +169811,7 @@ async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger ); } else { return { - upload_failed_run_skipped_because: "Code Scanning is not enabled." + upload_failed_run_skipped_because: "No analysis kind that supports failed SARIF uploads is enabled." }; } } catch (e) { diff --git a/src/init-action-post-helper.test.ts b/src/init-action-post-helper.test.ts index 645ec87d6..fac31423c 100644 --- a/src/init-action-post-helper.test.ts +++ b/src/init-action-post-helper.test.ts @@ -309,7 +309,7 @@ test("not uploading failed SARIF when `code-scanning` is not an enabled analysis }); t.is( result.upload_failed_run_skipped_because, - "Code Scanning is not enabled.", + "No analysis kind that supports failed SARIF uploads is enabled.", ); }); diff --git a/src/init-action-post-helper.ts b/src/init-action-post-helper.ts index 5c60961d9..76a14be11 100644 --- a/src/init-action-post-helper.ts +++ b/src/init-action-post-helper.ts @@ -159,7 +159,8 @@ export async function tryUploadSarifIfRunFailed( ); } else { return { - upload_failed_run_skipped_because: "Code Scanning is not enabled.", + upload_failed_run_skipped_because: + "No analysis kind that supports failed SARIF uploads is enabled.", }; } } catch (e) { From 60ca40ecd42b75d02a64ba42d42c26b623618b7e Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Thu, 26 Feb 2026 18:04:21 +0000 Subject: [PATCH 04/50] Refactor `prepareFailedSarif` out of `maybeUploadFailedSarif` --- lib/init-action-post.js | 54 ++++++++++++++++++++++++++---- src/init-action-post-helper.ts | 61 ++++++++++++++++++++++++++-------- 2 files changed, 95 insertions(+), 20 deletions(-) diff --git a/lib/init-action-post.js b/lib/init-action-post.js index c7cb2af50..3874bd7e1 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -163855,6 +163855,34 @@ async function asyncSome(array, predicate) { const results = await Promise.all(array.map(predicate)); return results.some((result) => result); } +var Success = class { + constructor(value) { + this.value = value; + } + isSuccess() { + return true; + } + isFailure() { + return false; + } + orElse(_defaultValue) { + return this.value; + } +}; +var Failure = class { + constructor(value) { + this.value = value; + } + isSuccess() { + return false; + } + isFailure() { + return true; + } + orElse(defaultValue) { + return defaultValue; + } +}; // src/actions-util.ts var pkg = require_package(); @@ -169755,9 +169783,11 @@ function createFailedUploadFailedSarifResult(error3) { upload_failed_run_stack_trace: wrappedError.stack }; } -async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) { +async function prepareFailedSarif(logger, features, config) { if (!config.codeQLCmd) { - return { upload_failed_run_skipped_because: "CodeQL command not found" }; + return new Failure({ + upload_failed_run_skipped_because: "CodeQL command not found" + }); } const workflow = await getWorkflow(logger); const jobName = getRequiredEnvParam("GITHUB_JOB"); @@ -169766,7 +169796,9 @@ async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) { if (!["always", "failure-only"].includes( getUploadValue(shouldUpload) ) || shouldSkipSarifUpload()) { - return { upload_failed_run_skipped_because: "SARIF upload is disabled" }; + return new Failure({ + upload_failed_run_skipped_because: "SARIF upload is disabled" + }); } const category = getCategoryInputOrThrow(workflow, jobName, matrix); const checkoutPath = getCheckoutPathInputOrThrow(workflow, jobName, matrix); @@ -169778,11 +169810,19 @@ async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) { } else { await codeql.databaseExportDiagnostics(databasePath, sarifFile, category); } - logger.info(`Uploading failed SARIF file ${sarifFile}`); + return new Success({ sarifFile, category, checkoutPath }); +} +async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) { + const failedSarifResult = await prepareFailedSarif(logger, features, config); + if (failedSarifResult.isFailure()) { + return failedSarifResult.value; + } + const failedSarif = failedSarifResult.value; + logger.info(`Uploading failed SARIF file ${failedSarif.sarifFile}`); const uploadResult = await uploadFiles( - sarifFile, - checkoutPath, - category, + failedSarif.sarifFile, + failedSarif.checkoutPath, + failedSarif.category, features, logger, CodeScanning diff --git a/src/init-action-post-helper.ts b/src/init-action-post-helper.ts index 76a14be11..c03a0c1f0 100644 --- a/src/init-action-post-helper.ts +++ b/src/init-action-post-helper.ts @@ -19,10 +19,13 @@ import * as uploadLib from "./upload-lib"; import { checkDiskUsage, delay, + Failure, getErrorMessage, getRequiredEnvParam, parseMatrixInput, + Result, shouldSkipSarifUpload, + Success, wrapError, } from "./util"; import { @@ -62,18 +65,27 @@ function createFailedUploadFailedSarifResult( }; } +/** Records details about a SARIF file that can contains information about a failed analysis. */ +interface FailedSarifInfo { + sarifFile: string; + category: string | undefined; + checkoutPath: string; +} + /** - * Upload a failed SARIF file if we can verify that SARIF upload is enabled and determine the SARIF - * category for the workflow. + * Tries to prepare a SARIF file that can contains information about a failed analysis. + * + * @returns Either information about the SARIF file that was produced, or a reason why it couldn't be produced. */ -async function maybeUploadFailedSarif( - config: Config, - repositoryNwo: RepositoryNwo, - features: FeatureEnablement, +async function prepareFailedSarif( logger: Logger, -): Promise { + features: FeatureEnablement, + config: Config, +): Promise> { if (!config.codeQLCmd) { - return { upload_failed_run_skipped_because: "CodeQL command not found" }; + return new Failure({ + upload_failed_run_skipped_because: "CodeQL command not found", + }); } const workflow = await getWorkflow(logger); const jobName = getRequiredEnvParam("GITHUB_JOB"); @@ -85,7 +97,9 @@ async function maybeUploadFailedSarif( ) || shouldSkipSarifUpload() ) { - return { upload_failed_run_skipped_because: "SARIF upload is disabled" }; + return new Failure({ + upload_failed_run_skipped_because: "SARIF upload is disabled", + }); } const category = getCategoryInputOrThrow(workflow, jobName, matrix); const checkoutPath = getCheckoutPathInputOrThrow(workflow, jobName, matrix); @@ -105,11 +119,32 @@ async function maybeUploadFailedSarif( await codeql.databaseExportDiagnostics(databasePath, sarifFile, category); } - logger.info(`Uploading failed SARIF file ${sarifFile}`); + return new Success({ sarifFile, category, checkoutPath }); +} + +/** + * Upload a failed SARIF file if we can verify that SARIF upload is enabled and determine the SARIF + * category for the workflow. + */ +async function maybeUploadFailedSarif( + config: Config, + repositoryNwo: RepositoryNwo, + features: FeatureEnablement, + logger: Logger, +): Promise { + const failedSarifResult = await prepareFailedSarif(logger, features, config); + + if (failedSarifResult.isFailure()) { + return failedSarifResult.value; + } + + const failedSarif = failedSarifResult.value; + + logger.info(`Uploading failed SARIF file ${failedSarif.sarifFile}`); const uploadResult = await uploadLib.uploadFiles( - sarifFile, - checkoutPath, - category, + failedSarif.sarifFile, + failedSarif.checkoutPath, + failedSarif.category, features, logger, CodeScanning, From 44b66a8064e35661c1ebd700dee861b9149bce02 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Thu, 26 Feb 2026 18:40:00 +0000 Subject: [PATCH 05/50] Upload failed SARIF as artifact for risk assessments --- lib/init-action-post.js | 970 +++++++++++++++++---------------- src/config-utils.ts | 7 + src/init-action-post-helper.ts | 49 +- 3 files changed, 551 insertions(+), 475 deletions(-) diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 3874bd7e1..5cd5ca0c2 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -1337,14 +1337,14 @@ var require_util = __commonJS({ } const port = url2.port != null ? url2.port : url2.protocol === "https:" ? 443 : 80; let origin = url2.origin != null ? url2.origin : `${url2.protocol || ""}//${url2.hostname || ""}:${port}`; - let path18 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; + let path19 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; if (origin[origin.length - 1] === "/") { origin = origin.slice(0, origin.length - 1); } - if (path18 && path18[0] !== "/") { - path18 = `/${path18}`; + if (path19 && path19[0] !== "/") { + path19 = `/${path19}`; } - return new URL(`${origin}${path18}`); + return new URL(`${origin}${path19}`); } if (!isHttpOrHttpsPrefixed(url2.origin || url2.protocol)) { throw new InvalidArgumentError("Invalid URL protocol: the URL must start with `http:` or `https:`."); @@ -1795,39 +1795,39 @@ var require_diagnostics = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path18, origin } + request: { method, path: path19, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path18); + debuglog("sending request to %s %s/%s", method, origin, path19); }); diagnosticsChannel.channel("undici:request:headers").subscribe((evt) => { const { - request: { method, path: path18, origin }, + request: { method, path: path19, origin }, response: { statusCode } } = evt; debuglog( "received response to %s %s/%s - HTTP %d", method, origin, - path18, + path19, statusCode ); }); diagnosticsChannel.channel("undici:request:trailers").subscribe((evt) => { const { - request: { method, path: path18, origin } + request: { method, path: path19, origin } } = evt; - debuglog("trailers received from %s %s/%s", method, origin, path18); + debuglog("trailers received from %s %s/%s", method, origin, path19); }); diagnosticsChannel.channel("undici:request:error").subscribe((evt) => { const { - request: { method, path: path18, origin }, + request: { method, path: path19, origin }, error: error3 } = evt; debuglog( "request to %s %s/%s errored - %s", method, origin, - path18, + path19, error3.message ); }); @@ -1876,9 +1876,9 @@ var require_diagnostics = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path18, origin } + request: { method, path: path19, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path18); + debuglog("sending request to %s %s/%s", method, origin, path19); }); } diagnosticsChannel.channel("undici:websocket:open").subscribe((evt) => { @@ -1941,7 +1941,7 @@ var require_request = __commonJS({ var kHandler = /* @__PURE__ */ Symbol("handler"); var Request = class { constructor(origin, { - path: path18, + path: path19, method, body, headers, @@ -1956,11 +1956,11 @@ var require_request = __commonJS({ expectContinue, servername }, handler2) { - if (typeof path18 !== "string") { + if (typeof path19 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path18[0] !== "/" && !(path18.startsWith("http://") || path18.startsWith("https://")) && method !== "CONNECT") { + } else if (path19[0] !== "/" && !(path19.startsWith("http://") || path19.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.test(path18)) { + } else if (invalidPathRegex.test(path19)) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -2023,7 +2023,7 @@ var require_request = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? buildURL(path18, query) : path18; + this.path = query ? buildURL(path19, query) : path19; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -6536,7 +6536,7 @@ var require_client_h1 = __commonJS({ return method !== "GET" && method !== "HEAD" && method !== "OPTIONS" && method !== "TRACE" && method !== "CONNECT"; } function writeH1(client, request2) { - const { method, path: path18, host, upgrade, blocking, reset } = request2; + const { method, path: path19, host, upgrade, blocking, reset } = request2; let { body, headers, contentLength } = request2; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH" || method === "QUERY" || method === "PROPFIND" || method === "PROPPATCH"; if (util.isFormDataLike(body)) { @@ -6602,7 +6602,7 @@ var require_client_h1 = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path18} HTTP/1.1\r + let header = `${method} ${path19} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -7128,7 +7128,7 @@ var require_client_h2 = __commonJS({ } function writeH2(client, request2) { const session = client[kHTTP2Session]; - const { method, path: path18, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; + const { method, path: path19, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; let { body } = request2; if (upgrade) { util.errorRequest(client, request2, new Error("Upgrade not supported for H2")); @@ -7195,7 +7195,7 @@ var require_client_h2 = __commonJS({ }); return true; } - headers[HTTP2_HEADER_PATH] = path18; + headers[HTTP2_HEADER_PATH] = path19; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -7548,9 +7548,9 @@ var require_redirect_handler = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path18 = search ? `${pathname}${search}` : pathname; + const path19 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path18; + this.opts.path = path19; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -8784,10 +8784,10 @@ var require_proxy_agent = __commonJS({ }; const { origin, - path: path18 = "/", + path: path19 = "/", headers = {} } = opts; - opts.path = origin + path18; + opts.path = origin + path19; if (!("host" in headers) && !("Host" in headers)) { const { host } = new URL2(origin); headers.host = host; @@ -10708,20 +10708,20 @@ var require_mock_utils = __commonJS({ } return true; } - function safeUrl(path18) { - if (typeof path18 !== "string") { - return path18; + function safeUrl(path19) { + if (typeof path19 !== "string") { + return path19; } - const pathSegments = path18.split("?"); + const pathSegments = path19.split("?"); if (pathSegments.length !== 2) { - return path18; + return path19; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path18, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path18); + function matchKey(mockDispatch2, { path: path19, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path19); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -10743,7 +10743,7 @@ var require_mock_utils = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path18 }) => matchValue(safeUrl(path18), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path19 }) => matchValue(safeUrl(path19), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -10781,9 +10781,9 @@ var require_mock_utils = __commonJS({ } } function buildKey(opts) { - const { path: path18, method, body, headers, query } = opts; + const { path: path19, method, body, headers, query } = opts; return { - path: path18, + path: path19, method, body, headers, @@ -11246,10 +11246,10 @@ var require_pending_interceptors_formatter = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path18, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path19, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path18, + Path: path19, "Status code": statusCode, Persistent: persist ? PERSISTENT : NOT_PERSISTENT, Invocations: timesInvoked, @@ -16130,9 +16130,9 @@ var require_util6 = __commonJS({ } } } - function validateCookiePath(path18) { - for (let i = 0; i < path18.length; ++i) { - const code = path18.charCodeAt(i); + function validateCookiePath(path19) { + for (let i = 0; i < path19.length; ++i) { + const code = path19.charCodeAt(i); if (code < 32 || // exclude CTLs (0-31) code === 127 || // DEL code === 59) { @@ -18726,11 +18726,11 @@ var require_undici = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path18 = opts.path; + let path19 = opts.path; if (!opts.path.startsWith("/")) { - path18 = `/${path18}`; + path19 = `/${path19}`; } - url2 = new URL(util.parseOrigin(url2).origin + path18); + url2 = new URL(util.parseOrigin(url2).origin + path19); } else { if (!opts) { opts = typeof url2 === "object" ? url2 : {}; @@ -20033,7 +20033,7 @@ var require_path_utils = __commonJS({ exports2.toPosixPath = toPosixPath; exports2.toWin32Path = toWin32Path; exports2.toPlatformPath = toPlatformPath; - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); function toPosixPath(pth) { return pth.replace(/[\\]/g, "/"); } @@ -20041,7 +20041,7 @@ var require_path_utils = __commonJS({ return pth.replace(/[/]/g, "\\"); } function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path18.sep); + return pth.replace(/[/\\]/g, path19.sep); } } }); @@ -20124,7 +20124,7 @@ var require_io_util = __commonJS({ exports2.tryGetExecutablePath = tryGetExecutablePath; exports2.getCmdPath = getCmdPath; var fs19 = __importStar2(require("fs")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); _a = fs19.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; function readlink(fsPath) { @@ -20179,7 +20179,7 @@ var require_io_util = __commonJS({ } if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { - const upperExt = path18.extname(filePath).toUpperCase(); + const upperExt = path19.extname(filePath).toUpperCase(); if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { return filePath; } @@ -20203,11 +20203,11 @@ var require_io_util = __commonJS({ if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { try { - const directory = path18.dirname(filePath); - const upperName = path18.basename(filePath).toUpperCase(); + const directory = path19.dirname(filePath); + const upperName = path19.basename(filePath).toUpperCase(); for (const actualName of yield (0, exports2.readdir)(directory)) { if (upperName === actualName.toUpperCase()) { - filePath = path18.join(directory, actualName); + filePath = path19.join(directory, actualName); break; } } @@ -20319,7 +20319,7 @@ var require_io = __commonJS({ exports2.which = which7; exports2.findInPath = findInPath; var assert_1 = require("assert"); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var ioUtil = __importStar2(require_io_util()); function cp(source_1, dest_1) { return __awaiter2(this, arguments, void 0, function* (source, dest, options = {}) { @@ -20328,7 +20328,7 @@ var require_io = __commonJS({ if (destStat && destStat.isFile() && !force) { return; } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path18.join(dest, path18.basename(source)) : dest; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path19.join(dest, path19.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } @@ -20340,7 +20340,7 @@ var require_io = __commonJS({ yield cpDirRecursive(source, newDest, 0, force); } } else { - if (path18.relative(source, newDest) === "") { + if (path19.relative(source, newDest) === "") { throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile2(source, newDest, force); @@ -20352,7 +20352,7 @@ var require_io = __commonJS({ if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { - dest = path18.join(dest, path18.basename(source)); + dest = path19.join(dest, path19.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { @@ -20363,7 +20363,7 @@ var require_io = __commonJS({ } } } - yield mkdirP(path18.dirname(dest)); + yield mkdirP(path19.dirname(dest)); yield ioUtil.rename(source, dest); }); } @@ -20422,7 +20422,7 @@ var require_io = __commonJS({ } const extensions = []; if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path18.delimiter)) { + for (const extension of process.env["PATHEXT"].split(path19.delimiter)) { if (extension) { extensions.push(extension); } @@ -20435,12 +20435,12 @@ var require_io = __commonJS({ } return []; } - if (tool.includes(path18.sep)) { + if (tool.includes(path19.sep)) { return []; } const directories = []; if (process.env.PATH) { - for (const p of process.env.PATH.split(path18.delimiter)) { + for (const p of process.env.PATH.split(path19.delimiter)) { if (p) { directories.push(p); } @@ -20448,7 +20448,7 @@ var require_io = __commonJS({ } const matches = []; for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path18.join(directory, tool), extensions); + const filePath = yield ioUtil.tryGetExecutablePath(path19.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } @@ -20578,7 +20578,7 @@ var require_toolrunner = __commonJS({ var os4 = __importStar2(require("os")); var events = __importStar2(require("events")); var child = __importStar2(require("child_process")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var io7 = __importStar2(require_io()); var ioUtil = __importStar2(require_io_util()); var timers_1 = require("timers"); @@ -20793,7 +20793,7 @@ var require_toolrunner = __commonJS({ exec() { return __awaiter2(this, void 0, void 0, function* () { if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path18.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + this.toolPath = path19.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } this.toolPath = yield io7.which(this.toolPath, true); return new Promise((resolve8, reject) => __awaiter2(this, void 0, void 0, function* () { @@ -21346,7 +21346,7 @@ var require_core = __commonJS({ var file_command_1 = require_file_command(); var utils_1 = require_utils(); var os4 = __importStar2(require("os")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var oidc_utils_1 = require_oidc_utils(); var ExitCode; (function(ExitCode2) { @@ -21372,7 +21372,7 @@ var require_core = __commonJS({ } else { (0, command_1.issueCommand)("add-path", {}, inputPath); } - process.env["PATH"] = `${inputPath}${path18.delimiter}${process.env["PATH"]}`; + process.env["PATH"] = `${inputPath}${path19.delimiter}${process.env["PATH"]}`; } function getInput2(name, options) { const val = process.env[`INPUT_${name.replace(/ /g, "_").toUpperCase()}`] || ""; @@ -21509,8 +21509,8 @@ var require_context = __commonJS({ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path18 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path18} does not exist${os_1.EOL}`); + const path19 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path19} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -22335,14 +22335,14 @@ var require_util9 = __commonJS({ } const port = url2.port != null ? url2.port : url2.protocol === "https:" ? 443 : 80; let origin = url2.origin != null ? url2.origin : `${url2.protocol || ""}//${url2.hostname || ""}:${port}`; - let path18 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; + let path19 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; if (origin[origin.length - 1] === "/") { origin = origin.slice(0, origin.length - 1); } - if (path18 && path18[0] !== "/") { - path18 = `/${path18}`; + if (path19 && path19[0] !== "/") { + path19 = `/${path19}`; } - return new URL(`${origin}${path18}`); + return new URL(`${origin}${path19}`); } if (!isHttpOrHttpsPrefixed(url2.origin || url2.protocol)) { throw new InvalidArgumentError("Invalid URL protocol: the URL must start with `http:` or `https:`."); @@ -22793,39 +22793,39 @@ var require_diagnostics2 = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path18, origin } + request: { method, path: path19, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path18); + debuglog("sending request to %s %s/%s", method, origin, path19); }); diagnosticsChannel.channel("undici:request:headers").subscribe((evt) => { const { - request: { method, path: path18, origin }, + request: { method, path: path19, origin }, response: { statusCode } } = evt; debuglog( "received response to %s %s/%s - HTTP %d", method, origin, - path18, + path19, statusCode ); }); diagnosticsChannel.channel("undici:request:trailers").subscribe((evt) => { const { - request: { method, path: path18, origin } + request: { method, path: path19, origin } } = evt; - debuglog("trailers received from %s %s/%s", method, origin, path18); + debuglog("trailers received from %s %s/%s", method, origin, path19); }); diagnosticsChannel.channel("undici:request:error").subscribe((evt) => { const { - request: { method, path: path18, origin }, + request: { method, path: path19, origin }, error: error3 } = evt; debuglog( "request to %s %s/%s errored - %s", method, origin, - path18, + path19, error3.message ); }); @@ -22874,9 +22874,9 @@ var require_diagnostics2 = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path18, origin } + request: { method, path: path19, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path18); + debuglog("sending request to %s %s/%s", method, origin, path19); }); } diagnosticsChannel.channel("undici:websocket:open").subscribe((evt) => { @@ -22939,7 +22939,7 @@ var require_request3 = __commonJS({ var kHandler = /* @__PURE__ */ Symbol("handler"); var Request = class { constructor(origin, { - path: path18, + path: path19, method, body, headers, @@ -22954,11 +22954,11 @@ var require_request3 = __commonJS({ expectContinue, servername }, handler2) { - if (typeof path18 !== "string") { + if (typeof path19 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path18[0] !== "/" && !(path18.startsWith("http://") || path18.startsWith("https://")) && method !== "CONNECT") { + } else if (path19[0] !== "/" && !(path19.startsWith("http://") || path19.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.test(path18)) { + } else if (invalidPathRegex.test(path19)) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -23021,7 +23021,7 @@ var require_request3 = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? buildURL(path18, query) : path18; + this.path = query ? buildURL(path19, query) : path19; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -27534,7 +27534,7 @@ var require_client_h12 = __commonJS({ return method !== "GET" && method !== "HEAD" && method !== "OPTIONS" && method !== "TRACE" && method !== "CONNECT"; } function writeH1(client, request2) { - const { method, path: path18, host, upgrade, blocking, reset } = request2; + const { method, path: path19, host, upgrade, blocking, reset } = request2; let { body, headers, contentLength } = request2; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH" || method === "QUERY" || method === "PROPFIND" || method === "PROPPATCH"; if (util.isFormDataLike(body)) { @@ -27600,7 +27600,7 @@ var require_client_h12 = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path18} HTTP/1.1\r + let header = `${method} ${path19} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -28126,7 +28126,7 @@ var require_client_h22 = __commonJS({ } function writeH2(client, request2) { const session = client[kHTTP2Session]; - const { method, path: path18, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; + const { method, path: path19, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; let { body } = request2; if (upgrade) { util.errorRequest(client, request2, new Error("Upgrade not supported for H2")); @@ -28193,7 +28193,7 @@ var require_client_h22 = __commonJS({ }); return true; } - headers[HTTP2_HEADER_PATH] = path18; + headers[HTTP2_HEADER_PATH] = path19; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -28546,9 +28546,9 @@ var require_redirect_handler2 = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path18 = search ? `${pathname}${search}` : pathname; + const path19 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path18; + this.opts.path = path19; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -29782,10 +29782,10 @@ var require_proxy_agent2 = __commonJS({ }; const { origin, - path: path18 = "/", + path: path19 = "/", headers = {} } = opts; - opts.path = origin + path18; + opts.path = origin + path19; if (!("host" in headers) && !("Host" in headers)) { const { host } = new URL2(origin); headers.host = host; @@ -31706,20 +31706,20 @@ var require_mock_utils2 = __commonJS({ } return true; } - function safeUrl(path18) { - if (typeof path18 !== "string") { - return path18; + function safeUrl(path19) { + if (typeof path19 !== "string") { + return path19; } - const pathSegments = path18.split("?"); + const pathSegments = path19.split("?"); if (pathSegments.length !== 2) { - return path18; + return path19; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path18, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path18); + function matchKey(mockDispatch2, { path: path19, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path19); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -31741,7 +31741,7 @@ var require_mock_utils2 = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path18 }) => matchValue(safeUrl(path18), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path19 }) => matchValue(safeUrl(path19), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -31779,9 +31779,9 @@ var require_mock_utils2 = __commonJS({ } } function buildKey(opts) { - const { path: path18, method, body, headers, query } = opts; + const { path: path19, method, body, headers, query } = opts; return { - path: path18, + path: path19, method, body, headers, @@ -32244,10 +32244,10 @@ var require_pending_interceptors_formatter2 = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path18, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path19, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path18, + Path: path19, "Status code": statusCode, Persistent: persist ? PERSISTENT : NOT_PERSISTENT, Invocations: timesInvoked, @@ -37128,9 +37128,9 @@ var require_util14 = __commonJS({ } } } - function validateCookiePath(path18) { - for (let i = 0; i < path18.length; ++i) { - const code = path18.charCodeAt(i); + function validateCookiePath(path19) { + for (let i = 0; i < path19.length; ++i) { + const code = path19.charCodeAt(i); if (code < 32 || // exclude CTLs (0-31) code === 127 || // DEL code === 59) { @@ -39724,11 +39724,11 @@ var require_undici2 = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path18 = opts.path; + let path19 = opts.path; if (!opts.path.startsWith("/")) { - path18 = `/${path18}`; + path19 = `/${path19}`; } - url2 = new URL(util.parseOrigin(url2).origin + path18); + url2 = new URL(util.parseOrigin(url2).origin + path19); } else { if (!opts) { opts = typeof url2 === "object" ? url2 : {}; @@ -47411,14 +47411,14 @@ var require_helpers = __commonJS({ "node_modules/jsonschema/lib/helpers.js"(exports2, module2) { "use strict"; var uri = require("url"); - var ValidationError = exports2.ValidationError = function ValidationError2(message, instance, schema2, path18, name, argument) { - if (Array.isArray(path18)) { - this.path = path18; - this.property = path18.reduce(function(sum, item) { + var ValidationError = exports2.ValidationError = function ValidationError2(message, instance, schema2, path19, name, argument) { + if (Array.isArray(path19)) { + this.path = path19; + this.property = path19.reduce(function(sum, item) { return sum + makeSuffix(item); }, "instance"); - } else if (path18 !== void 0) { - this.property = path18; + } else if (path19 !== void 0) { + this.property = path19; } if (message) { this.message = message; @@ -47509,16 +47509,16 @@ var require_helpers = __commonJS({ name: { value: "SchemaError", enumerable: false } } ); - var SchemaContext = exports2.SchemaContext = function SchemaContext2(schema2, options, path18, base, schemas) { + var SchemaContext = exports2.SchemaContext = function SchemaContext2(schema2, options, path19, base, schemas) { this.schema = schema2; this.options = options; - if (Array.isArray(path18)) { - this.path = path18; - this.propertyPath = path18.reduce(function(sum, item) { + if (Array.isArray(path19)) { + this.path = path19; + this.propertyPath = path19.reduce(function(sum, item) { return sum + makeSuffix(item); }, "instance"); } else { - this.propertyPath = path18; + this.propertyPath = path19; } this.base = base; this.schemas = schemas; @@ -47527,10 +47527,10 @@ var require_helpers = __commonJS({ return uri.resolve(this.base, target); }; SchemaContext.prototype.makeChild = function makeChild(schema2, propertyName) { - var path18 = propertyName === void 0 ? this.path : this.path.concat([propertyName]); + var path19 = propertyName === void 0 ? this.path : this.path.concat([propertyName]); var id = schema2.$id || schema2.id; var base = uri.resolve(this.base, id || ""); - var ctx = new SchemaContext(schema2, this.options, path18, base, Object.create(this.schemas)); + var ctx = new SchemaContext(schema2, this.options, path19, base, Object.create(this.schemas)); if (id && !ctx.schemas[base]) { ctx.schemas[base] = schema2; } @@ -48833,7 +48833,7 @@ var require_internal_path_helper = __commonJS({ exports2.hasRoot = hasRoot; exports2.normalizeSeparators = normalizeSeparators; exports2.safeTrimTrailingSeparator = safeTrimTrailingSeparator; - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var assert_1 = __importDefault2(require("assert")); var IS_WINDOWS = process.platform === "win32"; function dirname5(p) { @@ -48841,7 +48841,7 @@ var require_internal_path_helper = __commonJS({ if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } - let result = path18.dirname(p); + let result = path19.dirname(p); if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } @@ -48878,7 +48878,7 @@ var require_internal_path_helper = __commonJS({ (0, assert_1.default)(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { } else { - root += path18.sep; + root += path19.sep; } return root + itemPath; } @@ -48912,10 +48912,10 @@ var require_internal_path_helper = __commonJS({ return ""; } p = normalizeSeparators(p); - if (!p.endsWith(path18.sep)) { + if (!p.endsWith(path19.sep)) { return p; } - if (p === path18.sep) { + if (p === path19.sep) { return p; } if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { @@ -49260,7 +49260,7 @@ var require_minimatch = __commonJS({ "node_modules/minimatch/minimatch.js"(exports2, module2) { module2.exports = minimatch; minimatch.Minimatch = Minimatch; - var path18 = (function() { + var path19 = (function() { try { return require("path"); } catch (e) { @@ -49268,7 +49268,7 @@ var require_minimatch = __commonJS({ })() || { sep: "/" }; - minimatch.sep = path18.sep; + minimatch.sep = path19.sep; var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; var expand2 = require_brace_expansion(); var plTypes = { @@ -49357,8 +49357,8 @@ var require_minimatch = __commonJS({ assertValidPattern(pattern); if (!options) options = {}; pattern = pattern.trim(); - if (!options.allowWindowsEscape && path18.sep !== "/") { - pattern = pattern.split(path18.sep).join("/"); + if (!options.allowWindowsEscape && path19.sep !== "/") { + pattern = pattern.split(path19.sep).join("/"); } this.options = options; this.set = []; @@ -49728,8 +49728,8 @@ var require_minimatch = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; var options = this.options; - if (path18.sep !== "/") { - f = f.split(path18.sep).join("/"); + if (path19.sep !== "/") { + f = f.split(path19.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -49875,7 +49875,7 @@ var require_internal_path = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Path = void 0; - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var pathHelper = __importStar2(require_internal_path_helper()); var assert_1 = __importDefault2(require("assert")); var IS_WINDOWS = process.platform === "win32"; @@ -49890,12 +49890,12 @@ var require_internal_path = __commonJS({ (0, assert_1.default)(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path18.sep); + this.segments = itemPath.split(path19.sep); } else { let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { - const basename2 = path18.basename(remaining); + const basename2 = path19.basename(remaining); this.segments.unshift(basename2); remaining = dir; dir = pathHelper.dirname(remaining); @@ -49913,7 +49913,7 @@ var require_internal_path = __commonJS({ (0, assert_1.default)(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { - (0, assert_1.default)(!segment.includes(path18.sep), `Parameter 'itemPath' contains unexpected path separators`); + (0, assert_1.default)(!segment.includes(path19.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } @@ -49924,12 +49924,12 @@ var require_internal_path = __commonJS({ */ toString() { let result = this.segments[0]; - let skipSlash = result.endsWith(path18.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + let skipSlash = result.endsWith(path19.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { - result += path18.sep; + result += path19.sep; } result += this.segments[i]; } @@ -49987,7 +49987,7 @@ var require_internal_pattern = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.Pattern = void 0; var os4 = __importStar2(require("os")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var pathHelper = __importStar2(require_internal_path_helper()); var assert_1 = __importDefault2(require("assert")); var minimatch_1 = require_minimatch(); @@ -50016,7 +50016,7 @@ var require_internal_pattern = __commonJS({ } pattern = _Pattern.fixupPattern(pattern, homedir); this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path18.sep); + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path19.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); @@ -50040,8 +50040,8 @@ var require_internal_pattern = __commonJS({ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path18.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path18.sep}`; + if (!itemPath.endsWith(path19.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path19.sep}`; } } else { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); @@ -50076,9 +50076,9 @@ var require_internal_pattern = __commonJS({ (0, assert_1.default)(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); (0, assert_1.default)(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path18.sep}`)) { + if (pattern === "." || pattern.startsWith(`.${path19.sep}`)) { pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path18.sep}`)) { + } else if (pattern === "~" || pattern.startsWith(`~${path19.sep}`)) { homedir = homedir || os4.homedir(); (0, assert_1.default)(homedir, "Unable to determine HOME directory"); (0, assert_1.default)(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); @@ -50162,8 +50162,8 @@ var require_internal_search_state = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.SearchState = void 0; var SearchState = class { - constructor(path18, level) { - this.path = path18; + constructor(path19, level) { + this.path = path19; this.level = level; } }; @@ -50307,7 +50307,7 @@ var require_internal_globber = __commonJS({ var core17 = __importStar2(require_core()); var fs19 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var patternHelper = __importStar2(require_internal_pattern_helper()); var internal_match_kind_1 = require_internal_match_kind(); var internal_pattern_1 = require_internal_pattern(); @@ -50383,7 +50383,7 @@ var require_internal_globber = __commonJS({ if (!stats) { continue; } - if (options.excludeHiddenFiles && path18.basename(item.path).match(/^\./)) { + if (options.excludeHiddenFiles && path19.basename(item.path).match(/^\./)) { continue; } if (stats.isDirectory()) { @@ -50393,7 +50393,7 @@ var require_internal_globber = __commonJS({ continue; } const childLevel = item.level + 1; - const childItems = (yield __await2(fs19.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path18.join(item.path, x), childLevel)); + const childItems = (yield __await2(fs19.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path19.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await2(item.path); @@ -50555,7 +50555,7 @@ var require_internal_hash_files = __commonJS({ var fs19 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); function hashFiles2(globber_1, currentWorkspace_1) { return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; @@ -50571,7 +50571,7 @@ var require_internal_hash_files = __commonJS({ _e = false; const file = _c; writeDelegate(file); - if (!file.startsWith(`${githubWorkspace}${path18.sep}`)) { + if (!file.startsWith(`${githubWorkspace}${path19.sep}`)) { writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); continue; } @@ -51957,7 +51957,7 @@ var require_cacheUtils = __commonJS({ var io7 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); var fs19 = __importStar2(require("fs")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var semver9 = __importStar2(require_semver3()); var util = __importStar2(require("util")); var constants_1 = require_constants12(); @@ -51977,9 +51977,9 @@ var require_cacheUtils = __commonJS({ baseLocation = "/home"; } } - tempDirectory = path18.join(baseLocation, "actions", "temp"); + tempDirectory = path19.join(baseLocation, "actions", "temp"); } - const dest = path18.join(tempDirectory, crypto2.randomUUID()); + const dest = path19.join(tempDirectory, crypto2.randomUUID()); yield io7.mkdirP(dest); return dest; }); @@ -52001,7 +52001,7 @@ var require_cacheUtils = __commonJS({ _c = _g.value; _e = false; const file = _c; - const relativeFile = path18.relative(workspace, file).replace(new RegExp(`\\${path18.sep}`, "g"), "/"); + const relativeFile = path19.relative(workspace, file).replace(new RegExp(`\\${path19.sep}`, "g"), "/"); core17.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); @@ -52528,13 +52528,13 @@ function __disposeResources(env) { } return next(); } -function __rewriteRelativeImportExtension(path18, preserveJsx) { - if (typeof path18 === "string" && /^\.\.?\//.test(path18)) { - return path18.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function(m, tsx, d, ext, cm) { +function __rewriteRelativeImportExtension(path19, preserveJsx) { + if (typeof path19 === "string" && /^\.\.?\//.test(path19)) { + return path19.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function(m, tsx, d, ext, cm) { return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : d + ext + "." + cm.toLowerCase() + "js"; }); } - return path18; + return path19; } var extendStatics, __assign, __createBinding, __setModuleDefault, ownKeys, _SuppressedError, tslib_es6_default; var init_tslib_es6 = __esm({ @@ -56948,8 +56948,8 @@ var require_getClient = __commonJS({ } const { allowInsecureConnection, httpClient } = clientOptions; const endpointUrl = clientOptions.endpoint ?? endpoint2; - const client = (path18, ...args) => { - const getUrl = (requestOptions) => (0, urlHelpers_js_1.buildRequestUrl)(endpointUrl, path18, args, { allowInsecureConnection, ...requestOptions }); + const client = (path19, ...args) => { + const getUrl = (requestOptions) => (0, urlHelpers_js_1.buildRequestUrl)(endpointUrl, path19, args, { allowInsecureConnection, ...requestOptions }); return { get: (requestOptions = {}) => { return buildOperation("GET", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); @@ -60820,15 +60820,15 @@ var require_urlHelpers2 = __commonJS({ let isAbsolutePath = false; let requestUrl = replaceAll(baseUri, urlReplacements); if (operationSpec.path) { - let path18 = replaceAll(operationSpec.path, urlReplacements); - if (operationSpec.path === "/{nextLink}" && path18.startsWith("/")) { - path18 = path18.substring(1); + let path19 = replaceAll(operationSpec.path, urlReplacements); + if (operationSpec.path === "/{nextLink}" && path19.startsWith("/")) { + path19 = path19.substring(1); } - if (isAbsoluteUrl(path18)) { - requestUrl = path18; + if (isAbsoluteUrl(path19)) { + requestUrl = path19; isAbsolutePath = true; } else { - requestUrl = appendPath(requestUrl, path18); + requestUrl = appendPath(requestUrl, path19); } } const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); @@ -60874,9 +60874,9 @@ var require_urlHelpers2 = __commonJS({ } const searchStart = pathToAppend.indexOf("?"); if (searchStart !== -1) { - const path18 = pathToAppend.substring(0, searchStart); + const path19 = pathToAppend.substring(0, searchStart); const search = pathToAppend.substring(searchStart + 1); - newPath = newPath + path18; + newPath = newPath + path19; if (search) { parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; } @@ -63127,10 +63127,10 @@ var require_utils_common = __commonJS({ var constants_js_1 = require_constants15(); function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path18 = urlParsed.pathname; - path18 = path18 || "/"; - path18 = escape(path18); - urlParsed.pathname = path18; + let path19 = urlParsed.pathname; + path19 = path19 || "/"; + path19 = escape(path19); + urlParsed.pathname = path19; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -63215,9 +63215,9 @@ var require_utils_common = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path18 = urlParsed.pathname; - path18 = path18 ? path18.endsWith("/") ? `${path18}${name}` : `${path18}/${name}` : name; - urlParsed.pathname = path18; + let path19 = urlParsed.pathname; + path19 = path19 ? path19.endsWith("/") ? `${path19}${name}` : `${path19}/${name}` : name; + urlParsed.pathname = path19; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -64444,9 +64444,9 @@ var require_StorageSharedKeyCredentialPolicy = __commonJS({ * @param request - */ getCanonicalizedResourceString(request2) { - const path18 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path19 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path18}`; + canonicalizedResourceString += `/${this.factory.accountName}${path19}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -65185,10 +65185,10 @@ var require_utils_common2 = __commonJS({ var constants_js_1 = require_constants16(); function escapeURLPath(url2) { const urlParsed = new URL(url2); - let path18 = urlParsed.pathname; - path18 = path18 || "/"; - path18 = escape(path18); - urlParsed.pathname = path18; + let path19 = urlParsed.pathname; + path19 = path19 || "/"; + path19 = escape(path19); + urlParsed.pathname = path19; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -65273,9 +65273,9 @@ var require_utils_common2 = __commonJS({ } function appendToURLPath(url2, name) { const urlParsed = new URL(url2); - let path18 = urlParsed.pathname; - path18 = path18 ? path18.endsWith("/") ? `${path18}${name}` : `${path18}/${name}` : name; - urlParsed.pathname = path18; + let path19 = urlParsed.pathname; + path19 = path19 ? path19.endsWith("/") ? `${path19}${name}` : `${path19}/${name}` : name; + urlParsed.pathname = path19; return urlParsed.toString(); } function setURLParameter(url2, name, value) { @@ -66196,9 +66196,9 @@ var require_StorageSharedKeyCredentialPolicy2 = __commonJS({ * @param request - */ getCanonicalizedResourceString(request2) { - const path18 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path19 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path18}`; + canonicalizedResourceString += `/${this.factory.accountName}${path19}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -66828,9 +66828,9 @@ var require_StorageSharedKeyCredentialPolicyV2 = __commonJS({ return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request2) { - const path18 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path19 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path18}`; + canonicalizedResourceString += `/${options.accountName}${path19}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -67175,9 +67175,9 @@ var require_StorageSharedKeyCredentialPolicyV22 = __commonJS({ return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request2) { - const path18 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path19 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path18}`; + canonicalizedResourceString += `/${options.accountName}${path19}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -88832,8 +88832,8 @@ var require_BlobBatch = __commonJS({ if (this.operationCount >= constants_js_1.BATCH_MAX_REQUEST) { throw new RangeError(`Cannot exceed ${constants_js_1.BATCH_MAX_REQUEST} sub requests in a single batch`); } - const path18 = (0, utils_common_js_1.getURLPath)(subRequest.url); - if (!path18 || path18 === "") { + const path19 = (0, utils_common_js_1.getURLPath)(subRequest.url); + if (!path19 || path19 === "") { throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } } @@ -88911,8 +88911,8 @@ var require_BlobBatchClient = __commonJS({ pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipeline, options); } const storageClientContext = new StorageContextClient_js_1.StorageContextClient(url2, (0, Pipeline_js_1.getCoreClientOptions)(pipeline)); - const path18 = (0, utils_common_js_1.getURLPath)(url2); - if (path18 && path18 !== "/") { + const path19 = (0, utils_common_js_1.getURLPath)(url2); + if (path19 && path19 !== "/") { this.serviceOrContainerContext = storageClientContext.container; } else { this.serviceOrContainerContext = storageClientContext.service; @@ -98221,7 +98221,7 @@ var require_tar = __commonJS({ var exec_1 = require_exec(); var io7 = __importStar2(require_io()); var fs_1 = require("fs"); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var constants_1 = require_constants12(); var IS_WINDOWS = process.platform === "win32"; @@ -98267,13 +98267,13 @@ var require_tar = __commonJS({ const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (type2) { case "create": - args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path18.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path18.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path18.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); + args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path19.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path19.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path19.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); break; case "extract": - args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path18.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path18.sep}`, "g"), "/")); + args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path19.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path19.sep}`, "g"), "/")); break; case "list": - args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path18.sep}`, "g"), "/"), "-P"); + args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path19.sep}`, "g"), "/"), "-P"); break; } if (tarPath.type === constants_1.ArchiveToolType.GNU) { @@ -98319,7 +98319,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --long=30 --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path18.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path19.sep}`, "g"), "/") ] : [ "--use-compress-program", IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" @@ -98328,7 +98328,7 @@ var require_tar = __commonJS({ return BSD_TAR_ZSTD ? [ "zstd -d --force -o", constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path18.sep}`, "g"), "/") + archivePath.replace(new RegExp(`\\${path19.sep}`, "g"), "/") ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; default: return ["-z"]; @@ -98343,7 +98343,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ "zstd -T0 --long=30 --force -o", - cacheFileName.replace(new RegExp(`\\${path18.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path19.sep}`, "g"), "/"), constants_1.TarFilename ] : [ "--use-compress-program", @@ -98352,7 +98352,7 @@ var require_tar = __commonJS({ case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ "zstd -T0 --force -o", - cacheFileName.replace(new RegExp(`\\${path18.sep}`, "g"), "/"), + cacheFileName.replace(new RegExp(`\\${path19.sep}`, "g"), "/"), constants_1.TarFilename ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; default: @@ -98390,7 +98390,7 @@ var require_tar = __commonJS({ } function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter2(this, void 0, void 0, function* () { - (0, fs_1.writeFileSync)(path18.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); + (0, fs_1.writeFileSync)(path19.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); const commands = yield getCommands(compressionMethod, "create"); yield execCommands(commands, archiveFolder); }); @@ -98472,7 +98472,7 @@ var require_cache5 = __commonJS({ exports2.restoreCache = restoreCache5; exports2.saveCache = saveCache5; var core17 = __importStar2(require_core()); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); var cacheTwirpClient = __importStar2(require_cacheTwirpClient()); @@ -98567,7 +98567,7 @@ var require_cache5 = __commonJS({ core17.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } - archivePath = path18.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path19.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core17.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); if (core17.isDebug()) { @@ -98636,7 +98636,7 @@ var require_cache5 = __commonJS({ core17.info("Lookup only - skipping download"); return response.matchedKey; } - archivePath = path18.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + archivePath = path19.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core17.debug(`Archive path: ${archivePath}`); core17.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); @@ -98698,7 +98698,7 @@ var require_cache5 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path18.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path19.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core17.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -98762,7 +98762,7 @@ var require_cache5 = __commonJS({ throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path18.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + const archivePath = path19.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core17.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); @@ -99189,7 +99189,7 @@ var require_tool_cache = __commonJS({ var fs19 = __importStar2(require("fs")); var mm = __importStar2(require_manifest()); var os4 = __importStar2(require("os")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var httpm = __importStar2(require_lib()); var semver9 = __importStar2(require_semver2()); var stream2 = __importStar2(require("stream")); @@ -99210,8 +99210,8 @@ var require_tool_cache = __commonJS({ var userAgent2 = "actions/tool-cache"; function downloadTool2(url2, dest, auth2, headers) { return __awaiter2(this, void 0, void 0, function* () { - dest = dest || path18.join(_getTempDirectory(), crypto2.randomUUID()); - yield io7.mkdirP(path18.dirname(dest)); + dest = dest || path19.join(_getTempDirectory(), crypto2.randomUUID()); + yield io7.mkdirP(path19.dirname(dest)); core17.debug(`Downloading ${url2}`); core17.debug(`Destination ${dest}`); const maxAttempts = 3; @@ -99301,7 +99301,7 @@ var require_tool_cache = __commonJS({ process.chdir(originalCwd); } } else { - const escapedScript = path18.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedScript = path19.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; @@ -99473,7 +99473,7 @@ var require_tool_cache = __commonJS({ } const destPath = yield _createToolPath(tool, version, arch2); for (const itemName of fs19.readdirSync(sourceDir)) { - const s = path18.join(sourceDir, itemName); + const s = path19.join(sourceDir, itemName); yield io7.cp(s, destPath, { recursive: true }); } _completeToolPath(tool, version, arch2); @@ -99490,7 +99490,7 @@ var require_tool_cache = __commonJS({ throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); - const destPath = path18.join(destFolder, targetFile); + const destPath = path19.join(destFolder, targetFile); core17.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); @@ -99513,7 +99513,7 @@ var require_tool_cache = __commonJS({ let toolPath = ""; if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; - const cachePath = path18.join(_getCacheDirectory(), toolName, versionSpec, arch2); + const cachePath = path19.join(_getCacheDirectory(), toolName, versionSpec, arch2); core17.debug(`checking cache: ${cachePath}`); if (fs19.existsSync(cachePath) && fs19.existsSync(`${cachePath}.complete`)) { core17.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); @@ -99527,12 +99527,12 @@ var require_tool_cache = __commonJS({ function findAllVersions2(toolName, arch2) { const versions = []; arch2 = arch2 || os4.arch(); - const toolPath = path18.join(_getCacheDirectory(), toolName); + const toolPath = path19.join(_getCacheDirectory(), toolName); if (fs19.existsSync(toolPath)) { const children = fs19.readdirSync(toolPath); for (const child of children) { if (isExplicitVersion(child)) { - const fullPath = path18.join(toolPath, child, arch2 || ""); + const fullPath = path19.join(toolPath, child, arch2 || ""); if (fs19.existsSync(fullPath) && fs19.existsSync(`${fullPath}.complete`)) { versions.push(child); } @@ -99584,7 +99584,7 @@ var require_tool_cache = __commonJS({ function _createExtractFolder(dest) { return __awaiter2(this, void 0, void 0, function* () { if (!dest) { - dest = path18.join(_getTempDirectory(), crypto2.randomUUID()); + dest = path19.join(_getTempDirectory(), crypto2.randomUUID()); } yield io7.mkdirP(dest); return dest; @@ -99592,7 +99592,7 @@ var require_tool_cache = __commonJS({ } function _createToolPath(tool, version, arch2) { return __awaiter2(this, void 0, void 0, function* () { - const folderPath = path18.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); + const folderPath = path19.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); core17.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); @@ -99602,7 +99602,7 @@ var require_tool_cache = __commonJS({ }); } function _completeToolPath(tool, version, arch2) { - const folderPath = path18.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); + const folderPath = path19.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs19.writeFileSync(markerPath, ""); core17.debug("finished caching tool"); @@ -102384,13 +102384,13 @@ These characters are not allowed in the artifact name due to limitations with ce (0, core_1.info)(`Artifact name is valid!`); } exports2.validateArtifactName = validateArtifactName; - function validateFilePath(path18) { - if (!path18) { + function validateFilePath(path19) { + if (!path19) { throw new Error(`Provided file path input during validation is empty`); } for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) { - if (path18.includes(invalidCharacterKey)) { - throw new Error(`The path for one of the files in artifact is not valid: ${path18}. Contains the following character: ${errorMessageForCharacter} + if (path19.includes(invalidCharacterKey)) { + throw new Error(`The path for one of the files in artifact is not valid: ${path19}. Contains the following character: ${errorMessageForCharacter} Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()} @@ -103291,8 +103291,8 @@ var require_minimatch2 = __commonJS({ return new Minimatch(pattern, options).match(p); }; module2.exports = minimatch; - var path18 = require_path(); - minimatch.sep = path18.sep; + var path19 = require_path(); + minimatch.sep = path19.sep; var GLOBSTAR = /* @__PURE__ */ Symbol("globstar **"); minimatch.GLOBSTAR = GLOBSTAR; var expand2 = require_brace_expansion2(); @@ -103802,8 +103802,8 @@ var require_minimatch2 = __commonJS({ if (this.empty) return f === ""; if (f === "/" && partial) return true; const options = this.options; - if (path18.sep !== "/") { - f = f.split(path18.sep).join("/"); + if (path19.sep !== "/") { + f = f.split(path19.sep).join("/"); } f = f.split(slashSplit); this.debug(this.pattern, "split", f); @@ -103901,8 +103901,8 @@ var require_readdir_glob = __commonJS({ }); }); } - async function* exploreWalkAsync(dir, path18, followSymlinks, useStat, shouldSkip, strict) { - let files = await readdir(path18 + dir, strict); + async function* exploreWalkAsync(dir, path19, followSymlinks, useStat, shouldSkip, strict) { + let files = await readdir(path19 + dir, strict); for (const file of files) { let name = file.name; if (name === void 0) { @@ -103911,7 +103911,7 @@ var require_readdir_glob = __commonJS({ } const filename = dir + "/" + name; const relative2 = filename.slice(1); - const absolute = path18 + "/" + relative2; + const absolute = path19 + "/" + relative2; let stats = null; if (useStat || followSymlinks) { stats = await stat(absolute, followSymlinks); @@ -103925,15 +103925,15 @@ var require_readdir_glob = __commonJS({ if (stats.isDirectory()) { if (!shouldSkip(relative2)) { yield { relative: relative2, absolute, stats }; - yield* exploreWalkAsync(filename, path18, followSymlinks, useStat, shouldSkip, false); + yield* exploreWalkAsync(filename, path19, followSymlinks, useStat, shouldSkip, false); } } else { yield { relative: relative2, absolute, stats }; } } } - async function* explore(path18, followSymlinks, useStat, shouldSkip) { - yield* exploreWalkAsync("", path18, followSymlinks, useStat, shouldSkip, true); + async function* explore(path19, followSymlinks, useStat, shouldSkip) { + yield* exploreWalkAsync("", path19, followSymlinks, useStat, shouldSkip, true); } function readOptions(options) { return { @@ -105971,14 +105971,14 @@ var require_polyfills = __commonJS({ fs19.fstatSync = statFixSync(fs19.fstatSync); fs19.lstatSync = statFixSync(fs19.lstatSync); if (fs19.chmod && !fs19.lchmod) { - fs19.lchmod = function(path18, mode, cb) { + fs19.lchmod = function(path19, mode, cb) { if (cb) process.nextTick(cb); }; fs19.lchmodSync = function() { }; } if (fs19.chown && !fs19.lchown) { - fs19.lchown = function(path18, uid, gid, cb) { + fs19.lchown = function(path19, uid, gid, cb) { if (cb) process.nextTick(cb); }; fs19.lchownSync = function() { @@ -106045,9 +106045,9 @@ var require_polyfills = __commonJS({ }; })(fs19.readSync); function patchLchmod(fs20) { - fs20.lchmod = function(path18, mode, callback) { + fs20.lchmod = function(path19, mode, callback) { fs20.open( - path18, + path19, constants.O_WRONLY | constants.O_SYMLINK, mode, function(err, fd) { @@ -106063,8 +106063,8 @@ var require_polyfills = __commonJS({ } ); }; - fs20.lchmodSync = function(path18, mode) { - var fd = fs20.openSync(path18, constants.O_WRONLY | constants.O_SYMLINK, mode); + fs20.lchmodSync = function(path19, mode) { + var fd = fs20.openSync(path19, constants.O_WRONLY | constants.O_SYMLINK, mode); var threw = true; var ret; try { @@ -106085,8 +106085,8 @@ var require_polyfills = __commonJS({ } function patchLutimes(fs20) { if (constants.hasOwnProperty("O_SYMLINK") && fs20.futimes) { - fs20.lutimes = function(path18, at, mt, cb) { - fs20.open(path18, constants.O_SYMLINK, function(er, fd) { + fs20.lutimes = function(path19, at, mt, cb) { + fs20.open(path19, constants.O_SYMLINK, function(er, fd) { if (er) { if (cb) cb(er); return; @@ -106098,8 +106098,8 @@ var require_polyfills = __commonJS({ }); }); }; - fs20.lutimesSync = function(path18, at, mt) { - var fd = fs20.openSync(path18, constants.O_SYMLINK); + fs20.lutimesSync = function(path19, at, mt) { + var fd = fs20.openSync(path19, constants.O_SYMLINK); var ret; var threw = true; try { @@ -106217,11 +106217,11 @@ var require_legacy_streams = __commonJS({ ReadStream, WriteStream }; - function ReadStream(path18, options) { - if (!(this instanceof ReadStream)) return new ReadStream(path18, options); + function ReadStream(path19, options) { + if (!(this instanceof ReadStream)) return new ReadStream(path19, options); Stream.call(this); var self2 = this; - this.path = path18; + this.path = path19; this.fd = null; this.readable = true; this.paused = false; @@ -106266,10 +106266,10 @@ var require_legacy_streams = __commonJS({ self2._read(); }); } - function WriteStream(path18, options) { - if (!(this instanceof WriteStream)) return new WriteStream(path18, options); + function WriteStream(path19, options) { + if (!(this instanceof WriteStream)) return new WriteStream(path19, options); Stream.call(this); - this.path = path18; + this.path = path19; this.fd = null; this.writable = true; this.flags = "w"; @@ -106412,14 +106412,14 @@ var require_graceful_fs = __commonJS({ fs20.createWriteStream = createWriteStream3; var fs$readFile = fs20.readFile; fs20.readFile = readFile; - function readFile(path18, options, cb) { + function readFile(path19, options, cb) { if (typeof options === "function") cb = options, options = null; - return go$readFile(path18, options, cb); - function go$readFile(path19, options2, cb2, startTime) { - return fs$readFile(path19, options2, function(err) { + return go$readFile(path19, options, cb); + function go$readFile(path20, options2, cb2, startTime) { + return fs$readFile(path20, options2, function(err) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$readFile, [path19, options2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$readFile, [path20, options2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -106429,14 +106429,14 @@ var require_graceful_fs = __commonJS({ } var fs$writeFile = fs20.writeFile; fs20.writeFile = writeFile; - function writeFile(path18, data, options, cb) { + function writeFile(path19, data, options, cb) { if (typeof options === "function") cb = options, options = null; - return go$writeFile(path18, data, options, cb); - function go$writeFile(path19, data2, options2, cb2, startTime) { - return fs$writeFile(path19, data2, options2, function(err) { + return go$writeFile(path19, data, options, cb); + function go$writeFile(path20, data2, options2, cb2, startTime) { + return fs$writeFile(path20, data2, options2, function(err) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$writeFile, [path19, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$writeFile, [path20, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -106447,14 +106447,14 @@ var require_graceful_fs = __commonJS({ var fs$appendFile = fs20.appendFile; if (fs$appendFile) fs20.appendFile = appendFile; - function appendFile(path18, data, options, cb) { + function appendFile(path19, data, options, cb) { if (typeof options === "function") cb = options, options = null; - return go$appendFile(path18, data, options, cb); - function go$appendFile(path19, data2, options2, cb2, startTime) { - return fs$appendFile(path19, data2, options2, function(err) { + return go$appendFile(path19, data, options, cb); + function go$appendFile(path20, data2, options2, cb2, startTime) { + return fs$appendFile(path20, data2, options2, function(err) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$appendFile, [path19, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$appendFile, [path20, data2, options2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -106485,31 +106485,31 @@ var require_graceful_fs = __commonJS({ var fs$readdir = fs20.readdir; fs20.readdir = readdir; var noReaddirOptionVersions = /^v[0-5]\./; - function readdir(path18, options, cb) { + function readdir(path19, options, cb) { if (typeof options === "function") cb = options, options = null; - var go$readdir = noReaddirOptionVersions.test(process.version) ? function go$readdir2(path19, options2, cb2, startTime) { - return fs$readdir(path19, fs$readdirCallback( - path19, + var go$readdir = noReaddirOptionVersions.test(process.version) ? function go$readdir2(path20, options2, cb2, startTime) { + return fs$readdir(path20, fs$readdirCallback( + path20, options2, cb2, startTime )); - } : function go$readdir2(path19, options2, cb2, startTime) { - return fs$readdir(path19, options2, fs$readdirCallback( - path19, + } : function go$readdir2(path20, options2, cb2, startTime) { + return fs$readdir(path20, options2, fs$readdirCallback( + path20, options2, cb2, startTime )); }; - return go$readdir(path18, options, cb); - function fs$readdirCallback(path19, options2, cb2, startTime) { + return go$readdir(path19, options, cb); + function fs$readdirCallback(path20, options2, cb2, startTime) { return function(err, files) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) enqueue([ go$readdir, - [path19, options2, cb2], + [path20, options2, cb2], err, startTime || Date.now(), Date.now() @@ -106580,7 +106580,7 @@ var require_graceful_fs = __commonJS({ enumerable: true, configurable: true }); - function ReadStream(path18, options) { + function ReadStream(path19, options) { if (this instanceof ReadStream) return fs$ReadStream.apply(this, arguments), this; else @@ -106600,7 +106600,7 @@ var require_graceful_fs = __commonJS({ } }); } - function WriteStream(path18, options) { + function WriteStream(path19, options) { if (this instanceof WriteStream) return fs$WriteStream.apply(this, arguments), this; else @@ -106618,22 +106618,22 @@ var require_graceful_fs = __commonJS({ } }); } - function createReadStream2(path18, options) { - return new fs20.ReadStream(path18, options); + function createReadStream2(path19, options) { + return new fs20.ReadStream(path19, options); } - function createWriteStream3(path18, options) { - return new fs20.WriteStream(path18, options); + function createWriteStream3(path19, options) { + return new fs20.WriteStream(path19, options); } var fs$open = fs20.open; fs20.open = open; - function open(path18, flags, mode, cb) { + function open(path19, flags, mode, cb) { if (typeof mode === "function") cb = mode, mode = null; - return go$open(path18, flags, mode, cb); - function go$open(path19, flags2, mode2, cb2, startTime) { - return fs$open(path19, flags2, mode2, function(err, fd) { + return go$open(path19, flags, mode, cb); + function go$open(path20, flags2, mode2, cb2, startTime) { + return fs$open(path20, flags2, mode2, function(err, fd) { if (err && (err.code === "EMFILE" || err.code === "ENFILE")) - enqueue([go$open, [path19, flags2, mode2, cb2], err, startTime || Date.now(), Date.now()]); + enqueue([go$open, [path20, flags2, mode2, cb2], err, startTime || Date.now(), Date.now()]); else { if (typeof cb2 === "function") cb2.apply(this, arguments); @@ -108734,22 +108734,22 @@ var require_lazystream = __commonJS({ // node_modules/normalize-path/index.js var require_normalize_path = __commonJS({ "node_modules/normalize-path/index.js"(exports2, module2) { - module2.exports = function(path18, stripTrailing) { - if (typeof path18 !== "string") { + module2.exports = function(path19, stripTrailing) { + if (typeof path19 !== "string") { throw new TypeError("expected path to be a string"); } - if (path18 === "\\" || path18 === "/") return "/"; - var len = path18.length; - if (len <= 1) return path18; + if (path19 === "\\" || path19 === "/") return "/"; + var len = path19.length; + if (len <= 1) return path19; var prefix = ""; - if (len > 4 && path18[3] === "\\") { - var ch = path18[2]; - if ((ch === "?" || ch === ".") && path18.slice(0, 2) === "\\\\") { - path18 = path18.slice(2); + if (len > 4 && path19[3] === "\\") { + var ch = path19[2]; + if ((ch === "?" || ch === ".") && path19.slice(0, 2) === "\\\\") { + path19 = path19.slice(2); prefix = "//"; } } - var segs = path18.split(/[/\\]+/); + var segs = path19.split(/[/\\]+/); if (stripTrailing !== false && segs[segs.length - 1] === "") { segs.pop(); } @@ -117371,11 +117371,11 @@ var require_commonjs20 = __commonJS({ return (f) => f.length === len && f !== "." && f !== ".."; }; var defaultPlatform = typeof process === "object" && process ? typeof process.env === "object" && process.env && process.env.__MINIMATCH_TESTING_PLATFORM__ || process.platform : "posix"; - var path18 = { + var path19 = { win32: { sep: "\\" }, posix: { sep: "/" } }; - exports2.sep = defaultPlatform === "win32" ? path18.win32.sep : path18.posix.sep; + exports2.sep = defaultPlatform === "win32" ? path19.win32.sep : path19.posix.sep; exports2.minimatch.sep = exports2.sep; exports2.GLOBSTAR = /* @__PURE__ */ Symbol("globstar **"); exports2.minimatch.GLOBSTAR = exports2.GLOBSTAR; @@ -120660,12 +120660,12 @@ var require_commonjs23 = __commonJS({ /** * Get the Path object referenced by the string path, resolved from this Path */ - resolve(path18) { - if (!path18) { + resolve(path19) { + if (!path19) { return this; } - const rootPath = this.getRootString(path18); - const dir = path18.substring(rootPath.length); + const rootPath = this.getRootString(path19); + const dir = path19.substring(rootPath.length); const dirParts = dir.split(this.splitSep); const result = rootPath ? this.getRoot(rootPath).#resolveParts(dirParts) : this.#resolveParts(dirParts); return result; @@ -121418,8 +121418,8 @@ var require_commonjs23 = __commonJS({ /** * @internal */ - getRootString(path18) { - return node_path_1.win32.parse(path18).root; + getRootString(path19) { + return node_path_1.win32.parse(path19).root; } /** * @internal @@ -121466,8 +121466,8 @@ var require_commonjs23 = __commonJS({ /** * @internal */ - getRootString(path18) { - return path18.startsWith("/") ? "/" : ""; + getRootString(path19) { + return path19.startsWith("/") ? "/" : ""; } /** * @internal @@ -121557,11 +121557,11 @@ var require_commonjs23 = __commonJS({ /** * Get the depth of a provided path, string, or the cwd */ - depth(path18 = this.cwd) { - if (typeof path18 === "string") { - path18 = this.cwd.resolve(path18); + depth(path19 = this.cwd) { + if (typeof path19 === "string") { + path19 = this.cwd.resolve(path19); } - return path18.depth(); + return path19.depth(); } /** * Return the cache of child entries. Exposed so subclasses can create @@ -122048,9 +122048,9 @@ var require_commonjs23 = __commonJS({ process2(); return results; } - chdir(path18 = this.cwd) { + chdir(path19 = this.cwd) { const oldCwd = this.cwd; - this.cwd = typeof path18 === "string" ? this.cwd.resolve(path18) : path18; + this.cwd = typeof path19 === "string" ? this.cwd.resolve(path19) : path19; this.cwd[setAsCwd](oldCwd); } }; @@ -122438,8 +122438,8 @@ var require_processor = __commonJS({ } // match, absolute, ifdir entries() { - return [...this.store.entries()].map(([path18, n]) => [ - path18, + return [...this.store.entries()].map(([path19, n]) => [ + path19, !!(n & 2), !!(n & 1) ]); @@ -122657,9 +122657,9 @@ var require_walker = __commonJS({ signal; maxDepth; includeChildMatches; - constructor(patterns, path18, opts) { + constructor(patterns, path19, opts) { this.patterns = patterns; - this.path = path18; + this.path = path19; this.opts = opts; this.#sep = !opts.posix && opts.platform === "win32" ? "\\" : "/"; this.includeChildMatches = opts.includeChildMatches !== false; @@ -122678,11 +122678,11 @@ var require_walker = __commonJS({ }); } } - #ignored(path18) { - return this.seen.has(path18) || !!this.#ignore?.ignored?.(path18); + #ignored(path19) { + return this.seen.has(path19) || !!this.#ignore?.ignored?.(path19); } - #childrenIgnored(path18) { - return !!this.#ignore?.childrenIgnored?.(path18); + #childrenIgnored(path19) { + return !!this.#ignore?.childrenIgnored?.(path19); } // backpressure mechanism pause() { @@ -122898,8 +122898,8 @@ var require_walker = __commonJS({ exports2.GlobUtil = GlobUtil; var GlobWalker = class extends GlobUtil { matches = /* @__PURE__ */ new Set(); - constructor(patterns, path18, opts) { - super(patterns, path18, opts); + constructor(patterns, path19, opts) { + super(patterns, path19, opts); } matchEmit(e) { this.matches.add(e); @@ -122937,8 +122937,8 @@ var require_walker = __commonJS({ exports2.GlobWalker = GlobWalker; var GlobStream = class extends GlobUtil { results; - constructor(patterns, path18, opts) { - super(patterns, path18, opts); + constructor(patterns, path19, opts) { + super(patterns, path19, opts); this.results = new minipass_1.Minipass({ signal: this.signal, objectMode: true @@ -123294,7 +123294,7 @@ var require_commonjs24 = __commonJS({ var require_file4 = __commonJS({ "node_modules/archiver-utils/file.js"(exports2, module2) { var fs19 = require_graceful_fs(); - var path18 = require("path"); + var path19 = require("path"); var flatten = require_flatten(); var difference = require_difference(); var union = require_union(); @@ -123319,7 +123319,7 @@ var require_file4 = __commonJS({ return result; }; file.exists = function() { - var filepath = path18.join.apply(path18, arguments); + var filepath = path19.join.apply(path19, arguments); return fs19.existsSync(filepath); }; file.expand = function(...args) { @@ -123333,7 +123333,7 @@ var require_file4 = __commonJS({ }); if (options.filter) { matches = matches.filter(function(filepath) { - filepath = path18.join(options.cwd || "", filepath); + filepath = path19.join(options.cwd || "", filepath); try { if (typeof options.filter === "function") { return options.filter(filepath); @@ -123350,7 +123350,7 @@ var require_file4 = __commonJS({ file.expandMapping = function(patterns, destBase, options) { options = Object.assign({ rename: function(destBase2, destPath) { - return path18.join(destBase2 || "", destPath); + return path19.join(destBase2 || "", destPath); } }, options); var files = []; @@ -123358,14 +123358,14 @@ var require_file4 = __commonJS({ file.expand(options, patterns).forEach(function(src) { var destPath = src; if (options.flatten) { - destPath = path18.basename(destPath); + destPath = path19.basename(destPath); } if (options.ext) { destPath = destPath.replace(/(\.[^\/]*)?$/, options.ext); } var dest = options.rename(destBase, destPath, options); if (options.cwd) { - src = path18.join(options.cwd, src); + src = path19.join(options.cwd, src); } dest = dest.replace(pathSeparatorRe, "/"); src = src.replace(pathSeparatorRe, "/"); @@ -123447,7 +123447,7 @@ var require_file4 = __commonJS({ var require_archiver_utils = __commonJS({ "node_modules/archiver-utils/index.js"(exports2, module2) { var fs19 = require_graceful_fs(); - var path18 = require("path"); + var path19 = require("path"); var isStream = require_is_stream(); var lazystream = require_lazystream(); var normalizePath = require_normalize_path(); @@ -123535,11 +123535,11 @@ var require_archiver_utils = __commonJS({ if (!file) { return callback(null, results); } - filepath = path18.join(dirpath, file); + filepath = path19.join(dirpath, file); fs19.stat(filepath, function(err2, stats) { results.push({ path: filepath, - relative: path18.relative(base, filepath).replace(/\\/g, "/"), + relative: path19.relative(base, filepath).replace(/\\/g, "/"), stats }); if (stats && stats.isDirectory()) { @@ -123601,7 +123601,7 @@ var require_core2 = __commonJS({ var fs19 = require("fs"); var glob2 = require_readdir_glob(); var async = require_async(); - var path18 = require("path"); + var path19 = require("path"); var util = require_archiver_utils(); var inherits = require("util").inherits; var ArchiverError = require_error3(); @@ -123877,9 +123877,9 @@ var require_core2 = __commonJS({ task.source = Buffer.concat([]); } else if (stats.isSymbolicLink() && this._moduleSupports("symlink")) { var linkPath = fs19.readlinkSync(task.filepath); - var dirName = path18.dirname(task.filepath); + var dirName = path19.dirname(task.filepath); task.data.type = "symlink"; - task.data.linkname = path18.relative(dirName, path18.resolve(dirName, linkPath)); + task.data.linkname = path19.relative(dirName, path19.resolve(dirName, linkPath)); task.data.sourceType = "buffer"; task.source = Buffer.concat([]); } else { @@ -128329,8 +128329,8 @@ var require_context2 = __commonJS({ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path18 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path18} does not exist${os_1.EOL}`); + const path19 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path19} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -128915,14 +128915,14 @@ var require_util24 = __commonJS({ } const port = url2.port != null ? url2.port : url2.protocol === "https:" ? 443 : 80; let origin = url2.origin != null ? url2.origin : `${url2.protocol}//${url2.hostname}:${port}`; - let path18 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; + let path19 = url2.path != null ? url2.path : `${url2.pathname || ""}${url2.search || ""}`; if (origin.endsWith("/")) { origin = origin.substring(0, origin.length - 1); } - if (path18 && !path18.startsWith("/")) { - path18 = `/${path18}`; + if (path19 && !path19.startsWith("/")) { + path19 = `/${path19}`; } - url2 = new URL(origin + path18); + url2 = new URL(origin + path19); } return url2; } @@ -130536,20 +130536,20 @@ var require_parseParams = __commonJS({ var require_basename = __commonJS({ "node_modules/@fastify/busboy/lib/utils/basename.js"(exports2, module2) { "use strict"; - module2.exports = function basename2(path18) { - if (typeof path18 !== "string") { + module2.exports = function basename2(path19) { + if (typeof path19 !== "string") { return ""; } - for (var i = path18.length - 1; i >= 0; --i) { - switch (path18.charCodeAt(i)) { + for (var i = path19.length - 1; i >= 0; --i) { + switch (path19.charCodeAt(i)) { case 47: // '/' case 92: - path18 = path18.slice(i + 1); - return path18 === ".." || path18 === "." ? "" : path18; + path19 = path19.slice(i + 1); + return path19 === ".." || path19 === "." ? "" : path19; } } - return path18 === ".." || path18 === "." ? "" : path18; + return path19 === ".." || path19 === "." ? "" : path19; }; } }); @@ -133579,7 +133579,7 @@ var require_request5 = __commonJS({ } var Request = class _Request { constructor(origin, { - path: path18, + path: path19, method, body, headers, @@ -133593,11 +133593,11 @@ var require_request5 = __commonJS({ throwOnError, expectContinue }, handler2) { - if (typeof path18 !== "string") { + if (typeof path19 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path18[0] !== "/" && !(path18.startsWith("http://") || path18.startsWith("https://")) && method !== "CONNECT") { + } else if (path19[0] !== "/" && !(path19.startsWith("http://") || path19.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.exec(path18) !== null) { + } else if (invalidPathRegex.exec(path19) !== null) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -133660,7 +133660,7 @@ var require_request5 = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? util.buildURL(path18, query) : path18; + this.path = query ? util.buildURL(path19, query) : path19; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -134668,9 +134668,9 @@ var require_RedirectHandler = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path18 = search ? `${pathname}${search}` : pathname; + const path19 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path18; + this.opts.path = path19; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -135910,7 +135910,7 @@ var require_client3 = __commonJS({ writeH2(client, client[kHTTP2Session], request2); return; } - const { body, method, path: path18, host, upgrade, headers, blocking, reset } = request2; + const { body, method, path: path19, host, upgrade, headers, blocking, reset } = request2; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { body.read(0); @@ -135960,7 +135960,7 @@ var require_client3 = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path18} HTTP/1.1\r + let header = `${method} ${path19} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -136023,7 +136023,7 @@ upgrade: ${upgrade}\r return true; } function writeH2(client, session, request2) { - const { body, method, path: path18, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; + const { body, method, path: path19, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; let headers; if (typeof reqHeaders === "string") headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()); else headers = reqHeaders; @@ -136066,7 +136066,7 @@ upgrade: ${upgrade}\r }); return true; } - headers[HTTP2_HEADER_PATH] = path18; + headers[HTTP2_HEADER_PATH] = path19; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -138306,20 +138306,20 @@ var require_mock_utils3 = __commonJS({ } return true; } - function safeUrl(path18) { - if (typeof path18 !== "string") { - return path18; + function safeUrl(path19) { + if (typeof path19 !== "string") { + return path19; } - const pathSegments = path18.split("?"); + const pathSegments = path19.split("?"); if (pathSegments.length !== 2) { - return path18; + return path19; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path18, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path18); + function matchKey(mockDispatch2, { path: path19, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path19); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -138337,7 +138337,7 @@ var require_mock_utils3 = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path18 }) => matchValue(safeUrl(path18), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path19 }) => matchValue(safeUrl(path19), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -138374,9 +138374,9 @@ var require_mock_utils3 = __commonJS({ } } function buildKey(opts) { - const { path: path18, method, body, headers, query } = opts; + const { path: path19, method, body, headers, query } = opts; return { - path: path18, + path: path19, method, body, headers, @@ -138825,10 +138825,10 @@ var require_pending_interceptors_formatter3 = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path18, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path19, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path18, + Path: path19, "Status code": statusCode, Persistent: persist ? "\u2705" : "\u274C", Invocations: timesInvoked, @@ -143448,8 +143448,8 @@ var require_util29 = __commonJS({ } } } - function validateCookiePath(path18) { - for (const char of path18) { + function validateCookiePath(path19) { + for (const char of path19) { const code = char.charCodeAt(0); if (code < 33 || char === ";") { throw new Error("Invalid cookie path"); @@ -145129,11 +145129,11 @@ var require_undici3 = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path18 = opts.path; + let path19 = opts.path; if (!opts.path.startsWith("/")) { - path18 = `/${path18}`; + path19 = `/${path19}`; } - url2 = new URL(util.parseOrigin(url2).origin + path18); + url2 = new URL(util.parseOrigin(url2).origin + path19); } else { if (!opts) { opts = typeof url2 === "object" ? url2 : {}; @@ -149983,7 +149983,7 @@ var require_traverse = __commonJS({ })(this.value); }; function walk(root, cb, immutable) { - var path18 = []; + var path19 = []; var parents = []; var alive = true; return (function walker(node_) { @@ -149992,11 +149992,11 @@ var require_traverse = __commonJS({ var state = { node, node_, - path: [].concat(path18), + path: [].concat(path19), parent: parents.slice(-1)[0], - key: path18.slice(-1)[0], - isRoot: path18.length === 0, - level: path18.length, + key: path19.slice(-1)[0], + isRoot: path19.length === 0, + level: path19.length, circular: null, update: function(x) { if (!state.isRoot) { @@ -150051,7 +150051,7 @@ var require_traverse = __commonJS({ parents.push(state); var keys = Object.keys(state.node); keys.forEach(function(key, i2) { - path18.push(key); + path19.push(key); if (modifiers.pre) modifiers.pre.call(state, state.node[key], key); var child = walker(state.node[key]); if (immutable && Object.hasOwnProperty.call(state.node, key)) { @@ -150060,7 +150060,7 @@ var require_traverse = __commonJS({ child.isLast = i2 == keys.length - 1; child.isFirst = i2 == 0; if (modifiers.post) modifiers.post.call(state, child); - path18.pop(); + path19.pop(); }); parents.pop(); } @@ -151081,11 +151081,11 @@ var require_unzip_stream = __commonJS({ return requiredLength; case states.CENTRAL_DIRECTORY_FILE_HEADER_SUFFIX: var isUtf8 = (this.parsedEntity.flags & 2048) !== 0; - var path18 = this._decodeString(chunk.slice(0, this.parsedEntity.fileNameLength), isUtf8); + var path19 = this._decodeString(chunk.slice(0, this.parsedEntity.fileNameLength), isUtf8); var extraDataBuffer = chunk.slice(this.parsedEntity.fileNameLength, this.parsedEntity.fileNameLength + this.parsedEntity.extraFieldLength); var extra = this._readExtraFields(extraDataBuffer); if (extra && extra.parsed && extra.parsed.path && !isUtf8) { - path18 = extra.parsed.path; + path19 = extra.parsed.path; } this.parsedEntity.extra = extra.parsed; var isUnix = (this.parsedEntity.versionMadeBy & 65280) >> 8 === 3; @@ -151097,7 +151097,7 @@ var require_unzip_stream = __commonJS({ } if (this.options.debug) { const debugObj = Object.assign({}, this.parsedEntity, { - path: path18, + path: path19, flags: "0x" + this.parsedEntity.flags.toString(16), unixAttrs: unixAttrs && "0" + unixAttrs.toString(8), isSymlink, @@ -151534,7 +151534,7 @@ var require_parser_stream = __commonJS({ // node_modules/mkdirp/index.js var require_mkdirp = __commonJS({ "node_modules/mkdirp/index.js"(exports2, module2) { - var path18 = require("path"); + var path19 = require("path"); var fs19 = require("fs"); var _0777 = parseInt("0777", 8); module2.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP; @@ -151554,7 +151554,7 @@ var require_mkdirp = __commonJS({ var cb = f || /* istanbul ignore next */ function() { }; - p = path18.resolve(p); + p = path19.resolve(p); xfs.mkdir(p, mode, function(er) { if (!er) { made = made || p; @@ -151562,8 +151562,8 @@ var require_mkdirp = __commonJS({ } switch (er.code) { case "ENOENT": - if (path18.dirname(p) === p) return cb(er); - mkdirP(path18.dirname(p), opts, function(er2, made2) { + if (path19.dirname(p) === p) return cb(er); + mkdirP(path19.dirname(p), opts, function(er2, made2) { if (er2) cb(er2, made2); else mkdirP(p, opts, cb, made2); }); @@ -151590,14 +151590,14 @@ var require_mkdirp = __commonJS({ mode = _0777; } if (!made) made = null; - p = path18.resolve(p); + p = path19.resolve(p); try { xfs.mkdirSync(p, mode); made = made || p; } catch (err0) { switch (err0.code) { case "ENOENT": - made = sync(path18.dirname(p), opts, made); + made = sync(path19.dirname(p), opts, made); sync(p, opts, made); break; // In the case of any other error, just see if there's a dir @@ -151623,7 +151623,7 @@ var require_mkdirp = __commonJS({ var require_extract2 = __commonJS({ "node_modules/unzip-stream/lib/extract.js"(exports2, module2) { var fs19 = require("fs"); - var path18 = require("path"); + var path19 = require("path"); var util = require("util"); var mkdirp = require_mkdirp(); var Transform = require("stream").Transform; @@ -151665,8 +151665,8 @@ var require_extract2 = __commonJS({ }; Extract.prototype._processEntry = function(entry) { var self2 = this; - var destPath = path18.join(this.opts.path, entry.path); - var directory = entry.isDirectory ? destPath : path18.dirname(destPath); + var destPath = path19.join(this.opts.path, entry.path); + var directory = entry.isDirectory ? destPath : path19.dirname(destPath); this.unfinishedEntries++; var writeFileFn = function() { var pipedStream = fs19.createWriteStream(destPath); @@ -151793,10 +151793,10 @@ var require_download_artifact = __commonJS({ parsed.search = ""; return parsed.toString(); }; - function exists(path18) { + function exists(path19) { return __awaiter2(this, void 0, void 0, function* () { try { - yield promises_1.default.access(path18); + yield promises_1.default.access(path19); return true; } catch (error3) { if (error3.code === "ENOENT") { @@ -152028,12 +152028,12 @@ var require_dist_node11 = __commonJS({ octokit.log.debug("request", options); const start = Date.now(); const requestOptions = octokit.request.endpoint.parse(options); - const path18 = requestOptions.url.replace(options.baseUrl, ""); + const path19 = requestOptions.url.replace(options.baseUrl, ""); return request2(options).then((response) => { - octokit.log.info(`${requestOptions.method} ${path18} - ${response.status} in ${Date.now() - start}ms`); + octokit.log.info(`${requestOptions.method} ${path19} - ${response.status} in ${Date.now() - start}ms`); return response; }).catch((error3) => { - octokit.log.info(`${requestOptions.method} ${path18} - ${error3.status} in ${Date.now() - start}ms`); + octokit.log.info(`${requestOptions.method} ${path19} - ${error3.status} in ${Date.now() - start}ms`); throw error3; }); }); @@ -154128,7 +154128,7 @@ var require_path_utils2 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = void 0; - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); function toPosixPath(pth) { return pth.replace(/[\\]/g, "/"); } @@ -154138,7 +154138,7 @@ var require_path_utils2 = __commonJS({ } exports2.toWin32Path = toWin32Path; function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path18.sep); + return pth.replace(/[/\\]/g, path19.sep); } exports2.toPlatformPath = toPlatformPath; } @@ -154202,7 +154202,7 @@ var require_io_util2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getCmdPath = exports2.tryGetExecutablePath = exports2.isRooted = exports2.isDirectory = exports2.exists = exports2.READONLY = exports2.UV_FS_O_EXLOCK = exports2.IS_WINDOWS = exports2.unlink = exports2.symlink = exports2.stat = exports2.rmdir = exports2.rm = exports2.rename = exports2.readlink = exports2.readdir = exports2.open = exports2.mkdir = exports2.lstat = exports2.copyFile = exports2.chmod = void 0; var fs19 = __importStar2(require("fs")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); _a = fs19.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; exports2.UV_FS_O_EXLOCK = 268435456; @@ -154251,7 +154251,7 @@ var require_io_util2 = __commonJS({ } if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { - const upperExt = path18.extname(filePath).toUpperCase(); + const upperExt = path19.extname(filePath).toUpperCase(); if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { return filePath; } @@ -154275,11 +154275,11 @@ var require_io_util2 = __commonJS({ if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { try { - const directory = path18.dirname(filePath); - const upperName = path18.basename(filePath).toUpperCase(); + const directory = path19.dirname(filePath); + const upperName = path19.basename(filePath).toUpperCase(); for (const actualName of yield exports2.readdir(directory)) { if (upperName === actualName.toUpperCase()) { - filePath = path18.join(directory, actualName); + filePath = path19.join(directory, actualName); break; } } @@ -154374,7 +154374,7 @@ var require_io2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.findInPath = exports2.which = exports2.mkdirP = exports2.rmRF = exports2.mv = exports2.cp = void 0; var assert_1 = require("assert"); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var ioUtil = __importStar2(require_io_util2()); function cp(source, dest, options = {}) { return __awaiter2(this, void 0, void 0, function* () { @@ -154383,7 +154383,7 @@ var require_io2 = __commonJS({ if (destStat && destStat.isFile() && !force) { return; } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path18.join(dest, path18.basename(source)) : dest; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path19.join(dest, path19.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } @@ -154395,7 +154395,7 @@ var require_io2 = __commonJS({ yield cpDirRecursive(source, newDest, 0, force); } } else { - if (path18.relative(source, newDest) === "") { + if (path19.relative(source, newDest) === "") { throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile2(source, newDest, force); @@ -154408,7 +154408,7 @@ var require_io2 = __commonJS({ if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { - dest = path18.join(dest, path18.basename(source)); + dest = path19.join(dest, path19.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { @@ -154419,7 +154419,7 @@ var require_io2 = __commonJS({ } } } - yield mkdirP(path18.dirname(dest)); + yield mkdirP(path19.dirname(dest)); yield ioUtil.rename(source, dest); }); } @@ -154482,7 +154482,7 @@ var require_io2 = __commonJS({ } const extensions = []; if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path18.delimiter)) { + for (const extension of process.env["PATHEXT"].split(path19.delimiter)) { if (extension) { extensions.push(extension); } @@ -154495,12 +154495,12 @@ var require_io2 = __commonJS({ } return []; } - if (tool.includes(path18.sep)) { + if (tool.includes(path19.sep)) { return []; } const directories = []; if (process.env.PATH) { - for (const p of process.env.PATH.split(path18.delimiter)) { + for (const p of process.env.PATH.split(path19.delimiter)) { if (p) { directories.push(p); } @@ -154508,7 +154508,7 @@ var require_io2 = __commonJS({ } const matches = []; for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path18.join(directory, tool), extensions); + const filePath = yield ioUtil.tryGetExecutablePath(path19.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } @@ -154624,7 +154624,7 @@ var require_toolrunner2 = __commonJS({ var os4 = __importStar2(require("os")); var events = __importStar2(require("events")); var child = __importStar2(require("child_process")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var io7 = __importStar2(require_io2()); var ioUtil = __importStar2(require_io_util2()); var timers_1 = require("timers"); @@ -154839,7 +154839,7 @@ var require_toolrunner2 = __commonJS({ exec() { return __awaiter2(this, void 0, void 0, function* () { if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path18.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + this.toolPath = path19.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } this.toolPath = yield io7.which(this.toolPath, true); return new Promise((resolve8, reject) => __awaiter2(this, void 0, void 0, function* () { @@ -155339,7 +155339,7 @@ var require_core3 = __commonJS({ var file_command_1 = require_file_command2(); var utils_1 = require_utils12(); var os4 = __importStar2(require("os")); - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); var oidc_utils_1 = require_oidc_utils2(); var ExitCode; (function(ExitCode2) { @@ -155367,7 +155367,7 @@ var require_core3 = __commonJS({ } else { (0, command_1.issueCommand)("add-path", {}, inputPath); } - process.env["PATH"] = `${inputPath}${path18.delimiter}${process.env["PATH"]}`; + process.env["PATH"] = `${inputPath}${path19.delimiter}${process.env["PATH"]}`; } exports2.addPath = addPath; function getInput2(name, options) { @@ -155543,13 +155543,13 @@ These characters are not allowed in the artifact name due to limitations with ce (0, core_1.info)(`Artifact name is valid!`); } exports2.checkArtifactName = checkArtifactName; - function checkArtifactFilePath(path18) { - if (!path18) { - throw new Error(`Artifact path: ${path18}, is incorrectly provided`); + function checkArtifactFilePath(path19) { + if (!path19) { + throw new Error(`Artifact path: ${path19}, is incorrectly provided`); } for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) { - if (path18.includes(invalidCharacterKey)) { - throw new Error(`Artifact path is not valid: ${path18}. Contains the following character: ${errorMessageForCharacter} + if (path19.includes(invalidCharacterKey)) { + throw new Error(`Artifact path is not valid: ${path19}. Contains the following character: ${errorMessageForCharacter} Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()} @@ -155640,7 +155640,7 @@ var require_tmp = __commonJS({ "node_modules/tmp/lib/tmp.js"(exports2, module2) { var fs19 = require("fs"); var os4 = require("os"); - var path18 = require("path"); + var path19 = require("path"); var crypto2 = require("crypto"); var _c = { fs: fs19.constants, os: os4.constants }; var RANDOM_CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"; @@ -155847,35 +155847,35 @@ var require_tmp = __commonJS({ return [actualOptions, callback]; } function _resolvePath(name, tmpDir, cb) { - const pathToResolve = path18.isAbsolute(name) ? name : path18.join(tmpDir, name); + const pathToResolve = path19.isAbsolute(name) ? name : path19.join(tmpDir, name); fs19.stat(pathToResolve, function(err) { if (err) { - fs19.realpath(path18.dirname(pathToResolve), function(err2, parentDir) { + fs19.realpath(path19.dirname(pathToResolve), function(err2, parentDir) { if (err2) return cb(err2); - cb(null, path18.join(parentDir, path18.basename(pathToResolve))); + cb(null, path19.join(parentDir, path19.basename(pathToResolve))); }); } else { - fs19.realpath(path18, cb); + fs19.realpath(path19, cb); } }); } function _resolvePathSync(name, tmpDir) { - const pathToResolve = path18.isAbsolute(name) ? name : path18.join(tmpDir, name); + const pathToResolve = path19.isAbsolute(name) ? name : path19.join(tmpDir, name); try { fs19.statSync(pathToResolve); return fs19.realpathSync(pathToResolve); } catch (_err) { - const parentDir = fs19.realpathSync(path18.dirname(pathToResolve)); - return path18.join(parentDir, path18.basename(pathToResolve)); + const parentDir = fs19.realpathSync(path19.dirname(pathToResolve)); + return path19.join(parentDir, path19.basename(pathToResolve)); } } function _generateTmpName(opts) { const tmpDir = opts.tmpdir; if (!_isUndefined(opts.name)) { - return path18.join(tmpDir, opts.dir, opts.name); + return path19.join(tmpDir, opts.dir, opts.name); } if (!_isUndefined(opts.template)) { - return path18.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); + return path19.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); } const name = [ opts.prefix ? opts.prefix : "tmp", @@ -155885,13 +155885,13 @@ var require_tmp = __commonJS({ _randomChars(12), opts.postfix ? "-" + opts.postfix : "" ].join(""); - return path18.join(tmpDir, opts.dir, name); + return path19.join(tmpDir, opts.dir, name); } function _assertOptionsBase(options) { if (!_isUndefined(options.name)) { const name = options.name; - if (path18.isAbsolute(name)) throw new Error(`name option must not contain an absolute path, found "${name}".`); - const basename2 = path18.basename(name); + if (path19.isAbsolute(name)) throw new Error(`name option must not contain an absolute path, found "${name}".`); + const basename2 = path19.basename(name); if (basename2 === ".." || basename2 === "." || basename2 !== name) throw new Error(`name option must not contain a path, found "${name}".`); } @@ -155913,7 +155913,7 @@ var require_tmp = __commonJS({ if (_isUndefined(name)) return cb(null); _resolvePath(name, tmpDir, function(err, resolvedPath) { if (err) return cb(err); - const relativePath = path18.relative(tmpDir, resolvedPath); + const relativePath = path19.relative(tmpDir, resolvedPath); if (!resolvedPath.startsWith(tmpDir)) { return cb(new Error(`${option} option must be relative to "${tmpDir}", found "${relativePath}".`)); } @@ -155923,7 +155923,7 @@ var require_tmp = __commonJS({ function _getRelativePathSync(option, name, tmpDir) { if (_isUndefined(name)) return; const resolvedPath = _resolvePathSync(name, tmpDir); - const relativePath = path18.relative(tmpDir, resolvedPath); + const relativePath = path19.relative(tmpDir, resolvedPath); if (!resolvedPath.startsWith(tmpDir)) { throw new Error(`${option} option must be relative to "${tmpDir}", found "${relativePath}".`); } @@ -156003,14 +156003,14 @@ var require_tmp_promise = __commonJS({ var fileWithOptions = promisify( (options, cb) => tmp.file( options, - (err, path18, fd, cleanup) => err ? cb(err) : cb(void 0, { path: path18, fd, cleanup: promisify(cleanup) }) + (err, path19, fd, cleanup) => err ? cb(err) : cb(void 0, { path: path19, fd, cleanup: promisify(cleanup) }) ) ); module2.exports.file = async (options) => fileWithOptions(options); module2.exports.withFile = async function withFile(fn, options) { - const { path: path18, fd, cleanup } = await module2.exports.file(options); + const { path: path19, fd, cleanup } = await module2.exports.file(options); try { - return await fn({ path: path18, fd }); + return await fn({ path: path19, fd }); } finally { await cleanup(); } @@ -156019,14 +156019,14 @@ var require_tmp_promise = __commonJS({ var dirWithOptions = promisify( (options, cb) => tmp.dir( options, - (err, path18, cleanup) => err ? cb(err) : cb(void 0, { path: path18, cleanup: promisify(cleanup) }) + (err, path19, cleanup) => err ? cb(err) : cb(void 0, { path: path19, cleanup: promisify(cleanup) }) ) ); module2.exports.dir = async (options) => dirWithOptions(options); module2.exports.withDir = async function withDir(fn, options) { - const { path: path18, cleanup } = await module2.exports.dir(options); + const { path: path19, cleanup } = await module2.exports.dir(options); try { - return await fn({ path: path18 }); + return await fn({ path: path19 }); } finally { await cleanup(); } @@ -157734,21 +157734,21 @@ var require_download_specification = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getDownloadSpecification = void 0; - var path18 = __importStar2(require("path")); + var path19 = __importStar2(require("path")); function getDownloadSpecification(artifactName, artifactEntries, downloadPath, includeRootDirectory) { const directories = /* @__PURE__ */ new Set(); const specifications = { - rootDownloadLocation: includeRootDirectory ? path18.join(downloadPath, artifactName) : downloadPath, + rootDownloadLocation: includeRootDirectory ? path19.join(downloadPath, artifactName) : downloadPath, directoryStructure: [], emptyFilesToCreate: [], filesToDownload: [] }; for (const entry of artifactEntries) { if (entry.path.startsWith(`${artifactName}/`) || entry.path.startsWith(`${artifactName}\\`)) { - const normalizedPathEntry = path18.normalize(entry.path); - const filePath = path18.join(downloadPath, includeRootDirectory ? normalizedPathEntry : normalizedPathEntry.replace(artifactName, "")); + const normalizedPathEntry = path19.normalize(entry.path); + const filePath = path19.join(downloadPath, includeRootDirectory ? normalizedPathEntry : normalizedPathEntry.replace(artifactName, "")); if (entry.itemType === "file") { - directories.add(path18.dirname(filePath)); + directories.add(path19.dirname(filePath)); if (entry.fileLength === 0) { specifications.emptyFilesToCreate.push(filePath); } else { @@ -157890,7 +157890,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz return uploadResponse; }); } - downloadArtifact(name, path18, options) { + downloadArtifact(name, path19, options) { return __awaiter2(this, void 0, void 0, function* () { const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); const artifacts = yield downloadHttpClient.listArtifacts(); @@ -157904,12 +157904,12 @@ Note: The size of downloaded zips can differ significantly from the reported siz throw new Error(`Unable to find an artifact with the name: ${name}`); } const items = yield downloadHttpClient.getContainerItems(artifactToDownload.name, artifactToDownload.fileContainerResourceUrl); - if (!path18) { - path18 = (0, config_variables_1.getWorkSpaceDirectory)(); + if (!path19) { + path19 = (0, config_variables_1.getWorkSpaceDirectory)(); } - path18 = (0, path_1.normalize)(path18); - path18 = (0, path_1.resolve)(path18); - const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path18, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); + path19 = (0, path_1.normalize)(path19); + path19 = (0, path_1.resolve)(path19); + const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path19, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); if (downloadSpecification.filesToDownload.length === 0) { core17.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); } else { @@ -157924,7 +157924,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz }; }); } - downloadAllArtifacts(path18) { + downloadAllArtifacts(path19) { return __awaiter2(this, void 0, void 0, function* () { const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); const response = []; @@ -157933,18 +157933,18 @@ Note: The size of downloaded zips can differ significantly from the reported siz core17.info("Unable to find any artifacts for the associated workflow"); return response; } - if (!path18) { - path18 = (0, config_variables_1.getWorkSpaceDirectory)(); + if (!path19) { + path19 = (0, config_variables_1.getWorkSpaceDirectory)(); } - path18 = (0, path_1.normalize)(path18); - path18 = (0, path_1.resolve)(path18); + path19 = (0, path_1.normalize)(path19); + path19 = (0, path_1.resolve)(path19); let downloadedArtifacts = 0; while (downloadedArtifacts < artifacts.count) { const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; downloadedArtifacts += 1; core17.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); - const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path18, true); + const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path19, true); if (downloadSpecification.filesToDownload.length === 0) { core17.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); } else { @@ -164930,8 +164930,8 @@ var getFileOidsUnderPath = async function(basePath) { const match = line.match(regex); if (match) { const oid = match[1]; - const path18 = decodeGitFilePath(match[2]); - fileOidMap[path18] = oid; + const path19 = decodeGitFilePath(match[2]); + fileOidMap[path19] = oid; } else { throw new Error(`Unexpected "git ls-files" output: ${line}`); } @@ -165778,6 +165778,9 @@ function appendExtraQueryExclusions(extraQueryExclusions, cliConfig) { function isCodeScanningEnabled(config) { return config.analysisKinds.includes("code-scanning" /* CodeScanning */); } +function isRiskAssessmentEnabled(config) { + return config.analysisKinds.includes("risk-assessment" /* RiskAssessment */); +} // src/setup-codeql.ts var fs10 = __toESM(require("fs")); @@ -167879,6 +167882,7 @@ async function createDatabaseBundleCli(codeql, config, language) { // src/init-action-post-helper.ts var fs18 = __toESM(require("fs")); +var import_path3 = __toESM(require("path")); var github2 = __toESM(require_github()); // src/upload-lib.ts @@ -169835,6 +169839,26 @@ async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) { ); return uploadResult ? { ...uploadResult.statusReport, sarifID: uploadResult.sarifID } : {}; } +async function maybeUploadFailedSarifArtifact(config, features, logger) { + const failedSarifResult = await prepareFailedSarif(logger, features, config); + if (failedSarifResult.isFailure()) { + return failedSarifResult.value; + } + const failedSarif = failedSarifResult.value; + logger.info( + `Uploading failed SARIF file ${failedSarif.sarifFile} as artifact` + ); + const gitHubVersion = await getGitHubVersion(); + const client = await getArtifactUploaderClient(logger, gitHubVersion.type); + const suffix = getArtifactSuffix(getOptionalInput("matrix")); + const name = `sarif-artifact-${suffix}`; + await client.uploadArtifact( + name, + [import_path3.default.normalize(failedSarif.sarifFile)], + import_path3.default.normalize("..") + ); + return { sarifID: name }; +} async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger) { if (process.env["CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */] === "true") { return { @@ -169849,6 +169873,8 @@ async function tryUploadSarifIfRunFailed(config, repositoryNwo, features, logger features, logger ); + } else if (isRiskAssessmentEnabled(config)) { + return await maybeUploadFailedSarifArtifact(config, features, logger); } else { return { upload_failed_run_skipped_because: "No analysis kind that supports failed SARIF uploads is enabled." @@ -169871,7 +169897,7 @@ async function uploadFailureInfo(uploadAllAvailableDebugArtifacts, printDebugLog ); if (uploadFailedSarifResult.upload_failed_run_skipped_because) { logger.debug( - `Won't upload a failed SARIF file for this CodeQL code scanning run because: ${uploadFailedSarifResult.upload_failed_run_skipped_because}.` + `Won't upload a failed SARIF file for this CodeQL analysis because: ${uploadFailedSarifResult.upload_failed_run_skipped_because}.` ); } if (process.env["CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF"] === "true" && !uploadFailedSarifResult.raw_upload_size_bytes) { diff --git a/src/config-utils.ts b/src/config-utils.ts index 3b23a12bd..86f784e3a 100644 --- a/src/config-utils.ts +++ b/src/config-utils.ts @@ -1470,6 +1470,13 @@ export function isCodeQualityEnabled(config: Config): boolean { return config.analysisKinds.includes(AnalysisKind.CodeQuality); } +/** + * Returns `true` if Code Scanning Risk Assessment analysis is enabled, or `false` if not. + */ +export function isRiskAssessmentEnabled(config: Config): boolean { + return config.analysisKinds.includes(AnalysisKind.RiskAssessment); +} + /** * Returns the primary analysis kind that the Action is initialised with. If there is only * one analysis kind, then that is returned. diff --git a/src/init-action-post-helper.ts b/src/init-action-post-helper.ts index c03a0c1f0..cdd564853 100644 --- a/src/init-action-post-helper.ts +++ b/src/init-action-post-helper.ts @@ -1,12 +1,21 @@ import * as fs from "fs"; +import path from "path"; import * as github from "@actions/github"; import * as actionsUtil from "./actions-util"; import { CodeScanning } from "./analyses"; -import { getApiClient } from "./api-client"; +import { getApiClient, getGitHubVersion } from "./api-client"; import { CodeQL, getCodeQL } from "./codeql"; -import { Config, isCodeScanningEnabled } from "./config-utils"; +import { + Config, + isCodeScanningEnabled, + isRiskAssessmentEnabled, +} from "./config-utils"; +import { + getArtifactSuffix, + getArtifactUploaderClient, +} from "./debug-artifacts"; import * as dependencyCaching from "./dependency-caching"; import { EnvVar } from "./environment"; import { Feature, FeatureEnablement } from "./feature-flags"; @@ -160,6 +169,38 @@ async function maybeUploadFailedSarif( : {}; } +/** Uploads a failed SARIF file as workflow artifact, if it can be generated. */ +async function maybeUploadFailedSarifArtifact( + config: Config, + features: FeatureEnablement, + logger: Logger, +): Promise { + const failedSarifResult = await prepareFailedSarif(logger, features, config); + + if (failedSarifResult.isFailure()) { + return failedSarifResult.value; + } + + const failedSarif = failedSarifResult.value; + + logger.info( + `Uploading failed SARIF file ${failedSarif.sarifFile} as artifact`, + ); + + const gitHubVersion = await getGitHubVersion(); + const client = await getArtifactUploaderClient(logger, gitHubVersion.type); + + const suffix = getArtifactSuffix(actionsUtil.getOptionalInput("matrix")); + const name = `sarif-artifact-${suffix}`; + await client.uploadArtifact( + name, + [path.normalize(failedSarif.sarifFile)], + path.normalize(".."), + ); + + return { sarifID: name }; +} + /** * Tries to upload a SARIF file with information about the run, if it failed. * @@ -192,6 +233,8 @@ export async function tryUploadSarifIfRunFailed( features, logger, ); + } else if (isRiskAssessmentEnabled(config)) { + return await maybeUploadFailedSarifArtifact(config, features, logger); } else { return { upload_failed_run_skipped_because: @@ -245,7 +288,7 @@ export async function uploadFailureInfo( if (uploadFailedSarifResult.upload_failed_run_skipped_because) { logger.debug( - "Won't upload a failed SARIF file for this CodeQL code scanning run because: " + + "Won't upload a failed SARIF file for this CodeQL analysis because: " + `${uploadFailedSarifResult.upload_failed_run_skipped_because}.`, ); } From f265dd9392d918a2bd0736228989d389325e9990 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Thu, 26 Feb 2026 18:44:50 +0000 Subject: [PATCH 06/50] Separate `generateFailedSarif` out of `prepareFailedSarif` --- lib/init-action-post.js | 11 ++++++++++- src/init-action-post-helper.ts | 18 +++++++++++++++++- 2 files changed, 27 insertions(+), 2 deletions(-) diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 5cd5ca0c2..f40d4fd98 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -169806,6 +169806,15 @@ async function prepareFailedSarif(logger, features, config) { } const category = getCategoryInputOrThrow(workflow, jobName, matrix); const checkoutPath = getCheckoutPathInputOrThrow(workflow, jobName, matrix); + const result = await generateFailedSarif( + features, + config, + category, + checkoutPath + ); + return new Success(result); +} +async function generateFailedSarif(features, config, category, checkoutPath) { const databasePath = config.dbLocation; const codeql = await getCodeQL(config.codeQLCmd); const sarifFile = "../codeql-failed-run.sarif"; @@ -169814,7 +169823,7 @@ async function prepareFailedSarif(logger, features, config) { } else { await codeql.databaseExportDiagnostics(databasePath, sarifFile, category); } - return new Success({ sarifFile, category, checkoutPath }); + return { sarifFile, category, checkoutPath }; } async function maybeUploadFailedSarif(config, repositoryNwo, features, logger) { const failedSarifResult = await prepareFailedSarif(logger, features, config); diff --git a/src/init-action-post-helper.ts b/src/init-action-post-helper.ts index cdd564853..bf0586f0e 100644 --- a/src/init-action-post-helper.ts +++ b/src/init-action-post-helper.ts @@ -112,6 +112,22 @@ async function prepareFailedSarif( } const category = getCategoryInputOrThrow(workflow, jobName, matrix); const checkoutPath = getCheckoutPathInputOrThrow(workflow, jobName, matrix); + + const result = await generateFailedSarif( + features, + config, + category, + checkoutPath, + ); + return new Success(result); +} + +async function generateFailedSarif( + features: FeatureEnablement, + config: Config, + category: string | undefined, + checkoutPath: string, +) { const databasePath = config.dbLocation; const codeql = await getCodeQL(config.codeQLCmd); @@ -128,7 +144,7 @@ async function prepareFailedSarif( await codeql.databaseExportDiagnostics(databasePath, sarifFile, category); } - return new Success({ sarifFile, category, checkoutPath }); + return { sarifFile, category, checkoutPath }; } /** From 5b9d1f4fdf979b0eba7338968177a97591ad986d Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Thu, 26 Feb 2026 18:50:24 +0000 Subject: [PATCH 07/50] Simplify `prepareFailedSarif` for risk assessments --- lib/init-action-post.js | 50 ++++++++++++++++++++-------- src/init-action-post-helper.ts | 60 ++++++++++++++++++++++++---------- 2 files changed, 79 insertions(+), 31 deletions(-) diff --git a/lib/init-action-post.js b/lib/init-action-post.js index f40d4fd98..76b2c4587 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -169793,26 +169793,48 @@ async function prepareFailedSarif(logger, features, config) { upload_failed_run_skipped_because: "CodeQL command not found" }); } - const workflow = await getWorkflow(logger); const jobName = getRequiredEnvParam("GITHUB_JOB"); const matrix = parseMatrixInput(getRequiredInput("matrix")); - const shouldUpload = getUploadInputOrThrow(workflow, jobName, matrix); - if (!["always", "failure-only"].includes( - getUploadValue(shouldUpload) - ) || shouldSkipSarifUpload()) { + if (shouldSkipSarifUpload()) { return new Failure({ upload_failed_run_skipped_because: "SARIF upload is disabled" }); } - const category = getCategoryInputOrThrow(workflow, jobName, matrix); - const checkoutPath = getCheckoutPathInputOrThrow(workflow, jobName, matrix); - const result = await generateFailedSarif( - features, - config, - category, - checkoutPath - ); - return new Success(result); + if (isRiskAssessmentEnabled(config)) { + if (config.languages.length !== 1) { + return new Failure({ + upload_failed_run_skipped_because: "Unexpectedly, the configuration is not for a single language." + }); + } + const category = `/language:${config.languages[0]}`; + const checkoutPath = "."; + const result = await generateFailedSarif( + features, + config, + category, + checkoutPath + ); + return new Success(result); + } else { + const workflow = await getWorkflow(logger); + const shouldUpload = getUploadInputOrThrow(workflow, jobName, matrix); + if (!["always", "failure-only"].includes( + getUploadValue(shouldUpload) + )) { + return new Failure({ + upload_failed_run_skipped_because: "SARIF upload is disabled" + }); + } + const category = getCategoryInputOrThrow(workflow, jobName, matrix); + const checkoutPath = getCheckoutPathInputOrThrow(workflow, jobName, matrix); + const result = await generateFailedSarif( + features, + config, + category, + checkoutPath + ); + return new Success(result); + } } async function generateFailedSarif(features, config, category, checkoutPath) { const databasePath = config.dbLocation; diff --git a/src/init-action-post-helper.ts b/src/init-action-post-helper.ts index bf0586f0e..538042938 100644 --- a/src/init-action-post-helper.ts +++ b/src/init-action-post-helper.ts @@ -96,30 +96,56 @@ async function prepareFailedSarif( upload_failed_run_skipped_because: "CodeQL command not found", }); } - const workflow = await getWorkflow(logger); const jobName = getRequiredEnvParam("GITHUB_JOB"); const matrix = parseMatrixInput(actionsUtil.getRequiredInput("matrix")); - const shouldUpload = getUploadInputOrThrow(workflow, jobName, matrix); - if ( - !["always", "failure-only"].includes( - actionsUtil.getUploadValue(shouldUpload), - ) || - shouldSkipSarifUpload() - ) { + + if (shouldSkipSarifUpload()) { return new Failure({ upload_failed_run_skipped_because: "SARIF upload is disabled", }); } - const category = getCategoryInputOrThrow(workflow, jobName, matrix); - const checkoutPath = getCheckoutPathInputOrThrow(workflow, jobName, matrix); - const result = await generateFailedSarif( - features, - config, - category, - checkoutPath, - ); - return new Success(result); + if (isRiskAssessmentEnabled(config)) { + if (config.languages.length !== 1) { + return new Failure({ + upload_failed_run_skipped_because: + "Unexpectedly, the configuration is not for a single language.", + }); + } + + // We can make these assumptions for risk assessments. + const category = `/language:${config.languages[0]}`; + const checkoutPath = "."; + const result = await generateFailedSarif( + features, + config, + category, + checkoutPath, + ); + return new Success(result); + } else { + const workflow = await getWorkflow(logger); + const shouldUpload = getUploadInputOrThrow(workflow, jobName, matrix); + if ( + !["always", "failure-only"].includes( + actionsUtil.getUploadValue(shouldUpload), + ) + ) { + return new Failure({ + upload_failed_run_skipped_because: "SARIF upload is disabled", + }); + } + const category = getCategoryInputOrThrow(workflow, jobName, matrix); + const checkoutPath = getCheckoutPathInputOrThrow(workflow, jobName, matrix); + + const result = await generateFailedSarif( + features, + config, + category, + checkoutPath, + ); + return new Success(result); + } } async function generateFailedSarif( From 003044eb8479a316ec1b199f4d30ad5b03be81c2 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Thu, 26 Feb 2026 19:17:14 +0000 Subject: [PATCH 08/50] Add test --- src/init-action-post-helper.test.ts | 62 +++++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) diff --git a/src/init-action-post-helper.test.ts b/src/init-action-post-helper.test.ts index fac31423c..22659d047 100644 --- a/src/init-action-post-helper.test.ts +++ b/src/init-action-post-helper.test.ts @@ -1,10 +1,13 @@ +import * as core from "@actions/core"; import test, { ExecutionContext } from "ava"; import * as sinon from "sinon"; import * as actionsUtil from "./actions-util"; import { AnalysisKind } from "./analyses"; +import * as apiClient from "./api-client"; import * as codeql from "./codeql"; import * as configUtils from "./config-utils"; +import * as debugArtifacts from "./debug-artifacts"; import { EnvVar } from "./environment"; import { Feature } from "./feature-flags"; import * as initActionPostHelper from "./init-action-post-helper"; @@ -16,6 +19,7 @@ import { createFeatures, createTestConfig, makeVersionInfo, + RecordingLogger, setupTests, } from "./testing-utils"; import * as uploadLib from "./upload-lib"; @@ -622,3 +626,61 @@ async function testFailedSarifUpload( } return result; } + +test("tryUploadSarifIfRunFailed - uploads as artifact for risk assessments", async (t) => { + process.env["GITHUB_JOB"] = "analyze"; + process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; + process.env["GITHUB_WORKSPACE"] = + "/home/runner/work/codeql-action-fake-repository/codeql-action-fake-repository"; + + const logger = new RecordingLogger(); + const config = createTestConfig({ + analysisKinds: [AnalysisKind.RiskAssessment], + codeQLCmd: "codeql-for-testing", + languages: ["javascript"], + }); + const features = createFeatures([]); + + sinon + .stub(apiClient, "getGitHubVersion") + .resolves({ type: util.GitHubVariant.GHES, version: "3.0.0" }); + + const uploadArtifact = sinon.stub().resolves(); + const artifactClient = { uploadArtifact }; + sinon + .stub(debugArtifacts, "getArtifactUploaderClient") + .value(() => artifactClient); + + const matrix = JSON.stringify({ + language: "javascript", + category: "/language:javascript", + "build-mode": "none", + runner: "ubuntu-latest", + }); + sinon.stub(core, "getInput").withArgs("matrix").returns(matrix); + + const codeqlObject = await codeql.getCodeQLForTesting(); + sinon.stub(codeqlObject, "databaseExportDiagnostics").resolves(); + sinon.stub(codeqlObject, "diagnosticsExport").resolves(); + + sinon.stub(codeql, "getCodeQL").resolves(codeqlObject); + + const result = await initActionPostHelper.tryUploadSarifIfRunFailed( + config, + parseRepositoryNwo("github/codeql-action-fake-repository"), + features, + logger, + ); + + const expectedName = `sarif-artifact-${debugArtifacts.getArtifactSuffix(matrix)}`; + t.is(result.upload_failed_run_skipped_because, undefined); + t.is(result.upload_failed_run_error, undefined); + t.is(result.sarifID, expectedName); + t.assert( + uploadArtifact.calledOnceWith( + expectedName, + sinon.match.array, + sinon.match.string, + ), + ); +}); From ce97dfe40565e250b82eaaabdd757d5545595a58 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Thu, 26 Feb 2026 19:47:55 +0000 Subject: [PATCH 09/50] Sanitise artifact name --- lib/init-action-post.js | 2 +- src/init-action-post-helper.test.ts | 4 +++- src/init-action-post-helper.ts | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 76b2c4587..781e8975a 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -169882,7 +169882,7 @@ async function maybeUploadFailedSarifArtifact(config, features, logger) { const gitHubVersion = await getGitHubVersion(); const client = await getArtifactUploaderClient(logger, gitHubVersion.type); const suffix = getArtifactSuffix(getOptionalInput("matrix")); - const name = `sarif-artifact-${suffix}`; + const name = sanitizeArtifactName(`sarif-artifact-${suffix}`); await client.uploadArtifact( name, [import_path3.default.normalize(failedSarif.sarifFile)], diff --git a/src/init-action-post-helper.test.ts b/src/init-action-post-helper.test.ts index 22659d047..3f064726e 100644 --- a/src/init-action-post-helper.test.ts +++ b/src/init-action-post-helper.test.ts @@ -672,7 +672,9 @@ test("tryUploadSarifIfRunFailed - uploads as artifact for risk assessments", asy logger, ); - const expectedName = `sarif-artifact-${debugArtifacts.getArtifactSuffix(matrix)}`; + const expectedName = debugArtifacts.sanitizeArtifactName( + `sarif-artifact-${debugArtifacts.getArtifactSuffix(matrix)}`, + ); t.is(result.upload_failed_run_skipped_because, undefined); t.is(result.upload_failed_run_error, undefined); t.is(result.sarifID, expectedName); diff --git a/src/init-action-post-helper.ts b/src/init-action-post-helper.ts index 538042938..b1789aaba 100644 --- a/src/init-action-post-helper.ts +++ b/src/init-action-post-helper.ts @@ -15,6 +15,7 @@ import { import { getArtifactSuffix, getArtifactUploaderClient, + sanitizeArtifactName, } from "./debug-artifacts"; import * as dependencyCaching from "./dependency-caching"; import { EnvVar } from "./environment"; @@ -233,7 +234,7 @@ async function maybeUploadFailedSarifArtifact( const client = await getArtifactUploaderClient(logger, gitHubVersion.type); const suffix = getArtifactSuffix(actionsUtil.getOptionalInput("matrix")); - const name = `sarif-artifact-${suffix}`; + const name = sanitizeArtifactName(`sarif-artifact-${suffix}`); await client.uploadArtifact( name, [path.normalize(failedSarif.sarifFile)], From ca32b84657b6298bc8df1fbaa1ec003e60649b73 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Thu, 26 Feb 2026 19:56:07 +0000 Subject: [PATCH 10/50] Ensure correct failed SARIF file names for CSRA --- lib/init-action-post.js | 12 ++++++++---- src/init-action-post-helper.ts | 14 ++++++++++---- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 781e8975a..8c223911c 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -169806,13 +169806,15 @@ async function prepareFailedSarif(logger, features, config) { upload_failed_run_skipped_because: "Unexpectedly, the configuration is not for a single language." }); } - const category = `/language:${config.languages[0]}`; + const language = config.languages[0]; + const category = `/language:${language}`; const checkoutPath = "."; const result = await generateFailedSarif( features, config, category, - checkoutPath + checkoutPath, + `../codeql-failed-sarif-${language}${RiskAssessment.sarifExtension}` ); return new Success(result); } else { @@ -169836,10 +169838,12 @@ async function prepareFailedSarif(logger, features, config) { return new Success(result); } } -async function generateFailedSarif(features, config, category, checkoutPath) { +async function generateFailedSarif(features, config, category, checkoutPath, sarifFile) { const databasePath = config.dbLocation; const codeql = await getCodeQL(config.codeQLCmd); - const sarifFile = "../codeql-failed-run.sarif"; + if (sarifFile === void 0) { + sarifFile = "../codeql-failed-run.sarif"; + } if (databasePath === void 0 || !await features.getValue("export_diagnostics_enabled" /* ExportDiagnosticsEnabled */, codeql)) { await codeql.diagnosticsExport(sarifFile, category, config); } else { diff --git a/src/init-action-post-helper.ts b/src/init-action-post-helper.ts index b1789aaba..6fe38f3bb 100644 --- a/src/init-action-post-helper.ts +++ b/src/init-action-post-helper.ts @@ -4,7 +4,7 @@ import path from "path"; import * as github from "@actions/github"; import * as actionsUtil from "./actions-util"; -import { CodeScanning } from "./analyses"; +import { CodeScanning, RiskAssessment } from "./analyses"; import { getApiClient, getGitHubVersion } from "./api-client"; import { CodeQL, getCodeQL } from "./codeql"; import { @@ -115,13 +115,15 @@ async function prepareFailedSarif( } // We can make these assumptions for risk assessments. - const category = `/language:${config.languages[0]}`; + const language = config.languages[0]; + const category = `/language:${language}`; const checkoutPath = "."; const result = await generateFailedSarif( features, config, category, checkoutPath, + `../codeql-failed-sarif-${language}${RiskAssessment.sarifExtension}`, ); return new Success(result); } else { @@ -154,11 +156,15 @@ async function generateFailedSarif( config: Config, category: string | undefined, checkoutPath: string, + sarifFile?: string, ) { const databasePath = config.dbLocation; - const codeql = await getCodeQL(config.codeQLCmd); - const sarifFile = "../codeql-failed-run.sarif"; + + // Set the filename for the SARIF file if not already set. + if (sarifFile === undefined) { + sarifFile = "../codeql-failed-run.sarif"; + } // If there is no database or the feature flag is off, we run 'export diagnostics' if ( From 383b86ddcbd007aadb5b5dd9ac58229c6cf87b6d Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Fri, 27 Feb 2026 12:27:32 +0000 Subject: [PATCH 11/50] Refactor some test setup code into `mockRiskAssessmentEnv` --- src/init-action-post-helper.test.ts | 57 ++++++++++++++++------------- 1 file changed, 32 insertions(+), 25 deletions(-) diff --git a/src/init-action-post-helper.test.ts b/src/init-action-post-helper.test.ts index 3f064726e..fa53ec92a 100644 --- a/src/init-action-post-helper.test.ts +++ b/src/init-action-post-helper.test.ts @@ -627,12 +627,43 @@ async function testFailedSarifUpload( return result; } -test("tryUploadSarifIfRunFailed - uploads as artifact for risk assessments", async (t) => { +async function mockRiskAssessmentEnv(matrix: string) { process.env["GITHUB_JOB"] = "analyze"; process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; process.env["GITHUB_WORKSPACE"] = "/home/runner/work/codeql-action-fake-repository/codeql-action-fake-repository"; + sinon + .stub(apiClient, "getGitHubVersion") + .resolves({ type: util.GitHubVariant.GHES, version: "3.0.0" }); + + const codeqlObject = await codeql.getCodeQLForTesting(); + sinon.stub(codeqlObject, "databaseExportDiagnostics").resolves(); + sinon.stub(codeqlObject, "diagnosticsExport").resolves(); + + sinon.stub(codeql, "getCodeQL").resolves(codeqlObject); + + sinon.stub(core, "getInput").withArgs("matrix").returns(matrix); + + const uploadArtifact = sinon.stub().resolves(); + const artifactClient = { uploadArtifact }; + sinon + .stub(debugArtifacts, "getArtifactUploaderClient") + .value(() => artifactClient); + + return [uploadArtifact]; +} + +test("tryUploadSarifIfRunFailed - uploads as artifact for risk assessments", async (t) => { + const matrix = JSON.stringify({ + language: "javascript", + category: "/language:javascript", + "build-mode": "none", + runner: "ubuntu-latest", + }); + + const [uploadArtifact] = await mockRiskAssessmentEnv(matrix); + const logger = new RecordingLogger(); const config = createTestConfig({ analysisKinds: [AnalysisKind.RiskAssessment], @@ -641,30 +672,6 @@ test("tryUploadSarifIfRunFailed - uploads as artifact for risk assessments", asy }); const features = createFeatures([]); - sinon - .stub(apiClient, "getGitHubVersion") - .resolves({ type: util.GitHubVariant.GHES, version: "3.0.0" }); - - const uploadArtifact = sinon.stub().resolves(); - const artifactClient = { uploadArtifact }; - sinon - .stub(debugArtifacts, "getArtifactUploaderClient") - .value(() => artifactClient); - - const matrix = JSON.stringify({ - language: "javascript", - category: "/language:javascript", - "build-mode": "none", - runner: "ubuntu-latest", - }); - sinon.stub(core, "getInput").withArgs("matrix").returns(matrix); - - const codeqlObject = await codeql.getCodeQLForTesting(); - sinon.stub(codeqlObject, "databaseExportDiagnostics").resolves(); - sinon.stub(codeqlObject, "diagnosticsExport").resolves(); - - sinon.stub(codeql, "getCodeQL").resolves(codeqlObject); - const result = await initActionPostHelper.tryUploadSarifIfRunFailed( config, parseRepositoryNwo("github/codeql-action-fake-repository"), From 1e7e52a33006c1f952fe1e2dfbee2630acc88a19 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Fri, 27 Feb 2026 12:40:04 +0000 Subject: [PATCH 12/50] Add tests where upload should get skipped --- src/init-action-post-helper.test.ts | 79 +++++++++++++++++++++++++---- 1 file changed, 69 insertions(+), 10 deletions(-) diff --git a/src/init-action-post-helper.test.ts b/src/init-action-post-helper.test.ts index fa53ec92a..d473fdd08 100644 --- a/src/init-action-post-helper.test.ts +++ b/src/init-action-post-helper.test.ts @@ -627,6 +627,13 @@ async function testFailedSarifUpload( return result; } +const singleLanguageMatrix = JSON.stringify({ + language: "javascript", + category: "/language:javascript", + "build-mode": "none", + runner: "ubuntu-latest", +}); + async function mockRiskAssessmentEnv(matrix: string) { process.env["GITHUB_JOB"] = "analyze"; process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; @@ -655,16 +662,9 @@ async function mockRiskAssessmentEnv(matrix: string) { } test("tryUploadSarifIfRunFailed - uploads as artifact for risk assessments", async (t) => { - const matrix = JSON.stringify({ - language: "javascript", - category: "/language:javascript", - "build-mode": "none", - runner: "ubuntu-latest", - }); - - const [uploadArtifact] = await mockRiskAssessmentEnv(matrix); - const logger = new RecordingLogger(); + const [uploadArtifact] = await mockRiskAssessmentEnv(singleLanguageMatrix); + const config = createTestConfig({ analysisKinds: [AnalysisKind.RiskAssessment], codeQLCmd: "codeql-for-testing", @@ -680,7 +680,7 @@ test("tryUploadSarifIfRunFailed - uploads as artifact for risk assessments", asy ); const expectedName = debugArtifacts.sanitizeArtifactName( - `sarif-artifact-${debugArtifacts.getArtifactSuffix(matrix)}`, + `sarif-artifact-${debugArtifacts.getArtifactSuffix(singleLanguageMatrix)}`, ); t.is(result.upload_failed_run_skipped_because, undefined); t.is(result.upload_failed_run_error, undefined); @@ -693,3 +693,62 @@ test("tryUploadSarifIfRunFailed - uploads as artifact for risk assessments", asy ), ); }); + +const skippedUploadTest = test.macro({ + exec: async ( + t: ExecutionContext, + config: Partial, + expectedSkippedReason: string, + ) => { + const logger = new RecordingLogger(); + const [uploadArtifact] = await mockRiskAssessmentEnv(singleLanguageMatrix); + const features = createFeatures([]); + + const result = await initActionPostHelper.tryUploadSarifIfRunFailed( + createTestConfig(config), + parseRepositoryNwo("github/codeql-action-fake-repository"), + features, + logger, + ); + + t.is(result.upload_failed_run_skipped_because, expectedSkippedReason); + t.assert(uploadArtifact.notCalled); + }, + + title: (providedTitle: string = "") => + `tryUploadSarifIfRunFailed - skips upload ${providedTitle}`, +}); + +test( + "without CodeQL command", + skippedUploadTest, + // No codeQLCmd + { + analysisKinds: [AnalysisKind.RiskAssessment], + languages: ["javascript"], + } satisfies Partial, + "CodeQL command not found", +); + +test( + "if no language is configured", + skippedUploadTest, + // No explicit language configuration + { + analysisKinds: [AnalysisKind.RiskAssessment], + codeQLCmd: "codeql-for-testing", + } satisfies Partial, + "Unexpectedly, the configuration is not for a single language.", +); + +test( + "if multiple languages is configured", + skippedUploadTest, + // Multiple explicit languages configured + { + analysisKinds: [AnalysisKind.RiskAssessment], + codeQLCmd: "codeql-for-testing", + languages: ["javascript", "python"], + } satisfies Partial, + "Unexpectedly, the configuration is not for a single language.", +); From e995ba3522528bd5b5d8f9202c952b25b1d22fa0 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Fri, 27 Feb 2026 12:52:54 +0000 Subject: [PATCH 13/50] Add more tests/assertions --- src/init-action-post-helper.test.ts | 76 ++++++++++++++++++++++++++--- 1 file changed, 69 insertions(+), 7 deletions(-) diff --git a/src/init-action-post-helper.test.ts b/src/init-action-post-helper.test.ts index d473fdd08..e5f1a3831 100644 --- a/src/init-action-post-helper.test.ts +++ b/src/init-action-post-helper.test.ts @@ -635,6 +635,7 @@ const singleLanguageMatrix = JSON.stringify({ }); async function mockRiskAssessmentEnv(matrix: string) { + process.env[EnvVar.ANALYZE_DID_COMPLETE_SUCCESSFULLY] = "false"; process.env["GITHUB_JOB"] = "analyze"; process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository"; process.env["GITHUB_WORKSPACE"] = @@ -645,8 +646,12 @@ async function mockRiskAssessmentEnv(matrix: string) { .resolves({ type: util.GitHubVariant.GHES, version: "3.0.0" }); const codeqlObject = await codeql.getCodeQLForTesting(); - sinon.stub(codeqlObject, "databaseExportDiagnostics").resolves(); - sinon.stub(codeqlObject, "diagnosticsExport").resolves(); + const databaseExportDiagnostics = sinon + .stub(codeqlObject, "databaseExportDiagnostics") + .resolves(); + const diagnosticsExport = sinon + .stub(codeqlObject, "diagnosticsExport") + .resolves(); sinon.stub(codeql, "getCodeQL").resolves(codeqlObject); @@ -658,12 +663,13 @@ async function mockRiskAssessmentEnv(matrix: string) { .stub(debugArtifacts, "getArtifactUploaderClient") .value(() => artifactClient); - return [uploadArtifact]; + return { uploadArtifact, databaseExportDiagnostics, diagnosticsExport }; } -test("tryUploadSarifIfRunFailed - uploads as artifact for risk assessments", async (t) => { +test("tryUploadSarifIfRunFailed - uploads as artifact for risk assessments (diagnosticsExport)", async (t) => { const logger = new RecordingLogger(); - const [uploadArtifact] = await mockRiskAssessmentEnv(singleLanguageMatrix); + const { uploadArtifact, databaseExportDiagnostics, diagnosticsExport } = + await mockRiskAssessmentEnv(singleLanguageMatrix); const config = createTestConfig({ analysisKinds: [AnalysisKind.RiskAssessment], @@ -682,16 +688,70 @@ test("tryUploadSarifIfRunFailed - uploads as artifact for risk assessments", asy const expectedName = debugArtifacts.sanitizeArtifactName( `sarif-artifact-${debugArtifacts.getArtifactSuffix(singleLanguageMatrix)}`, ); + const expectedFilePattern = /codeql-failed-sarif-javascript\.csra\.sarif$/; t.is(result.upload_failed_run_skipped_because, undefined); t.is(result.upload_failed_run_error, undefined); t.is(result.sarifID, expectedName); t.assert( uploadArtifact.calledOnceWith( expectedName, - sinon.match.array, + [sinon.match(expectedFilePattern)], sinon.match.string, ), ); + t.assert(databaseExportDiagnostics.notCalled); + t.assert( + diagnosticsExport.calledOnceWith( + sinon.match(expectedFilePattern), + "/language:javascript", + config, + ), + ); +}); + +test("tryUploadSarifIfRunFailed - uploads as artifact for risk assessments (databaseExportDiagnostics)", async (t) => { + const logger = new RecordingLogger(); + const { uploadArtifact, databaseExportDiagnostics, diagnosticsExport } = + await mockRiskAssessmentEnv(singleLanguageMatrix); + + const dbLocation = "/some/path"; + const config = createTestConfig({ + analysisKinds: [AnalysisKind.RiskAssessment], + codeQLCmd: "codeql-for-testing", + languages: ["javascript"], + dbLocation: "/some/path", + }); + const features = createFeatures([Feature.ExportDiagnosticsEnabled]); + + const result = await initActionPostHelper.tryUploadSarifIfRunFailed( + config, + parseRepositoryNwo("github/codeql-action-fake-repository"), + features, + logger, + ); + + const expectedName = debugArtifacts.sanitizeArtifactName( + `sarif-artifact-${debugArtifacts.getArtifactSuffix(singleLanguageMatrix)}`, + ); + const expectedFilePattern = /codeql-failed-sarif-javascript\.csra\.sarif$/; + t.is(result.upload_failed_run_skipped_because, undefined); + t.is(result.upload_failed_run_error, undefined); + t.is(result.sarifID, expectedName); + t.assert( + uploadArtifact.calledOnceWith( + expectedName, + [sinon.match(expectedFilePattern)], + sinon.match.string, + ), + ); + t.assert(diagnosticsExport.notCalled); + t.assert( + databaseExportDiagnostics.calledOnceWith( + dbLocation, + sinon.match(expectedFilePattern), + "/language:javascript", + ), + ); }); const skippedUploadTest = test.macro({ @@ -701,7 +761,8 @@ const skippedUploadTest = test.macro({ expectedSkippedReason: string, ) => { const logger = new RecordingLogger(); - const [uploadArtifact] = await mockRiskAssessmentEnv(singleLanguageMatrix); + const { uploadArtifact, diagnosticsExport } = + await mockRiskAssessmentEnv(singleLanguageMatrix); const features = createFeatures([]); const result = await initActionPostHelper.tryUploadSarifIfRunFailed( @@ -713,6 +774,7 @@ const skippedUploadTest = test.macro({ t.is(result.upload_failed_run_skipped_because, expectedSkippedReason); t.assert(uploadArtifact.notCalled); + t.assert(diagnosticsExport.notCalled); }, title: (providedTitle: string = "") => From f3663cdc324659061dee087b0d8d7d7f7c03f20d Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Sat, 28 Feb 2026 15:18:25 +0000 Subject: [PATCH 14/50] Fix typos in comments --- src/init-action-post-helper.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/init-action-post-helper.ts b/src/init-action-post-helper.ts index 6fe38f3bb..40d0f42e3 100644 --- a/src/init-action-post-helper.ts +++ b/src/init-action-post-helper.ts @@ -75,7 +75,7 @@ function createFailedUploadFailedSarifResult( }; } -/** Records details about a SARIF file that can contains information about a failed analysis. */ +/** Records details about a SARIF file that contains information about a failed analysis. */ interface FailedSarifInfo { sarifFile: string; category: string | undefined; @@ -83,7 +83,7 @@ interface FailedSarifInfo { } /** - * Tries to prepare a SARIF file that can contains information about a failed analysis. + * Tries to prepare a SARIF file that contains information about a failed analysis. * * @returns Either information about the SARIF file that was produced, or a reason why it couldn't be produced. */ @@ -300,7 +300,7 @@ export async function tryUploadSarifIfRunFailed( /** * Handles the majority of the `post-init` step logic which, depending on the configuration, - * mainly involves uploading a SARIF file with information about the failued run, debug + * mainly involves uploading a SARIF file with information about the failed run, debug * artifacts, and performing clean-up operations. * * @param uploadAllAvailableDebugArtifacts A function with which to upload debug artifacts. From 5db3a9e947b9ac46243a3d76f6e677cdc5cb7ead Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Tue, 3 Mar 2026 14:14:34 +0000 Subject: [PATCH 15/50] Extract `JobSpecification` type from `Specification` --- pr-checks/sync.ts | 36 ++++++++++++++++++++---------------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/pr-checks/sync.ts b/pr-checks/sync.ts index ca2b069cb..8c73e9f63 100755 --- a/pr-checks/sync.ts +++ b/pr-checks/sync.ts @@ -29,11 +29,9 @@ type WorkflowInputs = Partial>; /** * Represents PR check specifications. */ -interface Specification { +interface Specification extends JobSpecification { /** The display name for the check. */ name: string; - /** The workflow steps specific to this check. */ - steps: any[]; /** Workflow-level input definitions forwarded to `workflow_dispatch`/`workflow_call`. */ inputs?: Record; /** CodeQL bundle versions to test against. Defaults to `DEFAULT_TEST_VERSIONS`. */ @@ -45,25 +43,31 @@ interface Specification { /** Values for the `analysis-kinds` matrix dimension. */ analysisKinds?: string[]; + /** Container image configuration for the job. */ + container?: any; + /** Service containers for the job. */ + services?: any; + + /** If set, this check is part of a named collection that gets its own caller workflow. */ + collection?: string; +} + +/** Represents job specifications. */ +interface JobSpecification { + /** Custom permissions override for the job. */ + permissions?: Record; + /** Extra environment variables for the job. */ + env?: Record; + + /** The workflow steps specific to this check. */ + steps: any[]; + installNode?: boolean; installGo?: boolean; installJava?: boolean; installPython?: boolean; installDotNet?: boolean; installYq?: boolean; - - /** Container image configuration for the job. */ - container?: any; - /** Service containers for the job. */ - services?: any; - - /** Custom permissions override for the job. */ - permissions?: Record; - /** Extra environment variables for the job. */ - env?: Record; - - /** If set, this check is part of a named collection that gets its own caller workflow. */ - collection?: string; } // The default set of CodeQL Bundle versions to use for the PR checks. From 97a3705788fc10f17713753a106ceec2a64af38a Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Tue, 3 Mar 2026 13:49:12 +0000 Subject: [PATCH 16/50] Organise language-specific setup information --- .github/workflows/__all-platform-bundle.yml | 30 +- .github/workflows/__analyze-ref-input.yml | 30 +- .github/workflows/__build-mode-manual.yml | 30 +- .github/workflows/__config-input.yml | 14 +- .../__export-file-baseline-information.yml | 30 +- .github/workflows/__go-custom-queries.yml | 30 +- .github/workflows/__go.yml | 12 +- .github/workflows/__local-bundle.yml | 30 +- .../workflows/__multi-language-autodetect.yml | 30 +- ...ackaging-codescanning-config-inputs-js.yml | 44 +-- .../__packaging-config-inputs-js.yml | 42 +-- .github/workflows/__packaging-config-js.yml | 42 +-- .github/workflows/__packaging-inputs-js.yml | 42 +-- .github/workflows/__remote-config.yml | 30 +- .github/workflows/__split-workflow.yml | 30 +- .github/workflows/__swift-custom-build.yml | 30 +- .github/workflows/__unset-environment.yml | 30 +- .github/workflows/__upload-ref-sha-input.yml | 30 +- .github/workflows/__upload-sarif.yml | 30 +- .github/workflows/__with-checkout-path.yml | 30 +- pr-checks/sync.ts | 304 +++++++++++------- 21 files changed, 491 insertions(+), 429 deletions(-) diff --git a/.github/workflows/__all-platform-bundle.yml b/.github/workflows/__all-platform-bundle.yml index 1be5ba9a4..ca61c3d8b 100644 --- a/.github/workflows/__all-platform-bundle.yml +++ b/.github/workflows/__all-platform-bundle.yml @@ -25,34 +25,34 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' workflow_call: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: all-platform-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} + group: all-platform-bundle-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}} jobs: all-platform-bundle: strategy: @@ -82,15 +82,15 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'true' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install .NET - uses: actions/setup-dotnet@v5 - with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - id: init uses: ./../action/init with: diff --git a/.github/workflows/__analyze-ref-input.yml b/.github/workflows/__analyze-ref-input.yml index 671e5f57b..952e4a3ae 100644 --- a/.github/workflows/__analyze-ref-input.yml +++ b/.github/workflows/__analyze-ref-input.yml @@ -25,6 +25,11 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -35,13 +40,13 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x workflow_call: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -52,17 +57,12 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: analyze-ref-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: analyze-ref-input-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}}-${{inputs.python-version}} jobs: analyze-ref-input: strategy: @@ -88,6 +88,10 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: @@ -98,10 +102,6 @@ jobs: uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} - - name: Install .NET - uses: actions/setup-dotnet@v5 - with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - uses: ./../action/init with: tools: ${{ steps.prepare-test.outputs.tools-url }} diff --git a/.github/workflows/__build-mode-manual.yml b/.github/workflows/__build-mode-manual.yml index b2723d64f..3d08a663b 100644 --- a/.github/workflows/__build-mode-manual.yml +++ b/.github/workflows/__build-mode-manual.yml @@ -25,34 +25,34 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' workflow_call: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: build-mode-manual-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} + group: build-mode-manual-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}} jobs: build-mode-manual: strategy: @@ -78,15 +78,15 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install .NET - uses: actions/setup-dotnet@v5 - with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - uses: ./../action/init id: init with: diff --git a/.github/workflows/__config-input.yml b/.github/workflows/__config-input.yml index a2e4dba2c..f80243926 100644 --- a/.github/workflows/__config-input.yml +++ b/.github/workflows/__config-input.yml @@ -51,13 +51,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Install Node.js - uses: actions/setup-node@v6 - with: - node-version: 20.x - cache: npm - - name: Install dependencies - run: npm ci - name: Prepare test id: prepare-test uses: ./.github/actions/prepare-test @@ -65,6 +58,13 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install Node.js + uses: actions/setup-node@v6 + with: + node-version: 20.x + cache: npm + - name: Install dependencies + run: npm ci - name: Copy queries into workspace run: | cp -a ../action/queries . diff --git a/.github/workflows/__export-file-baseline-information.yml b/.github/workflows/__export-file-baseline-information.yml index ef33c6485..d72d27a5d 100644 --- a/.github/workflows/__export-file-baseline-information.yml +++ b/.github/workflows/__export-file-baseline-information.yml @@ -25,34 +25,34 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' workflow_call: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: export-file-baseline-information-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} + group: export-file-baseline-information-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}} jobs: export-file-baseline-information: strategy: @@ -82,15 +82,15 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install .NET - uses: actions/setup-dotnet@v5 - with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - uses: ./../action/init id: init with: diff --git a/.github/workflows/__go-custom-queries.yml b/.github/workflows/__go-custom-queries.yml index f7d5a99f3..895cc0c1f 100644 --- a/.github/workflows/__go-custom-queries.yml +++ b/.github/workflows/__go-custom-queries.yml @@ -25,34 +25,34 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' workflow_call: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: go-custom-queries-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} + group: go-custom-queries-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}} jobs: go-custom-queries: strategy: @@ -80,15 +80,15 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install .NET - uses: actions/setup-dotnet@v5 - with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - uses: ./../action/init with: languages: go diff --git a/.github/workflows/__go.yml b/.github/workflows/__go.yml index 76d178b72..3688dc6fd 100644 --- a/.github/workflows/__go.yml +++ b/.github/workflows/__go.yml @@ -10,16 +10,16 @@ env: on: workflow_dispatch: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' jobs: go-custom-queries: name: 'Go: Custom queries' @@ -28,8 +28,8 @@ jobs: security-events: read uses: ./.github/workflows/__go-custom-queries.yml with: - go-version: ${{ inputs.go-version }} dotnet-version: ${{ inputs.dotnet-version }} + go-version: ${{ inputs.go-version }} go-indirect-tracing-workaround-diagnostic: name: 'Go: diagnostic when Go is changed after init step' permissions: diff --git a/.github/workflows/__local-bundle.yml b/.github/workflows/__local-bundle.yml index ae7798300..cb33e169f 100644 --- a/.github/workflows/__local-bundle.yml +++ b/.github/workflows/__local-bundle.yml @@ -25,6 +25,11 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -35,13 +40,13 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x workflow_call: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -52,17 +57,12 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: local-bundle-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: local-bundle-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}}-${{inputs.python-version}} jobs: local-bundle: strategy: @@ -88,6 +88,10 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: @@ -98,10 +102,6 @@ jobs: uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} - - name: Install .NET - uses: actions/setup-dotnet@v5 - with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Fetch latest CodeQL bundle run: | wget https://github.com/github/codeql-action/releases/latest/download/codeql-bundle-linux64.tar.zst diff --git a/.github/workflows/__multi-language-autodetect.yml b/.github/workflows/__multi-language-autodetect.yml index 9cae8d362..c991d11b8 100644 --- a/.github/workflows/__multi-language-autodetect.yml +++ b/.github/workflows/__multi-language-autodetect.yml @@ -25,6 +25,11 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -35,13 +40,13 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x workflow_call: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -52,17 +57,12 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: multi-language-autodetect-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: multi-language-autodetect-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}}-${{inputs.python-version}} jobs: multi-language-autodetect: strategy: @@ -122,6 +122,10 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: @@ -132,10 +136,6 @@ jobs: uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} - - name: Install .NET - uses: actions/setup-dotnet@v5 - with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Use Xcode 16 if: runner.os == 'macOS' && matrix.version != 'nightly-latest' run: sudo xcode-select -s "/Applications/Xcode_16.app" diff --git a/.github/workflows/__packaging-codescanning-config-inputs-js.yml b/.github/workflows/__packaging-codescanning-config-inputs-js.yml index d0304ecd1..803587eee 100644 --- a/.github/workflows/__packaging-codescanning-config-inputs-js.yml +++ b/.github/workflows/__packaging-codescanning-config-inputs-js.yml @@ -25,6 +25,11 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -35,13 +40,13 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x workflow_call: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -52,17 +57,12 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: packaging-codescanning-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: packaging-codescanning-config-inputs-js-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}}-${{inputs.python-version}} jobs: packaging-codescanning-config-inputs-js: strategy: @@ -85,13 +85,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Install Node.js - uses: actions/setup-node@v6 - with: - node-version: 20.x - cache: npm - - name: Install dependencies - run: npm ci - name: Prepare test id: prepare-test uses: ./.github/actions/prepare-test @@ -99,20 +92,27 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false + - name: Install Node.js + uses: actions/setup-node@v6 + with: + node-version: 20.x + cache: npm + - name: Install dependencies + run: npm ci - name: Install Python if: matrix.version != 'nightly-latest' uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} - - name: Install .NET - uses: actions/setup-dotnet@v5 - with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - uses: ./../action/init with: config-file: '.github/codeql/codeql-config-packaging3.yml' diff --git a/.github/workflows/__packaging-config-inputs-js.yml b/.github/workflows/__packaging-config-inputs-js.yml index f97103009..25367fd7e 100644 --- a/.github/workflows/__packaging-config-inputs-js.yml +++ b/.github/workflows/__packaging-config-inputs-js.yml @@ -25,34 +25,34 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' workflow_call: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: packaging-config-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} + group: packaging-config-inputs-js-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}} jobs: packaging-config-inputs-js: strategy: @@ -75,13 +75,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Install Node.js - uses: actions/setup-node@v6 - with: - node-version: 20.x - cache: npm - - name: Install dependencies - run: npm ci - name: Prepare test id: prepare-test uses: ./.github/actions/prepare-test @@ -89,15 +82,22 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install .NET - uses: actions/setup-dotnet@v5 + - name: Install Node.js + uses: actions/setup-node@v6 with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} + node-version: 20.x + cache: npm + - name: Install dependencies + run: npm ci - uses: ./../action/init with: config-file: '.github/codeql/codeql-config-packaging3.yml' diff --git a/.github/workflows/__packaging-config-js.yml b/.github/workflows/__packaging-config-js.yml index 99bd171f9..bb8d0491a 100644 --- a/.github/workflows/__packaging-config-js.yml +++ b/.github/workflows/__packaging-config-js.yml @@ -25,34 +25,34 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' workflow_call: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: packaging-config-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} + group: packaging-config-js-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}} jobs: packaging-config-js: strategy: @@ -75,13 +75,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Install Node.js - uses: actions/setup-node@v6 - with: - node-version: 20.x - cache: npm - - name: Install dependencies - run: npm ci - name: Prepare test id: prepare-test uses: ./.github/actions/prepare-test @@ -89,15 +82,22 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install .NET - uses: actions/setup-dotnet@v5 + - name: Install Node.js + uses: actions/setup-node@v6 with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} + node-version: 20.x + cache: npm + - name: Install dependencies + run: npm ci - uses: ./../action/init with: config-file: '.github/codeql/codeql-config-packaging.yml' diff --git a/.github/workflows/__packaging-inputs-js.yml b/.github/workflows/__packaging-inputs-js.yml index e5cd0182e..16ab2c0ce 100644 --- a/.github/workflows/__packaging-inputs-js.yml +++ b/.github/workflows/__packaging-inputs-js.yml @@ -25,34 +25,34 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' workflow_call: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: packaging-inputs-js-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} + group: packaging-inputs-js-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}} jobs: packaging-inputs-js: strategy: @@ -75,13 +75,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Install Node.js - uses: actions/setup-node@v6 - with: - node-version: 20.x - cache: npm - - name: Install dependencies - run: npm ci - name: Prepare test id: prepare-test uses: ./.github/actions/prepare-test @@ -89,15 +82,22 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install .NET - uses: actions/setup-dotnet@v5 + - name: Install Node.js + uses: actions/setup-node@v6 with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} + node-version: 20.x + cache: npm + - name: Install dependencies + run: npm ci - uses: ./../action/init with: config-file: '.github/codeql/codeql-config-packaging2.yml' diff --git a/.github/workflows/__remote-config.yml b/.github/workflows/__remote-config.yml index abf5cd21e..af323ed1a 100644 --- a/.github/workflows/__remote-config.yml +++ b/.github/workflows/__remote-config.yml @@ -25,6 +25,11 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -35,13 +40,13 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x workflow_call: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -52,17 +57,12 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: remote-config-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: remote-config-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}}-${{inputs.python-version}} jobs: remote-config: strategy: @@ -90,6 +90,10 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: @@ -100,10 +104,6 @@ jobs: uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} - - name: Install .NET - uses: actions/setup-dotnet@v5 - with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - uses: ./../action/init with: tools: ${{ steps.prepare-test.outputs.tools-url }} diff --git a/.github/workflows/__split-workflow.yml b/.github/workflows/__split-workflow.yml index 58e547f36..7359f4d5f 100644 --- a/.github/workflows/__split-workflow.yml +++ b/.github/workflows/__split-workflow.yml @@ -25,34 +25,34 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' workflow_call: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: split-workflow-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} + group: split-workflow-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}} jobs: split-workflow: strategy: @@ -88,15 +88,15 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install .NET - uses: actions/setup-dotnet@v5 - with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - uses: ./../action/init with: config-file: '.github/codeql/codeql-config-packaging3.yml' diff --git a/.github/workflows/__swift-custom-build.yml b/.github/workflows/__swift-custom-build.yml index 7749f1b81..7e1f15e0c 100644 --- a/.github/workflows/__swift-custom-build.yml +++ b/.github/workflows/__swift-custom-build.yml @@ -25,34 +25,34 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' workflow_call: inputs: - go-version: - type: string - description: The version of Go to install - required: false - default: '>=1.21.0' dotnet-version: type: string description: The version of .NET to install required: false default: 9.x + go-version: + type: string + description: The version of Go to install + required: false + default: '>=1.21.0' defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: swift-custom-build-${{github.ref}}-${{inputs.go-version}}-${{inputs.dotnet-version}} + group: swift-custom-build-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}} jobs: swift-custom-build: strategy: @@ -82,15 +82,15 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install .NET - uses: actions/setup-dotnet@v5 - with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Use Xcode 16 if: runner.os == 'macOS' && matrix.version != 'nightly-latest' run: sudo xcode-select -s "/Applications/Xcode_16.app" diff --git a/.github/workflows/__unset-environment.yml b/.github/workflows/__unset-environment.yml index 5be68f810..5c5ee701f 100644 --- a/.github/workflows/__unset-environment.yml +++ b/.github/workflows/__unset-environment.yml @@ -25,6 +25,11 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -35,13 +40,13 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x workflow_call: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -52,17 +57,12 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: unset-environment-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: unset-environment-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}}-${{inputs.python-version}} jobs: unset-environment: strategy: @@ -90,6 +90,10 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: @@ -100,10 +104,6 @@ jobs: uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} - - name: Install .NET - uses: actions/setup-dotnet@v5 - with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - uses: ./../action/init id: init with: diff --git a/.github/workflows/__upload-ref-sha-input.yml b/.github/workflows/__upload-ref-sha-input.yml index f9a832d7e..1882ec23c 100644 --- a/.github/workflows/__upload-ref-sha-input.yml +++ b/.github/workflows/__upload-ref-sha-input.yml @@ -25,6 +25,11 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -35,13 +40,13 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x workflow_call: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -52,17 +57,12 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: upload-ref-sha-input-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: upload-ref-sha-input-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}}-${{inputs.python-version}} jobs: upload-ref-sha-input: strategy: @@ -88,6 +88,10 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: @@ -98,10 +102,6 @@ jobs: uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} - - name: Install .NET - uses: actions/setup-dotnet@v5 - with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - uses: ./../action/init with: tools: ${{ steps.prepare-test.outputs.tools-url }} diff --git a/.github/workflows/__upload-sarif.yml b/.github/workflows/__upload-sarif.yml index 873ba5ca9..4055c0873 100644 --- a/.github/workflows/__upload-sarif.yml +++ b/.github/workflows/__upload-sarif.yml @@ -25,6 +25,11 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -35,13 +40,13 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x workflow_call: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -52,17 +57,12 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: upload-sarif-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: upload-sarif-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}}-${{inputs.python-version}} jobs: upload-sarif: strategy: @@ -95,6 +95,10 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: @@ -105,10 +109,6 @@ jobs: uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} - - name: Install .NET - uses: actions/setup-dotnet@v5 - with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - uses: ./../action/init with: tools: ${{ steps.prepare-test.outputs.tools-url }} diff --git a/.github/workflows/__with-checkout-path.yml b/.github/workflows/__with-checkout-path.yml index 11f6e1dbc..b0fab4b71 100644 --- a/.github/workflows/__with-checkout-path.yml +++ b/.github/workflows/__with-checkout-path.yml @@ -25,6 +25,11 @@ on: - cron: '0 5 * * *' workflow_dispatch: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -35,13 +40,13 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x workflow_call: inputs: + dotnet-version: + type: string + description: The version of .NET to install + required: false + default: 9.x go-version: type: string description: The version of Go to install @@ -52,17 +57,12 @@ on: description: The version of Python to install required: false default: '3.13' - dotnet-version: - type: string - description: The version of .NET to install - required: false - default: 9.x defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: with-checkout-path-${{github.ref}}-${{inputs.go-version}}-${{inputs.python-version}}-${{inputs.dotnet-version}} + group: with-checkout-path-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}}-${{inputs.python-version}} jobs: with-checkout-path: strategy: @@ -89,6 +89,10 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Install Go uses: actions/setup-go@v6 with: @@ -99,10 +103,6 @@ jobs: uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} - - name: Install .NET - uses: actions/setup-dotnet@v5 - with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Delete original checkout run: | # delete the original checkout so we don't accidentally use it. diff --git a/pr-checks/sync.ts b/pr-checks/sync.ts index 8c73e9f63..f7a49b7fc 100755 --- a/pr-checks/sync.ts +++ b/pr-checks/sync.ts @@ -5,6 +5,8 @@ import * as path from "path"; import * as yaml from "yaml"; +import { KnownLanguage } from "../src/languages"; + /** Known workflow input names. */ enum KnownInputName { GoVersion = "go-version", @@ -70,6 +72,16 @@ interface JobSpecification { installYq?: boolean; } +/** Describes language/framework-specific steps and inputs. */ +interface LanguageSetup { + specProperty: keyof JobSpecification; + inputs?: WorkflowInputs; + steps: any[]; +} + +/** Describes partial mappings from known languages to their specific setup information. */ +type LanguageSetups = Partial>; + // The default set of CodeQL Bundle versions to use for the PR checks. const defaultTestVersions = [ // The oldest supported CodeQL version. If bumping, update `CODEQL_MINIMUM_VERSION` in `codeql.ts` @@ -94,6 +106,131 @@ const defaultTestVersions = [ "nightly-latest", ]; +/** The default versions we use for languages / frameworks, if not specified as a workflow input. */ +const defaultLanguageVersions = { + javascript: "20.x", + go: ">=1.21.0", + java: "17", + python: "3.13", + csharp: "9.x", +} as const satisfies Partial>; + +/** A partial mapping from known languages to their specific setup information. */ +const languageSetups: LanguageSetups = { + javascript: { + specProperty: "installNode", + steps: [ + { + name: "Install Node.js", + uses: "actions/setup-node@v6", + with: { + "node-version": defaultLanguageVersions.javascript, + cache: "npm", + }, + }, + { + name: "Install dependencies", + run: "npm ci", + }, + ], + }, + go: { + specProperty: "installGo", + inputs: { + [KnownInputName.GoVersion]: { + type: "string", + description: "The version of Go to install", + required: false, + default: defaultLanguageVersions.go, + }, + }, + steps: [ + { + name: "Install Go", + uses: "actions/setup-go@v6", + with: { + "go-version": + "${{ inputs.go-version || '" + defaultLanguageVersions.go + "' }}", + // to avoid potentially misleading autobuilder results where we expect it to download + // dependencies successfully, but they actually come from a warm cache + cache: false, + }, + }, + ], + }, + java: { + specProperty: "installJava", + inputs: { + [KnownInputName.JavaVersion]: { + type: "string", + description: "The version of Java to install", + required: false, + default: defaultLanguageVersions.java, + }, + }, + steps: [ + { + name: "Install Java", + uses: "actions/setup-java@v5", + with: { + "java-version": + "${{ inputs.java-version || '" + + defaultLanguageVersions.java + + "' }}", + distribution: "temurin", + }, + }, + ], + }, + python: { + specProperty: "installPython", + inputs: { + [KnownInputName.PythonVersion]: { + type: "string", + description: "The version of Python to install", + required: false, + default: defaultLanguageVersions.python, + }, + }, + steps: [ + { + name: "Install Python", + if: "matrix.version != 'nightly-latest'", + uses: "actions/setup-python@v6", + with: { + "python-version": + "${{ inputs.python-version || '" + + defaultLanguageVersions.python + + "' }}", + }, + }, + ], + }, + csharp: { + specProperty: "installDotNet", + inputs: { + [KnownInputName.DotnetVersion]: { + type: "string", + description: "The version of .NET to install", + required: false, + default: defaultLanguageVersions.csharp, + }, + }, + steps: [ + { + name: "Install .NET", + uses: "actions/setup-dotnet@v5", + with: { + "dotnet-version": + "${{ inputs.dotnet-version || '" + + defaultLanguageVersions.csharp + + "' }}", + }, + }, + ], + }, +}; + const THIS_DIR = __dirname; const CHECKS_DIR = path.join(THIS_DIR, "checks"); const OUTPUT_DIR = path.join(THIS_DIR, "..", ".github", "workflows"); @@ -138,6 +275,36 @@ function stripTrailingWhitespace(content: string): string { .join("\n"); } +/** + * Retrieves setup steps and additional input definitions based on specific languages or frameworks + * that are requested by the `checkSpecification`. + * + * @returns An object containing setup steps and additional input specifications. + */ +function getSetupSteps(checkSpecification: Specification): { + inputs: WorkflowInputs; + steps: any[]; +} { + let inputs: WorkflowInputs = {}; + const steps = []; + + for (const language of Object.values(KnownLanguage).sort()) { + const setupSpec = languageSetups[language]; + + if ( + setupSpec === undefined || + checkSpecification[setupSpec.specProperty] === undefined + ) { + continue; + } + + steps.push(...setupSpec.steps); + inputs = { ...inputs, ...setupSpec.inputs }; + } + + return { inputs, steps }; +} + /** * Main entry point for the sync script. */ @@ -170,7 +337,6 @@ function main(): void { console.log(`Processing: ${checkName} — "${checkSpecification.name}"`); - const workflowInputs: WorkflowInputs = {}; let matrix: Array> = []; for (const version of checkSpecification.versions ?? defaultTestVersions) { @@ -216,134 +382,30 @@ function main(): void { matrix = newMatrix; } + // Determine which languages or frameworks have to be installed. + const setupInfo = getSetupSteps(checkSpecification); + const workflowInputs = setupInfo.inputs; + // Construct the workflow steps needed for this check. const steps: any[] = [ { name: "Check out repository", uses: "actions/checkout@v6", }, - ]; - - const installNode = checkSpecification.installNode; - - if (installNode) { - steps.push( - { - name: "Install Node.js", - uses: "actions/setup-node@v6", - with: { - "node-version": "20.x", - cache: "npm", - }, + { + name: "Prepare test", + id: "prepare-test", + uses: "./.github/actions/prepare-test", + with: { + version: "${{ matrix.version }}", + "use-all-platform-bundle": useAllPlatformBundle, + // If the action is being run from a container, then do not setup kotlin. + // This is because the kotlin binaries cannot be downloaded from the container. + "setup-kotlin": "container" in checkSpecification ? "false" : "true", }, - { - name: "Install dependencies", - run: "npm ci", - }, - ); - } - - steps.push({ - name: "Prepare test", - id: "prepare-test", - uses: "./.github/actions/prepare-test", - with: { - version: "${{ matrix.version }}", - "use-all-platform-bundle": useAllPlatformBundle, - // If the action is being run from a container, then do not setup kotlin. - // This is because the kotlin binaries cannot be downloaded from the container. - "setup-kotlin": "container" in checkSpecification ? "false" : "true", }, - }); - - const installGo = checkSpecification.installGo; - - if (installGo) { - const baseGoVersionExpr = ">=1.21.0"; - workflowInputs[KnownInputName.GoVersion] = { - type: "string", - description: "The version of Go to install", - required: false, - default: baseGoVersionExpr, - }; - - steps.push({ - name: "Install Go", - uses: "actions/setup-go@v6", - with: { - "go-version": - "${{ inputs.go-version || '" + baseGoVersionExpr + "' }}", - // to avoid potentially misleading autobuilder results where we expect it to download - // dependencies successfully, but they actually come from a warm cache - cache: false, - }, - }); - } - - const installJava = checkSpecification.installJava; - - if (installJava) { - const baseJavaVersionExpr = "17"; - workflowInputs[KnownInputName.JavaVersion] = { - type: "string", - description: "The version of Java to install", - required: false, - default: baseJavaVersionExpr, - }; - - steps.push({ - name: "Install Java", - uses: "actions/setup-java@v5", - with: { - "java-version": - "${{ inputs.java-version || '" + baseJavaVersionExpr + "' }}", - distribution: "temurin", - }, - }); - } - - const installPython = checkSpecification.installPython; - - if (installPython) { - const basePythonVersionExpr = "3.13"; - workflowInputs[KnownInputName.PythonVersion] = { - type: "string", - description: "The version of Python to install", - required: false, - default: basePythonVersionExpr, - }; - - steps.push({ - name: "Install Python", - if: "matrix.version != 'nightly-latest'", - uses: "actions/setup-python@v6", - with: { - "python-version": - "${{ inputs.python-version || '" + basePythonVersionExpr + "' }}", - }, - }); - } - - const installDotNet = checkSpecification.installDotNet; - - if (installDotNet) { - const baseDotNetVersionExpr = "9.x"; - workflowInputs[KnownInputName.DotnetVersion] = { - type: "string", - description: "The version of .NET to install", - required: false, - default: baseDotNetVersionExpr, - }; - - steps.push({ - name: "Install .NET", - uses: "actions/setup-dotnet@v5", - with: { - "dotnet-version": - "${{ inputs.dotnet-version || '" + baseDotNetVersionExpr + "' }}", - }, - }); - } + ...setupInfo.steps, + ]; const installYq = checkSpecification.installYq; From 369d73b98fa4e47970096f2bc039084b89a58afd Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Tue, 3 Mar 2026 13:53:36 +0000 Subject: [PATCH 17/50] Refactor matrix generation into its own function --- pr-checks/sync.ts | 88 +++++++++++++++++++++++++---------------------- 1 file changed, 47 insertions(+), 41 deletions(-) diff --git a/pr-checks/sync.ts b/pr-checks/sync.ts index f7a49b7fc..c6d990c31 100755 --- a/pr-checks/sync.ts +++ b/pr-checks/sync.ts @@ -275,6 +275,52 @@ function stripTrailingWhitespace(content: string): string { .join("\n"); } +/** Generates the matrix for a job. */ +function generateJobMatrix( + checkSpecification: Specification, +): Array> { + let matrix: Array> = []; + + for (const version of checkSpecification.versions ?? defaultTestVersions) { + if (version === "latest") { + throw new Error( + 'Did not recognise "version: latest". Did you mean "version: linked"?', + ); + } + + const runnerImages = ["ubuntu-latest", "macos-latest", "windows-latest"]; + const operatingSystems = checkSpecification.operatingSystems ?? ["ubuntu"]; + + for (const operatingSystem of operatingSystems) { + const runnerImagesForOs = runnerImages.filter((image) => + image.startsWith(operatingSystem), + ); + + for (const runnerImage of runnerImagesForOs) { + matrix.push({ + os: runnerImage, + version, + }); + } + } + } + + if (checkSpecification.analysisKinds) { + const newMatrix: Array> = []; + for (const matrixInclude of matrix) { + for (const analysisKind of checkSpecification.analysisKinds) { + newMatrix.push({ + ...matrixInclude, + "analysis-kinds": analysisKind, + }); + } + } + matrix = newMatrix; + } + + return matrix; +} + /** * Retrieves setup steps and additional input definitions based on specific languages or frameworks * that are requested by the `checkSpecification`. @@ -337,51 +383,11 @@ function main(): void { console.log(`Processing: ${checkName} — "${checkSpecification.name}"`); - let matrix: Array> = []; - - for (const version of checkSpecification.versions ?? defaultTestVersions) { - if (version === "latest") { - throw new Error( - 'Did not recognise "version: latest". Did you mean "version: linked"?', - ); - } - - const runnerImages = ["ubuntu-latest", "macos-latest", "windows-latest"]; - const operatingSystems = checkSpecification.operatingSystems ?? [ - "ubuntu", - ]; - - for (const operatingSystem of operatingSystems) { - const runnerImagesForOs = runnerImages.filter((image) => - image.startsWith(operatingSystem), - ); - - for (const runnerImage of runnerImagesForOs) { - matrix.push({ - os: runnerImage, - version, - }); - } - } - } - + const matrix: Array> = generateJobMatrix(checkSpecification); const useAllPlatformBundle = checkSpecification.useAllPlatformBundle ? checkSpecification.useAllPlatformBundle : "false"; - if (checkSpecification.analysisKinds) { - const newMatrix: Array> = []; - for (const matrixInclude of matrix) { - for (const analysisKind of checkSpecification.analysisKinds) { - newMatrix.push({ - ...matrixInclude, - "analysis-kinds": analysisKind, - }); - } - } - matrix = newMatrix; - } - // Determine which languages or frameworks have to be installed. const setupInfo = getSetupSteps(checkSpecification); const workflowInputs = setupInfo.inputs; From 92ab799fe0e7addb7a082bf77beb3609aeea3f02 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Tue, 3 Mar 2026 14:00:45 +0000 Subject: [PATCH 18/50] Refactor job generation into `generateJob` --- pr-checks/sync.ts | 217 +++++++++++++++++++++++++--------------------- 1 file changed, 120 insertions(+), 97 deletions(-) diff --git a/pr-checks/sync.ts b/pr-checks/sync.ts index c6d990c31..856cf295c 100755 --- a/pr-checks/sync.ts +++ b/pr-checks/sync.ts @@ -351,6 +351,122 @@ function getSetupSteps(checkSpecification: Specification): { return { inputs, steps }; } +/** + * Generates an Actions job from the `checkSpecification`. + * + * @param specDocument + * The raw YAML document of the PR check specification. + * Used to extract `jobs` without losing the original formatting. + * @param checkSpecification The PR check specification. + * @returns The job and additional workflow inputs. + */ +function generateJob( + specDocument: yaml.Document, + checkSpecification: Specification, +) { + const matrix: Array> = + generateJobMatrix(checkSpecification); + + const useAllPlatformBundle = checkSpecification.useAllPlatformBundle + ? checkSpecification.useAllPlatformBundle + : "false"; + + // Determine which languages or frameworks have to be installed. + const setupInfo = getSetupSteps(checkSpecification); + const workflowInputs = setupInfo.inputs; + + // Construct the workflow steps needed for this check. + const steps: any[] = [ + { + name: "Check out repository", + uses: "actions/checkout@v6", + }, + { + name: "Prepare test", + id: "prepare-test", + uses: "./.github/actions/prepare-test", + with: { + version: "${{ matrix.version }}", + "use-all-platform-bundle": useAllPlatformBundle, + // If the action is being run from a container, then do not setup kotlin. + // This is because the kotlin binaries cannot be downloaded from the container. + "setup-kotlin": "container" in checkSpecification ? "false" : "true", + }, + }, + ...setupInfo.steps, + ]; + + const installYq = checkSpecification.installYq; + + if (installYq) { + steps.push({ + name: "Install yq", + if: "runner.os == 'Windows'", + env: { + YQ_PATH: "${{ runner.temp }}/yq", + // This is essentially an arbitrary version of `yq`, which happened to be the one that + // `choco` fetched when we moved away from using that here. + // See https://github.com/github/codeql-action/pull/3423 + YQ_VERSION: "v4.50.1", + }, + run: + 'gh release download --repo mikefarah/yq --pattern "yq_windows_amd64.exe" "$YQ_VERSION" -O "$YQ_PATH/yq.exe"\n' + + 'echo "$YQ_PATH" >> "$GITHUB_PATH"', + }); + } + + // Extract the sequence of steps from the YAML document to persist as much formatting as possible. + const specSteps = specDocument.get("steps") as yaml.YAMLSeq; + + // A handful of workflow specifications use double quotes for values, while we generally use single quotes. + // This replaces double quotes with single quotes for consistency. + yaml.visit(specSteps, { + Scalar(_key, node) { + if (node.type === "QUOTE_DOUBLE") { + node.type = "QUOTE_SINGLE"; + } + }, + }); + + // Add the generated steps in front of the ones from the specification. + specSteps.items.unshift(...steps); + + const checkJob: Record = { + strategy: { + "fail-fast": false, + matrix: { + include: matrix, + }, + }, + name: checkSpecification.name, + if: "github.triggering_actor != 'dependabot[bot]'", + permissions: { + contents: "read", + "security-events": "read", + }, + "timeout-minutes": 45, + "runs-on": "${{ matrix.os }}", + steps: specSteps, + }; + + if (checkSpecification.permissions) { + checkJob.permissions = checkSpecification.permissions; + } + + for (const key of ["env", "container", "services"] as const) { + if (checkSpecification[key] !== undefined) { + checkJob[key] = checkSpecification[key]; + } + } + + checkJob.env = checkJob.env ?? {}; + if (!("CODEQL_ACTION_TEST_MODE" in checkJob.env)) { + checkJob.env.CODEQL_ACTION_TEST_MODE = true; + } + + return { checkJob, workflowInputs }; +} + /** * Main entry point for the sync script. */ @@ -383,103 +499,10 @@ function main(): void { console.log(`Processing: ${checkName} — "${checkSpecification.name}"`); - const matrix: Array> = generateJobMatrix(checkSpecification); - const useAllPlatformBundle = checkSpecification.useAllPlatformBundle - ? checkSpecification.useAllPlatformBundle - : "false"; - - // Determine which languages or frameworks have to be installed. - const setupInfo = getSetupSteps(checkSpecification); - const workflowInputs = setupInfo.inputs; - - // Construct the workflow steps needed for this check. - const steps: any[] = [ - { - name: "Check out repository", - uses: "actions/checkout@v6", - }, - { - name: "Prepare test", - id: "prepare-test", - uses: "./.github/actions/prepare-test", - with: { - version: "${{ matrix.version }}", - "use-all-platform-bundle": useAllPlatformBundle, - // If the action is being run from a container, then do not setup kotlin. - // This is because the kotlin binaries cannot be downloaded from the container. - "setup-kotlin": "container" in checkSpecification ? "false" : "true", - }, - }, - ...setupInfo.steps, - ]; - - const installYq = checkSpecification.installYq; - - if (installYq) { - steps.push({ - name: "Install yq", - if: "runner.os == 'Windows'", - env: { - YQ_PATH: "${{ runner.temp }}/yq", - // This is essentially an arbitrary version of `yq`, which happened to be the one that - // `choco` fetched when we moved away from using that here. - // See https://github.com/github/codeql-action/pull/3423 - YQ_VERSION: "v4.50.1", - }, - run: - 'gh release download --repo mikefarah/yq --pattern "yq_windows_amd64.exe" "$YQ_VERSION" -O "$YQ_PATH/yq.exe"\n' + - 'echo "$YQ_PATH" >> "$GITHUB_PATH"', - }); - } - - // Extract the sequence of steps from the YAML document to persist as much formatting as possible. - const specSteps = specDocument.get("steps") as yaml.YAMLSeq; - - // A handful of workflow specifications use double quotes for values, while we generally use single quotes. - // This replaces double quotes with single quotes for consistency. - yaml.visit(specSteps, { - Scalar(_key, node) { - if (node.type === "QUOTE_DOUBLE") { - node.type = "QUOTE_SINGLE"; - } - }, - }); - - // Add the generated steps in front of the ones from the specification. - specSteps.items.unshift(...steps); - - const checkJob: Record = { - strategy: { - "fail-fast": false, - matrix: { - include: matrix, - }, - }, - name: checkSpecification.name, - if: "github.triggering_actor != 'dependabot[bot]'", - permissions: { - contents: "read", - "security-events": "read", - }, - "timeout-minutes": 45, - "runs-on": "${{ matrix.os }}", - steps: specSteps, - }; - - if (checkSpecification.permissions) { - checkJob.permissions = checkSpecification.permissions; - } - - for (const key of ["env", "container", "services"] as const) { - if (checkSpecification[key] !== undefined) { - checkJob[key] = checkSpecification[key]; - } - } - - checkJob.env = checkJob.env ?? {}; - if (!("CODEQL_ACTION_TEST_MODE" in checkJob.env)) { - checkJob.env.CODEQL_ACTION_TEST_MODE = true; - } + const { checkJob, workflowInputs } = generateJob( + specDocument, + checkSpecification, + ); // If this check belongs to a named collection, record it. if (checkSpecification.collection) { From 95fc2f11fbd3f73b1b8df4e76a944f5fed52175c Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Wed, 4 Mar 2026 11:27:42 +0000 Subject: [PATCH 19/50] Move `yq` setup code into `getSetupSteps` --- pr-checks/sync.ts | 40 +++++++++++++++++++++------------------- 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/pr-checks/sync.ts b/pr-checks/sync.ts index 856cf295c..6a28f5d04 100755 --- a/pr-checks/sync.ts +++ b/pr-checks/sync.ts @@ -231,6 +231,11 @@ const languageSetups: LanguageSetups = { }, }; +// This is essentially an arbitrary version of `yq`, which happened to be the one that +// `choco` fetched when we moved away from using that here. +// See https://github.com/github/codeql-action/pull/3423 +const YQ_VERSION = "v4.50.1"; + const THIS_DIR = __dirname; const CHECKS_DIR = path.join(THIS_DIR, "checks"); const OUTPUT_DIR = path.join(THIS_DIR, "..", ".github", "workflows"); @@ -348,6 +353,22 @@ function getSetupSteps(checkSpecification: Specification): { inputs = { ...inputs, ...setupSpec.inputs }; } + const installYq = checkSpecification.installYq; + + if (installYq) { + steps.push({ + name: "Install yq", + if: "runner.os == 'Windows'", + env: { + YQ_PATH: "${{ runner.temp }}/yq", + YQ_VERSION, + }, + run: + 'gh release download --repo mikefarah/yq --pattern "yq_windows_amd64.exe" "$YQ_VERSION" -O "$YQ_PATH/yq.exe"\n' + + 'echo "$YQ_PATH" >> "$GITHUB_PATH"', + }); + } + return { inputs, steps }; } @@ -396,25 +417,6 @@ function generateJob( ...setupInfo.steps, ]; - const installYq = checkSpecification.installYq; - - if (installYq) { - steps.push({ - name: "Install yq", - if: "runner.os == 'Windows'", - env: { - YQ_PATH: "${{ runner.temp }}/yq", - // This is essentially an arbitrary version of `yq`, which happened to be the one that - // `choco` fetched when we moved away from using that here. - // See https://github.com/github/codeql-action/pull/3423 - YQ_VERSION: "v4.50.1", - }, - run: - 'gh release download --repo mikefarah/yq --pattern "yq_windows_amd64.exe" "$YQ_VERSION" -O "$YQ_PATH/yq.exe"\n' + - 'echo "$YQ_PATH" >> "$GITHUB_PATH"', - }); - } - // Extract the sequence of steps from the YAML document to persist as much formatting as possible. const specSteps = specDocument.get("steps") as yaml.YAMLSeq; From 2b6077152e19c9ddd8fe6e99f9512b04a839313a Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Tue, 3 Mar 2026 14:56:00 +0000 Subject: [PATCH 20/50] Add support for additional, validation jobs --- pr-checks/sync.ts | 108 +++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 102 insertions(+), 6 deletions(-) diff --git a/pr-checks/sync.ts b/pr-checks/sync.ts index 6a28f5d04..8f2b2e80f 100755 --- a/pr-checks/sync.ts +++ b/pr-checks/sync.ts @@ -32,8 +32,6 @@ type WorkflowInputs = Partial>; * Represents PR check specifications. */ interface Specification extends JobSpecification { - /** The display name for the check. */ - name: string; /** Workflow-level input definitions forwarded to `workflow_dispatch`/`workflow_call`. */ inputs?: Record; /** CodeQL bundle versions to test against. Defaults to `DEFAULT_TEST_VERSIONS`. */ @@ -50,12 +48,17 @@ interface Specification extends JobSpecification { /** Service containers for the job. */ services?: any; + /** Additional jobs to run after the main PR check job. */ + validationJobs?: Record; + /** If set, this check is part of a named collection that gets its own caller workflow. */ collection?: string; } /** Represents job specifications. */ interface JobSpecification { + /** The display name for the check. */ + name: string; /** Custom permissions override for the job. */ permissions?: Record; /** Extra environment variables for the job. */ @@ -469,6 +472,92 @@ function generateJob( return { checkJob, workflowInputs }; } +/** Generates a validation job. */ +function generateValidationJob( + specDocument: yaml.Document, + jobSpecification: JobSpecification, + checkName: string, + name: string, +) { + // Determine which languages or frameworks have to be installed. + const { inputs, steps } = getSetupSteps(jobSpecification); + + // Extract the sequence of steps from the YAML document to persist as much formatting as possible. + const specSteps = specDocument.getIn([ + "validationJobs", + name, + "steps", + ]) as yaml.YAMLSeq; + + // Add the generated steps in front of the ones from the specification. + specSteps.items.unshift(...steps); + + const validationJob: Record = { + name: jobSpecification.name, + if: "github.triggering_actor != 'dependabot[bot]'", + needs: [checkName], + permissions: { + contents: "read", + "security-events": "read", + }, + "timeout-minutes": 5, + "runs-on": "ubuntu-slim", + steps: specSteps, + }; + + if (jobSpecification.permissions) { + validationJob.permissions = jobSpecification.permissions; + } + + for (const key of ["env"] as const) { + if (jobSpecification[key] !== undefined) { + validationJob[key] = jobSpecification[key]; + } + } + + validationJob.env = validationJob.env ?? {}; + if (!("CODEQL_ACTION_TEST_MODE" in validationJob.env)) { + validationJob.env.CODEQL_ACTION_TEST_MODE = true; + } + + return { validationJob, inputs }; +} + +/** Generates additional jobs that run after the main check job, based on the `validationJobs` property. */ +function generateValidationJobs( + specDocument: yaml.Document, + checkSpecification: Specification, + checkName: string, +): Record { + if (checkSpecification.validationJobs === undefined) { + return {}; + } + + const validationJobs: Record = {}; + let workflowInputs: WorkflowInputs = {}; + + for (const [jobName, jobSpec] of Object.entries( + checkSpecification.validationJobs, + )) { + if (checkName === jobName) { + throw new Error( + `Validation job '${jobName}' cannot have the same name as the main job.`, + ); + } + + const { validationJob, inputs } = generateValidationJob( + specDocument, + jobSpec, + checkName, + jobName, + ); + validationJobs[jobName] = validationJob; + workflowInputs = { ...workflowInputs, ...inputs }; + } + + return { validationJobs, workflowInputs }; +} + /** * Main entry point for the sync script. */ @@ -505,6 +594,12 @@ function main(): void { specDocument, checkSpecification, ); + const { validationJobs, validationJobInputs } = generateValidationJobs( + specDocument, + checkSpecification, + checkName, + ); + const combinedInputs = { ...workflowInputs, ...validationJobInputs }; // If this check belongs to a named collection, record it. if (checkSpecification.collection) { @@ -515,12 +610,12 @@ function main(): void { collections[collectionName].push({ specification: checkSpecification, checkName, - inputs: workflowInputs, + inputs: combinedInputs, }); } let extraGroupName = ""; - for (const inputName of Object.keys(workflowInputs)) { + for (const inputName of Object.keys(combinedInputs)) { extraGroupName += "-${{inputs." + inputName + "}}"; } @@ -545,10 +640,10 @@ function main(): void { }, schedule: [{ cron }], workflow_dispatch: { - inputs: workflowInputs, + inputs: combinedInputs, }, workflow_call: { - inputs: workflowInputs, + inputs: combinedInputs, }, }, defaults: { @@ -563,6 +658,7 @@ function main(): void { }, jobs: { [checkName]: checkJob, + ...validationJobs, }, }; From 3d478129f20ce1c3dafa70269211887a9b1d5e05 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Thu, 5 Mar 2026 15:53:53 +0000 Subject: [PATCH 21/50] Add `tsconfig.json` for `pr-checks` --- pr-checks/sync.ts | 2 +- pr-checks/tsconfig.json | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) create mode 100644 pr-checks/tsconfig.json diff --git a/pr-checks/sync.ts b/pr-checks/sync.ts index 8f2b2e80f..00cf625c0 100755 --- a/pr-checks/sync.ts +++ b/pr-checks/sync.ts @@ -340,7 +340,7 @@ function getSetupSteps(checkSpecification: Specification): { steps: any[]; } { let inputs: WorkflowInputs = {}; - const steps = []; + const steps: any[] = []; for (const language of Object.values(KnownLanguage).sort()) { const setupSpec = languageSetups[language]; diff --git a/pr-checks/tsconfig.json b/pr-checks/tsconfig.json new file mode 100644 index 000000000..e85403253 --- /dev/null +++ b/pr-checks/tsconfig.json @@ -0,0 +1,33 @@ +{ + "compilerOptions": { + /* Basic Options */ + "lib": ["ES2022"], + "target": "ES2022", + "module": "commonjs", + "rootDir": "..", + "sourceMap": false, + "noEmit": true, + + /* Strict Type-Checking Options */ + "strict": true, /* Enable all strict type-checking options. */ + "noImplicitAny": false, /* Raise error on expressions and declarations with an implied 'any' type. */ + "strictNullChecks": true, /* Enable strict null checks. */ + "strictFunctionTypes": true, /* Enable strict checking of function types. */ + "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */ + "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ + "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ + "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ + + /* Additional Checks */ + "noUnusedLocals": false, /* Report errors on unused locals. */ + "noUnusedParameters": false, /* Report errors on unused parameters. */ + "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ + "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ + + /* Module Resolution Options */ + "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ + "resolveJsonModule": true, + }, + "include": ["./*.ts", "../src/**/*.ts"], + "exclude": ["node_modules"] +} From 79fdef791d746ceb151d8d5014b353ba5d086a5f Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Thu, 5 Mar 2026 15:54:33 +0000 Subject: [PATCH 22/50] Fix `generateValidationJobs` typing --- pr-checks/sync.ts | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/pr-checks/sync.ts b/pr-checks/sync.ts index 00cf625c0..e4c8e4e61 100755 --- a/pr-checks/sync.ts +++ b/pr-checks/sync.ts @@ -528,9 +528,9 @@ function generateValidationJobs( specDocument: yaml.Document, checkSpecification: Specification, checkName: string, -): Record { +) { if (checkSpecification.validationJobs === undefined) { - return {}; + return { validationJobs: {}, workflowInputs: {} }; } const validationJobs: Record = {}; @@ -594,11 +594,8 @@ function main(): void { specDocument, checkSpecification, ); - const { validationJobs, validationJobInputs } = generateValidationJobs( - specDocument, - checkSpecification, - checkName, - ); + const { validationJobs, workflowInputs: validationJobInputs } = + generateValidationJobs(specDocument, checkSpecification, checkName); const combinedInputs = { ...workflowInputs, ...validationJobInputs }; // If this check belongs to a named collection, record it. From 103db93efa6bc778aca29b92c57691be785251ca Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Thu, 5 Mar 2026 16:06:03 +0000 Subject: [PATCH 23/50] Make it more explicit that `getSetupSteps` just needs a `JobSpecification` --- pr-checks/sync.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pr-checks/sync.ts b/pr-checks/sync.ts index e4c8e4e61..fd648f2f7 100755 --- a/pr-checks/sync.ts +++ b/pr-checks/sync.ts @@ -335,7 +335,7 @@ function generateJobMatrix( * * @returns An object containing setup steps and additional input specifications. */ -function getSetupSteps(checkSpecification: Specification): { +function getSetupSteps(checkSpecification: JobSpecification): { inputs: WorkflowInputs; steps: any[]; } { From 2a0060496cd11fc2992f8019e5f42e80dd109a86 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Thu, 5 Mar 2026 16:07:10 +0000 Subject: [PATCH 24/50] Fix condition --- pr-checks/sync.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pr-checks/sync.ts b/pr-checks/sync.ts index fd648f2f7..878b4c245 100755 --- a/pr-checks/sync.ts +++ b/pr-checks/sync.ts @@ -347,7 +347,7 @@ function getSetupSteps(checkSpecification: JobSpecification): { if ( setupSpec === undefined || - checkSpecification[setupSpec.specProperty] === undefined + checkSpecification[setupSpec.specProperty] !== true ) { continue; } From da11f44114c04abe7a1ae2a3581707e938fa07ec Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Mon, 9 Mar 2026 14:13:22 +0000 Subject: [PATCH 25/50] Run `prepare-test` after setup steps --- .github/workflows/__all-platform-bundle.yml | 14 +++++++------- .github/workflows/__analyze-ref-input.yml | 14 +++++++------- .github/workflows/__autobuild-action.yml | 8 ++++---- ...__autobuild-direct-tracing-with-working-dir.yml | 10 +++++----- .github/workflows/__build-mode-autobuild.yml | 14 +++++++------- .github/workflows/__build-mode-manual.yml | 14 +++++++------- .github/workflows/__config-input.yml | 14 +++++++------- .../__export-file-baseline-information.yml | 14 +++++++------- .github/workflows/__go-custom-queries.yml | 14 +++++++------- ...__go-indirect-tracing-workaround-diagnostic.yml | 10 +++++----- ...indirect-tracing-workaround-no-file-program.yml | 10 +++++----- .../workflows/__go-indirect-tracing-workaround.yml | 10 +++++----- .github/workflows/__go-tracing-autobuilder.yml | 10 +++++----- .../workflows/__go-tracing-custom-build-steps.yml | 10 +++++----- .github/workflows/__go-tracing-legacy-workflow.yml | 10 +++++----- .github/workflows/__local-bundle.yml | 14 +++++++------- .github/workflows/__multi-language-autodetect.yml | 14 +++++++------- .../__packaging-codescanning-config-inputs-js.yml | 14 +++++++------- .github/workflows/__packaging-config-inputs-js.yml | 14 +++++++------- .github/workflows/__packaging-config-js.yml | 14 +++++++------- .github/workflows/__packaging-inputs-js.yml | 14 +++++++------- .github/workflows/__remote-config.yml | 14 +++++++------- .github/workflows/__split-workflow.yml | 14 +++++++------- .github/workflows/__swift-custom-build.yml | 14 +++++++------- .github/workflows/__unset-environment.yml | 14 +++++++------- .github/workflows/__upload-ref-sha-input.yml | 14 +++++++------- .github/workflows/__upload-sarif.yml | 14 +++++++------- .github/workflows/__with-checkout-path.yml | 14 +++++++------- pr-checks/sync.ts | 2 +- 29 files changed, 180 insertions(+), 180 deletions(-) diff --git a/.github/workflows/__all-platform-bundle.yml b/.github/workflows/__all-platform-bundle.yml index ca61c3d8b..1be1d6375 100644 --- a/.github/workflows/__all-platform-bundle.yml +++ b/.github/workflows/__all-platform-bundle.yml @@ -75,13 +75,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'true' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -91,6 +84,13 @@ jobs: with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'true' + setup-kotlin: 'true' - id: init uses: ./../action/init with: diff --git a/.github/workflows/__analyze-ref-input.yml b/.github/workflows/__analyze-ref-input.yml index 952e4a3ae..79d863a13 100644 --- a/.github/workflows/__analyze-ref-input.yml +++ b/.github/workflows/__analyze-ref-input.yml @@ -81,13 +81,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -102,6 +95,13 @@ jobs: uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - uses: ./../action/init with: tools: ${{ steps.prepare-test.outputs.tools-url }} diff --git a/.github/workflows/__autobuild-action.yml b/.github/workflows/__autobuild-action.yml index ed5cf1937..7521614f2 100644 --- a/.github/workflows/__autobuild-action.yml +++ b/.github/workflows/__autobuild-action.yml @@ -65,6 +65,10 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 + - name: Install .NET + uses: actions/setup-dotnet@v5 + with: + dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - name: Prepare test id: prepare-test uses: ./.github/actions/prepare-test @@ -72,10 +76,6 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' - - name: Install .NET - uses: actions/setup-dotnet@v5 - with: - dotnet-version: ${{ inputs.dotnet-version || '9.x' }} - uses: ./../action/init with: languages: csharp diff --git a/.github/workflows/__autobuild-direct-tracing-with-working-dir.yml b/.github/workflows/__autobuild-direct-tracing-with-working-dir.yml index 4a411ad1b..8acd90253 100644 --- a/.github/workflows/__autobuild-direct-tracing-with-working-dir.yml +++ b/.github/workflows/__autobuild-direct-tracing-with-working-dir.yml @@ -67,6 +67,11 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 + - name: Install Java + uses: actions/setup-java@v5 + with: + java-version: ${{ inputs.java-version || '17' }} + distribution: temurin - name: Prepare test id: prepare-test uses: ./.github/actions/prepare-test @@ -74,11 +79,6 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' - - name: Install Java - uses: actions/setup-java@v5 - with: - java-version: ${{ inputs.java-version || '17' }} - distribution: temurin - name: Test setup run: | # Make sure that Gradle build succeeds in autobuild-dir ... diff --git a/.github/workflows/__build-mode-autobuild.yml b/.github/workflows/__build-mode-autobuild.yml index 2863793fd..7619d4593 100644 --- a/.github/workflows/__build-mode-autobuild.yml +++ b/.github/workflows/__build-mode-autobuild.yml @@ -67,13 +67,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install Java uses: actions/setup-java@v5 with: @@ -87,6 +80,13 @@ jobs: run: |- gh release download --repo mikefarah/yq --pattern "yq_windows_amd64.exe" "$YQ_VERSION" -O "$YQ_PATH/yq.exe" echo "$YQ_PATH" >> "$GITHUB_PATH" + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - name: Set up Java test repo configuration run: | mv * .github ../action/tests/multi-language-repo/ diff --git a/.github/workflows/__build-mode-manual.yml b/.github/workflows/__build-mode-manual.yml index 3d08a663b..93010fe63 100644 --- a/.github/workflows/__build-mode-manual.yml +++ b/.github/workflows/__build-mode-manual.yml @@ -71,13 +71,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -87,6 +80,13 @@ jobs: with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - uses: ./../action/init id: init with: diff --git a/.github/workflows/__config-input.yml b/.github/workflows/__config-input.yml index f80243926..a2e4dba2c 100644 --- a/.github/workflows/__config-input.yml +++ b/.github/workflows/__config-input.yml @@ -51,13 +51,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install Node.js uses: actions/setup-node@v6 with: @@ -65,6 +58,13 @@ jobs: cache: npm - name: Install dependencies run: npm ci + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - name: Copy queries into workspace run: | cp -a ../action/queries . diff --git a/.github/workflows/__export-file-baseline-information.yml b/.github/workflows/__export-file-baseline-information.yml index d72d27a5d..e07009774 100644 --- a/.github/workflows/__export-file-baseline-information.yml +++ b/.github/workflows/__export-file-baseline-information.yml @@ -75,13 +75,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -91,6 +84,13 @@ jobs: with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - uses: ./../action/init id: init with: diff --git a/.github/workflows/__go-custom-queries.yml b/.github/workflows/__go-custom-queries.yml index 895cc0c1f..cc11dc941 100644 --- a/.github/workflows/__go-custom-queries.yml +++ b/.github/workflows/__go-custom-queries.yml @@ -73,13 +73,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -89,6 +82,13 @@ jobs: with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - uses: ./../action/init with: languages: go diff --git a/.github/workflows/__go-indirect-tracing-workaround-diagnostic.yml b/.github/workflows/__go-indirect-tracing-workaround-diagnostic.yml index 11497389f..e474e1b6f 100644 --- a/.github/workflows/__go-indirect-tracing-workaround-diagnostic.yml +++ b/.github/workflows/__go-indirect-tracing-workaround-diagnostic.yml @@ -61,6 +61,11 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 + - name: Install Go + uses: actions/setup-go@v6 + with: + go-version: ${{ inputs.go-version || '>=1.21.0' }} + cache: false - name: Prepare test id: prepare-test uses: ./.github/actions/prepare-test @@ -68,11 +73,6 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' - - name: Install Go - uses: actions/setup-go@v6 - with: - go-version: ${{ inputs.go-version || '>=1.21.0' }} - cache: false - uses: ./../action/init with: languages: go diff --git a/.github/workflows/__go-indirect-tracing-workaround-no-file-program.yml b/.github/workflows/__go-indirect-tracing-workaround-no-file-program.yml index 7d83904e6..cc94d3bb8 100644 --- a/.github/workflows/__go-indirect-tracing-workaround-no-file-program.yml +++ b/.github/workflows/__go-indirect-tracing-workaround-no-file-program.yml @@ -61,6 +61,11 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 + - name: Install Go + uses: actions/setup-go@v6 + with: + go-version: ${{ inputs.go-version || '>=1.21.0' }} + cache: false - name: Prepare test id: prepare-test uses: ./.github/actions/prepare-test @@ -68,11 +73,6 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' - - name: Install Go - uses: actions/setup-go@v6 - with: - go-version: ${{ inputs.go-version || '>=1.21.0' }} - cache: false - name: Remove `file` program run: | echo $(which file) diff --git a/.github/workflows/__go-indirect-tracing-workaround.yml b/.github/workflows/__go-indirect-tracing-workaround.yml index 1259e7fa6..6c8a24e0c 100644 --- a/.github/workflows/__go-indirect-tracing-workaround.yml +++ b/.github/workflows/__go-indirect-tracing-workaround.yml @@ -61,6 +61,11 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 + - name: Install Go + uses: actions/setup-go@v6 + with: + go-version: ${{ inputs.go-version || '>=1.21.0' }} + cache: false - name: Prepare test id: prepare-test uses: ./.github/actions/prepare-test @@ -68,11 +73,6 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' - - name: Install Go - uses: actions/setup-go@v6 - with: - go-version: ${{ inputs.go-version || '>=1.21.0' }} - cache: false - uses: ./../action/init with: languages: go diff --git a/.github/workflows/__go-tracing-autobuilder.yml b/.github/workflows/__go-tracing-autobuilder.yml index 4a6dc68e2..c4ecfd410 100644 --- a/.github/workflows/__go-tracing-autobuilder.yml +++ b/.github/workflows/__go-tracing-autobuilder.yml @@ -95,6 +95,11 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 + - name: Install Go + uses: actions/setup-go@v6 + with: + go-version: ${{ inputs.go-version || '>=1.21.0' }} + cache: false - name: Prepare test id: prepare-test uses: ./.github/actions/prepare-test @@ -102,11 +107,6 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' - - name: Install Go - uses: actions/setup-go@v6 - with: - go-version: ${{ inputs.go-version || '>=1.21.0' }} - cache: false - uses: ./../action/init with: languages: go diff --git a/.github/workflows/__go-tracing-custom-build-steps.yml b/.github/workflows/__go-tracing-custom-build-steps.yml index 5576b561c..d51fc02a9 100644 --- a/.github/workflows/__go-tracing-custom-build-steps.yml +++ b/.github/workflows/__go-tracing-custom-build-steps.yml @@ -95,6 +95,11 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 + - name: Install Go + uses: actions/setup-go@v6 + with: + go-version: ${{ inputs.go-version || '>=1.21.0' }} + cache: false - name: Prepare test id: prepare-test uses: ./.github/actions/prepare-test @@ -102,11 +107,6 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' - - name: Install Go - uses: actions/setup-go@v6 - with: - go-version: ${{ inputs.go-version || '>=1.21.0' }} - cache: false - uses: ./../action/init with: languages: go diff --git a/.github/workflows/__go-tracing-legacy-workflow.yml b/.github/workflows/__go-tracing-legacy-workflow.yml index 3a85caeb0..46127791e 100644 --- a/.github/workflows/__go-tracing-legacy-workflow.yml +++ b/.github/workflows/__go-tracing-legacy-workflow.yml @@ -95,6 +95,11 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 + - name: Install Go + uses: actions/setup-go@v6 + with: + go-version: ${{ inputs.go-version || '>=1.21.0' }} + cache: false - name: Prepare test id: prepare-test uses: ./.github/actions/prepare-test @@ -102,11 +107,6 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' - - name: Install Go - uses: actions/setup-go@v6 - with: - go-version: ${{ inputs.go-version || '>=1.21.0' }} - cache: false - uses: ./../action/init with: languages: go diff --git a/.github/workflows/__local-bundle.yml b/.github/workflows/__local-bundle.yml index cb33e169f..24704c3a6 100644 --- a/.github/workflows/__local-bundle.yml +++ b/.github/workflows/__local-bundle.yml @@ -81,13 +81,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -102,6 +95,13 @@ jobs: uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - name: Fetch latest CodeQL bundle run: | wget https://github.com/github/codeql-action/releases/latest/download/codeql-bundle-linux64.tar.zst diff --git a/.github/workflows/__multi-language-autodetect.yml b/.github/workflows/__multi-language-autodetect.yml index c991d11b8..6899561ef 100644 --- a/.github/workflows/__multi-language-autodetect.yml +++ b/.github/workflows/__multi-language-autodetect.yml @@ -115,13 +115,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -136,6 +129,13 @@ jobs: uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - name: Use Xcode 16 if: runner.os == 'macOS' && matrix.version != 'nightly-latest' run: sudo xcode-select -s "/Applications/Xcode_16.app" diff --git a/.github/workflows/__packaging-codescanning-config-inputs-js.yml b/.github/workflows/__packaging-codescanning-config-inputs-js.yml index 803587eee..2f7ca6769 100644 --- a/.github/workflows/__packaging-codescanning-config-inputs-js.yml +++ b/.github/workflows/__packaging-codescanning-config-inputs-js.yml @@ -85,13 +85,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -113,6 +106,13 @@ jobs: uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - uses: ./../action/init with: config-file: '.github/codeql/codeql-config-packaging3.yml' diff --git a/.github/workflows/__packaging-config-inputs-js.yml b/.github/workflows/__packaging-config-inputs-js.yml index 25367fd7e..146bc4355 100644 --- a/.github/workflows/__packaging-config-inputs-js.yml +++ b/.github/workflows/__packaging-config-inputs-js.yml @@ -75,13 +75,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -98,6 +91,13 @@ jobs: cache: npm - name: Install dependencies run: npm ci + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - uses: ./../action/init with: config-file: '.github/codeql/codeql-config-packaging3.yml' diff --git a/.github/workflows/__packaging-config-js.yml b/.github/workflows/__packaging-config-js.yml index bb8d0491a..fddd971f3 100644 --- a/.github/workflows/__packaging-config-js.yml +++ b/.github/workflows/__packaging-config-js.yml @@ -75,13 +75,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -98,6 +91,13 @@ jobs: cache: npm - name: Install dependencies run: npm ci + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - uses: ./../action/init with: config-file: '.github/codeql/codeql-config-packaging.yml' diff --git a/.github/workflows/__packaging-inputs-js.yml b/.github/workflows/__packaging-inputs-js.yml index 16ab2c0ce..64bd575d5 100644 --- a/.github/workflows/__packaging-inputs-js.yml +++ b/.github/workflows/__packaging-inputs-js.yml @@ -75,13 +75,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -98,6 +91,13 @@ jobs: cache: npm - name: Install dependencies run: npm ci + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - uses: ./../action/init with: config-file: '.github/codeql/codeql-config-packaging2.yml' diff --git a/.github/workflows/__remote-config.yml b/.github/workflows/__remote-config.yml index af323ed1a..9bee2292a 100644 --- a/.github/workflows/__remote-config.yml +++ b/.github/workflows/__remote-config.yml @@ -83,13 +83,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -104,6 +97,13 @@ jobs: uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - uses: ./../action/init with: tools: ${{ steps.prepare-test.outputs.tools-url }} diff --git a/.github/workflows/__split-workflow.yml b/.github/workflows/__split-workflow.yml index 7359f4d5f..7dfa81249 100644 --- a/.github/workflows/__split-workflow.yml +++ b/.github/workflows/__split-workflow.yml @@ -81,13 +81,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -97,6 +90,13 @@ jobs: with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - uses: ./../action/init with: config-file: '.github/codeql/codeql-config-packaging3.yml' diff --git a/.github/workflows/__swift-custom-build.yml b/.github/workflows/__swift-custom-build.yml index 7e1f15e0c..efdbde721 100644 --- a/.github/workflows/__swift-custom-build.yml +++ b/.github/workflows/__swift-custom-build.yml @@ -75,13 +75,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -91,6 +84,13 @@ jobs: with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - name: Use Xcode 16 if: runner.os == 'macOS' && matrix.version != 'nightly-latest' run: sudo xcode-select -s "/Applications/Xcode_16.app" diff --git a/.github/workflows/__unset-environment.yml b/.github/workflows/__unset-environment.yml index 5c5ee701f..aa958c8b9 100644 --- a/.github/workflows/__unset-environment.yml +++ b/.github/workflows/__unset-environment.yml @@ -83,13 +83,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -104,6 +97,13 @@ jobs: uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - uses: ./../action/init id: init with: diff --git a/.github/workflows/__upload-ref-sha-input.yml b/.github/workflows/__upload-ref-sha-input.yml index 1882ec23c..5ac97a515 100644 --- a/.github/workflows/__upload-ref-sha-input.yml +++ b/.github/workflows/__upload-ref-sha-input.yml @@ -81,13 +81,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -102,6 +95,13 @@ jobs: uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - uses: ./../action/init with: tools: ${{ steps.prepare-test.outputs.tools-url }} diff --git a/.github/workflows/__upload-sarif.yml b/.github/workflows/__upload-sarif.yml index 4055c0873..f6b7aa3cc 100644 --- a/.github/workflows/__upload-sarif.yml +++ b/.github/workflows/__upload-sarif.yml @@ -88,13 +88,6 @@ jobs: steps: - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -109,6 +102,13 @@ jobs: uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - uses: ./../action/init with: tools: ${{ steps.prepare-test.outputs.tools-url }} diff --git a/.github/workflows/__with-checkout-path.yml b/.github/workflows/__with-checkout-path.yml index b0fab4b71..06aae6c4a 100644 --- a/.github/workflows/__with-checkout-path.yml +++ b/.github/workflows/__with-checkout-path.yml @@ -82,13 +82,6 @@ jobs: # This ensures we don't accidentally use the original checkout for any part of the test. - name: Check out repository uses: actions/checkout@v6 - - name: Prepare test - id: prepare-test - uses: ./.github/actions/prepare-test - with: - version: ${{ matrix.version }} - use-all-platform-bundle: 'false' - setup-kotlin: 'true' - name: Install .NET uses: actions/setup-dotnet@v5 with: @@ -103,6 +96,13 @@ jobs: uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} + - name: Prepare test + id: prepare-test + uses: ./.github/actions/prepare-test + with: + version: ${{ matrix.version }} + use-all-platform-bundle: 'false' + setup-kotlin: 'true' - name: Delete original checkout run: | # delete the original checkout so we don't accidentally use it. diff --git a/pr-checks/sync.ts b/pr-checks/sync.ts index 878b4c245..dc3dc9768 100755 --- a/pr-checks/sync.ts +++ b/pr-checks/sync.ts @@ -405,6 +405,7 @@ function generateJob( name: "Check out repository", uses: "actions/checkout@v6", }, + ...setupInfo.steps, { name: "Prepare test", id: "prepare-test", @@ -417,7 +418,6 @@ function generateJob( "setup-kotlin": "container" in checkSpecification ? "false" : "true", }, }, - ...setupInfo.steps, ]; // Extract the sequence of steps from the YAML document to persist as much formatting as possible. From 5ddbbbe614f6e162a655fb50322ea16f93b43d0e Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Mon, 9 Mar 2026 14:16:23 +0000 Subject: [PATCH 26/50] Install python if there is no `matrix.version` --- .github/workflows/__analyze-ref-input.yml | 2 +- .github/workflows/__local-bundle.yml | 2 +- .github/workflows/__multi-language-autodetect.yml | 2 +- .github/workflows/__packaging-codescanning-config-inputs-js.yml | 2 +- .github/workflows/__remote-config.yml | 2 +- .github/workflows/__unset-environment.yml | 2 +- .github/workflows/__upload-ref-sha-input.yml | 2 +- .github/workflows/__upload-sarif.yml | 2 +- .github/workflows/__with-checkout-path.yml | 2 +- pr-checks/sync.ts | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/__analyze-ref-input.yml b/.github/workflows/__analyze-ref-input.yml index 79d863a13..66988ce3a 100644 --- a/.github/workflows/__analyze-ref-input.yml +++ b/.github/workflows/__analyze-ref-input.yml @@ -91,7 +91,7 @@ jobs: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - name: Install Python - if: matrix.version != 'nightly-latest' + if: matrix.version != 'nightly-latest' || !matrix.version uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} diff --git a/.github/workflows/__local-bundle.yml b/.github/workflows/__local-bundle.yml index 24704c3a6..47975dbd5 100644 --- a/.github/workflows/__local-bundle.yml +++ b/.github/workflows/__local-bundle.yml @@ -91,7 +91,7 @@ jobs: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - name: Install Python - if: matrix.version != 'nightly-latest' + if: matrix.version != 'nightly-latest' || !matrix.version uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} diff --git a/.github/workflows/__multi-language-autodetect.yml b/.github/workflows/__multi-language-autodetect.yml index 6899561ef..afd322785 100644 --- a/.github/workflows/__multi-language-autodetect.yml +++ b/.github/workflows/__multi-language-autodetect.yml @@ -125,7 +125,7 @@ jobs: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - name: Install Python - if: matrix.version != 'nightly-latest' + if: matrix.version != 'nightly-latest' || !matrix.version uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} diff --git a/.github/workflows/__packaging-codescanning-config-inputs-js.yml b/.github/workflows/__packaging-codescanning-config-inputs-js.yml index 2f7ca6769..cd97dd80c 100644 --- a/.github/workflows/__packaging-codescanning-config-inputs-js.yml +++ b/.github/workflows/__packaging-codescanning-config-inputs-js.yml @@ -102,7 +102,7 @@ jobs: - name: Install dependencies run: npm ci - name: Install Python - if: matrix.version != 'nightly-latest' + if: matrix.version != 'nightly-latest' || !matrix.version uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} diff --git a/.github/workflows/__remote-config.yml b/.github/workflows/__remote-config.yml index 9bee2292a..82cbee135 100644 --- a/.github/workflows/__remote-config.yml +++ b/.github/workflows/__remote-config.yml @@ -93,7 +93,7 @@ jobs: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - name: Install Python - if: matrix.version != 'nightly-latest' + if: matrix.version != 'nightly-latest' || !matrix.version uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} diff --git a/.github/workflows/__unset-environment.yml b/.github/workflows/__unset-environment.yml index aa958c8b9..e5189ca01 100644 --- a/.github/workflows/__unset-environment.yml +++ b/.github/workflows/__unset-environment.yml @@ -93,7 +93,7 @@ jobs: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - name: Install Python - if: matrix.version != 'nightly-latest' + if: matrix.version != 'nightly-latest' || !matrix.version uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} diff --git a/.github/workflows/__upload-ref-sha-input.yml b/.github/workflows/__upload-ref-sha-input.yml index 5ac97a515..f75b9f61f 100644 --- a/.github/workflows/__upload-ref-sha-input.yml +++ b/.github/workflows/__upload-ref-sha-input.yml @@ -91,7 +91,7 @@ jobs: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - name: Install Python - if: matrix.version != 'nightly-latest' + if: matrix.version != 'nightly-latest' || !matrix.version uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} diff --git a/.github/workflows/__upload-sarif.yml b/.github/workflows/__upload-sarif.yml index f6b7aa3cc..e017a2527 100644 --- a/.github/workflows/__upload-sarif.yml +++ b/.github/workflows/__upload-sarif.yml @@ -98,7 +98,7 @@ jobs: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - name: Install Python - if: matrix.version != 'nightly-latest' + if: matrix.version != 'nightly-latest' || !matrix.version uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} diff --git a/.github/workflows/__with-checkout-path.yml b/.github/workflows/__with-checkout-path.yml index 06aae6c4a..cfcfb077f 100644 --- a/.github/workflows/__with-checkout-path.yml +++ b/.github/workflows/__with-checkout-path.yml @@ -92,7 +92,7 @@ jobs: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - name: Install Python - if: matrix.version != 'nightly-latest' + if: matrix.version != 'nightly-latest' || !matrix.version uses: actions/setup-python@v6 with: python-version: ${{ inputs.python-version || '3.13' }} diff --git a/pr-checks/sync.ts b/pr-checks/sync.ts index dc3dc9768..f5d23d72d 100755 --- a/pr-checks/sync.ts +++ b/pr-checks/sync.ts @@ -198,7 +198,7 @@ const languageSetups: LanguageSetups = { steps: [ { name: "Install Python", - if: "matrix.version != 'nightly-latest'", + if: "matrix.version != 'nightly-latest' || !matrix.version", uses: "actions/setup-python@v6", with: { "python-version": From e04697664c0c9f40771761688e5f5712ca3f7c8a Mon Sep 17 00:00:00 2001 From: Sam Robson Date: Wed, 4 Mar 2026 18:22:25 +0000 Subject: [PATCH 27/50] feat: add minimumVersion values for existing language-specific overlay feature flags --- lib/analyze-action-post.js | 94 +++++++++--------------------- lib/analyze-action.js | 94 +++++++++--------------------- lib/autobuild-action.js | 94 +++++++++--------------------- lib/init-action-post.js | 94 +++++++++--------------------- lib/init-action.js | 94 +++++++++--------------------- lib/resolve-environment-action.js | 94 +++++++++--------------------- lib/setup-codeql-action.js | 94 +++++++++--------------------- lib/start-proxy-action-post.js | 94 +++++++++--------------------- lib/start-proxy-action.js | 94 +++++++++--------------------- lib/upload-lib.js | 94 +++++++++--------------------- lib/upload-sarif-action-post.js | 94 +++++++++--------------------- lib/upload-sarif-action.js | 94 +++++++++--------------------- src/config-utils.ts | 17 +++--- src/feature-flags.ts | 95 ++++++++++--------------------- src/overlay/index.ts | 9 +++ 15 files changed, 381 insertions(+), 868 deletions(-) diff --git a/lib/analyze-action-post.js b/lib/analyze-action-post.js index 4fd650079..a1d3a9476 100644 --- a/lib/analyze-action-post.js +++ b/lib/analyze-action-post.js @@ -161944,6 +161944,12 @@ async function isAnalyzingDefaultBranch() { // src/overlay/index.ts var CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP = "2.24.1"; +var CODEQL_OVERLAY_MINIMUM_VERSION_GO = "2.24.2"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9"; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6; async function writeBaseDatabaseOidsFile(config, sourceRoot) { @@ -162086,70 +162092,49 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS", minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION }, - ["overlay_analysis_actions" /* OverlayAnalysisActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CPP", - minimumVersion: void 0 - }, + // Per-language overlay feature flags. Each has minimumVersion set to the + // minimum CLI version that supports overlay analysis for that language. + // Only languages that are GA or in staff-ship should have feature flags here. + // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP }, ["overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_GO", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUST", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_SWIFT", - minimumVersion: void 0 - }, - ["overlay_analysis_cpp" /* OverlayAnalysisCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CPP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_csharp" /* OverlayAnalysisCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP + }, + ["overlay_analysis_go" /* OverlayAnalysisGo */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_status_check" /* OverlayAnalysisStatusCheck */]: { defaultValue: false, @@ -162161,25 +162146,20 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_SAVE", minimumVersion: void 0 }, - ["overlay_analysis_go" /* OverlayAnalysisGo */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", - minimumVersion: void 0 - }, ["overlay_analysis_java" /* OverlayAnalysisJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_javascript" /* OverlayAnalysisJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_python" /* OverlayAnalysisPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_resource_checks_v2" /* OverlayAnalysisResourceChecksV2 */]: { defaultValue: false, @@ -162189,23 +162169,13 @@ var featureConfig = { ["overlay_analysis_ruby" /* OverlayAnalysisRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_rust" /* OverlayAnalysisRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUST", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS", minimumVersion: void 0 }, - ["overlay_analysis_swift" /* OverlayAnalysisSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SWIFT", - minimumVersion: void 0 - }, ["python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */]: { defaultValue: false, envVar: "CODEQL_ACTION_DISABLE_PYTHON_STANDARD_LIBRARY_EXTRACTION", @@ -162268,28 +162238,20 @@ var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB = 14e3; var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB * 1e6; var OVERLAY_MINIMUM_MEMORY_MB = 5 * 1024; var OVERLAY_ANALYSIS_FEATURES = { - actions: "overlay_analysis_actions" /* OverlayAnalysisActions */, - cpp: "overlay_analysis_cpp" /* OverlayAnalysisCpp */, csharp: "overlay_analysis_csharp" /* OverlayAnalysisCsharp */, go: "overlay_analysis_go" /* OverlayAnalysisGo */, java: "overlay_analysis_java" /* OverlayAnalysisJava */, javascript: "overlay_analysis_javascript" /* OverlayAnalysisJavascript */, python: "overlay_analysis_python" /* OverlayAnalysisPython */, - ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */, - rust: "overlay_analysis_rust" /* OverlayAnalysisRust */, - swift: "overlay_analysis_swift" /* OverlayAnalysisSwift */ + ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */ }; var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { - actions: "overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */, - cpp: "overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */, csharp: "overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */, go: "overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */, java: "overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */, javascript: "overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */, python: "overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */, - ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */, - rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */, - swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ + ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */ }; function getPathToParsedConfigFile(tempDir) { return path3.join(tempDir, "config"); diff --git a/lib/analyze-action.js b/lib/analyze-action.js index 3935138f5..37f9cf758 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -107559,6 +107559,12 @@ async function isAnalyzingDefaultBranch() { // src/overlay/index.ts var CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP = "2.24.1"; +var CODEQL_OVERLAY_MINIMUM_VERSION_GO = "2.24.2"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9"; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6; async function writeBaseDatabaseOidsFile(config, sourceRoot) { @@ -107835,70 +107841,49 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS", minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION }, - ["overlay_analysis_actions" /* OverlayAnalysisActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CPP", - minimumVersion: void 0 - }, + // Per-language overlay feature flags. Each has minimumVersion set to the + // minimum CLI version that supports overlay analysis for that language. + // Only languages that are GA or in staff-ship should have feature flags here. + // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP }, ["overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_GO", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUST", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_SWIFT", - minimumVersion: void 0 - }, - ["overlay_analysis_cpp" /* OverlayAnalysisCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CPP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_csharp" /* OverlayAnalysisCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP + }, + ["overlay_analysis_go" /* OverlayAnalysisGo */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_status_check" /* OverlayAnalysisStatusCheck */]: { defaultValue: false, @@ -107910,25 +107895,20 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_SAVE", minimumVersion: void 0 }, - ["overlay_analysis_go" /* OverlayAnalysisGo */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", - minimumVersion: void 0 - }, ["overlay_analysis_java" /* OverlayAnalysisJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_javascript" /* OverlayAnalysisJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_python" /* OverlayAnalysisPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_resource_checks_v2" /* OverlayAnalysisResourceChecksV2 */]: { defaultValue: false, @@ -107938,23 +107918,13 @@ var featureConfig = { ["overlay_analysis_ruby" /* OverlayAnalysisRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_rust" /* OverlayAnalysisRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUST", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS", minimumVersion: void 0 }, - ["overlay_analysis_swift" /* OverlayAnalysisSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SWIFT", - minimumVersion: void 0 - }, ["python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */]: { defaultValue: false, envVar: "CODEQL_ACTION_DISABLE_PYTHON_STANDARD_LIBRARY_EXTRACTION", @@ -108613,28 +108583,20 @@ var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB = 14e3; var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB * 1e6; var OVERLAY_MINIMUM_MEMORY_MB = 5 * 1024; var OVERLAY_ANALYSIS_FEATURES = { - actions: "overlay_analysis_actions" /* OverlayAnalysisActions */, - cpp: "overlay_analysis_cpp" /* OverlayAnalysisCpp */, csharp: "overlay_analysis_csharp" /* OverlayAnalysisCsharp */, go: "overlay_analysis_go" /* OverlayAnalysisGo */, java: "overlay_analysis_java" /* OverlayAnalysisJava */, javascript: "overlay_analysis_javascript" /* OverlayAnalysisJavascript */, python: "overlay_analysis_python" /* OverlayAnalysisPython */, - ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */, - rust: "overlay_analysis_rust" /* OverlayAnalysisRust */, - swift: "overlay_analysis_swift" /* OverlayAnalysisSwift */ + ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */ }; var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { - actions: "overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */, - cpp: "overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */, csharp: "overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */, go: "overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */, java: "overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */, javascript: "overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */, python: "overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */, - ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */, - rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */, - swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ + ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */ }; function getPathToParsedConfigFile(tempDir) { return path7.join(tempDir, "config"); diff --git a/lib/autobuild-action.js b/lib/autobuild-action.js index de91c2350..27483c3c6 100644 --- a/lib/autobuild-action.js +++ b/lib/autobuild-action.js @@ -103995,6 +103995,12 @@ async function isAnalyzingDefaultBranch() { // src/overlay/index.ts var CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP = "2.24.1"; +var CODEQL_OVERLAY_MINIMUM_VERSION_GO = "2.24.2"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9"; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6; async function writeBaseDatabaseOidsFile(config, sourceRoot) { @@ -104135,70 +104141,49 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS", minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION }, - ["overlay_analysis_actions" /* OverlayAnalysisActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CPP", - minimumVersion: void 0 - }, + // Per-language overlay feature flags. Each has minimumVersion set to the + // minimum CLI version that supports overlay analysis for that language. + // Only languages that are GA or in staff-ship should have feature flags here. + // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP }, ["overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_GO", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUST", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_SWIFT", - minimumVersion: void 0 - }, - ["overlay_analysis_cpp" /* OverlayAnalysisCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CPP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_csharp" /* OverlayAnalysisCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP + }, + ["overlay_analysis_go" /* OverlayAnalysisGo */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_status_check" /* OverlayAnalysisStatusCheck */]: { defaultValue: false, @@ -104210,25 +104195,20 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_SAVE", minimumVersion: void 0 }, - ["overlay_analysis_go" /* OverlayAnalysisGo */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", - minimumVersion: void 0 - }, ["overlay_analysis_java" /* OverlayAnalysisJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_javascript" /* OverlayAnalysisJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_python" /* OverlayAnalysisPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_resource_checks_v2" /* OverlayAnalysisResourceChecksV2 */]: { defaultValue: false, @@ -104238,23 +104218,13 @@ var featureConfig = { ["overlay_analysis_ruby" /* OverlayAnalysisRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_rust" /* OverlayAnalysisRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUST", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS", minimumVersion: void 0 }, - ["overlay_analysis_swift" /* OverlayAnalysisSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SWIFT", - minimumVersion: void 0 - }, ["python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */]: { defaultValue: false, envVar: "CODEQL_ACTION_DISABLE_PYTHON_STANDARD_LIBRARY_EXTRACTION", @@ -104640,28 +104610,20 @@ var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB = 14e3; var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB * 1e6; var OVERLAY_MINIMUM_MEMORY_MB = 5 * 1024; var OVERLAY_ANALYSIS_FEATURES = { - actions: "overlay_analysis_actions" /* OverlayAnalysisActions */, - cpp: "overlay_analysis_cpp" /* OverlayAnalysisCpp */, csharp: "overlay_analysis_csharp" /* OverlayAnalysisCsharp */, go: "overlay_analysis_go" /* OverlayAnalysisGo */, java: "overlay_analysis_java" /* OverlayAnalysisJava */, javascript: "overlay_analysis_javascript" /* OverlayAnalysisJavascript */, python: "overlay_analysis_python" /* OverlayAnalysisPython */, - ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */, - rust: "overlay_analysis_rust" /* OverlayAnalysisRust */, - swift: "overlay_analysis_swift" /* OverlayAnalysisSwift */ + ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */ }; var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { - actions: "overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */, - cpp: "overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */, csharp: "overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */, go: "overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */, java: "overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */, javascript: "overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */, python: "overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */, - ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */, - rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */, - swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ + ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */ }; function getPathToParsedConfigFile(tempDir) { return path4.join(tempDir, "config"); diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 42738ca24..5bf3fa045 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -165427,6 +165427,12 @@ async function isAnalyzingDefaultBranch() { // src/overlay/index.ts var CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP = "2.24.1"; +var CODEQL_OVERLAY_MINIMUM_VERSION_GO = "2.24.2"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9"; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6; async function writeBaseDatabaseOidsFile(config, sourceRoot) { @@ -165572,70 +165578,49 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS", minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION }, - ["overlay_analysis_actions" /* OverlayAnalysisActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CPP", - minimumVersion: void 0 - }, + // Per-language overlay feature flags. Each has minimumVersion set to the + // minimum CLI version that supports overlay analysis for that language. + // Only languages that are GA or in staff-ship should have feature flags here. + // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP }, ["overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_GO", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUST", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_SWIFT", - minimumVersion: void 0 - }, - ["overlay_analysis_cpp" /* OverlayAnalysisCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CPP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_csharp" /* OverlayAnalysisCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP + }, + ["overlay_analysis_go" /* OverlayAnalysisGo */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_status_check" /* OverlayAnalysisStatusCheck */]: { defaultValue: false, @@ -165647,25 +165632,20 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_SAVE", minimumVersion: void 0 }, - ["overlay_analysis_go" /* OverlayAnalysisGo */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", - minimumVersion: void 0 - }, ["overlay_analysis_java" /* OverlayAnalysisJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_javascript" /* OverlayAnalysisJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_python" /* OverlayAnalysisPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_resource_checks_v2" /* OverlayAnalysisResourceChecksV2 */]: { defaultValue: false, @@ -165675,23 +165655,13 @@ var featureConfig = { ["overlay_analysis_ruby" /* OverlayAnalysisRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_rust" /* OverlayAnalysisRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUST", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS", minimumVersion: void 0 }, - ["overlay_analysis_swift" /* OverlayAnalysisSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SWIFT", - minimumVersion: void 0 - }, ["python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */]: { defaultValue: false, envVar: "CODEQL_ACTION_DISABLE_PYTHON_STANDARD_LIBRARY_EXTRACTION", @@ -166148,28 +166118,20 @@ var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB = 14e3; var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB * 1e6; var OVERLAY_MINIMUM_MEMORY_MB = 5 * 1024; var OVERLAY_ANALYSIS_FEATURES = { - actions: "overlay_analysis_actions" /* OverlayAnalysisActions */, - cpp: "overlay_analysis_cpp" /* OverlayAnalysisCpp */, csharp: "overlay_analysis_csharp" /* OverlayAnalysisCsharp */, go: "overlay_analysis_go" /* OverlayAnalysisGo */, java: "overlay_analysis_java" /* OverlayAnalysisJava */, javascript: "overlay_analysis_javascript" /* OverlayAnalysisJavascript */, python: "overlay_analysis_python" /* OverlayAnalysisPython */, - ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */, - rust: "overlay_analysis_rust" /* OverlayAnalysisRust */, - swift: "overlay_analysis_swift" /* OverlayAnalysisSwift */ + ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */ }; var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { - actions: "overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */, - cpp: "overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */, csharp: "overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */, go: "overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */, java: "overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */, javascript: "overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */, python: "overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */, - ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */, - rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */, - swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ + ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */ }; function getPathToParsedConfigFile(tempDir) { return path8.join(tempDir, "config"); diff --git a/lib/init-action.js b/lib/init-action.js index b07f684e9..1984ba004 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -105107,6 +105107,12 @@ async function getGeneratedFiles(workingDirectory) { // src/overlay/index.ts var CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP = "2.24.1"; +var CODEQL_OVERLAY_MINIMUM_VERSION_GO = "2.24.2"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9"; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6; async function writeBaseDatabaseOidsFile(config, sourceRoot) { @@ -105395,70 +105401,49 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS", minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION }, - ["overlay_analysis_actions" /* OverlayAnalysisActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CPP", - minimumVersion: void 0 - }, + // Per-language overlay feature flags. Each has minimumVersion set to the + // minimum CLI version that supports overlay analysis for that language. + // Only languages that are GA or in staff-ship should have feature flags here. + // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP }, ["overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_GO", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUST", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_SWIFT", - minimumVersion: void 0 - }, - ["overlay_analysis_cpp" /* OverlayAnalysisCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CPP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_csharp" /* OverlayAnalysisCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP + }, + ["overlay_analysis_go" /* OverlayAnalysisGo */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_status_check" /* OverlayAnalysisStatusCheck */]: { defaultValue: false, @@ -105470,25 +105455,20 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_SAVE", minimumVersion: void 0 }, - ["overlay_analysis_go" /* OverlayAnalysisGo */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", - minimumVersion: void 0 - }, ["overlay_analysis_java" /* OverlayAnalysisJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_javascript" /* OverlayAnalysisJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_python" /* OverlayAnalysisPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_resource_checks_v2" /* OverlayAnalysisResourceChecksV2 */]: { defaultValue: false, @@ -105498,23 +105478,13 @@ var featureConfig = { ["overlay_analysis_ruby" /* OverlayAnalysisRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_rust" /* OverlayAnalysisRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUST", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS", minimumVersion: void 0 }, - ["overlay_analysis_swift" /* OverlayAnalysisSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SWIFT", - minimumVersion: void 0 - }, ["python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */]: { defaultValue: false, envVar: "CODEQL_ACTION_DISABLE_PYTHON_STANDARD_LIBRARY_EXTRACTION", @@ -106384,28 +106354,20 @@ async function loadUserConfig(logger, configFile, workspacePath, apiDetails, tem } } var OVERLAY_ANALYSIS_FEATURES = { - actions: "overlay_analysis_actions" /* OverlayAnalysisActions */, - cpp: "overlay_analysis_cpp" /* OverlayAnalysisCpp */, csharp: "overlay_analysis_csharp" /* OverlayAnalysisCsharp */, go: "overlay_analysis_go" /* OverlayAnalysisGo */, java: "overlay_analysis_java" /* OverlayAnalysisJava */, javascript: "overlay_analysis_javascript" /* OverlayAnalysisJavascript */, python: "overlay_analysis_python" /* OverlayAnalysisPython */, - ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */, - rust: "overlay_analysis_rust" /* OverlayAnalysisRust */, - swift: "overlay_analysis_swift" /* OverlayAnalysisSwift */ + ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */ }; var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { - actions: "overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */, - cpp: "overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */, csharp: "overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */, go: "overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */, java: "overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */, javascript: "overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */, python: "overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */, - ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */, - rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */, - swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ + ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */ }; async function checkOverlayAnalysisFeatureEnabled(features, codeql, languages, codeScanningConfig) { if (!await features.getValue("overlay_analysis" /* OverlayAnalysis */, codeql)) { diff --git a/lib/resolve-environment-action.js b/lib/resolve-environment-action.js index c5230200e..c8fc414a8 100644 --- a/lib/resolve-environment-action.js +++ b/lib/resolve-environment-action.js @@ -103988,6 +103988,12 @@ async function isAnalyzingDefaultBranch() { // src/overlay/index.ts var CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP = "2.24.1"; +var CODEQL_OVERLAY_MINIMUM_VERSION_GO = "2.24.2"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9"; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6; async function writeBaseDatabaseOidsFile(config, sourceRoot) { @@ -104126,70 +104132,49 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS", minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION }, - ["overlay_analysis_actions" /* OverlayAnalysisActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CPP", - minimumVersion: void 0 - }, + // Per-language overlay feature flags. Each has minimumVersion set to the + // minimum CLI version that supports overlay analysis for that language. + // Only languages that are GA or in staff-ship should have feature flags here. + // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP }, ["overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_GO", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUST", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_SWIFT", - minimumVersion: void 0 - }, - ["overlay_analysis_cpp" /* OverlayAnalysisCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CPP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_csharp" /* OverlayAnalysisCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP + }, + ["overlay_analysis_go" /* OverlayAnalysisGo */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_status_check" /* OverlayAnalysisStatusCheck */]: { defaultValue: false, @@ -104201,25 +104186,20 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_SAVE", minimumVersion: void 0 }, - ["overlay_analysis_go" /* OverlayAnalysisGo */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", - minimumVersion: void 0 - }, ["overlay_analysis_java" /* OverlayAnalysisJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_javascript" /* OverlayAnalysisJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_python" /* OverlayAnalysisPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_resource_checks_v2" /* OverlayAnalysisResourceChecksV2 */]: { defaultValue: false, @@ -104229,23 +104209,13 @@ var featureConfig = { ["overlay_analysis_ruby" /* OverlayAnalysisRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_rust" /* OverlayAnalysisRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUST", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS", minimumVersion: void 0 }, - ["overlay_analysis_swift" /* OverlayAnalysisSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SWIFT", - minimumVersion: void 0 - }, ["python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */]: { defaultValue: false, envVar: "CODEQL_ACTION_DISABLE_PYTHON_STANDARD_LIBRARY_EXTRACTION", @@ -104308,28 +104278,20 @@ var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB = 14e3; var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB * 1e6; var OVERLAY_MINIMUM_MEMORY_MB = 5 * 1024; var OVERLAY_ANALYSIS_FEATURES = { - actions: "overlay_analysis_actions" /* OverlayAnalysisActions */, - cpp: "overlay_analysis_cpp" /* OverlayAnalysisCpp */, csharp: "overlay_analysis_csharp" /* OverlayAnalysisCsharp */, go: "overlay_analysis_go" /* OverlayAnalysisGo */, java: "overlay_analysis_java" /* OverlayAnalysisJava */, javascript: "overlay_analysis_javascript" /* OverlayAnalysisJavascript */, python: "overlay_analysis_python" /* OverlayAnalysisPython */, - ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */, - rust: "overlay_analysis_rust" /* OverlayAnalysisRust */, - swift: "overlay_analysis_swift" /* OverlayAnalysisSwift */ + ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */ }; var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { - actions: "overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */, - cpp: "overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */, csharp: "overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */, go: "overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */, java: "overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */, javascript: "overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */, python: "overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */, - ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */, - rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */, - swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ + ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */ }; function getPathToParsedConfigFile(tempDir) { return path3.join(tempDir, "config"); diff --git a/lib/setup-codeql-action.js b/lib/setup-codeql-action.js index d323a2dcc..6fd852209 100644 --- a/lib/setup-codeql-action.js +++ b/lib/setup-codeql-action.js @@ -103882,6 +103882,12 @@ function formatDuration(durationMs) { // src/overlay/index.ts var CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP = "2.24.1"; +var CODEQL_OVERLAY_MINIMUM_VERSION_GO = "2.24.2"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9"; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6; async function writeBaseDatabaseOidsFile(config, sourceRoot) { @@ -104023,70 +104029,49 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS", minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION }, - ["overlay_analysis_actions" /* OverlayAnalysisActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CPP", - minimumVersion: void 0 - }, + // Per-language overlay feature flags. Each has minimumVersion set to the + // minimum CLI version that supports overlay analysis for that language. + // Only languages that are GA or in staff-ship should have feature flags here. + // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP }, ["overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_GO", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUST", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_SWIFT", - minimumVersion: void 0 - }, - ["overlay_analysis_cpp" /* OverlayAnalysisCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CPP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_csharp" /* OverlayAnalysisCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP + }, + ["overlay_analysis_go" /* OverlayAnalysisGo */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_status_check" /* OverlayAnalysisStatusCheck */]: { defaultValue: false, @@ -104098,25 +104083,20 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_SAVE", minimumVersion: void 0 }, - ["overlay_analysis_go" /* OverlayAnalysisGo */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", - minimumVersion: void 0 - }, ["overlay_analysis_java" /* OverlayAnalysisJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_javascript" /* OverlayAnalysisJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_python" /* OverlayAnalysisPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_resource_checks_v2" /* OverlayAnalysisResourceChecksV2 */]: { defaultValue: false, @@ -104126,23 +104106,13 @@ var featureConfig = { ["overlay_analysis_ruby" /* OverlayAnalysisRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_rust" /* OverlayAnalysisRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUST", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS", minimumVersion: void 0 }, - ["overlay_analysis_swift" /* OverlayAnalysisSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SWIFT", - minimumVersion: void 0 - }, ["python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */]: { defaultValue: false, envVar: "CODEQL_ACTION_DISABLE_PYTHON_STANDARD_LIBRARY_EXTRACTION", @@ -104870,28 +104840,20 @@ var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB = 14e3; var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB * 1e6; var OVERLAY_MINIMUM_MEMORY_MB = 5 * 1024; var OVERLAY_ANALYSIS_FEATURES = { - actions: "overlay_analysis_actions" /* OverlayAnalysisActions */, - cpp: "overlay_analysis_cpp" /* OverlayAnalysisCpp */, csharp: "overlay_analysis_csharp" /* OverlayAnalysisCsharp */, go: "overlay_analysis_go" /* OverlayAnalysisGo */, java: "overlay_analysis_java" /* OverlayAnalysisJava */, javascript: "overlay_analysis_javascript" /* OverlayAnalysisJavascript */, python: "overlay_analysis_python" /* OverlayAnalysisPython */, - ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */, - rust: "overlay_analysis_rust" /* OverlayAnalysisRust */, - swift: "overlay_analysis_swift" /* OverlayAnalysisSwift */ + ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */ }; var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { - actions: "overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */, - cpp: "overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */, csharp: "overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */, go: "overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */, java: "overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */, javascript: "overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */, python: "overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */, - ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */, - rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */, - swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ + ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */ }; function appendExtraQueryExclusions(extraQueryExclusions, cliConfig) { const augmentedConfig = cloneObject(cliConfig); diff --git a/lib/start-proxy-action-post.js b/lib/start-proxy-action-post.js index b9a712fa4..cb16abb2b 100644 --- a/lib/start-proxy-action-post.js +++ b/lib/start-proxy-action-post.js @@ -161410,6 +161410,12 @@ var semver3 = __toESM(require_semver2()); // src/overlay/index.ts var CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP = "2.24.1"; +var CODEQL_OVERLAY_MINIMUM_VERSION_GO = "2.24.2"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9"; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6; @@ -161492,70 +161498,49 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS", minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION }, - ["overlay_analysis_actions" /* OverlayAnalysisActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CPP", - minimumVersion: void 0 - }, + // Per-language overlay feature flags. Each has minimumVersion set to the + // minimum CLI version that supports overlay analysis for that language. + // Only languages that are GA or in staff-ship should have feature flags here. + // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP }, ["overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_GO", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUST", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_SWIFT", - minimumVersion: void 0 - }, - ["overlay_analysis_cpp" /* OverlayAnalysisCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CPP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_csharp" /* OverlayAnalysisCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP + }, + ["overlay_analysis_go" /* OverlayAnalysisGo */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_status_check" /* OverlayAnalysisStatusCheck */]: { defaultValue: false, @@ -161567,25 +161552,20 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_SAVE", minimumVersion: void 0 }, - ["overlay_analysis_go" /* OverlayAnalysisGo */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", - minimumVersion: void 0 - }, ["overlay_analysis_java" /* OverlayAnalysisJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_javascript" /* OverlayAnalysisJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_python" /* OverlayAnalysisPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_resource_checks_v2" /* OverlayAnalysisResourceChecksV2 */]: { defaultValue: false, @@ -161595,23 +161575,13 @@ var featureConfig = { ["overlay_analysis_ruby" /* OverlayAnalysisRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_rust" /* OverlayAnalysisRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUST", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS", minimumVersion: void 0 }, - ["overlay_analysis_swift" /* OverlayAnalysisSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SWIFT", - minimumVersion: void 0 - }, ["python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */]: { defaultValue: false, envVar: "CODEQL_ACTION_DISABLE_PYTHON_STANDARD_LIBRARY_EXTRACTION", @@ -161674,28 +161644,20 @@ var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB = 14e3; var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB * 1e6; var OVERLAY_MINIMUM_MEMORY_MB = 5 * 1024; var OVERLAY_ANALYSIS_FEATURES = { - actions: "overlay_analysis_actions" /* OverlayAnalysisActions */, - cpp: "overlay_analysis_cpp" /* OverlayAnalysisCpp */, csharp: "overlay_analysis_csharp" /* OverlayAnalysisCsharp */, go: "overlay_analysis_go" /* OverlayAnalysisGo */, java: "overlay_analysis_java" /* OverlayAnalysisJava */, javascript: "overlay_analysis_javascript" /* OverlayAnalysisJavascript */, python: "overlay_analysis_python" /* OverlayAnalysisPython */, - ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */, - rust: "overlay_analysis_rust" /* OverlayAnalysisRust */, - swift: "overlay_analysis_swift" /* OverlayAnalysisSwift */ + ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */ }; var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { - actions: "overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */, - cpp: "overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */, csharp: "overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */, go: "overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */, java: "overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */, javascript: "overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */, python: "overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */, - ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */, - rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */, - swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ + ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */ }; function getPathToParsedConfigFile(tempDir) { return path.join(tempDir, "config"); diff --git a/lib/start-proxy-action.js b/lib/start-proxy-action.js index 694b6a100..f58a672dd 100644 --- a/lib/start-proxy-action.js +++ b/lib/start-proxy-action.js @@ -120731,6 +120731,12 @@ function getActionsLogger() { // src/overlay/index.ts var CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP = "2.24.1"; +var CODEQL_OVERLAY_MINIMUM_VERSION_GO = "2.24.2"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9"; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6; @@ -120815,70 +120821,49 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS", minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION }, - ["overlay_analysis_actions" /* OverlayAnalysisActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CPP", - minimumVersion: void 0 - }, + // Per-language overlay feature flags. Each has minimumVersion set to the + // minimum CLI version that supports overlay analysis for that language. + // Only languages that are GA or in staff-ship should have feature flags here. + // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP }, ["overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_GO", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUST", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_SWIFT", - minimumVersion: void 0 - }, - ["overlay_analysis_cpp" /* OverlayAnalysisCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CPP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_csharp" /* OverlayAnalysisCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP + }, + ["overlay_analysis_go" /* OverlayAnalysisGo */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_status_check" /* OverlayAnalysisStatusCheck */]: { defaultValue: false, @@ -120890,25 +120875,20 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_SAVE", minimumVersion: void 0 }, - ["overlay_analysis_go" /* OverlayAnalysisGo */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", - minimumVersion: void 0 - }, ["overlay_analysis_java" /* OverlayAnalysisJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_javascript" /* OverlayAnalysisJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_python" /* OverlayAnalysisPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_resource_checks_v2" /* OverlayAnalysisResourceChecksV2 */]: { defaultValue: false, @@ -120918,23 +120898,13 @@ var featureConfig = { ["overlay_analysis_ruby" /* OverlayAnalysisRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_rust" /* OverlayAnalysisRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUST", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS", minimumVersion: void 0 }, - ["overlay_analysis_swift" /* OverlayAnalysisSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SWIFT", - minimumVersion: void 0 - }, ["python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */]: { defaultValue: false, envVar: "CODEQL_ACTION_DISABLE_PYTHON_STANDARD_LIBRARY_EXTRACTION", @@ -121418,28 +121388,20 @@ var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB = 14e3; var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB * 1e6; var OVERLAY_MINIMUM_MEMORY_MB = 5 * 1024; var OVERLAY_ANALYSIS_FEATURES = { - actions: "overlay_analysis_actions" /* OverlayAnalysisActions */, - cpp: "overlay_analysis_cpp" /* OverlayAnalysisCpp */, csharp: "overlay_analysis_csharp" /* OverlayAnalysisCsharp */, go: "overlay_analysis_go" /* OverlayAnalysisGo */, java: "overlay_analysis_java" /* OverlayAnalysisJava */, javascript: "overlay_analysis_javascript" /* OverlayAnalysisJavascript */, python: "overlay_analysis_python" /* OverlayAnalysisPython */, - ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */, - rust: "overlay_analysis_rust" /* OverlayAnalysisRust */, - swift: "overlay_analysis_swift" /* OverlayAnalysisSwift */ + ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */ }; var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { - actions: "overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */, - cpp: "overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */, csharp: "overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */, go: "overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */, java: "overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */, javascript: "overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */, python: "overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */, - ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */, - rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */, - swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ + ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */ }; // src/status-report.ts diff --git a/lib/upload-lib.js b/lib/upload-lib.js index 67f15351c..8b9170299 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -107143,6 +107143,12 @@ async function isAnalyzingDefaultBranch() { // src/overlay/index.ts var CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP = "2.24.1"; +var CODEQL_OVERLAY_MINIMUM_VERSION_GO = "2.24.2"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9"; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6; async function writeBaseDatabaseOidsFile(config, sourceRoot) { @@ -107282,70 +107288,49 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS", minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION }, - ["overlay_analysis_actions" /* OverlayAnalysisActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CPP", - minimumVersion: void 0 - }, + // Per-language overlay feature flags. Each has minimumVersion set to the + // minimum CLI version that supports overlay analysis for that language. + // Only languages that are GA or in staff-ship should have feature flags here. + // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP }, ["overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_GO", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUST", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_SWIFT", - minimumVersion: void 0 - }, - ["overlay_analysis_cpp" /* OverlayAnalysisCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CPP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_csharp" /* OverlayAnalysisCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP + }, + ["overlay_analysis_go" /* OverlayAnalysisGo */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_status_check" /* OverlayAnalysisStatusCheck */]: { defaultValue: false, @@ -107357,25 +107342,20 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_SAVE", minimumVersion: void 0 }, - ["overlay_analysis_go" /* OverlayAnalysisGo */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", - minimumVersion: void 0 - }, ["overlay_analysis_java" /* OverlayAnalysisJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_javascript" /* OverlayAnalysisJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_python" /* OverlayAnalysisPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_resource_checks_v2" /* OverlayAnalysisResourceChecksV2 */]: { defaultValue: false, @@ -107385,23 +107365,13 @@ var featureConfig = { ["overlay_analysis_ruby" /* OverlayAnalysisRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_rust" /* OverlayAnalysisRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUST", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS", minimumVersion: void 0 }, - ["overlay_analysis_swift" /* OverlayAnalysisSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SWIFT", - minimumVersion: void 0 - }, ["python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */]: { defaultValue: false, envVar: "CODEQL_ACTION_DISABLE_PYTHON_STANDARD_LIBRARY_EXTRACTION", @@ -107482,28 +107452,20 @@ var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB = 14e3; var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB * 1e6; var OVERLAY_MINIMUM_MEMORY_MB = 5 * 1024; var OVERLAY_ANALYSIS_FEATURES = { - actions: "overlay_analysis_actions" /* OverlayAnalysisActions */, - cpp: "overlay_analysis_cpp" /* OverlayAnalysisCpp */, csharp: "overlay_analysis_csharp" /* OverlayAnalysisCsharp */, go: "overlay_analysis_go" /* OverlayAnalysisGo */, java: "overlay_analysis_java" /* OverlayAnalysisJava */, javascript: "overlay_analysis_javascript" /* OverlayAnalysisJavascript */, python: "overlay_analysis_python" /* OverlayAnalysisPython */, - ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */, - rust: "overlay_analysis_rust" /* OverlayAnalysisRust */, - swift: "overlay_analysis_swift" /* OverlayAnalysisSwift */ + ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */ }; var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { - actions: "overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */, - cpp: "overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */, csharp: "overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */, go: "overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */, java: "overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */, javascript: "overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */, python: "overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */, - ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */, - rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */, - swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ + ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */ }; function getPathToParsedConfigFile(tempDir) { return path6.join(tempDir, "config"); diff --git a/lib/upload-sarif-action-post.js b/lib/upload-sarif-action-post.js index aa0d4cda9..02927d2b7 100644 --- a/lib/upload-sarif-action-post.js +++ b/lib/upload-sarif-action-post.js @@ -161568,6 +161568,12 @@ var semver3 = __toESM(require_semver2()); // src/overlay/index.ts var CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP = "2.24.1"; +var CODEQL_OVERLAY_MINIMUM_VERSION_GO = "2.24.2"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9"; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6; @@ -161654,70 +161660,49 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS", minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION }, - ["overlay_analysis_actions" /* OverlayAnalysisActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CPP", - minimumVersion: void 0 - }, + // Per-language overlay feature flags. Each has minimumVersion set to the + // minimum CLI version that supports overlay analysis for that language. + // Only languages that are GA or in staff-ship should have feature flags here. + // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP }, ["overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_GO", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUST", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_SWIFT", - minimumVersion: void 0 - }, - ["overlay_analysis_cpp" /* OverlayAnalysisCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CPP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_csharp" /* OverlayAnalysisCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP + }, + ["overlay_analysis_go" /* OverlayAnalysisGo */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_status_check" /* OverlayAnalysisStatusCheck */]: { defaultValue: false, @@ -161729,25 +161714,20 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_SAVE", minimumVersion: void 0 }, - ["overlay_analysis_go" /* OverlayAnalysisGo */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", - minimumVersion: void 0 - }, ["overlay_analysis_java" /* OverlayAnalysisJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_javascript" /* OverlayAnalysisJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_python" /* OverlayAnalysisPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_resource_checks_v2" /* OverlayAnalysisResourceChecksV2 */]: { defaultValue: false, @@ -161757,23 +161737,13 @@ var featureConfig = { ["overlay_analysis_ruby" /* OverlayAnalysisRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_rust" /* OverlayAnalysisRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUST", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS", minimumVersion: void 0 }, - ["overlay_analysis_swift" /* OverlayAnalysisSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SWIFT", - minimumVersion: void 0 - }, ["python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */]: { defaultValue: false, envVar: "CODEQL_ACTION_DISABLE_PYTHON_STANDARD_LIBRARY_EXTRACTION", @@ -161836,28 +161806,20 @@ var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB = 14e3; var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB * 1e6; var OVERLAY_MINIMUM_MEMORY_MB = 5 * 1024; var OVERLAY_ANALYSIS_FEATURES = { - actions: "overlay_analysis_actions" /* OverlayAnalysisActions */, - cpp: "overlay_analysis_cpp" /* OverlayAnalysisCpp */, csharp: "overlay_analysis_csharp" /* OverlayAnalysisCsharp */, go: "overlay_analysis_go" /* OverlayAnalysisGo */, java: "overlay_analysis_java" /* OverlayAnalysisJava */, javascript: "overlay_analysis_javascript" /* OverlayAnalysisJavascript */, python: "overlay_analysis_python" /* OverlayAnalysisPython */, - ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */, - rust: "overlay_analysis_rust" /* OverlayAnalysisRust */, - swift: "overlay_analysis_swift" /* OverlayAnalysisSwift */ + ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */ }; var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { - actions: "overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */, - cpp: "overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */, csharp: "overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */, go: "overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */, java: "overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */, javascript: "overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */, python: "overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */, - ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */, - rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */, - swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ + ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */ }; // src/setup-codeql.ts diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index 9130bfc50..ed5d452e1 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -106855,6 +106855,12 @@ function formatDuration(durationMs) { // src/overlay/index.ts var CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP = "2.24.1"; +var CODEQL_OVERLAY_MINIMUM_VERSION_GO = "2.24.2"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8"; +var CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9"; +var CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9"; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6; async function writeBaseDatabaseOidsFile(config, sourceRoot) { @@ -106996,70 +107002,49 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS", minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION }, - ["overlay_analysis_actions" /* OverlayAnalysisActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_ACTIONS", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CPP", - minimumVersion: void 0 - }, + // Per-language overlay feature flags. Each has minimumVersion set to the + // minimum CLI version that supports overlay analysis for that language. + // Only languages that are GA or in staff-ship should have feature flags here. + // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP }, ["overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_GO", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUST", - minimumVersion: void 0 - }, - ["overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_SWIFT", - minimumVersion: void 0 - }, - ["overlay_analysis_cpp" /* OverlayAnalysisCpp */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CPP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_csharp" /* OverlayAnalysisCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CSHARP", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP + }, + ["overlay_analysis_go" /* OverlayAnalysisGo */]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO }, ["overlay_analysis_status_check" /* OverlayAnalysisStatusCheck */]: { defaultValue: false, @@ -107071,25 +107056,20 @@ var featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_SAVE", minimumVersion: void 0 }, - ["overlay_analysis_go" /* OverlayAnalysisGo */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", - minimumVersion: void 0 - }, ["overlay_analysis_java" /* OverlayAnalysisJava */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVA", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA }, ["overlay_analysis_javascript" /* OverlayAnalysisJavascript */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT }, ["overlay_analysis_python" /* OverlayAnalysisPython */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_PYTHON", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON }, ["overlay_analysis_resource_checks_v2" /* OverlayAnalysisResourceChecksV2 */]: { defaultValue: false, @@ -107099,23 +107079,13 @@ var featureConfig = { ["overlay_analysis_ruby" /* OverlayAnalysisRuby */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUBY", - minimumVersion: void 0 - }, - ["overlay_analysis_rust" /* OverlayAnalysisRust */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUST", - minimumVersion: void 0 + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY }, ["overlay_analysis_skip_resource_checks" /* OverlayAnalysisSkipResourceChecks */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS", minimumVersion: void 0 }, - ["overlay_analysis_swift" /* OverlayAnalysisSwift */]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SWIFT", - minimumVersion: void 0 - }, ["python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */]: { defaultValue: false, envVar: "CODEQL_ACTION_DISABLE_PYTHON_STANDARD_LIBRARY_EXTRACTION", @@ -107681,28 +107651,20 @@ var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB = 14e3; var OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_BYTES = OVERLAY_MINIMUM_AVAILABLE_DISK_SPACE_V2_MB * 1e6; var OVERLAY_MINIMUM_MEMORY_MB = 5 * 1024; var OVERLAY_ANALYSIS_FEATURES = { - actions: "overlay_analysis_actions" /* OverlayAnalysisActions */, - cpp: "overlay_analysis_cpp" /* OverlayAnalysisCpp */, csharp: "overlay_analysis_csharp" /* OverlayAnalysisCsharp */, go: "overlay_analysis_go" /* OverlayAnalysisGo */, java: "overlay_analysis_java" /* OverlayAnalysisJava */, javascript: "overlay_analysis_javascript" /* OverlayAnalysisJavascript */, python: "overlay_analysis_python" /* OverlayAnalysisPython */, - ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */, - rust: "overlay_analysis_rust" /* OverlayAnalysisRust */, - swift: "overlay_analysis_swift" /* OverlayAnalysisSwift */ + ruby: "overlay_analysis_ruby" /* OverlayAnalysisRuby */ }; var OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES = { - actions: "overlay_analysis_code_scanning_actions" /* OverlayAnalysisCodeScanningActions */, - cpp: "overlay_analysis_code_scanning_cpp" /* OverlayAnalysisCodeScanningCpp */, csharp: "overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */, go: "overlay_analysis_code_scanning_go" /* OverlayAnalysisCodeScanningGo */, java: "overlay_analysis_code_scanning_java" /* OverlayAnalysisCodeScanningJava */, javascript: "overlay_analysis_code_scanning_javascript" /* OverlayAnalysisCodeScanningJavascript */, python: "overlay_analysis_code_scanning_python" /* OverlayAnalysisCodeScanningPython */, - ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */, - rust: "overlay_analysis_code_scanning_rust" /* OverlayAnalysisCodeScanningRust */, - swift: "overlay_analysis_code_scanning_swift" /* OverlayAnalysisCodeScanningSwift */ + ruby: "overlay_analysis_code_scanning_ruby" /* OverlayAnalysisCodeScanningRuby */ }; function getPathToParsedConfigFile(tempDir) { return path7.join(tempDir, "config"); diff --git a/src/config-utils.ts b/src/config-utils.ts index b80586938..b02ac139e 100644 --- a/src/config-utils.ts +++ b/src/config-utils.ts @@ -630,30 +630,27 @@ async function loadUserConfig( } } -const OVERLAY_ANALYSIS_FEATURES: Record = { - actions: Feature.OverlayAnalysisActions, - cpp: Feature.OverlayAnalysisCpp, +// Maps languages to their overlay analysis feature flags. Only languages that +// are GA or in staff-ship for overlay analysis are included here. Languages +// without an entry will have overlay analysis disabled. +const OVERLAY_ANALYSIS_FEATURES: Partial> = { csharp: Feature.OverlayAnalysisCsharp, go: Feature.OverlayAnalysisGo, java: Feature.OverlayAnalysisJava, javascript: Feature.OverlayAnalysisJavascript, python: Feature.OverlayAnalysisPython, ruby: Feature.OverlayAnalysisRuby, - rust: Feature.OverlayAnalysisRust, - swift: Feature.OverlayAnalysisSwift, }; -const OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES: Record = { - actions: Feature.OverlayAnalysisCodeScanningActions, - cpp: Feature.OverlayAnalysisCodeScanningCpp, +const OVERLAY_ANALYSIS_CODE_SCANNING_FEATURES: Partial< + Record +> = { csharp: Feature.OverlayAnalysisCodeScanningCsharp, go: Feature.OverlayAnalysisCodeScanningGo, java: Feature.OverlayAnalysisCodeScanningJava, javascript: Feature.OverlayAnalysisCodeScanningJavascript, python: Feature.OverlayAnalysisCodeScanningPython, ruby: Feature.OverlayAnalysisCodeScanningRuby, - rust: Feature.OverlayAnalysisCodeScanningRust, - swift: Feature.OverlayAnalysisCodeScanningSwift, }; /** diff --git a/src/feature-flags.ts b/src/feature-flags.ts index c77bd794f..2cff1e662 100644 --- a/src/feature-flags.ts +++ b/src/feature-flags.ts @@ -7,7 +7,15 @@ import { getApiClient } from "./api-client"; import type { CodeQL } from "./codeql"; import * as defaults from "./defaults.json"; import { Logger } from "./logging"; -import { CODEQL_OVERLAY_MINIMUM_VERSION } from "./overlay"; +import { + CODEQL_OVERLAY_MINIMUM_VERSION, + CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP, + CODEQL_OVERLAY_MINIMUM_VERSION_GO, + CODEQL_OVERLAY_MINIMUM_VERSION_JAVA, + CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT, + CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON, + CODEQL_OVERLAY_MINIMUM_VERSION_RUBY, +} from "./overlay"; import { RepositoryNwo } from "./repository"; import { ToolsFeature } from "./tools-features"; import * as util from "./util"; @@ -49,24 +57,18 @@ export enum Feature { IgnoreGeneratedFiles = "ignore_generated_files", JavaNetworkDebugging = "java_network_debugging", OverlayAnalysis = "overlay_analysis", - OverlayAnalysisActions = "overlay_analysis_actions", - OverlayAnalysisCodeScanningActions = "overlay_analysis_code_scanning_actions", - OverlayAnalysisCodeScanningCpp = "overlay_analysis_code_scanning_cpp", OverlayAnalysisCodeScanningCsharp = "overlay_analysis_code_scanning_csharp", OverlayAnalysisCodeScanningGo = "overlay_analysis_code_scanning_go", OverlayAnalysisCodeScanningJava = "overlay_analysis_code_scanning_java", OverlayAnalysisCodeScanningJavascript = "overlay_analysis_code_scanning_javascript", OverlayAnalysisCodeScanningPython = "overlay_analysis_code_scanning_python", OverlayAnalysisCodeScanningRuby = "overlay_analysis_code_scanning_ruby", - OverlayAnalysisCodeScanningRust = "overlay_analysis_code_scanning_rust", - OverlayAnalysisCodeScanningSwift = "overlay_analysis_code_scanning_swift", - OverlayAnalysisCpp = "overlay_analysis_cpp", OverlayAnalysisCsharp = "overlay_analysis_csharp", + OverlayAnalysisGo = "overlay_analysis_go", /** Controls whether the Actions cache is checked for overlay build outcomes. */ OverlayAnalysisStatusCheck = "overlay_analysis_status_check", /** Controls whether overlay build failures on are stored in the Actions cache. */ OverlayAnalysisStatusSave = "overlay_analysis_status_save", - OverlayAnalysisGo = "overlay_analysis_go", OverlayAnalysisJava = "overlay_analysis_java", OverlayAnalysisJavascript = "overlay_analysis_javascript", OverlayAnalysisPython = "overlay_analysis_python", @@ -76,10 +78,8 @@ export enum Feature { */ OverlayAnalysisResourceChecksV2 = "overlay_analysis_resource_checks_v2", OverlayAnalysisRuby = "overlay_analysis_ruby", - OverlayAnalysisRust = "overlay_analysis_rust", /** Controls whether hardware checks are skipped for overlay analysis. */ OverlayAnalysisSkipResourceChecks = "overlay_analysis_skip_resource_checks", - OverlayAnalysisSwift = "overlay_analysis_swift", PythonDefaultIsToNotExtractStdlib = "python_default_is_to_not_extract_stdlib", QaTelemetryEnabled = "qa_telemetry_enabled", /** Note that this currently only disables baseline file coverage information. */ @@ -195,70 +195,48 @@ export const featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS", minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION, }, - [Feature.OverlayAnalysisActions]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_ACTIONS", - minimumVersion: undefined, - }, - [Feature.OverlayAnalysisCodeScanningActions]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_ACTIONS", - minimumVersion: undefined, - }, - [Feature.OverlayAnalysisCodeScanningCpp]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CPP", - minimumVersion: undefined, - }, + // Per-language overlay feature flags. Each has minimumVersion set to the + // minimum CLI version that supports overlay analysis for that language. + // Only languages that are GA or in staff-ship should have feature flags here. [Feature.OverlayAnalysisCodeScanningCsharp]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", - minimumVersion: undefined, + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP, }, [Feature.OverlayAnalysisCodeScanningGo]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_GO", - minimumVersion: undefined, + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO, }, [Feature.OverlayAnalysisCodeScanningJava]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVA", - minimumVersion: undefined, + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA, }, [Feature.OverlayAnalysisCodeScanningJavascript]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_JAVASCRIPT", - minimumVersion: undefined, + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT, }, [Feature.OverlayAnalysisCodeScanningPython]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_PYTHON", - minimumVersion: undefined, + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON, }, [Feature.OverlayAnalysisCodeScanningRuby]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUBY", - minimumVersion: undefined, - }, - [Feature.OverlayAnalysisCodeScanningRust]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_RUST", - minimumVersion: undefined, - }, - [Feature.OverlayAnalysisCodeScanningSwift]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_SWIFT", - minimumVersion: undefined, - }, - [Feature.OverlayAnalysisCpp]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CPP", - minimumVersion: undefined, + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY, }, [Feature.OverlayAnalysisCsharp]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CSHARP", - minimumVersion: undefined, + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP, + }, + [Feature.OverlayAnalysisGo]: { + defaultValue: false, + envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_GO, }, [Feature.OverlayAnalysisStatusCheck]: { defaultValue: false, @@ -270,25 +248,20 @@ export const featureConfig = { envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_STATUS_SAVE", minimumVersion: undefined, }, - [Feature.OverlayAnalysisGo]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_GO", - minimumVersion: undefined, - }, [Feature.OverlayAnalysisJava]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVA", - minimumVersion: undefined, + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVA, }, [Feature.OverlayAnalysisJavascript]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_JAVASCRIPT", - minimumVersion: undefined, + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT, }, [Feature.OverlayAnalysisPython]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_PYTHON", - minimumVersion: undefined, + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON, }, [Feature.OverlayAnalysisResourceChecksV2]: { defaultValue: false, @@ -298,23 +271,13 @@ export const featureConfig = { [Feature.OverlayAnalysisRuby]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUBY", - minimumVersion: undefined, - }, - [Feature.OverlayAnalysisRust]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_RUST", - minimumVersion: undefined, + minimumVersion: CODEQL_OVERLAY_MINIMUM_VERSION_RUBY, }, [Feature.OverlayAnalysisSkipResourceChecks]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SKIP_RESOURCE_CHECKS", minimumVersion: undefined, }, - [Feature.OverlayAnalysisSwift]: { - defaultValue: false, - envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_SWIFT", - minimumVersion: undefined, - }, [Feature.PythonDefaultIsToNotExtractStdlib]: { defaultValue: false, envVar: "CODEQL_ACTION_DISABLE_PYTHON_STANDARD_LIBRARY_EXTRACTION", diff --git a/src/overlay/index.ts b/src/overlay/index.ts index 3dde65f56..3b45a60d4 100644 --- a/src/overlay/index.ts +++ b/src/overlay/index.ts @@ -33,6 +33,15 @@ export enum OverlayDatabaseMode { export const CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8"; +// Per-language minimum CLI versions for overlay analysis, based on release +// validation data. +export const CODEQL_OVERLAY_MINIMUM_VERSION_CSHARP = "2.24.1"; +export const CODEQL_OVERLAY_MINIMUM_VERSION_GO = "2.24.2"; +export const CODEQL_OVERLAY_MINIMUM_VERSION_JAVA = "2.23.8"; +export const CODEQL_OVERLAY_MINIMUM_VERSION_JAVASCRIPT = "2.23.9"; +export const CODEQL_OVERLAY_MINIMUM_VERSION_PYTHON = "2.23.9"; +export const CODEQL_OVERLAY_MINIMUM_VERSION_RUBY = "2.23.9"; + /** * The maximum (uncompressed) size of the overlay base database that we will * upload. By default, the Actions Cache has an overall capacity of 10 GB, and From 867f2b0e0a8fba2c73522b6a5554a960b098cac1 Mon Sep 17 00:00:00 2001 From: Sam Robson Date: Thu, 5 Mar 2026 18:28:34 +0000 Subject: [PATCH 28/50] test: verify overlay analysis is disabled for languages without per-language feature flags --- lib/analyze-action-post.js | 1 - lib/analyze-action.js | 1 - lib/autobuild-action.js | 1 - lib/init-action-post.js | 1 - lib/init-action.js | 1 - lib/resolve-environment-action.js | 1 - lib/setup-codeql-action.js | 1 - lib/start-proxy-action-post.js | 1 - lib/start-proxy-action.js | 1 - lib/upload-lib.js | 1 - lib/upload-sarif-action-post.js | 1 - lib/upload-sarif-action.js | 1 - src/config-utils.test.ts | 17 +++++++++++++++++ src/config-utils.ts | 8 +++++--- 14 files changed, 22 insertions(+), 15 deletions(-) diff --git a/lib/analyze-action-post.js b/lib/analyze-action-post.js index a1d3a9476..2b0944fa3 100644 --- a/lib/analyze-action-post.js +++ b/lib/analyze-action-post.js @@ -162095,7 +162095,6 @@ var featureConfig = { // Per-language overlay feature flags. Each has minimumVersion set to the // minimum CLI version that supports overlay analysis for that language. // Only languages that are GA or in staff-ship should have feature flags here. - // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", diff --git a/lib/analyze-action.js b/lib/analyze-action.js index 37f9cf758..7635bcf11 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -107844,7 +107844,6 @@ var featureConfig = { // Per-language overlay feature flags. Each has minimumVersion set to the // minimum CLI version that supports overlay analysis for that language. // Only languages that are GA or in staff-ship should have feature flags here. - // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", diff --git a/lib/autobuild-action.js b/lib/autobuild-action.js index 27483c3c6..cd786f540 100644 --- a/lib/autobuild-action.js +++ b/lib/autobuild-action.js @@ -104144,7 +104144,6 @@ var featureConfig = { // Per-language overlay feature flags. Each has minimumVersion set to the // minimum CLI version that supports overlay analysis for that language. // Only languages that are GA or in staff-ship should have feature flags here. - // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 5bf3fa045..ede950e52 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -165581,7 +165581,6 @@ var featureConfig = { // Per-language overlay feature flags. Each has minimumVersion set to the // minimum CLI version that supports overlay analysis for that language. // Only languages that are GA or in staff-ship should have feature flags here. - // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", diff --git a/lib/init-action.js b/lib/init-action.js index 1984ba004..ee7de2d1d 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -105404,7 +105404,6 @@ var featureConfig = { // Per-language overlay feature flags. Each has minimumVersion set to the // minimum CLI version that supports overlay analysis for that language. // Only languages that are GA or in staff-ship should have feature flags here. - // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", diff --git a/lib/resolve-environment-action.js b/lib/resolve-environment-action.js index c8fc414a8..f4da0c6a3 100644 --- a/lib/resolve-environment-action.js +++ b/lib/resolve-environment-action.js @@ -104135,7 +104135,6 @@ var featureConfig = { // Per-language overlay feature flags. Each has minimumVersion set to the // minimum CLI version that supports overlay analysis for that language. // Only languages that are GA or in staff-ship should have feature flags here. - // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", diff --git a/lib/setup-codeql-action.js b/lib/setup-codeql-action.js index 6fd852209..239af7bcb 100644 --- a/lib/setup-codeql-action.js +++ b/lib/setup-codeql-action.js @@ -104032,7 +104032,6 @@ var featureConfig = { // Per-language overlay feature flags. Each has minimumVersion set to the // minimum CLI version that supports overlay analysis for that language. // Only languages that are GA or in staff-ship should have feature flags here. - // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", diff --git a/lib/start-proxy-action-post.js b/lib/start-proxy-action-post.js index cb16abb2b..3a50dde49 100644 --- a/lib/start-proxy-action-post.js +++ b/lib/start-proxy-action-post.js @@ -161501,7 +161501,6 @@ var featureConfig = { // Per-language overlay feature flags. Each has minimumVersion set to the // minimum CLI version that supports overlay analysis for that language. // Only languages that are GA or in staff-ship should have feature flags here. - // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", diff --git a/lib/start-proxy-action.js b/lib/start-proxy-action.js index f58a672dd..7afdfef4c 100644 --- a/lib/start-proxy-action.js +++ b/lib/start-proxy-action.js @@ -120824,7 +120824,6 @@ var featureConfig = { // Per-language overlay feature flags. Each has minimumVersion set to the // minimum CLI version that supports overlay analysis for that language. // Only languages that are GA or in staff-ship should have feature flags here. - // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", diff --git a/lib/upload-lib.js b/lib/upload-lib.js index 8b9170299..bf40226c3 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -107291,7 +107291,6 @@ var featureConfig = { // Per-language overlay feature flags. Each has minimumVersion set to the // minimum CLI version that supports overlay analysis for that language. // Only languages that are GA or in staff-ship should have feature flags here. - // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", diff --git a/lib/upload-sarif-action-post.js b/lib/upload-sarif-action-post.js index 02927d2b7..66fd24ec5 100644 --- a/lib/upload-sarif-action-post.js +++ b/lib/upload-sarif-action-post.js @@ -161663,7 +161663,6 @@ var featureConfig = { // Per-language overlay feature flags. Each has minimumVersion set to the // minimum CLI version that supports overlay analysis for that language. // Only languages that are GA or in staff-ship should have feature flags here. - // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index ed5d452e1..ed8cd0f8f 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -107005,7 +107005,6 @@ var featureConfig = { // Per-language overlay feature flags. Each has minimumVersion set to the // minimum CLI version that supports overlay analysis for that language. // Only languages that are GA or in staff-ship should have feature flags here. - // See https://github.com/github/codeql-core/issues/5120. ["overlay_analysis_code_scanning_csharp" /* OverlayAnalysisCodeScanningCsharp */]: { defaultValue: false, envVar: "CODEQL_ACTION_OVERLAY_ANALYSIS_CODE_SCANNING_CSHARP", diff --git a/src/config-utils.test.ts b/src/config-utils.test.ts index 06994c0ed..1887f1273 100644 --- a/src/config-utils.test.ts +++ b/src/config-utils.test.ts @@ -2011,6 +2011,23 @@ for (const language in KnownLanguage) { ); } +// Verify that a language without a per-language overlay feature flag cannot have +// overlay analysis enabled, even when the base overlay feature flag is on. +// Using cpp here as it doesn't currently have overlay support — update this if +// cpp gains overlay support. +test.serial( + checkOverlayEnablementMacro, + "No overlay analysis for language without per-language overlay feature flag", + { + languages: [KnownLanguage.cpp], + features: [Feature.OverlayAnalysis], + isPullRequest: true, + }, + { + disabledReason: OverlayDisabledReason.LanguageNotEnabled, + }, +); + test.serial( "hasActionsWorkflows doesn't throw if workflows folder doesn't exist", async (t) => { diff --git a/src/config-utils.ts b/src/config-utils.ts index b02ac139e..79ffdd30e 100644 --- a/src/config-utils.ts +++ b/src/config-utils.ts @@ -630,9 +630,11 @@ async function loadUserConfig( } } -// Maps languages to their overlay analysis feature flags. Only languages that -// are GA or in staff-ship for overlay analysis are included here. Languages -// without an entry will have overlay analysis disabled. +/** + * Maps languages to their overlay analysis feature flags. Only languages that + * are GA or in staff-ship for overlay analysis are included here. Languages + * without an entry will have overlay analysis disabled. + */ const OVERLAY_ANALYSIS_FEATURES: Partial> = { csharp: Feature.OverlayAnalysisCsharp, go: Feature.OverlayAnalysisGo, From c102a6d8cda27cd4d26e29b4aab226b54a228ab4 Mon Sep 17 00:00:00 2001 From: Henry Mercer Date: Mon, 9 Mar 2026 16:01:03 +0000 Subject: [PATCH 29/50] Require tools feature flag And now that we have this, drop the restriction to `github` org. --- lib/analyze-action-post.js | 7 ++----- lib/analyze-action.js | 7 ++----- lib/autobuild-action.js | 7 ++----- lib/init-action-post.js | 7 ++----- lib/init-action.js | 14 +++++--------- lib/resolve-environment-action.js | 7 ++----- lib/setup-codeql-action.js | 7 ++----- lib/start-proxy-action-post.js | 7 ++----- lib/start-proxy-action.js | 7 ++----- lib/upload-lib.js | 7 ++----- lib/upload-sarif-action-post.js | 7 ++----- lib/upload-sarif-action.js | 7 ++----- src/feature-flags.ts | 5 +---- src/init-action.ts | 2 +- src/init.test.ts | 24 ++++-------------------- src/init.ts | 7 ++----- src/tools-features.ts | 1 + 17 files changed, 36 insertions(+), 94 deletions(-) diff --git a/lib/analyze-action-post.js b/lib/analyze-action-post.js index 4fd650079..328c64771 100644 --- a/lib/analyze-action-post.js +++ b/lib/analyze-action-post.js @@ -162221,11 +162221,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, diff --git a/lib/analyze-action.js b/lib/analyze-action.js index 3935138f5..bae47ad2d 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -107970,11 +107970,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, diff --git a/lib/autobuild-action.js b/lib/autobuild-action.js index de91c2350..33701fe48 100644 --- a/lib/autobuild-action.js +++ b/lib/autobuild-action.js @@ -104270,11 +104270,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 42738ca24..74818a1e2 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -165707,11 +165707,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, diff --git a/lib/init-action.js b/lib/init-action.js index b07f684e9..883313758 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -105530,11 +105530,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, @@ -109094,14 +109091,13 @@ function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = } } } -async function getFileCoverageInformationEnabled(debugMode, repositoryNwo, features) { +async function getFileCoverageInformationEnabled(debugMode, codeql, features) { return ( // Always enable file coverage information in debug mode debugMode || // We're most interested in speeding up PRs, and we want to keep // submitting file coverage information for the default branch since // it is used to populate the status page. - !isAnalyzingPullRequest() || // For now, restrict this feature to the GitHub org - repositoryNwo.owner !== "github" || !await features.getValue("skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */) + !isAnalyzingPullRequest() || !await features.getValue("skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */, codeql) ); } @@ -109743,7 +109739,7 @@ async function run(startedAt) { repositoryProperties: repositoryPropertiesResult.orElse({}), enableFileCoverageInformation: await getFileCoverageInformationEnabled( debugMode, - repositoryNwo, + codeql, features ), logger diff --git a/lib/resolve-environment-action.js b/lib/resolve-environment-action.js index c5230200e..3dd3ed839 100644 --- a/lib/resolve-environment-action.js +++ b/lib/resolve-environment-action.js @@ -104261,11 +104261,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, diff --git a/lib/setup-codeql-action.js b/lib/setup-codeql-action.js index d323a2dcc..bff5be22b 100644 --- a/lib/setup-codeql-action.js +++ b/lib/setup-codeql-action.js @@ -104158,11 +104158,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, diff --git a/lib/start-proxy-action-post.js b/lib/start-proxy-action-post.js index b9a712fa4..3018a5102 100644 --- a/lib/start-proxy-action-post.js +++ b/lib/start-proxy-action-post.js @@ -161627,11 +161627,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, diff --git a/lib/start-proxy-action.js b/lib/start-proxy-action.js index 694b6a100..932966e44 100644 --- a/lib/start-proxy-action.js +++ b/lib/start-proxy-action.js @@ -120950,11 +120950,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, diff --git a/lib/upload-lib.js b/lib/upload-lib.js index 67f15351c..ee9218896 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -107417,11 +107417,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, diff --git a/lib/upload-sarif-action-post.js b/lib/upload-sarif-action-post.js index aa0d4cda9..0f6b9cbf9 100644 --- a/lib/upload-sarif-action-post.js +++ b/lib/upload-sarif-action-post.js @@ -161789,11 +161789,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index 9130bfc50..7b3448637 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -107131,11 +107131,8 @@ var featureConfig = { ["skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. - minimumVersion: void 0 + minimumVersion: void 0, + toolsFeature: "suppressesMissingFileBaselineWarning" /* SuppressesMissingFileBaselineWarning */ }, ["start_proxy_remove_unused_registries" /* StartProxyRemoveUnusedRegistries */]: { defaultValue: false, diff --git a/src/feature-flags.ts b/src/feature-flags.ts index c77bd794f..af192ed7d 100644 --- a/src/feature-flags.ts +++ b/src/feature-flags.ts @@ -330,11 +330,8 @@ export const featureConfig = { [Feature.SkipFileCoverageOnPrs]: { defaultValue: false, envVar: "CODEQL_ACTION_SKIP_FILE_COVERAGE_ON_PRS", - // For testing, this is not behind a CLI version check yet. However - // before rolling this out externally, we should set a minimum version here - // since current versions of the CodeQL CLI will log if baseline information - // cannot be found when interpreting results. minimumVersion: undefined, + toolsFeature: ToolsFeature.SuppressesMissingFileBaselineWarning, }, [Feature.StartProxyRemoveUnusedRegistries]: { defaultValue: false, diff --git a/src/init-action.ts b/src/init-action.ts index 7bd749e82..de05c9b41 100644 --- a/src/init-action.ts +++ b/src/init-action.ts @@ -378,7 +378,7 @@ async function run(startedAt: Date) { repositoryProperties: repositoryPropertiesResult.orElse({}), enableFileCoverageInformation: await getFileCoverageInformationEnabled( debugMode, - repositoryNwo, + codeql, features, ), logger, diff --git a/src/init.test.ts b/src/init.test.ts index a7d4f4de1..d15737ea3 100644 --- a/src/init.test.ts +++ b/src/init.test.ts @@ -13,7 +13,6 @@ import { getFileCoverageInformationEnabled, } from "./init"; import { KnownLanguage } from "./languages"; -import { parseRepositoryNwo } from "./repository"; import { createFeatures, LoggedMessage, @@ -456,7 +455,7 @@ test("file coverage information enabled when debugMode is true", async (t) => { t.true( await getFileCoverageInformationEnabled( true, // debugMode - parseRepositoryNwo("github/codeql-action"), + createStubCodeQL({}), createFeatures([Feature.SkipFileCoverageOnPrs]), ), ); @@ -470,22 +469,7 @@ test.serial( t.true( await getFileCoverageInformationEnabled( false, // debugMode - parseRepositoryNwo("github/codeql-action"), - createFeatures([Feature.SkipFileCoverageOnPrs]), - ), - ); - }, -); - -test.serial( - "file coverage information enabled when owner is not 'github'", - async (t) => { - sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true); - - t.true( - await getFileCoverageInformationEnabled( - false, // debugMode - parseRepositoryNwo("other-org/some-repo"), + createStubCodeQL({}), createFeatures([Feature.SkipFileCoverageOnPrs]), ), ); @@ -500,7 +484,7 @@ test.serial( t.true( await getFileCoverageInformationEnabled( false, // debugMode - parseRepositoryNwo("github/codeql-action"), + createStubCodeQL({}), createFeatures([]), ), ); @@ -515,7 +499,7 @@ test.serial( t.false( await getFileCoverageInformationEnabled( false, // debugMode - parseRepositoryNwo("github/codeql-action"), + createStubCodeQL({}), createFeatures([Feature.SkipFileCoverageOnPrs]), ), ); diff --git a/src/init.ts b/src/init.ts index a5c8e9ec0..cf84322b9 100644 --- a/src/init.ts +++ b/src/init.ts @@ -20,7 +20,6 @@ import { } from "./feature-flags"; import { KnownLanguage, Language } from "./languages"; import { Logger, withGroupAsync } from "./logging"; -import { RepositoryNwo } from "./repository"; import { ToolsSource } from "./setup-codeql"; import { ZstdAvailability } from "./tar"; import { ToolsDownloadStatusReport } from "./tools-download"; @@ -300,7 +299,7 @@ export function cleanupDatabaseClusterDirectory( export async function getFileCoverageInformationEnabled( debugMode: boolean, - repositoryNwo: RepositoryNwo, + codeql: CodeQL, features: FeatureEnablement, ): Promise { return ( @@ -310,8 +309,6 @@ export async function getFileCoverageInformationEnabled( // submitting file coverage information for the default branch since // it is used to populate the status page. !isAnalyzingPullRequest() || - // For now, restrict this feature to the GitHub org - repositoryNwo.owner !== "github" || - !(await features.getValue(Feature.SkipFileCoverageOnPrs)) + !(await features.getValue(Feature.SkipFileCoverageOnPrs, codeql)) ); } diff --git a/src/tools-features.ts b/src/tools-features.ts index f57226434..5eefbd227 100644 --- a/src/tools-features.ts +++ b/src/tools-features.ts @@ -10,6 +10,7 @@ export enum ToolsFeature { ForceOverwrite = "forceOverwrite", IndirectTracingSupportsStaticBinaries = "indirectTracingSupportsStaticBinaries", PythonDefaultIsToNotExtractStdlib = "pythonDefaultIsToNotExtractStdlib", + SuppressesMissingFileBaselineWarning = "suppressesMissingFileBaselineWarning", } /** From 9e8c05933fa0c6d1a63f39fefc7e121d77e5a764 Mon Sep 17 00:00:00 2001 From: Henry Mercer Date: Mon, 9 Mar 2026 17:06:22 +0000 Subject: [PATCH 30/50] Add ability to override via repository property --- CHANGELOG.md | 5 +++++ lib/analyze-action-post.js | 1 + lib/analyze-action.js | 1 + lib/autobuild-action.js | 1 + lib/init-action-post.js | 1 + lib/init-action.js | 13 +++++++++---- lib/resolve-environment-action.js | 1 + lib/setup-codeql-action.js | 1 + lib/start-proxy-action-post.js | 1 + lib/start-proxy-action.js | 1 + lib/upload-lib.js | 1 + lib/upload-sarif-action-post.js | 1 + lib/upload-sarif-action.js | 1 + src/feature-flags/properties.ts | 4 ++++ src/init-action.ts | 1 + src/init.test.ts | 22 ++++++++++++++++++++++ src/init.ts | 9 +++++++++ 17 files changed, 61 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 28a3105f8..700402bc1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,11 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th ## [UNRELEASED] - Fixed [a bug](https://github.com/github/codeql-action/issues/3555) which caused the CodeQL Action to fail loading repository properties if a "Multi select" repository property was configured for the repository. [#3557](https://github.com/github/codeql-action/pull/3557) +- Added an experimental change which skips collecting file coverage information on pull requests to improve analysis performance. File coverage information will still be computed when analyzing the default branch and protected branches. + + Repositories owned by an organization can opt out of this change by creating a custom repository property with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then setting this property to `true` in the repository's settings. For more information, see [Managing custom properties for repositories in your organization](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization). + + We expect to roll this change out to everyone in March. [#TODO](https://github.com/github/codeql-action/pull/TODO) ## 4.32.6 - 05 Mar 2026 diff --git a/lib/analyze-action-post.js b/lib/analyze-action-post.js index 328c64771..46bfde5b0 100644 --- a/lib/analyze-action-post.js +++ b/lib/analyze-action-post.js @@ -161733,6 +161733,7 @@ var semver2 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( diff --git a/lib/analyze-action.js b/lib/analyze-action.js index bae47ad2d..f83b1ac07 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -107232,6 +107232,7 @@ var semver2 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( diff --git a/lib/autobuild-action.js b/lib/autobuild-action.js index 33701fe48..482b85ff0 100644 --- a/lib/autobuild-action.js +++ b/lib/autobuild-action.js @@ -103786,6 +103786,7 @@ var semver2 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 74818a1e2..aabc7baae 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -165100,6 +165100,7 @@ var semver2 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( diff --git a/lib/init-action.js b/lib/init-action.js index 883313758..8b279f437 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -104406,6 +104406,7 @@ function getUnknownLanguagesError(languages) { var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); function isString(value) { @@ -104422,7 +104423,8 @@ var booleanProperty = { }; var repositoryPropertyParsers = { ["github-codeql-disable-overlay" /* DISABLE_OVERLAY */]: booleanProperty, - ["github-codeql-extra-queries" /* EXTRA_QUERIES */]: stringProperty + ["github-codeql-extra-queries" /* EXTRA_QUERIES */]: stringProperty, + ["github-codeql-file-coverage-on-prs" /* FILE_COVERAGE_ON_PRS */]: booleanProperty }; async function loadPropertiesFromApi(gitHubVersion, logger, repositoryNwo) { if (gitHubVersion.type === "GitHub Enterprise Server" /* GHES */) { @@ -109091,13 +109093,15 @@ function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = } } } -async function getFileCoverageInformationEnabled(debugMode, codeql, features) { +async function getFileCoverageInformationEnabled(debugMode, codeql, features, repositoryProperties) { return ( // Always enable file coverage information in debug mode debugMode || // We're most interested in speeding up PRs, and we want to keep // submitting file coverage information for the default branch since // it is used to populate the status page. - !isAnalyzingPullRequest() || !await features.getValue("skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */, codeql) + !isAnalyzingPullRequest() || // Allow repositories to opt in to file coverage information on PRs + // using a repository property. + repositoryProperties["github-codeql-file-coverage-on-prs" /* FILE_COVERAGE_ON_PRS */] === true || !await features.getValue("skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */, codeql) ); } @@ -109740,7 +109744,8 @@ async function run(startedAt) { enableFileCoverageInformation: await getFileCoverageInformationEnabled( debugMode, codeql, - features + features, + repositoryPropertiesResult.orElse({}) ), logger }); diff --git a/lib/resolve-environment-action.js b/lib/resolve-environment-action.js index 3dd3ed839..854dc9de4 100644 --- a/lib/resolve-environment-action.js +++ b/lib/resolve-environment-action.js @@ -103785,6 +103785,7 @@ var semver2 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( diff --git a/lib/setup-codeql-action.js b/lib/setup-codeql-action.js index bff5be22b..3a1825ec5 100644 --- a/lib/setup-codeql-action.js +++ b/lib/setup-codeql-action.js @@ -104781,6 +104781,7 @@ var semver5 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( diff --git a/lib/start-proxy-action-post.js b/lib/start-proxy-action-post.js index 3018a5102..4b36497cb 100644 --- a/lib/start-proxy-action-post.js +++ b/lib/start-proxy-action-post.js @@ -161368,6 +161368,7 @@ var semver2 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( diff --git a/lib/start-proxy-action.js b/lib/start-proxy-action.js index 932966e44..4c88bab59 100644 --- a/lib/start-proxy-action.js +++ b/lib/start-proxy-action.js @@ -121388,6 +121388,7 @@ var semver5 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( diff --git a/lib/upload-lib.js b/lib/upload-lib.js index ee9218896..ad0a615df 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -106826,6 +106826,7 @@ var semver2 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( diff --git a/lib/upload-sarif-action-post.js b/lib/upload-sarif-action-post.js index 0f6b9cbf9..d8915fb49 100644 --- a/lib/upload-sarif-action-post.js +++ b/lib/upload-sarif-action-post.js @@ -161518,6 +161518,7 @@ var semver2 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index 7b3448637..3f97ab020 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -107572,6 +107572,7 @@ var semver5 = __toESM(require_semver2()); var RepositoryPropertyName = /* @__PURE__ */ ((RepositoryPropertyName2) => { RepositoryPropertyName2["DISABLE_OVERLAY"] = "github-codeql-disable-overlay"; RepositoryPropertyName2["EXTRA_QUERIES"] = "github-codeql-extra-queries"; + RepositoryPropertyName2["FILE_COVERAGE_ON_PRS"] = "github-codeql-file-coverage-on-prs"; return RepositoryPropertyName2; })(RepositoryPropertyName || {}); var KNOWN_REPOSITORY_PROPERTY_NAMES = new Set( diff --git a/src/feature-flags/properties.ts b/src/feature-flags/properties.ts index cb407c308..9245f7ece 100644 --- a/src/feature-flags/properties.ts +++ b/src/feature-flags/properties.ts @@ -9,12 +9,14 @@ import { GitHubVariant, GitHubVersion } from "../util"; export enum RepositoryPropertyName { DISABLE_OVERLAY = "github-codeql-disable-overlay", EXTRA_QUERIES = "github-codeql-extra-queries", + FILE_COVERAGE_ON_PRS = "github-codeql-file-coverage-on-prs", } /** Parsed types of the known repository properties. */ export type AllRepositoryProperties = { [RepositoryPropertyName.DISABLE_OVERLAY]: boolean; [RepositoryPropertyName.EXTRA_QUERIES]: string; + [RepositoryPropertyName.FILE_COVERAGE_ON_PRS]: boolean; }; /** Parsed repository properties. */ @@ -24,6 +26,7 @@ export type RepositoryProperties = Partial; export type RepositoryPropertyApiType = { [RepositoryPropertyName.DISABLE_OVERLAY]: string; [RepositoryPropertyName.EXTRA_QUERIES]: string; + [RepositoryPropertyName.FILE_COVERAGE_ON_PRS]: string; }; /** The type of functions which take the `value` from the API and try to convert it to the type we want. */ @@ -70,6 +73,7 @@ const repositoryPropertyParsers: { } = { [RepositoryPropertyName.DISABLE_OVERLAY]: booleanProperty, [RepositoryPropertyName.EXTRA_QUERIES]: stringProperty, + [RepositoryPropertyName.FILE_COVERAGE_ON_PRS]: booleanProperty, }; /** diff --git a/src/init-action.ts b/src/init-action.ts index de05c9b41..4806141a5 100644 --- a/src/init-action.ts +++ b/src/init-action.ts @@ -380,6 +380,7 @@ async function run(startedAt: Date) { debugMode, codeql, features, + repositoryPropertiesResult.orElse({}), ), logger, }); diff --git a/src/init.test.ts b/src/init.test.ts index d15737ea3..dbc4b4c30 100644 --- a/src/init.test.ts +++ b/src/init.test.ts @@ -457,6 +457,7 @@ test("file coverage information enabled when debugMode is true", async (t) => { true, // debugMode createStubCodeQL({}), createFeatures([Feature.SkipFileCoverageOnPrs]), + {}, ), ); }); @@ -471,6 +472,7 @@ test.serial( false, // debugMode createStubCodeQL({}), createFeatures([Feature.SkipFileCoverageOnPrs]), + {}, ), ); }, @@ -486,6 +488,25 @@ test.serial( false, // debugMode createStubCodeQL({}), createFeatures([]), + {}, + ), + ); + }, +); + +test.serial( + "file coverage information enabled when repository property is set", + async (t) => { + sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true); + + t.true( + await getFileCoverageInformationEnabled( + false, // debugMode + createStubCodeQL({}), + createFeatures([Feature.SkipFileCoverageOnPrs]), + { + "github-codeql-file-coverage-on-prs": true, + }, ), ); }, @@ -501,6 +522,7 @@ test.serial( false, // debugMode createStubCodeQL({}), createFeatures([Feature.SkipFileCoverageOnPrs]), + {}, ), ); }, diff --git a/src/init.ts b/src/init.ts index cf84322b9..b5e90b68b 100644 --- a/src/init.ts +++ b/src/init.ts @@ -18,6 +18,10 @@ import { Feature, FeatureEnablement, } from "./feature-flags"; +import { + RepositoryProperties, + RepositoryPropertyName, +} from "./feature-flags/properties"; import { KnownLanguage, Language } from "./languages"; import { Logger, withGroupAsync } from "./logging"; import { ToolsSource } from "./setup-codeql"; @@ -301,6 +305,7 @@ export async function getFileCoverageInformationEnabled( debugMode: boolean, codeql: CodeQL, features: FeatureEnablement, + repositoryProperties: RepositoryProperties, ): Promise { return ( // Always enable file coverage information in debug mode @@ -309,6 +314,10 @@ export async function getFileCoverageInformationEnabled( // submitting file coverage information for the default branch since // it is used to populate the status page. !isAnalyzingPullRequest() || + // Allow repositories to opt in to file coverage information on PRs + // using a repository property. + repositoryProperties[RepositoryPropertyName.FILE_COVERAGE_ON_PRS] === + true || !(await features.getValue(Feature.SkipFileCoverageOnPrs, codeql)) ); } From a3fdd0e0b5ce795f906edcd17cb6453d14e531f4 Mon Sep 17 00:00:00 2001 From: Henry Mercer Date: Mon, 9 Mar 2026 17:12:02 +0000 Subject: [PATCH 31/50] Add telemetry diagnostic to track whether repo property is used --- lib/init-action.js | 10 ++++++++++ src/init-action.ts | 19 +++++++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/lib/init-action.js b/lib/init-action.js index 8b279f437..bcf110f6a 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -109761,6 +109761,16 @@ async function run(startedAt) { ) ); } + if (config.enableFileCoverageInformation && isAnalyzingPullRequest() && repositoryPropertiesResult.orElse({})["github-codeql-file-coverage-on-prs" /* FILE_COVERAGE_ON_PRS */] === true) { + addNoLanguageDiagnostic( + config, + makeTelemetryDiagnostic( + "codeql-action/file-coverage-on-prs-enabled-by-repository-property", + "File coverage on PRs enabled by repository property", + {} + ) + ); + } await checkInstallPython311(config.languages, codeql); } catch (unwrappedError) { const error3 = wrapError(unwrappedError); diff --git a/src/init-action.ts b/src/init-action.ts index 4806141a5..3b673723a 100644 --- a/src/init-action.ts +++ b/src/init-action.ts @@ -14,6 +14,7 @@ import { getOptionalInput, getRequiredInput, getTemporaryDirectory, + isAnalyzingPullRequest, persistInputs, } from "./actions-util"; import { AnalysisKind, getAnalysisKinds } from "./analyses"; @@ -42,6 +43,7 @@ import { Feature, FeatureEnablement, initFeatures } from "./feature-flags"; import { loadPropertiesFromApi, RepositoryProperties, + RepositoryPropertyName, } from "./feature-flags/properties"; import { checkInstallPython311, @@ -398,6 +400,23 @@ async function run(startedAt: Date) { ); } + if ( + config.enableFileCoverageInformation && + isAnalyzingPullRequest() && + repositoryPropertiesResult.orElse({})[ + RepositoryPropertyName.FILE_COVERAGE_ON_PRS + ] === true + ) { + addNoLanguageDiagnostic( + config, + makeTelemetryDiagnostic( + "codeql-action/file-coverage-on-prs-enabled-by-repository-property", + "File coverage on PRs enabled by repository property", + {}, + ), + ); + } + await checkInstallPython311(config.languages, codeql); } catch (unwrappedError) { const error = wrapError(unwrappedError); From 6773afd15934ec2b03e88b7e91b646f7164d1027 Mon Sep 17 00:00:00 2001 From: Henry Mercer Date: Mon, 9 Mar 2026 16:06:25 +0000 Subject: [PATCH 32/50] Add changelog note --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 700402bc1..cf2f02301 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,7 +9,7 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th Repositories owned by an organization can opt out of this change by creating a custom repository property with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then setting this property to `true` in the repository's settings. For more information, see [Managing custom properties for repositories in your organization](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization). - We expect to roll this change out to everyone in March. [#TODO](https://github.com/github/codeql-action/pull/TODO) + We expect to roll this change out to everyone in March. [#3562](https://github.com/github/codeql-action/pull/3562) ## 4.32.6 - 05 Mar 2026 From 3592fe5d7ace9525afa253e19223d54e95eacd8d Mon Sep 17 00:00:00 2001 From: Henry Mercer Date: Mon, 9 Mar 2026 17:32:57 +0000 Subject: [PATCH 33/50] Address review comments --- CHANGELOG.md | 2 +- lib/init-action.js | 2 +- src/init-action.ts | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cf2f02301..e6e88d35e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,7 +5,7 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th ## [UNRELEASED] - Fixed [a bug](https://github.com/github/codeql-action/issues/3555) which caused the CodeQL Action to fail loading repository properties if a "Multi select" repository property was configured for the repository. [#3557](https://github.com/github/codeql-action/pull/3557) -- Added an experimental change which skips collecting file coverage information on pull requests to improve analysis performance. File coverage information will still be computed when analyzing the default branch and protected branches. +- Added an experimental change which skips collecting file coverage information on pull requests to improve analysis performance. File coverage information will still be computed on non-PR analyses. Repositories owned by an organization can opt out of this change by creating a custom repository property with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then setting this property to `true` in the repository's settings. For more information, see [Managing custom properties for repositories in your organization](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization). diff --git a/lib/init-action.js b/lib/init-action.js index bcf110f6a..42df557b7 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -109761,7 +109761,7 @@ async function run(startedAt) { ) ); } - if (config.enableFileCoverageInformation && isAnalyzingPullRequest() && repositoryPropertiesResult.orElse({})["github-codeql-file-coverage-on-prs" /* FILE_COVERAGE_ON_PRS */] === true) { + if (config.enableFileCoverageInformation && isAnalyzingPullRequest() && await features.getValue("skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */, codeql) && repositoryPropertiesResult.orElse({})["github-codeql-file-coverage-on-prs" /* FILE_COVERAGE_ON_PRS */] === true) { addNoLanguageDiagnostic( config, makeTelemetryDiagnostic( diff --git a/src/init-action.ts b/src/init-action.ts index 3b673723a..d64832b73 100644 --- a/src/init-action.ts +++ b/src/init-action.ts @@ -403,6 +403,7 @@ async function run(startedAt: Date) { if ( config.enableFileCoverageInformation && isAnalyzingPullRequest() && + (await features.getValue(Feature.SkipFileCoverageOnPrs, codeql)) && repositoryPropertiesResult.orElse({})[ RepositoryPropertyName.FILE_COVERAGE_ON_PRS ] === true From 3d2bdbbd3bf4f1c7216f3b42db04871b4453a4fa Mon Sep 17 00:00:00 2001 From: Henry Mercer Date: Tue, 10 Mar 2026 11:33:00 +0000 Subject: [PATCH 34/50] Simplify default repo properties --- lib/init-action.js | 7 ++++--- src/init-action.ts | 9 ++++----- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/lib/init-action.js b/lib/init-action.js index 42df557b7..6b9af9b69 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -109713,6 +109713,7 @@ async function run(startedAt) { } analysisKinds = await getAnalysisKinds(logger); const debugMode = getOptionalInput("debug") === "true" || core13.isDebug(); + const repositoryProperties = repositoryPropertiesResult.orElse({}); config = await initConfig2(features, { analysisKinds, languagesInput: getOptionalInput("languages"), @@ -109740,12 +109741,12 @@ async function run(startedAt) { githubVersion: gitHubVersion, apiDetails, features, - repositoryProperties: repositoryPropertiesResult.orElse({}), + repositoryProperties, enableFileCoverageInformation: await getFileCoverageInformationEnabled( debugMode, codeql, features, - repositoryPropertiesResult.orElse({}) + repositoryProperties ), logger }); @@ -109761,7 +109762,7 @@ async function run(startedAt) { ) ); } - if (config.enableFileCoverageInformation && isAnalyzingPullRequest() && await features.getValue("skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */, codeql) && repositoryPropertiesResult.orElse({})["github-codeql-file-coverage-on-prs" /* FILE_COVERAGE_ON_PRS */] === true) { + if (config.enableFileCoverageInformation && isAnalyzingPullRequest() && await features.getValue("skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */, codeql) && repositoryProperties["github-codeql-file-coverage-on-prs" /* FILE_COVERAGE_ON_PRS */] === true) { addNoLanguageDiagnostic( config, makeTelemetryDiagnostic( diff --git a/src/init-action.ts b/src/init-action.ts index d64832b73..6a386ed7a 100644 --- a/src/init-action.ts +++ b/src/init-action.ts @@ -348,6 +348,7 @@ async function run(startedAt: Date) { analysisKinds = await getAnalysisKinds(logger); const debugMode = getOptionalInput("debug") === "true" || core.isDebug(); + const repositoryProperties = repositoryPropertiesResult.orElse({}); config = await initConfig(features, { analysisKinds, languagesInput: getOptionalInput("languages"), @@ -377,12 +378,12 @@ async function run(startedAt: Date) { githubVersion: gitHubVersion, apiDetails, features, - repositoryProperties: repositoryPropertiesResult.orElse({}), + repositoryProperties, enableFileCoverageInformation: await getFileCoverageInformationEnabled( debugMode, codeql, features, - repositoryPropertiesResult.orElse({}), + repositoryProperties, ), logger, }); @@ -404,9 +405,7 @@ async function run(startedAt: Date) { config.enableFileCoverageInformation && isAnalyzingPullRequest() && (await features.getValue(Feature.SkipFileCoverageOnPrs, codeql)) && - repositoryPropertiesResult.orElse({})[ - RepositoryPropertyName.FILE_COVERAGE_ON_PRS - ] === true + repositoryProperties[RepositoryPropertyName.FILE_COVERAGE_ON_PRS] === true ) { addNoLanguageDiagnostic( config, From 55ae11793af2e75e1cc9550ee2a16baf5baee055 Mon Sep 17 00:00:00 2001 From: Henry Mercer Date: Tue, 10 Mar 2026 11:42:53 +0000 Subject: [PATCH 35/50] Reduce duplication of `getFileCoverageInformationEnabled` --- lib/init-action.js | 37 +++++++++++++---------- src/init-action.ts | 23 ++++++-------- src/init.test.ts | 74 +++++++++++++++++++++++----------------------- src/init.ts | 42 +++++++++++++++++--------- 4 files changed, 95 insertions(+), 81 deletions(-) diff --git a/lib/init-action.js b/lib/init-action.js index 6b9af9b69..34b9335f5 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -109094,15 +109094,19 @@ function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = } } async function getFileCoverageInformationEnabled(debugMode, codeql, features, repositoryProperties) { - return ( - // Always enable file coverage information in debug mode - debugMode || // We're most interested in speeding up PRs, and we want to keep - // submitting file coverage information for the default branch since - // it is used to populate the status page. - !isAnalyzingPullRequest() || // Allow repositories to opt in to file coverage information on PRs - // using a repository property. - repositoryProperties["github-codeql-file-coverage-on-prs" /* FILE_COVERAGE_ON_PRS */] === true || !await features.getValue("skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */, codeql) - ); + if (debugMode) { + return { enabled: true, enabledByRepositoryProperty: false }; + } + if (!isAnalyzingPullRequest()) { + return { enabled: true, enabledByRepositoryProperty: false }; + } + if (!await features.getValue("skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */, codeql)) { + return { enabled: true, enabledByRepositoryProperty: false }; + } + if (repositoryProperties["github-codeql-file-coverage-on-prs" /* FILE_COVERAGE_ON_PRS */] === true) { + return { enabled: true, enabledByRepositoryProperty: true }; + } + return { enabled: false, enabledByRepositoryProperty: false }; } // src/status-report.ts @@ -109714,6 +109718,12 @@ async function run(startedAt) { analysisKinds = await getAnalysisKinds(logger); const debugMode = getOptionalInput("debug") === "true" || core13.isDebug(); const repositoryProperties = repositoryPropertiesResult.orElse({}); + const fileCoverageResult = await getFileCoverageInformationEnabled( + debugMode, + codeql, + features, + repositoryProperties + ); config = await initConfig2(features, { analysisKinds, languagesInput: getOptionalInput("languages"), @@ -109742,12 +109752,7 @@ async function run(startedAt) { apiDetails, features, repositoryProperties, - enableFileCoverageInformation: await getFileCoverageInformationEnabled( - debugMode, - codeql, - features, - repositoryProperties - ), + enableFileCoverageInformation: fileCoverageResult.enabled, logger }); if (repositoryPropertiesResult.isFailure()) { @@ -109762,7 +109767,7 @@ async function run(startedAt) { ) ); } - if (config.enableFileCoverageInformation && isAnalyzingPullRequest() && await features.getValue("skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */, codeql) && repositoryProperties["github-codeql-file-coverage-on-prs" /* FILE_COVERAGE_ON_PRS */] === true) { + if (fileCoverageResult.enabledByRepositoryProperty) { addNoLanguageDiagnostic( config, makeTelemetryDiagnostic( diff --git a/src/init-action.ts b/src/init-action.ts index 6a386ed7a..577292ecd 100644 --- a/src/init-action.ts +++ b/src/init-action.ts @@ -14,7 +14,6 @@ import { getOptionalInput, getRequiredInput, getTemporaryDirectory, - isAnalyzingPullRequest, persistInputs, } from "./actions-util"; import { AnalysisKind, getAnalysisKinds } from "./analyses"; @@ -43,7 +42,6 @@ import { Feature, FeatureEnablement, initFeatures } from "./feature-flags"; import { loadPropertiesFromApi, RepositoryProperties, - RepositoryPropertyName, } from "./feature-flags/properties"; import { checkInstallPython311, @@ -349,6 +347,13 @@ async function run(startedAt: Date) { analysisKinds = await getAnalysisKinds(logger); const debugMode = getOptionalInput("debug") === "true" || core.isDebug(); const repositoryProperties = repositoryPropertiesResult.orElse({}); + const fileCoverageResult = await getFileCoverageInformationEnabled( + debugMode, + codeql, + features, + repositoryProperties, + ); + config = await initConfig(features, { analysisKinds, languagesInput: getOptionalInput("languages"), @@ -379,12 +384,7 @@ async function run(startedAt: Date) { apiDetails, features, repositoryProperties, - enableFileCoverageInformation: await getFileCoverageInformationEnabled( - debugMode, - codeql, - features, - repositoryProperties, - ), + enableFileCoverageInformation: fileCoverageResult.enabled, logger, }); @@ -401,12 +401,7 @@ async function run(startedAt: Date) { ); } - if ( - config.enableFileCoverageInformation && - isAnalyzingPullRequest() && - (await features.getValue(Feature.SkipFileCoverageOnPrs, codeql)) && - repositoryProperties[RepositoryPropertyName.FILE_COVERAGE_ON_PRS] === true - ) { + if (fileCoverageResult.enabledByRepositoryProperty) { addNoLanguageDiagnostic( config, makeTelemetryDiagnostic( diff --git a/src/init.test.ts b/src/init.test.ts index dbc4b4c30..c37d1c1e6 100644 --- a/src/init.test.ts +++ b/src/init.test.ts @@ -452,14 +452,14 @@ test( ); test("file coverage information enabled when debugMode is true", async (t) => { - t.true( - await getFileCoverageInformationEnabled( - true, // debugMode - createStubCodeQL({}), - createFeatures([Feature.SkipFileCoverageOnPrs]), - {}, - ), + const result = await getFileCoverageInformationEnabled( + true, // debugMode + createStubCodeQL({}), + createFeatures([Feature.SkipFileCoverageOnPrs]), + {}, ); + t.true(result.enabled); + t.false(result.enabledByRepositoryProperty); }); test.serial( @@ -467,14 +467,14 @@ test.serial( async (t) => { sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(false); - t.true( - await getFileCoverageInformationEnabled( - false, // debugMode - createStubCodeQL({}), - createFeatures([Feature.SkipFileCoverageOnPrs]), - {}, - ), + const result = await getFileCoverageInformationEnabled( + false, // debugMode + createStubCodeQL({}), + createFeatures([Feature.SkipFileCoverageOnPrs]), + {}, ); + t.true(result.enabled); + t.false(result.enabledByRepositoryProperty); }, ); @@ -483,14 +483,14 @@ test.serial( async (t) => { sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true); - t.true( - await getFileCoverageInformationEnabled( - false, // debugMode - createStubCodeQL({}), - createFeatures([]), - {}, - ), + const result = await getFileCoverageInformationEnabled( + false, // debugMode + createStubCodeQL({}), + createFeatures([]), + {}, ); + t.true(result.enabled); + t.false(result.enabledByRepositoryProperty); }, ); @@ -499,16 +499,16 @@ test.serial( async (t) => { sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true); - t.true( - await getFileCoverageInformationEnabled( - false, // debugMode - createStubCodeQL({}), - createFeatures([Feature.SkipFileCoverageOnPrs]), - { - "github-codeql-file-coverage-on-prs": true, - }, - ), + const result = await getFileCoverageInformationEnabled( + false, // debugMode + createStubCodeQL({}), + createFeatures([Feature.SkipFileCoverageOnPrs]), + { + "github-codeql-file-coverage-on-prs": true, + }, ); + t.true(result.enabled); + t.true(result.enabledByRepositoryProperty); }, ); @@ -517,13 +517,13 @@ test.serial( async (t) => { sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true); - t.false( - await getFileCoverageInformationEnabled( - false, // debugMode - createStubCodeQL({}), - createFeatures([Feature.SkipFileCoverageOnPrs]), - {}, - ), + const result = await getFileCoverageInformationEnabled( + false, // debugMode + createStubCodeQL({}), + createFeatures([Feature.SkipFileCoverageOnPrs]), + {}, ); + t.false(result.enabled); + t.false(result.enabledByRepositoryProperty); }, ); diff --git a/src/init.ts b/src/init.ts index b5e90b68b..a1c6b2108 100644 --- a/src/init.ts +++ b/src/init.ts @@ -306,18 +306,32 @@ export async function getFileCoverageInformationEnabled( codeql: CodeQL, features: FeatureEnablement, repositoryProperties: RepositoryProperties, -): Promise { - return ( - // Always enable file coverage information in debug mode - debugMode || - // We're most interested in speeding up PRs, and we want to keep - // submitting file coverage information for the default branch since - // it is used to populate the status page. - !isAnalyzingPullRequest() || - // Allow repositories to opt in to file coverage information on PRs - // using a repository property. - repositoryProperties[RepositoryPropertyName.FILE_COVERAGE_ON_PRS] === - true || - !(await features.getValue(Feature.SkipFileCoverageOnPrs, codeql)) - ); +): Promise<{ + enabled: boolean; + enabledByRepositoryProperty: boolean; +}> { + // Always enable file coverage information in debug mode + if (debugMode) { + return { enabled: true, enabledByRepositoryProperty: false }; + } + // We're most interested in speeding up PRs, and we want to keep + // submitting file coverage information for the default branch since + // it is used to populate the status page. + if (!isAnalyzingPullRequest()) { + return { enabled: true, enabledByRepositoryProperty: false }; + } + // If the feature is disabled, then maintain the previous behavior of + // unconditionally computing file coverage information. + if (!(await features.getValue(Feature.SkipFileCoverageOnPrs, codeql))) { + return { enabled: true, enabledByRepositoryProperty: false }; + } + // Allow repositories to opt in to file coverage information on PRs + // using a repository property. + if ( + repositoryProperties[RepositoryPropertyName.FILE_COVERAGE_ON_PRS] === true + ) { + return { enabled: true, enabledByRepositoryProperty: true }; + } + // Otherwise, disable file coverage information on PRs to speed up analysis. + return { enabled: false, enabledByRepositoryProperty: false }; } From 13c548978dbeb9acb50fc752558d02721d771406 Mon Sep 17 00:00:00 2001 From: Henry Mercer Date: Tue, 10 Mar 2026 12:14:20 +0000 Subject: [PATCH 36/50] Fix retries when uploading databases --- lib/analyze-action-post.js | 6 +-- lib/analyze-action.js | 81 +++++++++++++++++----------- lib/autobuild-action.js | 6 +-- lib/init-action-post.js | 6 +-- lib/init-action.js | 6 +-- lib/resolve-environment-action.js | 6 +-- lib/setup-codeql-action.js | 6 +-- lib/start-proxy-action-post.js | 6 +-- lib/start-proxy-action.js | 6 +-- lib/upload-lib.js | 6 +-- lib/upload-sarif-action-post.js | 6 +-- lib/upload-sarif-action.js | 6 +-- src/api-client.test.ts | 3 +- src/api-client.ts | 14 +++-- src/database-upload.ts | 89 ++++++++++++++++++++----------- 15 files changed, 141 insertions(+), 112 deletions(-) diff --git a/lib/analyze-action-post.js b/lib/analyze-action-post.js index 454c2d9fb..fb54b6631 100644 --- a/lib/analyze-action-post.js +++ b/lib/analyze-action-post.js @@ -161404,6 +161404,7 @@ retry.VERSION = VERSION7; // src/api-client.ts var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version"; +var DO_NOT_RETRY_STATUSES = [400, 410, 422, 451]; function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) { const auth2 = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth; const retryingOctokit = githubUtils.GitHub.plugin(retry); @@ -161418,10 +161419,7 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) error: core5.error }, retry: { - // The default is 400, 401, 403, 404, 410, 422, and 451. We have observed transient errors - // with authentication, so we remove 401, 403, and 404 from the default list to ensure that - // these errors are retried. - doNotRetry: [400, 410, 422, 451] + doNotRetry: DO_NOT_RETRY_STATUSES } }) ); diff --git a/lib/analyze-action.js b/lib/analyze-action.js index a65d7175c..cb0bf128f 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -106782,6 +106782,7 @@ function parseRepositoryNwo(input) { // src/api-client.ts var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version"; +var DO_NOT_RETRY_STATUSES = [400, 410, 422, 451]; function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) { const auth2 = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth; const retryingOctokit = githubUtils.GitHub.plugin(retry); @@ -106796,10 +106797,7 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) error: core5.error }, retry: { - // The default is 400, 401, 403, 404, 410, 422, and 451. We have observed transient errors - // with authentication, so we remove 401, 403, and 404 from the default list to ensure that - // these errors are retried. - doNotRetry: [400, 410, 422, 451] + doNotRetry: DO_NOT_RETRY_STATUSES } }) ); @@ -110969,40 +110967,59 @@ async function cleanupAndUploadDatabases(repositoryNwo, codeql, config, apiDetai includeDiagnostics: false }); bundledDbSize = fs13.statSync(bundledDb).size; - const bundledDbReadStream = fs13.createReadStream(bundledDb); const commitOid = await getCommitOid( getRequiredInput("checkout_path") ); - try { - const startTime = performance.now(); - await client.request( - `POST /repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name&commit_oid=:commit_oid`, - { - baseUrl: uploadsBaseUrl, - owner: repositoryNwo.owner, - repo: repositoryNwo.repo, - language, - name: `${language}-database`, - commit_oid: commitOid, - data: bundledDbReadStream, - headers: { - authorization: `token ${apiDetails.auth}`, - "Content-Type": "application/zip", - "Content-Length": bundledDbSize + const maxAttempts = 4; + let uploadDurationMs; + for (let attempt = 1; attempt <= maxAttempts; attempt++) { + const bundledDbReadStream = fs13.createReadStream(bundledDb); + try { + const attemptStartTime = performance.now(); + await client.request( + `POST /repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name&commit_oid=:commit_oid`, + { + baseUrl: uploadsBaseUrl, + owner: repositoryNwo.owner, + repo: repositoryNwo.repo, + language, + name: `${language}-database`, + commit_oid: commitOid, + data: bundledDbReadStream, + headers: { + authorization: `token ${apiDetails.auth}`, + "Content-Type": "application/zip", + "Content-Length": bundledDbSize + }, + request: { + retries: 0 + } } + ); + uploadDurationMs = performance.now() - attemptStartTime; + break; + } catch (e) { + const httpError = asHTTPError(e); + const isRetryable = !httpError || !DO_NOT_RETRY_STATUSES.includes(httpError.status); + if (!isRetryable || attempt === maxAttempts) { + throw e; } - ); - const endTime = performance.now(); - reports.push({ - language, - zipped_upload_size_bytes: bundledDbSize, - is_overlay_base: shouldUploadOverlayBase, - upload_duration_ms: endTime - startTime - }); - logger.debug(`Successfully uploaded database for ${language}`); - } finally { - bundledDbReadStream.close(); + const backoffMs = 15e3 * Math.pow(2, attempt - 1); + logger.debug( + `Database upload attempt ${attempt} of ${maxAttempts} failed for ${language}: ${getErrorMessage(e)}. Retrying in ${backoffMs / 1e3}s...` + ); + await new Promise((resolve8) => setTimeout(resolve8, backoffMs)); + } finally { + bundledDbReadStream.close(); + } } + reports.push({ + language, + zipped_upload_size_bytes: bundledDbSize, + is_overlay_base: shouldUploadOverlayBase, + upload_duration_ms: uploadDurationMs + }); + logger.debug(`Successfully uploaded database for ${language}`); } catch (e) { logger.warning( `Failed to upload database for ${language}: ${getErrorMessage(e)}` diff --git a/lib/autobuild-action.js b/lib/autobuild-action.js index acd1b250e..234da35b3 100644 --- a/lib/autobuild-action.js +++ b/lib/autobuild-action.js @@ -103423,6 +103423,7 @@ function parseRepositoryNwo(input) { // src/api-client.ts var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version"; +var DO_NOT_RETRY_STATUSES = [400, 410, 422, 451]; function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) { const auth2 = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth; const retryingOctokit = githubUtils.GitHub.plugin(retry); @@ -103437,10 +103438,7 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) error: core5.error }, retry: { - // The default is 400, 401, 403, 404, 410, 422, and 451. We have observed transient errors - // with authentication, so we remove 401, 403, and 404 from the default list to ensure that - // these errors are retried. - doNotRetry: [400, 410, 422, 451] + doNotRetry: DO_NOT_RETRY_STATUSES } }) ); diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 479129b24..67e8f2689 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -164624,6 +164624,7 @@ function parseRepositoryNwo(input) { // src/api-client.ts var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version"; +var DO_NOT_RETRY_STATUSES = [400, 410, 422, 451]; function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) { const auth2 = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth; const retryingOctokit = githubUtils.GitHub.plugin(retry); @@ -164638,10 +164639,7 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) error: core5.error }, retry: { - // The default is 400, 401, 403, 404, 410, 422, and 451. We have observed transient errors - // with authentication, so we remove 401, 403, and 404 from the default list to ensure that - // these errors are retried. - doNotRetry: [400, 410, 422, 451] + doNotRetry: DO_NOT_RETRY_STATUSES } }) ); diff --git a/lib/init-action.js b/lib/init-action.js index 5d5a6fa59..e305ac6f1 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -104131,6 +104131,7 @@ function parseRepositoryNwo(input) { // src/api-client.ts var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version"; +var DO_NOT_RETRY_STATUSES = [400, 410, 422, 451]; function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) { const auth2 = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth; const retryingOctokit = githubUtils.GitHub.plugin(retry); @@ -104145,10 +104146,7 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) error: core5.error }, retry: { - // The default is 400, 401, 403, 404, 410, 422, and 451. We have observed transient errors - // with authentication, so we remove 401, 403, and 404 from the default list to ensure that - // these errors are retried. - doNotRetry: [400, 410, 422, 451] + doNotRetry: DO_NOT_RETRY_STATUSES } }) ); diff --git a/lib/resolve-environment-action.js b/lib/resolve-environment-action.js index aa3673bd3..11fa8c0c0 100644 --- a/lib/resolve-environment-action.js +++ b/lib/resolve-environment-action.js @@ -103431,6 +103431,7 @@ function parseRepositoryNwo(input) { // src/api-client.ts var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version"; +var DO_NOT_RETRY_STATUSES = [400, 410, 422, 451]; function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) { const auth2 = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth; const retryingOctokit = githubUtils.GitHub.plugin(retry); @@ -103445,10 +103446,7 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) error: core5.error }, retry: { - // The default is 400, 401, 403, 404, 410, 422, and 451. We have observed transient errors - // with authentication, so we remove 401, 403, and 404 from the default list to ensure that - // these errors are retried. - doNotRetry: [400, 410, 422, 451] + doNotRetry: DO_NOT_RETRY_STATUSES } }) ); diff --git a/lib/setup-codeql-action.js b/lib/setup-codeql-action.js index a9eb08eb5..e35c84838 100644 --- a/lib/setup-codeql-action.js +++ b/lib/setup-codeql-action.js @@ -103540,6 +103540,7 @@ function parseRepositoryNwo(input) { // src/api-client.ts var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version"; +var DO_NOT_RETRY_STATUSES = [400, 410, 422, 451]; function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) { const auth2 = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth; const retryingOctokit = githubUtils.GitHub.plugin(retry); @@ -103554,10 +103555,7 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) error: core5.error }, retry: { - // The default is 400, 401, 403, 404, 410, 422, and 451. We have observed transient errors - // with authentication, so we remove 401, 403, and 404 from the default list to ensure that - // these errors are retried. - doNotRetry: [400, 410, 422, 451] + doNotRetry: DO_NOT_RETRY_STATUSES } }) ); diff --git a/lib/start-proxy-action-post.js b/lib/start-proxy-action-post.js index 6fdfe2d8b..610a35a72 100644 --- a/lib/start-proxy-action-post.js +++ b/lib/start-proxy-action-post.js @@ -161287,6 +161287,7 @@ retry.VERSION = VERSION7; // src/api-client.ts var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version"; +var DO_NOT_RETRY_STATUSES = [400, 410, 422, 451]; function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) { const auth2 = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth; const retryingOctokit = githubUtils.GitHub.plugin(retry); @@ -161301,10 +161302,7 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) error: core5.error }, retry: { - // The default is 400, 401, 403, 404, 410, 422, and 451. We have observed transient errors - // with authentication, so we remove 401, 403, and 404 from the default list to ensure that - // these errors are retried. - doNotRetry: [400, 410, 422, 451] + doNotRetry: DO_NOT_RETRY_STATUSES } }) ); diff --git a/lib/start-proxy-action.js b/lib/start-proxy-action.js index 84519a068..0b3ed7b0c 100644 --- a/lib/start-proxy-action.js +++ b/lib/start-proxy-action.js @@ -120510,6 +120510,7 @@ function parseRepositoryNwo(input) { // src/api-client.ts var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version"; +var DO_NOT_RETRY_STATUSES = [400, 410, 422, 451]; function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) { const auth2 = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth; const retryingOctokit = githubUtils.GitHub.plugin(retry); @@ -120524,10 +120525,7 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) error: core5.error }, retry: { - // The default is 400, 401, 403, 404, 410, 422, and 451. We have observed transient errors - // with authentication, so we remove 401, 403, and 404 from the default list to ensure that - // these errors are retried. - doNotRetry: [400, 410, 422, 451] + doNotRetry: DO_NOT_RETRY_STATUSES } }) ); diff --git a/lib/upload-lib.js b/lib/upload-lib.js index 2f5585bd1..38765bcc6 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -106416,6 +106416,7 @@ function parseRepositoryNwo(input) { // src/api-client.ts var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version"; +var DO_NOT_RETRY_STATUSES = [400, 410, 422, 451]; function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) { const auth2 = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth; const retryingOctokit = githubUtils.GitHub.plugin(retry); @@ -106430,10 +106431,7 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) error: core5.error }, retry: { - // The default is 400, 401, 403, 404, 410, 422, and 451. We have observed transient errors - // with authentication, so we remove 401, 403, and 404 from the default list to ensure that - // these errors are retried. - doNotRetry: [400, 410, 422, 451] + doNotRetry: DO_NOT_RETRY_STATUSES } }) ); diff --git a/lib/upload-sarif-action-post.js b/lib/upload-sarif-action-post.js index dab78eb86..f6a2421c2 100644 --- a/lib/upload-sarif-action-post.js +++ b/lib/upload-sarif-action-post.js @@ -161287,6 +161287,7 @@ retry.VERSION = VERSION7; // src/api-client.ts var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version"; +var DO_NOT_RETRY_STATUSES = [400, 410, 422, 451]; function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) { const auth2 = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth; const retryingOctokit = githubUtils.GitHub.plugin(retry); @@ -161301,10 +161302,7 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) error: core5.error }, retry: { - // The default is 400, 401, 403, 404, 410, 422, and 451. We have observed transient errors - // with authentication, so we remove 401, 403, and 404 from the default list to ensure that - // these errors are retried. - doNotRetry: [400, 410, 422, 451] + doNotRetry: DO_NOT_RETRY_STATUSES } }) ); diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index 53f385653..fd3672bde 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -106465,6 +106465,7 @@ function parseRepositoryNwo(input) { // src/api-client.ts var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version"; +var DO_NOT_RETRY_STATUSES = [400, 410, 422, 451]; function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) { const auth2 = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth; const retryingOctokit = githubUtils.GitHub.plugin(retry); @@ -106479,10 +106480,7 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) error: core5.error }, retry: { - // The default is 400, 401, 403, 404, 410, 422, and 451. We have observed transient errors - // with authentication, so we remove 401, 403, and 404 from the default list to ensure that - // these errors are retried. - doNotRetry: [400, 410, 422, 451] + doNotRetry: DO_NOT_RETRY_STATUSES } }) ); diff --git a/src/api-client.test.ts b/src/api-client.test.ts index d0311d0dc..f8846e768 100644 --- a/src/api-client.test.ts +++ b/src/api-client.test.ts @@ -5,6 +5,7 @@ import * as sinon from "sinon"; import * as actionsUtil from "./actions-util"; import * as api from "./api-client"; +import { DO_NOT_RETRY_STATUSES } from "./api-client"; import { setupTests } from "./testing-utils"; import * as util from "./util"; @@ -37,7 +38,7 @@ test.serial("getApiClient", async (t) => { log: sinon.match.any, userAgent: `CodeQL-Action/${actionsUtil.getActionVersion()}`, retry: { - doNotRetry: [400, 410, 422, 451], + doNotRetry: DO_NOT_RETRY_STATUSES, }, }), ); diff --git a/src/api-client.ts b/src/api-client.ts index 13babcd38..4b8cb7b34 100644 --- a/src/api-client.ts +++ b/src/api-client.ts @@ -19,6 +19,15 @@ import { const GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version"; +/** + * HTTP status codes that should not be retried. + * + * The default Octokit list is 400, 401, 403, 404, 410, 422, and 451. We have + * observed transient errors with authentication, so we remove 401, 403, and 404 + * from the default list to ensure that these errors are retried. + */ +export const DO_NOT_RETRY_STATUSES = [400, 410, 422, 451]; + export type GitHubApiCombinedDetails = GitHubApiDetails & GitHubApiExternalRepoDetails; @@ -52,10 +61,7 @@ function createApiClientWithDetails( error: core.error, }, retry: { - // The default is 400, 401, 403, 404, 410, 422, and 451. We have observed transient errors - // with authentication, so we remove 401, 403, and 404 from the default list to ensure that - // these errors are retried. - doNotRetry: [400, 410, 422, 451], + doNotRetry: DO_NOT_RETRY_STATUSES, }, }), ); diff --git a/src/database-upload.ts b/src/database-upload.ts index 41546697f..b9e2f5b06 100644 --- a/src/database-upload.ts +++ b/src/database-upload.ts @@ -2,7 +2,11 @@ import * as fs from "fs"; import * as actionsUtil from "./actions-util"; import { AnalysisKind } from "./analyses"; -import { getApiClient, GitHubApiDetails } from "./api-client"; +import { + DO_NOT_RETRY_STATUSES, + getApiClient, + GitHubApiDetails, +} from "./api-client"; import { type CodeQL } from "./codeql"; import { Config } from "./config-utils"; import { Feature, FeatureEnablement } from "./feature-flags"; @@ -11,7 +15,7 @@ import { Logger, withGroupAsync } from "./logging"; import { OverlayDatabaseMode } from "./overlay"; import { RepositoryNwo } from "./repository"; import * as util from "./util"; -import { bundleDb, CleanupLevel, parseGitHubUrl } from "./util"; +import { asHTTPError, bundleDb, CleanupLevel, parseGitHubUrl } from "./util"; /** Information about a database upload. */ export interface DatabaseUploadResult { @@ -105,40 +109,63 @@ export async function cleanupAndUploadDatabases( includeDiagnostics: false, }); bundledDbSize = fs.statSync(bundledDb).size; - const bundledDbReadStream = fs.createReadStream(bundledDb); const commitOid = await gitUtils.getCommitOid( actionsUtil.getRequiredInput("checkout_path"), ); - try { - const startTime = performance.now(); - await client.request( - `POST /repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name&commit_oid=:commit_oid`, - { - baseUrl: uploadsBaseUrl, - owner: repositoryNwo.owner, - repo: repositoryNwo.repo, - language, - name: `${language}-database`, - commit_oid: commitOid, - data: bundledDbReadStream, - headers: { - authorization: `token ${apiDetails.auth}`, - "Content-Type": "application/zip", - "Content-Length": bundledDbSize, + // Upload with manual retry logic. We disable Octokit's built-in retries + // because the request body is a ReadStream, which can only be consumed + // once. + const maxAttempts = 4; // 1 initial attempt + 3 retries, identical to the default retry behavior of Octokit + let uploadDurationMs: number | undefined; + for (let attempt = 1; attempt <= maxAttempts; attempt++) { + const bundledDbReadStream = fs.createReadStream(bundledDb); + try { + const attemptStartTime = performance.now(); + await client.request( + `POST /repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name&commit_oid=:commit_oid`, + { + baseUrl: uploadsBaseUrl, + owner: repositoryNwo.owner, + repo: repositoryNwo.repo, + language, + name: `${language}-database`, + commit_oid: commitOid, + data: bundledDbReadStream, + headers: { + authorization: `token ${apiDetails.auth}`, + "Content-Type": "application/zip", + "Content-Length": bundledDbSize, + }, + request: { + retries: 0, + }, }, - }, - ); - const endTime = performance.now(); - reports.push({ - language, - zipped_upload_size_bytes: bundledDbSize, - is_overlay_base: shouldUploadOverlayBase, - upload_duration_ms: endTime - startTime, - }); - logger.debug(`Successfully uploaded database for ${language}`); - } finally { - bundledDbReadStream.close(); + ); + uploadDurationMs = performance.now() - attemptStartTime; + break; + } catch (e) { + const httpError = asHTTPError(e); + const isRetryable = + !httpError || !DO_NOT_RETRY_STATUSES.includes(httpError.status); + if (!isRetryable || attempt === maxAttempts) { + throw e; + } + const backoffMs = 15_000 * Math.pow(2, attempt - 1); // 15s, 30s, 60s + logger.debug( + `Database upload attempt ${attempt} of ${maxAttempts} failed for ${language}: ${util.getErrorMessage(e)}. Retrying in ${backoffMs / 1000}s...`, + ); + await new Promise((resolve) => setTimeout(resolve, backoffMs)); + } finally { + bundledDbReadStream.close(); + } } + reports.push({ + language, + zipped_upload_size_bytes: bundledDbSize, + is_overlay_base: shouldUploadOverlayBase, + upload_duration_ms: uploadDurationMs, + }); + logger.debug(`Successfully uploaded database for ${language}`); } catch (e) { // Log a warning but don't fail the workflow logger.warning( From ca969a91db22e598a0776e071087dc700e42b199 Mon Sep 17 00:00:00 2001 From: Henry Mercer Date: Tue, 10 Mar 2026 12:34:47 +0000 Subject: [PATCH 37/50] Add changelog note --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6d6c8a15e..f7491eb62 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th - Fixed [a bug](https://github.com/github/codeql-action/issues/3555) which caused the CodeQL Action to fail loading repository properties if a "Multi select" repository property was configured for the repository. [#3557](https://github.com/github/codeql-action/pull/3557) - The CodeQL Action now loads [custom repository properties](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization) on GitHub Enterprise Server, enabling the customization of features such as `github-codeql-disable-overlay` that was previously only available on GitHub.com. [#3559](https://github.com/github/codeql-action/pull/3559) +- Fixed the retry mechanism for database uploads. Previously this would fail with the error "Response body object should not be disturbed or locked". [#3564](https://github.com/github/codeql-action/pull/3564) ## 4.32.6 - 05 Mar 2026 From edfcb0a509722876e11d1d78b8c49259926b4096 Mon Sep 17 00:00:00 2001 From: Henry Mercer Date: Tue, 10 Mar 2026 12:49:58 +0000 Subject: [PATCH 38/50] Update tests --- src/database-upload.test.ts | 100 ++++++++++++++++++++++++++---------- 1 file changed, 72 insertions(+), 28 deletions(-) diff --git a/src/database-upload.test.ts b/src/database-upload.test.ts index 3d8433d8b..8bd22091c 100644 --- a/src/database-upload.test.ts +++ b/src/database-upload.test.ts @@ -214,37 +214,81 @@ test.serial( }, ); -test.serial("Don't crash if uploading a database fails", async (t) => { - await withTmpDir(async (tmpDir) => { - setupActionsVars(tmpDir, tmpDir); - sinon - .stub(actionsUtil, "getRequiredInput") - .withArgs("upload-database") - .returns("true"); - sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true); +test.serial( + "Don't crash if uploading a database fails with a non-retryable error", + async (t) => { + await withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); + sinon + .stub(actionsUtil, "getRequiredInput") + .withArgs("upload-database") + .returns("true"); + sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true); - await mockHttpRequests(500); + await mockHttpRequests(422); - const loggedMessages = [] as LoggedMessage[]; - await cleanupAndUploadDatabases( - testRepoName, - getCodeQL(), - getTestConfig(tmpDir), - testApiDetails, - createFeatures([]), - getRecordingLogger(loggedMessages), - ); + const loggedMessages = [] as LoggedMessage[]; + await cleanupAndUploadDatabases( + testRepoName, + getCodeQL(), + getTestConfig(tmpDir), + testApiDetails, + createFeatures([]), + getRecordingLogger(loggedMessages), + ); - t.assert( - loggedMessages.find( - (v) => - v.type === "warning" && - v.message === - "Failed to upload database for javascript: some error message", - ) !== undefined, - ); - }); -}); + t.assert( + loggedMessages.find( + (v) => + v.type === "warning" && + v.message === + "Failed to upload database for javascript: some error message", + ) !== undefined, + ); + }); + }, +); + +test.serial( + "Don't crash if uploading a database fails with a retryable error", + async (t) => { + await withTmpDir(async (tmpDir) => { + setupActionsVars(tmpDir, tmpDir); + sinon + .stub(actionsUtil, "getRequiredInput") + .withArgs("upload-database") + .returns("true"); + sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true); + + await mockHttpRequests(500); + + // Stub setTimeout to fire immediately to avoid real delays from retry backoff. + const originalSetTimeout = global.setTimeout; + sinon + .stub(global, "setTimeout") + .callsFake((fn: () => void) => originalSetTimeout(fn, 0)); + + const loggedMessages = [] as LoggedMessage[]; + await cleanupAndUploadDatabases( + testRepoName, + getCodeQL(), + getTestConfig(tmpDir), + testApiDetails, + createFeatures([]), + getRecordingLogger(loggedMessages), + ); + + t.assert( + loggedMessages.find( + (v) => + v.type === "warning" && + v.message === + "Failed to upload database for javascript: some error message", + ) !== undefined, + ); + }); + }, +); test.serial("Successfully uploading a database to github.com", async (t) => { await withTmpDir(async (tmpDir) => { From bef08edf3221a5673c3202722c8817071010ae2a Mon Sep 17 00:00:00 2001 From: Henry Mercer Date: Tue, 10 Mar 2026 13:11:41 +0000 Subject: [PATCH 39/50] Update to log deprecation warning Move rollout to April --- CHANGELOG.md | 6 ++---- lib/init-action.js | 43 ++++++++++++++++++++++++++++++++++++------- src/environment.ts | 3 +++ src/init-action.ts | 17 +++++++++++++++++ src/init.test.ts | 7 ++++++- src/init.ts | 43 +++++++++++++++++++++++++++++++++---------- 6 files changed, 97 insertions(+), 22 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b6a424914..f5b37ac8b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,11 +4,9 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th ## [UNRELEASED] -- Added an experimental change which skips collecting file coverage information on pull requests to improve analysis performance. File coverage information will still be computed on non-PR analyses. +- Upcoming change: Starting April 2026, the CodeQL Action will skip collecting file coverage information on pull requests to improve analysis performance. File coverage information will still be computed on non-PR analyses. Pull request analyses will log a warning about this upcoming change. - Repositories owned by an organization can opt out of this change by creating a custom repository property with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then setting this property to `true` in the repository's settings. For more information, see [Managing custom properties for repositories in your organization](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization). - - We expect to roll this change out to everyone in March. [#3562](https://github.com/github/codeql-action/pull/3562) + Repositories owned by an organization can opt out of this change by creating a custom repository property with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then setting this property to `true` in the repository's settings. For more information, see [Managing custom properties for repositories in your organization](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization). [#3562](https://github.com/github/codeql-action/pull/3562) - Fixed [a bug](https://github.com/github/codeql-action/issues/3555) which caused the CodeQL Action to fail loading repository properties if a "Multi select" repository property was configured for the repository. [#3557](https://github.com/github/codeql-action/pull/3557) - The CodeQL Action now loads [custom repository properties](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization) on GitHub Enterprise Server, enabling the customization of features such as `github-codeql-disable-overlay` that was previously only available on GitHub.com. [#3559](https://github.com/github/codeql-action/pull/3559) diff --git a/lib/init-action.js b/lib/init-action.js index f5d4caece..a2a95f193 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -109087,18 +109087,38 @@ function cleanupDatabaseClusterDirectory(config, logger, options = {}, rmSync2 = } async function getFileCoverageInformationEnabled(debugMode, codeql, features, repositoryProperties) { if (debugMode) { - return { enabled: true, enabledByRepositoryProperty: false }; + return { + enabled: true, + enabledByRepositoryProperty: false, + showDeprecationWarning: false + }; } if (!isAnalyzingPullRequest()) { - return { enabled: true, enabledByRepositoryProperty: false }; - } - if (!await features.getValue("skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */, codeql)) { - return { enabled: true, enabledByRepositoryProperty: false }; + return { + enabled: true, + enabledByRepositoryProperty: false, + showDeprecationWarning: false + }; } if (repositoryProperties["github-codeql-file-coverage-on-prs" /* FILE_COVERAGE_ON_PRS */] === true) { - return { enabled: true, enabledByRepositoryProperty: true }; + return { + enabled: true, + enabledByRepositoryProperty: true, + showDeprecationWarning: false + }; } - return { enabled: false, enabledByRepositoryProperty: false }; + if (!await features.getValue("skip_file_coverage_on_prs" /* SkipFileCoverageOnPrs */, codeql)) { + return { + enabled: true, + enabledByRepositoryProperty: false, + showDeprecationWarning: true + }; + } + return { + enabled: false, + enabledByRepositoryProperty: false, + showDeprecationWarning: false + }; } // src/status-report.ts @@ -109767,6 +109787,15 @@ async function run(startedAt) { ) ); } + if (fileCoverageResult.showDeprecationWarning && !process.env["CODEQL_ACTION_DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION" /* DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION */]) { + logger.warning( + 'Starting April 2026, the CodeQL Action will skip collecting file coverage information on pull requests to improve analysis performance. File coverage information will still be computed on non-PR analyses. Repositories owned by an organization can opt out of this change by creating a custom repository property with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then setting this property to `true` in the repository\'s settings.' + ); + core13.exportVariable( + "CODEQL_ACTION_DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION" /* DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION */, + "true" + ); + } await checkInstallPython311(config.languages, codeql); } catch (unwrappedError) { const error3 = wrapError(unwrappedError); diff --git a/src/environment.ts b/src/environment.ts index 75fc3a7de..03ac727f3 100644 --- a/src/environment.ts +++ b/src/environment.ts @@ -47,6 +47,9 @@ export enum EnvVar { /** Whether the init action has been run. */ INIT_ACTION_HAS_RUN = "CODEQL_ACTION_INIT_HAS_RUN", + /** Whether the deprecation warning for file coverage on PRs has been logged. */ + DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION = "CODEQL_ACTION_DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION", + /** Whether the error for a deprecated version of the CodeQL Action was logged. */ LOG_VERSION_DEPRECATION = "CODEQL_ACTION_DID_LOG_VERSION_DEPRECATION", diff --git a/src/init-action.ts b/src/init-action.ts index 6ca8d0c97..76dd0bdba 100644 --- a/src/init-action.ts +++ b/src/init-action.ts @@ -409,6 +409,23 @@ async function run(startedAt: Date) { ); } + if ( + fileCoverageResult.showDeprecationWarning && + !process.env[EnvVar.DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION] + ) { + logger.warning( + "Starting April 2026, the CodeQL Action will skip collecting file coverage information on pull requests " + + "to improve analysis performance. File coverage information will still be computed on non-PR analyses. " + + "Repositories owned by an organization can opt out of this change by creating a custom repository property " + + 'with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then setting this property to ' + + "`true` in the repository's settings.", + ); + core.exportVariable( + EnvVar.DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION, + "true", + ); + } + await checkInstallPython311(config.languages, codeql); } catch (unwrappedError) { const error = wrapError(unwrappedError); diff --git a/src/init.test.ts b/src/init.test.ts index c37d1c1e6..0d5a5783c 100644 --- a/src/init.test.ts +++ b/src/init.test.ts @@ -460,6 +460,7 @@ test("file coverage information enabled when debugMode is true", async (t) => { ); t.true(result.enabled); t.false(result.enabledByRepositoryProperty); + t.false(result.showDeprecationWarning); }); test.serial( @@ -475,11 +476,12 @@ test.serial( ); t.true(result.enabled); t.false(result.enabledByRepositoryProperty); + t.false(result.showDeprecationWarning); }, ); test.serial( - "file coverage information enabled when feature flag is not enabled", + "file coverage information enabled when feature flag is not enabled, with deprecation warning", async (t) => { sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true); @@ -491,6 +493,7 @@ test.serial( ); t.true(result.enabled); t.false(result.enabledByRepositoryProperty); + t.true(result.showDeprecationWarning); }, ); @@ -509,6 +512,7 @@ test.serial( ); t.true(result.enabled); t.true(result.enabledByRepositoryProperty); + t.false(result.showDeprecationWarning); }, ); @@ -525,5 +529,6 @@ test.serial( ); t.false(result.enabled); t.false(result.enabledByRepositoryProperty); + t.false(result.showDeprecationWarning); }, ); diff --git a/src/init.ts b/src/init.ts index a1c6b2108..c4b219afc 100644 --- a/src/init.ts +++ b/src/init.ts @@ -309,29 +309,52 @@ export async function getFileCoverageInformationEnabled( ): Promise<{ enabled: boolean; enabledByRepositoryProperty: boolean; + showDeprecationWarning: boolean; }> { // Always enable file coverage information in debug mode if (debugMode) { - return { enabled: true, enabledByRepositoryProperty: false }; + return { + enabled: true, + enabledByRepositoryProperty: false, + showDeprecationWarning: false, + }; } // We're most interested in speeding up PRs, and we want to keep // submitting file coverage information for the default branch since // it is used to populate the status page. if (!isAnalyzingPullRequest()) { - return { enabled: true, enabledByRepositoryProperty: false }; - } - // If the feature is disabled, then maintain the previous behavior of - // unconditionally computing file coverage information. - if (!(await features.getValue(Feature.SkipFileCoverageOnPrs, codeql))) { - return { enabled: true, enabledByRepositoryProperty: false }; + return { + enabled: true, + enabledByRepositoryProperty: false, + showDeprecationWarning: false, + }; } // Allow repositories to opt in to file coverage information on PRs - // using a repository property. + // using a repository property. In this case, don't show the deprecation + // warning since the repository has explicitly opted in. if ( repositoryProperties[RepositoryPropertyName.FILE_COVERAGE_ON_PRS] === true ) { - return { enabled: true, enabledByRepositoryProperty: true }; + return { + enabled: true, + enabledByRepositoryProperty: true, + showDeprecationWarning: false, + }; + } + // If the feature is disabled, then maintain the previous behavior of + // unconditionally computing file coverage information, but warn that + // file coverage on PRs will be disabled in a future release. + if (!(await features.getValue(Feature.SkipFileCoverageOnPrs, codeql))) { + return { + enabled: true, + enabledByRepositoryProperty: false, + showDeprecationWarning: true, + }; } // Otherwise, disable file coverage information on PRs to speed up analysis. - return { enabled: false, enabledByRepositoryProperty: false }; + return { + enabled: false, + enabledByRepositoryProperty: false, + showDeprecationWarning: false, + }; } From 55a0f2b2aac8ca1d0d3ea9a17896ea2591e6cb40 Mon Sep 17 00:00:00 2001 From: Henry Mercer Date: Tue, 10 Mar 2026 15:41:40 +0000 Subject: [PATCH 40/50] Add environment variable override --- CHANGELOG.md | 7 +- lib/analyze-action.js | 480 +++++++++++----------- lib/init-action-post.js | 820 +++++++++++++++++++------------------ lib/init-action.js | 585 +++++++++++++------------- lib/setup-codeql-action.js | 488 +++++++++++----------- lib/upload-lib.js | 460 ++++++++++----------- lib/upload-sarif-action.js | 472 ++++++++++----------- src/environment.ts | 6 + src/init-action.ts | 18 +- src/init.test.ts | 181 ++++++++ src/init.ts | 58 +++ 11 files changed, 1928 insertions(+), 1647 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f5b37ac8b..7b53a6674 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,7 +6,12 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th - Upcoming change: Starting April 2026, the CodeQL Action will skip collecting file coverage information on pull requests to improve analysis performance. File coverage information will still be computed on non-PR analyses. Pull request analyses will log a warning about this upcoming change. - Repositories owned by an organization can opt out of this change by creating a custom repository property with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then setting this property to `true` in the repository's settings. For more information, see [Managing custom properties for repositories in your organization](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization). [#3562](https://github.com/github/codeql-action/pull/3562) + To opt out of this change: + - **Repositories owned by an organization:** Create a custom repository property with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then set this property to `true` in the repository's settings. For more information, see [Managing custom properties for repositories in your organization](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization). Alternatively, if you are using an advanced setup workflow, you can set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true` in your workflow. + - **User-owned repositories using default setup:** Switch to an advanced setup workflow and set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true` in your workflow. + - **User-owned repositories using advanced setup:** Set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true` in your workflow. + + [#3562](https://github.com/github/codeql-action/pull/3562) - Fixed [a bug](https://github.com/github/codeql-action/issues/3555) which caused the CodeQL Action to fail loading repository properties if a "Multi select" repository property was configured for the repository. [#3557](https://github.com/github/codeql-action/pull/3557) - The CodeQL Action now loads [custom repository properties](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization) on GitHub Enterprise Server, enabling the customization of features such as `github-codeql-disable-overlay` that was previously only available on GitHub.com. [#3559](https://github.com/github/codeql-action/pull/3559) diff --git a/lib/analyze-action.js b/lib/analyze-action.js index bc2af128c..1fd5bf621 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -3842,18 +3842,18 @@ var require_webidl = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context2) { - const plural = context2.types.length === 1 ? "" : " one of"; - const message = `${context2.argument} could not be converted to${plural}: ${context2.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context3) { + const plural = context3.types.length === 1 ? "" : " one of"; + const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; return webidl.errors.exception({ - header: context2.prefix, + header: context3.prefix, message }); }; - webidl.errors.invalidArgument = function(context2) { + webidl.errors.invalidArgument = function(context3) { return webidl.errors.exception({ - header: context2.prefix, - message: `"${context2.value}" is an invalid ${context2.type}.` + header: context3.prefix, + message: `"${context3.value}" is an invalid ${context3.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -9849,17 +9849,17 @@ var require_api_request = __commonJS({ } } } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context2, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -9896,7 +9896,7 @@ var require_api_request = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context2 + context: context3 }); } } @@ -10065,17 +10065,17 @@ var require_api_stream = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context2, callback, responseHeaders } = this; + const { factory, opaque, context: context3, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -10103,7 +10103,7 @@ var require_api_stream = __commonJS({ statusCode, headers, opaque, - context: context2 + context: context3 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -10295,7 +10295,7 @@ var require_api_pipeline = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -10304,10 +10304,10 @@ var require_api_pipeline = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler2, context: context3 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10325,7 +10325,7 @@ var require_api_pipeline = __commonJS({ headers, opaque, body: this.res, - context: context2 + context: context3 }); } catch (err) { this.res.on("error", util.nop); @@ -10409,7 +10409,7 @@ var require_api_upgrade = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; @@ -10423,7 +10423,7 @@ var require_api_upgrade = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10431,7 +10431,7 @@ var require_api_upgrade = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -10500,20 +10500,20 @@ var require_api_connect = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -10525,7 +10525,7 @@ var require_api_connect = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -21321,7 +21321,7 @@ var require_core = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.platform = exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = exports2.markdownSummary = exports2.summary = exports2.ExitCode = void 0; - exports2.exportVariable = exportVariable8; + exports2.exportVariable = exportVariable9; exports2.setSecret = setSecret; exports2.addPath = addPath; exports2.getInput = getInput2; @@ -21353,7 +21353,7 @@ var require_core = __commonJS({ ExitCode2[ExitCode2["Success"] = 0] = "Success"; ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; })(ExitCode || (exports2.ExitCode = ExitCode = {})); - function exportVariable8(name, val) { + function exportVariable9(name, val) { const convertedVal = (0, utils_1.toCommandValue)(val); process.env[name] = convertedVal; const filePath = process.env["GITHUB_ENV"] || ""; @@ -24840,18 +24840,18 @@ var require_webidl2 = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context2) { - const plural = context2.types.length === 1 ? "" : " one of"; - const message = `${context2.argument} could not be converted to${plural}: ${context2.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context3) { + const plural = context3.types.length === 1 ? "" : " one of"; + const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; return webidl.errors.exception({ - header: context2.prefix, + header: context3.prefix, message }); }; - webidl.errors.invalidArgument = function(context2) { + webidl.errors.invalidArgument = function(context3) { return webidl.errors.exception({ - header: context2.prefix, - message: `"${context2.value}" is an invalid ${context2.type}.` + header: context3.prefix, + message: `"${context3.value}" is an invalid ${context3.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -30847,17 +30847,17 @@ var require_api_request2 = __commonJS({ } } } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context2, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -30894,7 +30894,7 @@ var require_api_request2 = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context2 + context: context3 }); } } @@ -31063,17 +31063,17 @@ var require_api_stream2 = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context2, callback, responseHeaders } = this; + const { factory, opaque, context: context3, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -31101,7 +31101,7 @@ var require_api_stream2 = __commonJS({ statusCode, headers, opaque, - context: context2 + context: context3 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -31293,7 +31293,7 @@ var require_api_pipeline2 = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -31302,10 +31302,10 @@ var require_api_pipeline2 = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler2, context: context3 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31323,7 +31323,7 @@ var require_api_pipeline2 = __commonJS({ headers, opaque, body: this.res, - context: context2 + context: context3 }); } catch (err) { this.res.on("error", util.nop); @@ -31407,7 +31407,7 @@ var require_api_upgrade2 = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; @@ -31421,7 +31421,7 @@ var require_api_upgrade2 = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31429,7 +31429,7 @@ var require_api_upgrade2 = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -31498,20 +31498,20 @@ var require_api_connect2 = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -31523,7 +31523,7 @@ var require_api_connect2 = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -40178,8 +40178,8 @@ function isDefined(value) { function isKeyOperator(operator) { return operator === ";" || operator === "&" || operator === "?"; } -function getValues(context2, operator, key, modifier) { - var value = context2[key], result = []; +function getValues(context3, operator, key, modifier) { + var value = context3[key], result = []; if (isDefined(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); @@ -40243,7 +40243,7 @@ function parseUrl(template) { expand: expand.bind(null, template) }; } -function expand(template, context2) { +function expand(template, context3) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; template = template.replace( /\{([^\{\}]+)\}|([^\{\}]+)/g, @@ -40257,7 +40257,7 @@ function expand(template, context2) { } expression.split(/,/g).forEach(function(variable) { var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context2, operator, tmp[1], tmp[2] || tmp[3])); + values.push(getValues(context3, operator, tmp[1], tmp[2] || tmp[3])); }); if (operator && operator !== "+") { var separator = ","; @@ -48747,7 +48747,7 @@ var require_internal_glob_options_helper = __commonJS({ })(); Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = getOptions; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -48759,23 +48759,23 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core16.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core15.debug(`implicitDescendants '${result.implicitDescendants}'`); + core16.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core15.debug(`matchDirectories '${result.matchDirectories}'`); + core16.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core16.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core15.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core16.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -50403,7 +50403,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var fs18 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path16 = __importStar2(require("path")); @@ -50456,7 +50456,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core15.debug(`Search path '${searchPath}'`); + core16.debug(`Search path '${searchPath}'`); try { yield __await2(fs18.promises.lstat(searchPath)); } catch (err) { @@ -50531,7 +50531,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core15.debug(`Broken symlink '${item.path}'`); + core16.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -50547,7 +50547,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core16.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -50650,7 +50650,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = hashFiles2; var crypto3 = __importStar2(require("crypto")); - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var fs18 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); @@ -50659,7 +50659,7 @@ var require_internal_hash_files = __commonJS({ return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; - const writeDelegate = verbose ? core15.info : core15.debug; + const writeDelegate = verbose ? core16.info : core16.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto3.createHash("sha256"); @@ -52050,7 +52050,7 @@ var require_cacheUtils = __commonJS({ exports2.assertDefined = assertDefined; exports2.getCacheVersion = getCacheVersion; exports2.getRuntimeToken = getRuntimeToken; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var exec = __importStar2(require_exec()); var glob2 = __importStar2(require_glob()); var io7 = __importStar2(require_io()); @@ -52101,7 +52101,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path16.relative(workspace, file).replace(new RegExp(`\\${path16.sep}`, "g"), "/"); - core15.debug(`Matched: ${relativeFile}`); + core16.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -52129,7 +52129,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { let versionOutput = ""; additionalArgs.push("--version"); - core15.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core16.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -52140,10 +52140,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core15.debug(err.message); + core16.debug(err.message); } versionOutput = versionOutput.trim(); - core15.debug(versionOutput); + core16.debug(versionOutput); return versionOutput; }); } @@ -52151,7 +52151,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver9.clean(versionOutput); - core15.debug(`zstd version: ${version}`); + core16.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -52276,14 +52276,14 @@ function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, e var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); var _, done = false; for (var i = decorators.length - 1; i >= 0; i--) { - var context2 = {}; - for (var p in contextIn) context2[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context2.access[p] = contextIn.access[p]; - context2.addInitializer = function(f) { + var context3 = {}; + for (var p in contextIn) context3[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context3.access[p] = contextIn.access[p]; + context3.addInitializer = function(f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context2); + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context3); if (kind === "accessor") { if (result === void 0) continue; if (result === null || typeof result !== "object") throw new TypeError("Object expected"); @@ -53010,19 +53010,19 @@ var require_logger = __commonJS({ logger: clientLogger }; } - var context2 = createLoggerContext({ + var context3 = createLoggerContext({ logLevelEnvVarName: "TYPESPEC_RUNTIME_LOG_LEVEL", namespace: "typeSpecRuntime" }); - exports2.TypeSpecRuntimeLogger = context2.logger; + exports2.TypeSpecRuntimeLogger = context3.logger; function setLogLevel(logLevel) { - context2.setLogLevel(logLevel); + context3.setLogLevel(logLevel); } function getLogLevel() { - return context2.getLogLevel(); + return context3.getLogLevel(); } function createClientLogger(namespace) { - return context2.createClientLogger(namespace); + return context3.createClientLogger(namespace); } } }); @@ -57268,19 +57268,19 @@ var require_commonjs2 = __commonJS({ exports2.getLogLevel = getLogLevel; exports2.createClientLogger = createClientLogger; var logger_1 = require_internal(); - var context2 = (0, logger_1.createLoggerContext)({ + var context3 = (0, logger_1.createLoggerContext)({ logLevelEnvVarName: "AZURE_LOG_LEVEL", namespace: "azure" }); - exports2.AzureLogger = context2.logger; + exports2.AzureLogger = context3.logger; function setLogLevel(level) { - context2.setLogLevel(level); + context3.setLogLevel(level); } function getLogLevel() { - return context2.getLogLevel(); + return context3.getLogLevel(); } function createClientLogger(namespace) { - return context2.createClientLogger(namespace); + return context3.createClientLogger(namespace); } } }); @@ -58190,14 +58190,14 @@ var require_tracingContext = __commonJS({ namespace: /* @__PURE__ */ Symbol.for("@azure/core-tracing namespace") }; function createTracingContext(options = {}) { - let context2 = new TracingContextImpl(options.parentContext); + let context3 = new TracingContextImpl(options.parentContext); if (options.span) { - context2 = context2.setValue(exports2.knownContextKeys.span, options.span); + context3 = context3.setValue(exports2.knownContextKeys.span, options.span); } if (options.namespace) { - context2 = context2.setValue(exports2.knownContextKeys.namespace, options.namespace); + context3 = context3.setValue(exports2.knownContextKeys.namespace, options.namespace); } - return context2; + return context3; } var TracingContextImpl = class _TracingContextImpl { _contextMap; @@ -58335,8 +58335,8 @@ var require_tracingClient = __commonJS({ span.end(); } } - function withContext(context2, callback, ...callbackArgs) { - return (0, instrumenter_js_1.getInstrumenter)().withContext(context2, callback, ...callbackArgs); + function withContext(context3, callback, ...callbackArgs) { + return (0, instrumenter_js_1.getInstrumenter)().withContext(context3, callback, ...callbackArgs); } function parseTraceparentHeader(traceparentHeader) { return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); @@ -91807,7 +91807,7 @@ var require_uploadUtils = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadProgress = void 0; exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var storage_blob_1 = require_commonjs15(); var errors_1 = require_errors3(); var UploadProgress = class { @@ -91849,7 +91849,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core15.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core16.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -91906,14 +91906,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core15.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core16.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error3) { - core15.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); + core16.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); throw error3; } finally { uploadProgress.stopDisplayTimer(); @@ -91998,7 +91998,7 @@ var require_requestUtils = __commonJS({ exports2.retry = retry2; exports2.retryTypedResponse = retryTypedResponse; exports2.retryHttpClientResponse = retryHttpClientResponse; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants12(); function isSuccessStatusCode(statusCode) { @@ -92056,9 +92056,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core15.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core16.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core15.debug(`${name} - Error is not retryable`); + core16.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -92317,7 +92317,7 @@ var require_downloadUtils = __commonJS({ exports2.downloadCacheHttpClient = downloadCacheHttpClient; exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); @@ -92355,7 +92355,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core15.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core16.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -92389,7 +92389,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core15.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core16.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -92439,7 +92439,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core15.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core16.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -92450,7 +92450,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core15.debug("Unable to validate download, no Content-Length header"); + core16.debug("Unable to validate download, no Content-Length header"); } }); } @@ -92568,7 +92568,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core15.debug("Unable to determine content length, downloading file with http-client..."); + core16.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -92658,7 +92658,7 @@ var require_options = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadOptions = getUploadOptions; exports2.getDownloadOptions = getDownloadOptions; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -92678,9 +92678,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core15.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core15.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core16.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core16.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core16.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } function getDownloadOptions(copy) { @@ -92716,12 +92716,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core15.debug(`Download concurrency: ${result.downloadConcurrency}`); - core15.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core15.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core15.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core15.debug(`Lookup only: ${result.lookupOnly}`); + core16.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core16.debug(`Download concurrency: ${result.downloadConcurrency}`); + core16.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core16.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core16.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core16.debug(`Lookup only: ${result.lookupOnly}`); return result; } } @@ -92915,7 +92915,7 @@ var require_cacheHttpClient = __commonJS({ exports2.downloadCache = downloadCache; exports2.reserveCache = reserveCache; exports2.saveCache = saveCache5; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs18 = __importStar2(require("fs")); @@ -92933,7 +92933,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core15.debug(`Resource Url: ${url2}`); + core16.debug(`Resource Url: ${url2}`); return url2; } function createAcceptHeader(type2, apiVersion) { @@ -92961,7 +92961,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core15.isDebug()) { + if (core16.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -92974,9 +92974,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core15.setSecret(cacheDownloadUrl); - core15.debug(`Cache Result:`); - core15.debug(JSON.stringify(cacheResult)); + core16.setSecret(cacheDownloadUrl); + core16.debug(`Cache Result:`); + core16.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -92990,10 +92990,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core15.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core16.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core15.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core16.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -93036,7 +93036,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter2(this, void 0, void 0, function* () { - core15.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core16.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -93058,7 +93058,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core15.debug("Awaiting all uploads"); + core16.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { @@ -93101,16 +93101,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core15.debug("Upload cache"); + core16.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core15.debug("Commiting cache"); + core16.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core16.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core15.info("Cache saved successfully"); + core16.info("Cache saved successfully"); } }); } @@ -98593,7 +98593,7 @@ var require_cache5 = __commonJS({ exports2.isFeatureAvailable = isFeatureAvailable; exports2.restoreCache = restoreCache5; exports2.saveCache = saveCache5; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var path16 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); @@ -98652,7 +98652,7 @@ var require_cache5 = __commonJS({ function restoreCache5(paths_1, primaryKey_1, restoreKeys_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core15.debug(`Cache service version: ${cacheServiceVersion}`); + core16.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -98667,8 +98667,8 @@ var require_cache5 = __commonJS({ return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core15.debug("Resolved Keys:"); - core15.debug(JSON.stringify(keys)); + core16.debug("Resolved Keys:"); + core16.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -98686,19 +98686,19 @@ var require_cache5 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core15.info("Lookup only - skipping download"); + core16.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path16.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive Path: ${archivePath}`); + core16.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core15.isDebug()) { + if (core16.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core16.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core15.info("Cache restored successfully"); + core16.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error3) { const typedError = error3; @@ -98706,16 +98706,16 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to restore: ${error3.message}`); + core16.error(`Failed to restore: ${error3.message}`); } else { - core15.warning(`Failed to restore: ${error3.message}`); + core16.warning(`Failed to restore: ${error3.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core15.debug(`Failed to delete archive: ${error3}`); + core16.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -98726,8 +98726,8 @@ var require_cache5 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core15.debug("Resolved Keys:"); - core15.debug(JSON.stringify(keys)); + core16.debug("Resolved Keys:"); + core16.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -98745,30 +98745,30 @@ var require_cache5 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request2); if (!response.ok) { - core15.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); + core16.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request2.key !== response.matchedKey; if (isRestoreKeyMatch) { - core15.info(`Cache hit for restore-key: ${response.matchedKey}`); + core16.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core15.info(`Cache hit for: ${response.matchedKey}`); + core16.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core15.info("Lookup only - skipping download"); + core16.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path16.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive path: ${archivePath}`); - core15.debug(`Starting download of archive to: ${archivePath}`); + core16.debug(`Archive path: ${archivePath}`); + core16.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core15.isDebug()) { + core16.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core16.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core15.info("Cache restored successfully"); + core16.info("Cache restored successfully"); return response.matchedKey; } catch (error3) { const typedError = error3; @@ -98776,9 +98776,9 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to restore: ${error3.message}`); + core16.error(`Failed to restore: ${error3.message}`); } else { - core15.warning(`Failed to restore: ${error3.message}`); + core16.warning(`Failed to restore: ${error3.message}`); } } } finally { @@ -98787,7 +98787,7 @@ var require_cache5 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error3) { - core15.debug(`Failed to delete archive: ${error3}`); + core16.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -98796,7 +98796,7 @@ var require_cache5 = __commonJS({ function saveCache5(paths_1, key_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core15.debug(`Cache service version: ${cacheServiceVersion}`); + core16.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -98814,26 +98814,26 @@ var require_cache5 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core15.debug("Cache Paths:"); - core15.debug(`${JSON.stringify(cachePaths)}`); + core16.debug("Cache Paths:"); + core16.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path16.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive Path: ${archivePath}`); + core16.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core15.isDebug()) { + if (core16.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.debug(`File Size: ${archiveFileSize}`); + core16.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core15.debug("Reserving Cache"); + core16.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -98846,26 +98846,26 @@ var require_cache5 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core15.debug(`Saving Cache (ID: ${cacheId})`); + core16.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error3) { const typedError = error3; if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError2.name) { - core15.info(`Failed to save: ${typedError.message}`); + core16.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to save: ${typedError.message}`); + core16.error(`Failed to save: ${typedError.message}`); } else { - core15.warning(`Failed to save: ${typedError.message}`); + core16.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core15.debug(`Failed to delete archive: ${error3}`); + core16.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -98878,23 +98878,23 @@ var require_cache5 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core15.debug("Cache Paths:"); - core15.debug(`${JSON.stringify(cachePaths)}`); + core16.debug("Cache Paths:"); + core16.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path16.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core15.debug(`Archive Path: ${archivePath}`); + core16.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core15.isDebug()) { + if (core16.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core15.debug(`File Size: ${archiveFileSize}`); + core16.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core15.debug("Reserving Cache"); + core16.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request2 = { key, @@ -98905,16 +98905,16 @@ var require_cache5 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request2); if (!response.ok) { if (response.message) { - core15.warning(`Cache reservation failed: ${response.message}`); + core16.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error3) { - core15.debug(`Failed to reserve cache: ${error3}`); + core16.debug(`Failed to reserve cache: ${error3}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core15.debug(`Attempting to upload cache located at: ${archivePath}`); + core16.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -98922,7 +98922,7 @@ var require_cache5 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core15.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core16.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -98935,21 +98935,21 @@ var require_cache5 = __commonJS({ if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError2.name) { - core15.info(`Failed to save: ${typedError.message}`); + core16.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core15.warning(typedError.message); + core16.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core15.error(`Failed to save: ${typedError.message}`); + core16.error(`Failed to save: ${typedError.message}`); } else { - core15.warning(`Failed to save: ${typedError.message}`); + core16.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core15.debug(`Failed to delete archive: ${error3}`); + core16.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -99176,7 +99176,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -99199,10 +99199,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core15.info(err.message); + core16.info(err.message); } const seconds = this.getSleepAmount(); - core15.info(`Waiting ${seconds} seconds before trying again`); + core16.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -99305,7 +99305,7 @@ var require_tool_cache = __commonJS({ exports2.findFromManifest = findFromManifest; exports2.isExplicitVersion = isExplicitVersion; exports2.evaluateVersions = evaluateVersions; - var core15 = __importStar2(require_core()); + var core16 = __importStar2(require_core()); var io7 = __importStar2(require_io()); var crypto3 = __importStar2(require("crypto")); var fs18 = __importStar2(require("fs")); @@ -99334,8 +99334,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { dest = dest || path16.join(_getTempDirectory(), crypto3.randomUUID()); yield io7.mkdirP(path16.dirname(dest)); - core15.debug(`Downloading ${url2}`); - core15.debug(`Destination ${dest}`); + core16.debug(`Downloading ${url2}`); + core16.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -99361,7 +99361,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth2) { - core15.debug("set auth"); + core16.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -99370,7 +99370,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url2, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core15.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core16.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -99379,16 +99379,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs18.createWriteStream(dest)); - core15.debug("download complete"); + core16.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core15.debug("download failed"); + core16.debug("download failed"); try { yield io7.rmRF(dest); } catch (err) { - core15.debug(`Failed to delete '${dest}'. ${err.message}`); + core16.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -99403,7 +99403,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core15.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core16.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", // eXtract files with full paths @@ -99456,7 +99456,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core15.debug("Checking tar --version"); + core16.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -99466,7 +99466,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core15.debug(versionOutput.trim()); + core16.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -99474,7 +99474,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core15.isDebug() && !flags.includes("v")) { + if (core16.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -99505,7 +99505,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core15.isDebug()) { + if (core16.isDebug()) { args.push("-v"); } const xarPath = yield io7.which("xar", true); @@ -99548,7 +99548,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core15.debug(`Using pwsh at path: ${pwshPath}`); + core16.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -99568,7 +99568,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io7.which("powershell", true); - core15.debug(`Using powershell at path: ${powershellPath}`); + core16.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -99577,7 +99577,7 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const unzipPath = yield io7.which("unzip", true); const args = [file]; - if (!core15.isDebug()) { + if (!core16.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -99588,8 +99588,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os5.arch(); - core15.debug(`Caching tool ${tool} ${version} ${arch2}`); - core15.debug(`source dir: ${sourceDir}`); + core16.debug(`Caching tool ${tool} ${version} ${arch2}`); + core16.debug(`source dir: ${sourceDir}`); if (!fs18.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -99606,14 +99606,14 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os5.arch(); - core15.debug(`Caching tool ${tool} ${version} ${arch2}`); - core15.debug(`source file: ${sourceFile}`); + core16.debug(`Caching tool ${tool} ${version} ${arch2}`); + core16.debug(`source file: ${sourceFile}`); if (!fs18.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path16.join(destFolder, targetFile); - core15.debug(`destination file ${destPath}`); + core16.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -99636,12 +99636,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path16.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core15.debug(`checking cache: ${cachePath}`); + core16.debug(`checking cache: ${cachePath}`); if (fs18.existsSync(cachePath) && fs18.existsSync(`${cachePath}.complete`)) { - core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core16.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core15.debug("not found"); + core16.debug("not found"); } } return toolPath; @@ -99670,7 +99670,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth2) { - core15.debug("set auth"); + core16.debug("set auth"); headers.authorization = auth2; } const response = yield http.getJson(treeUrl, headers); @@ -99691,7 +99691,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core15.debug("Invalid json"); + core16.debug("Invalid json"); } } return releases; @@ -99715,7 +99715,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter2(this, void 0, void 0, function* () { const folderPath = path16.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); - core15.debug(`destination ${folderPath}`); + core16.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); yield io7.rmRF(markerPath); @@ -99727,18 +99727,18 @@ var require_tool_cache = __commonJS({ const folderPath = path16.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs18.writeFileSync(markerPath, ""); - core15.debug("finished caching tool"); + core16.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver9.clean(versionSpec) || ""; - core15.debug(`isExplicit: ${c}`); + core16.debug(`isExplicit: ${c}`); const valid3 = semver9.valid(c) != null; - core15.debug(`explicit? ${valid3}`); + core16.debug(`explicit? ${valid3}`); return valid3; } function evaluateVersions(versions, versionSpec) { let version = ""; - core15.debug(`evaluating ${versions.length} versions`); + core16.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver9.gt(a, b)) { return 1; @@ -99754,9 +99754,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core15.debug(`matched: ${version}`); + core16.debug(`matched: ${version}`); } else { - core15.debug("match not found"); + core16.debug("match not found"); } return version; } @@ -103236,7 +103236,7 @@ module.exports = __toCommonJS(analyze_action_exports); var fs17 = __toESM(require("fs")); var import_path4 = __toESM(require("path")); var import_perf_hooks3 = require("perf_hooks"); -var core14 = __toESM(require_core()); +var core15 = __toESM(require_core()); // src/actions-util.ts var fs2 = __toESM(require("fs")); @@ -111227,7 +111227,7 @@ var fs16 = __toESM(require("fs")); var path14 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); -var core13 = __toESM(require_core()); +var core14 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/fingerprints.ts @@ -112353,7 +112353,9 @@ async function addFingerprints(sarifLog, sourceRoot, logger) { } // src/init.ts +var core13 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); +var github2 = __toESM(require_github()); var io6 = __toESM(require_io()); async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); @@ -112495,7 +112497,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core13.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core14.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -112594,13 +112596,13 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { if (httpError !== void 0) { switch (httpError.status) { case 403: - core13.warning(httpError.message || GENERIC_403_MSG); + core14.warning(httpError.message || GENERIC_403_MSG); break; case 404: - core13.warning(httpError.message || GENERIC_404_MSG); + core14.warning(httpError.message || GENERIC_404_MSG); break; default: - core13.warning(httpError.message); + core14.warning(httpError.message); break; } } @@ -112987,7 +112989,7 @@ function validateUniqueCategory(sarifLog, sentinelPrefix) { `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core13.exportVariable(sentinelEnvVar, sentinelEnvVar); + core14.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { @@ -113204,7 +113206,7 @@ async function run(startedAt2) { } const apiDetails = getApiDetails(); const outputDir = getRequiredInput("output"); - core14.exportVariable("CODEQL_ACTION_SARIF_RESULTS_OUTPUT_DIR" /* SARIF_RESULTS_OUTPUT_DIR */, outputDir); + core15.exportVariable("CODEQL_ACTION_SARIF_RESULTS_OUTPUT_DIR" /* SARIF_RESULTS_OUTPUT_DIR */, outputDir); const threads = getThreadsFlag( getOptionalInput("threads") || process.env["CODEQL_THREADS"], logger @@ -113261,8 +113263,8 @@ async function run(startedAt2) { for (const language of config.languages) { dbLocations[language] = getCodeQLDatabasePath(config, language); } - core14.setOutput("db-locations", dbLocations); - core14.setOutput("sarif-output", import_path4.default.resolve(outputDir)); + core15.setOutput("db-locations", dbLocations); + core15.setOutput("sarif-output", import_path4.default.resolve(outputDir)); const uploadKind = getUploadValue( getOptionalInput("upload") ); @@ -113279,13 +113281,13 @@ async function run(startedAt2) { getOptionalInput("post-processed-sarif-path") ); if (uploadResults["code-scanning" /* CodeScanning */] !== void 0) { - core14.setOutput( + core15.setOutput( "sarif-id", uploadResults["code-scanning" /* CodeScanning */].sarifID ); } if (uploadResults["code-quality" /* CodeQuality */] !== void 0) { - core14.setOutput( + core15.setOutput( "quality-sarif-id", uploadResults["code-quality" /* CodeQuality */].sarifID ); @@ -113328,15 +113330,15 @@ async function run(startedAt2) { ); } if (getOptionalInput("expect-error") === "true") { - core14.setFailed( + core15.setFailed( `expect-error input was set to true but no error was thrown.` ); } - core14.exportVariable("CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */, "true"); + core15.exportVariable("CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */, "true"); } catch (unwrappedError) { const error3 = wrapError(unwrappedError); if (getOptionalInput("expect-error") !== "true" || hasBadExpectErrorInput()) { - core14.setFailed(error3.message); + core15.setFailed(error3.message); } await sendStatusReport2( startedAt2, @@ -113407,7 +113409,7 @@ async function runWrapper() { try { await runPromise; } catch (error3) { - core14.setFailed(`analyze action failed: ${getErrorMessage(error3)}`); + core15.setFailed(`analyze action failed: ${getErrorMessage(error3)}`); await sendUnhandledErrorStatusReport( "finish" /* Analyze */, startedAt, diff --git a/lib/init-action-post.js b/lib/init-action-post.js index 5b2a57e83..a60cfb06a 100644 --- a/lib/init-action-post.js +++ b/lib/init-action-post.js @@ -3842,18 +3842,18 @@ var require_webidl = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context3) { - const plural = context3.types.length === 1 ? "" : " one of"; - const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context4) { + const plural = context4.types.length === 1 ? "" : " one of"; + const message = `${context4.argument} could not be converted to${plural}: ${context4.types.join(", ")}.`; return webidl.errors.exception({ - header: context3.prefix, + header: context4.prefix, message }); }; - webidl.errors.invalidArgument = function(context3) { + webidl.errors.invalidArgument = function(context4) { return webidl.errors.exception({ - header: context3.prefix, - message: `"${context3.value}" is an invalid ${context3.type}.` + header: context4.prefix, + message: `"${context4.value}" is an invalid ${context4.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -9849,17 +9849,17 @@ var require_api_request = __commonJS({ } } } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context4, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -9896,7 +9896,7 @@ var require_api_request = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context3 + context: context4 }); } } @@ -10065,17 +10065,17 @@ var require_api_stream = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context3, callback, responseHeaders } = this; + const { factory, opaque, context: context4, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -10103,7 +10103,7 @@ var require_api_stream = __commonJS({ statusCode, headers, opaque, - context: context3 + context: context4 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -10295,7 +10295,7 @@ var require_api_pipeline = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -10304,10 +10304,10 @@ var require_api_pipeline = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context3 } = this; + const { opaque, handler: handler2, context: context4 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10325,7 +10325,7 @@ var require_api_pipeline = __commonJS({ headers, opaque, body: this.res, - context: context3 + context: context4 }); } catch (err) { this.res.on("error", util.nop); @@ -10409,7 +10409,7 @@ var require_api_upgrade = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; @@ -10423,7 +10423,7 @@ var require_api_upgrade = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10431,7 +10431,7 @@ var require_api_upgrade = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -10500,20 +10500,20 @@ var require_api_connect = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -10525,7 +10525,7 @@ var require_api_connect = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -21321,7 +21321,7 @@ var require_core = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.platform = exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = exports2.markdownSummary = exports2.summary = exports2.ExitCode = void 0; - exports2.exportVariable = exportVariable9; + exports2.exportVariable = exportVariable10; exports2.setSecret = setSecret; exports2.addPath = addPath; exports2.getInput = getInput2; @@ -21353,7 +21353,7 @@ var require_core = __commonJS({ ExitCode2[ExitCode2["Success"] = 0] = "Success"; ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; })(ExitCode || (exports2.ExitCode = ExitCode = {})); - function exportVariable9(name, val) { + function exportVariable10(name, val) { const convertedVal = (0, utils_1.toCommandValue)(val); process.env[name] = convertedVal; const filePath = process.env["GITHUB_ENV"] || ""; @@ -24840,18 +24840,18 @@ var require_webidl2 = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context3) { - const plural = context3.types.length === 1 ? "" : " one of"; - const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context4) { + const plural = context4.types.length === 1 ? "" : " one of"; + const message = `${context4.argument} could not be converted to${plural}: ${context4.types.join(", ")}.`; return webidl.errors.exception({ - header: context3.prefix, + header: context4.prefix, message }); }; - webidl.errors.invalidArgument = function(context3) { + webidl.errors.invalidArgument = function(context4) { return webidl.errors.exception({ - header: context3.prefix, - message: `"${context3.value}" is an invalid ${context3.type}.` + header: context4.prefix, + message: `"${context4.value}" is an invalid ${context4.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -30847,17 +30847,17 @@ var require_api_request2 = __commonJS({ } } } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context4, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -30894,7 +30894,7 @@ var require_api_request2 = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context3 + context: context4 }); } } @@ -31063,17 +31063,17 @@ var require_api_stream2 = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context3, callback, responseHeaders } = this; + const { factory, opaque, context: context4, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -31101,7 +31101,7 @@ var require_api_stream2 = __commonJS({ statusCode, headers, opaque, - context: context3 + context: context4 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -31293,7 +31293,7 @@ var require_api_pipeline2 = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -31302,10 +31302,10 @@ var require_api_pipeline2 = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context3 } = this; + const { opaque, handler: handler2, context: context4 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31323,7 +31323,7 @@ var require_api_pipeline2 = __commonJS({ headers, opaque, body: this.res, - context: context3 + context: context4 }); } catch (err) { this.res.on("error", util.nop); @@ -31407,7 +31407,7 @@ var require_api_upgrade2 = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; @@ -31421,7 +31421,7 @@ var require_api_upgrade2 = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31429,7 +31429,7 @@ var require_api_upgrade2 = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -31498,20 +31498,20 @@ var require_api_connect2 = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -31523,7 +31523,7 @@ var require_api_connect2 = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -40178,8 +40178,8 @@ function isDefined(value) { function isKeyOperator(operator) { return operator === ";" || operator === "&" || operator === "?"; } -function getValues(context3, operator, key, modifier) { - var value = context3[key], result = []; +function getValues(context4, operator, key, modifier) { + var value = context4[key], result = []; if (isDefined(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); @@ -40243,7 +40243,7 @@ function parseUrl(template) { expand: expand.bind(null, template) }; } -function expand(template, context3) { +function expand(template, context4) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; template = template.replace( /\{([^\{\}]+)\}|([^\{\}]+)/g, @@ -40257,7 +40257,7 @@ function expand(template, context3) { } expression.split(/,/g).forEach(function(variable) { var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context3, operator, tmp[1], tmp[2] || tmp[3])); + values.push(getValues(context4, operator, tmp[1], tmp[2] || tmp[3])); }); if (operator && operator !== "+") { var separator = ","; @@ -48747,7 +48747,7 @@ var require_internal_glob_options_helper = __commonJS({ })(); Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = getOptions; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -48759,23 +48759,23 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core17.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core18.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core17.debug(`implicitDescendants '${result.implicitDescendants}'`); + core18.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core17.debug(`matchDirectories '${result.matchDirectories}'`); + core18.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core17.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core18.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core17.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core18.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -50403,7 +50403,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var fs20 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path18 = __importStar2(require("path")); @@ -50456,7 +50456,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core17.debug(`Search path '${searchPath}'`); + core18.debug(`Search path '${searchPath}'`); try { yield __await2(fs20.promises.lstat(searchPath)); } catch (err) { @@ -50531,7 +50531,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core17.debug(`Broken symlink '${item.path}'`); + core18.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -50547,7 +50547,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core17.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core18.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -50650,7 +50650,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = hashFiles2; var crypto2 = __importStar2(require("crypto")); - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var fs20 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); @@ -50659,7 +50659,7 @@ var require_internal_hash_files = __commonJS({ return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; - const writeDelegate = verbose ? core17.info : core17.debug; + const writeDelegate = verbose ? core18.info : core18.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto2.createHash("sha256"); @@ -52050,7 +52050,7 @@ var require_cacheUtils = __commonJS({ exports2.assertDefined = assertDefined; exports2.getCacheVersion = getCacheVersion; exports2.getRuntimeToken = getRuntimeToken; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var exec3 = __importStar2(require_exec()); var glob2 = __importStar2(require_glob()); var io7 = __importStar2(require_io()); @@ -52101,7 +52101,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path18.relative(workspace, file).replace(new RegExp(`\\${path18.sep}`, "g"), "/"); - core17.debug(`Matched: ${relativeFile}`); + core18.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -52129,7 +52129,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { let versionOutput = ""; additionalArgs.push("--version"); - core17.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core18.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec3.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -52140,10 +52140,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core17.debug(err.message); + core18.debug(err.message); } versionOutput = versionOutput.trim(); - core17.debug(versionOutput); + core18.debug(versionOutput); return versionOutput; }); } @@ -52151,7 +52151,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver9.clean(versionOutput); - core17.debug(`zstd version: ${version}`); + core18.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -52276,14 +52276,14 @@ function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, e var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); var _2, done = false; for (var i = decorators.length - 1; i >= 0; i--) { - var context3 = {}; - for (var p in contextIn) context3[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context3.access[p] = contextIn.access[p]; - context3.addInitializer = function(f) { + var context4 = {}; + for (var p in contextIn) context4[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context4.access[p] = contextIn.access[p]; + context4.addInitializer = function(f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context3); + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context4); if (kind === "accessor") { if (result === void 0) continue; if (result === null || typeof result !== "object") throw new TypeError("Object expected"); @@ -53010,19 +53010,19 @@ var require_logger = __commonJS({ logger: clientLogger }; } - var context3 = createLoggerContext({ + var context4 = createLoggerContext({ logLevelEnvVarName: "TYPESPEC_RUNTIME_LOG_LEVEL", namespace: "typeSpecRuntime" }); - exports2.TypeSpecRuntimeLogger = context3.logger; + exports2.TypeSpecRuntimeLogger = context4.logger; function setLogLevel(logLevel) { - context3.setLogLevel(logLevel); + context4.setLogLevel(logLevel); } function getLogLevel() { - return context3.getLogLevel(); + return context4.getLogLevel(); } function createClientLogger(namespace) { - return context3.createClientLogger(namespace); + return context4.createClientLogger(namespace); } } }); @@ -57268,19 +57268,19 @@ var require_commonjs2 = __commonJS({ exports2.getLogLevel = getLogLevel; exports2.createClientLogger = createClientLogger; var logger_1 = require_internal(); - var context3 = (0, logger_1.createLoggerContext)({ + var context4 = (0, logger_1.createLoggerContext)({ logLevelEnvVarName: "AZURE_LOG_LEVEL", namespace: "azure" }); - exports2.AzureLogger = context3.logger; + exports2.AzureLogger = context4.logger; function setLogLevel(level) { - context3.setLogLevel(level); + context4.setLogLevel(level); } function getLogLevel() { - return context3.getLogLevel(); + return context4.getLogLevel(); } function createClientLogger(namespace) { - return context3.createClientLogger(namespace); + return context4.createClientLogger(namespace); } } }); @@ -58190,14 +58190,14 @@ var require_tracingContext = __commonJS({ namespace: /* @__PURE__ */ Symbol.for("@azure/core-tracing namespace") }; function createTracingContext(options = {}) { - let context3 = new TracingContextImpl(options.parentContext); + let context4 = new TracingContextImpl(options.parentContext); if (options.span) { - context3 = context3.setValue(exports2.knownContextKeys.span, options.span); + context4 = context4.setValue(exports2.knownContextKeys.span, options.span); } if (options.namespace) { - context3 = context3.setValue(exports2.knownContextKeys.namespace, options.namespace); + context4 = context4.setValue(exports2.knownContextKeys.namespace, options.namespace); } - return context3; + return context4; } var TracingContextImpl = class _TracingContextImpl { _contextMap; @@ -58335,8 +58335,8 @@ var require_tracingClient = __commonJS({ span.end(); } } - function withContext(context3, callback, ...callbackArgs) { - return (0, instrumenter_js_1.getInstrumenter)().withContext(context3, callback, ...callbackArgs); + function withContext(context4, callback, ...callbackArgs) { + return (0, instrumenter_js_1.getInstrumenter)().withContext(context4, callback, ...callbackArgs); } function parseTraceparentHeader(traceparentHeader) { return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); @@ -91807,7 +91807,7 @@ var require_uploadUtils = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadProgress = void 0; exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var storage_blob_1 = require_commonjs15(); var errors_1 = require_errors3(); var UploadProgress = class { @@ -91849,7 +91849,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core17.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core18.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -91906,14 +91906,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core17.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core18.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error3) { - core17.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); + core18.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); throw error3; } finally { uploadProgress.stopDisplayTimer(); @@ -91998,7 +91998,7 @@ var require_requestUtils = __commonJS({ exports2.retry = retry2; exports2.retryTypedResponse = retryTypedResponse; exports2.retryHttpClientResponse = retryHttpClientResponse; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants12(); function isSuccessStatusCode(statusCode) { @@ -92056,9 +92056,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core17.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core18.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core17.debug(`${name} - Error is not retryable`); + core18.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -92317,7 +92317,7 @@ var require_downloadUtils = __commonJS({ exports2.downloadCacheHttpClient = downloadCacheHttpClient; exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); @@ -92355,7 +92355,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core17.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core18.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -92389,7 +92389,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core17.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core18.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -92439,7 +92439,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core17.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core18.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -92450,7 +92450,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core17.debug("Unable to validate download, no Content-Length header"); + core18.debug("Unable to validate download, no Content-Length header"); } }); } @@ -92568,7 +92568,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core17.debug("Unable to determine content length, downloading file with http-client..."); + core18.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -92658,7 +92658,7 @@ var require_options = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadOptions = getUploadOptions; exports2.getDownloadOptions = getDownloadOptions; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -92678,9 +92678,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core17.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core17.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core17.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core18.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core18.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core18.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } function getDownloadOptions(copy) { @@ -92716,12 +92716,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core17.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core17.debug(`Download concurrency: ${result.downloadConcurrency}`); - core17.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core17.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core17.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core17.debug(`Lookup only: ${result.lookupOnly}`); + core18.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core18.debug(`Download concurrency: ${result.downloadConcurrency}`); + core18.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core18.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core18.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core18.debug(`Lookup only: ${result.lookupOnly}`); return result; } } @@ -92915,7 +92915,7 @@ var require_cacheHttpClient = __commonJS({ exports2.downloadCache = downloadCache; exports2.reserveCache = reserveCache; exports2.saveCache = saveCache5; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs20 = __importStar2(require("fs")); @@ -92933,7 +92933,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core17.debug(`Resource Url: ${url2}`); + core18.debug(`Resource Url: ${url2}`); return url2; } function createAcceptHeader(type2, apiVersion) { @@ -92961,7 +92961,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core17.isDebug()) { + if (core18.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -92974,9 +92974,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core17.setSecret(cacheDownloadUrl); - core17.debug(`Cache Result:`); - core17.debug(JSON.stringify(cacheResult)); + core18.setSecret(cacheDownloadUrl); + core18.debug(`Cache Result:`); + core18.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -92990,10 +92990,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core17.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core18.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core17.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core18.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -93036,7 +93036,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter2(this, void 0, void 0, function* () { - core17.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core18.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -93058,7 +93058,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core17.debug("Awaiting all uploads"); + core18.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { @@ -93101,16 +93101,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core17.debug("Upload cache"); + core18.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core17.debug("Commiting cache"); + core18.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core18.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core17.info("Cache saved successfully"); + core18.info("Cache saved successfully"); } }); } @@ -98593,7 +98593,7 @@ var require_cache5 = __commonJS({ exports2.isFeatureAvailable = isFeatureAvailable; exports2.restoreCache = restoreCache5; exports2.saveCache = saveCache5; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var path18 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); @@ -98652,7 +98652,7 @@ var require_cache5 = __commonJS({ function restoreCache5(paths_1, primaryKey_1, restoreKeys_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core17.debug(`Cache service version: ${cacheServiceVersion}`); + core18.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -98667,8 +98667,8 @@ var require_cache5 = __commonJS({ return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core17.debug("Resolved Keys:"); - core17.debug(JSON.stringify(keys)); + core18.debug("Resolved Keys:"); + core18.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -98686,19 +98686,19 @@ var require_cache5 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core17.info("Lookup only - skipping download"); + core18.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path18.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core17.debug(`Archive Path: ${archivePath}`); + core18.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core17.isDebug()) { + if (core18.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core18.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core17.info("Cache restored successfully"); + core18.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error3) { const typedError = error3; @@ -98706,16 +98706,16 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core17.error(`Failed to restore: ${error3.message}`); + core18.error(`Failed to restore: ${error3.message}`); } else { - core17.warning(`Failed to restore: ${error3.message}`); + core18.warning(`Failed to restore: ${error3.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core17.debug(`Failed to delete archive: ${error3}`); + core18.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -98726,8 +98726,8 @@ var require_cache5 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core17.debug("Resolved Keys:"); - core17.debug(JSON.stringify(keys)); + core18.debug("Resolved Keys:"); + core18.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -98745,30 +98745,30 @@ var require_cache5 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request2); if (!response.ok) { - core17.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); + core18.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request2.key !== response.matchedKey; if (isRestoreKeyMatch) { - core17.info(`Cache hit for restore-key: ${response.matchedKey}`); + core18.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core17.info(`Cache hit for: ${response.matchedKey}`); + core18.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core17.info("Lookup only - skipping download"); + core18.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path18.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core17.debug(`Archive path: ${archivePath}`); - core17.debug(`Starting download of archive to: ${archivePath}`); + core18.debug(`Archive path: ${archivePath}`); + core18.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core17.isDebug()) { + core18.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core18.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core17.info("Cache restored successfully"); + core18.info("Cache restored successfully"); return response.matchedKey; } catch (error3) { const typedError = error3; @@ -98776,9 +98776,9 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core17.error(`Failed to restore: ${error3.message}`); + core18.error(`Failed to restore: ${error3.message}`); } else { - core17.warning(`Failed to restore: ${error3.message}`); + core18.warning(`Failed to restore: ${error3.message}`); } } } finally { @@ -98787,7 +98787,7 @@ var require_cache5 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error3) { - core17.debug(`Failed to delete archive: ${error3}`); + core18.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -98796,7 +98796,7 @@ var require_cache5 = __commonJS({ function saveCache5(paths_1, key_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core17.debug(`Cache service version: ${cacheServiceVersion}`); + core18.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -98814,26 +98814,26 @@ var require_cache5 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core17.debug("Cache Paths:"); - core17.debug(`${JSON.stringify(cachePaths)}`); + core18.debug("Cache Paths:"); + core18.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path18.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core17.debug(`Archive Path: ${archivePath}`); + core18.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core17.isDebug()) { + if (core18.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.debug(`File Size: ${archiveFileSize}`); + core18.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core17.debug("Reserving Cache"); + core18.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -98846,26 +98846,26 @@ var require_cache5 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core17.debug(`Saving Cache (ID: ${cacheId})`); + core18.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error3) { const typedError = error3; if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError2.name) { - core17.info(`Failed to save: ${typedError.message}`); + core18.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core17.error(`Failed to save: ${typedError.message}`); + core18.error(`Failed to save: ${typedError.message}`); } else { - core17.warning(`Failed to save: ${typedError.message}`); + core18.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core17.debug(`Failed to delete archive: ${error3}`); + core18.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -98878,23 +98878,23 @@ var require_cache5 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core17.debug("Cache Paths:"); - core17.debug(`${JSON.stringify(cachePaths)}`); + core18.debug("Cache Paths:"); + core18.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path18.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core17.debug(`Archive Path: ${archivePath}`); + core18.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core17.isDebug()) { + if (core18.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core17.debug(`File Size: ${archiveFileSize}`); + core18.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core17.debug("Reserving Cache"); + core18.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request2 = { key, @@ -98905,16 +98905,16 @@ var require_cache5 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request2); if (!response.ok) { if (response.message) { - core17.warning(`Cache reservation failed: ${response.message}`); + core18.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error3) { - core17.debug(`Failed to reserve cache: ${error3}`); + core18.debug(`Failed to reserve cache: ${error3}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core17.debug(`Attempting to upload cache located at: ${archivePath}`); + core18.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -98922,7 +98922,7 @@ var require_cache5 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core17.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core18.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -98935,21 +98935,21 @@ var require_cache5 = __commonJS({ if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError2.name) { - core17.info(`Failed to save: ${typedError.message}`); + core18.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core17.warning(typedError.message); + core18.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core17.error(`Failed to save: ${typedError.message}`); + core18.error(`Failed to save: ${typedError.message}`); } else { - core17.warning(`Failed to save: ${typedError.message}`); + core18.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core17.debug(`Failed to delete archive: ${error3}`); + core18.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -99176,7 +99176,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -99199,10 +99199,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core17.info(err.message); + core18.info(err.message); } const seconds = this.getSleepAmount(); - core17.info(`Waiting ${seconds} seconds before trying again`); + core18.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -99305,7 +99305,7 @@ var require_tool_cache = __commonJS({ exports2.findFromManifest = findFromManifest; exports2.isExplicitVersion = isExplicitVersion; exports2.evaluateVersions = evaluateVersions; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var io7 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); var fs20 = __importStar2(require("fs")); @@ -99334,8 +99334,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { dest = dest || path18.join(_getTempDirectory(), crypto2.randomUUID()); yield io7.mkdirP(path18.dirname(dest)); - core17.debug(`Downloading ${url2}`); - core17.debug(`Destination ${dest}`); + core18.debug(`Downloading ${url2}`); + core18.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -99361,7 +99361,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth2) { - core17.debug("set auth"); + core18.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -99370,7 +99370,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url2, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core17.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core18.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -99379,16 +99379,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs20.createWriteStream(dest)); - core17.debug("download complete"); + core18.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core17.debug("download failed"); + core18.debug("download failed"); try { yield io7.rmRF(dest); } catch (err) { - core17.debug(`Failed to delete '${dest}'. ${err.message}`); + core18.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -99403,7 +99403,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core17.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core18.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", // eXtract files with full paths @@ -99456,7 +99456,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core17.debug("Checking tar --version"); + core18.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -99466,7 +99466,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core17.debug(versionOutput.trim()); + core18.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -99474,7 +99474,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core17.isDebug() && !flags.includes("v")) { + if (core18.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -99505,7 +99505,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core17.isDebug()) { + if (core18.isDebug()) { args.push("-v"); } const xarPath = yield io7.which("xar", true); @@ -99548,7 +99548,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core17.debug(`Using pwsh at path: ${pwshPath}`); + core18.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -99568,7 +99568,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io7.which("powershell", true); - core17.debug(`Using powershell at path: ${powershellPath}`); + core18.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -99577,7 +99577,7 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const unzipPath = yield io7.which("unzip", true); const args = [file]; - if (!core17.isDebug()) { + if (!core18.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -99588,8 +99588,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os4.arch(); - core17.debug(`Caching tool ${tool} ${version} ${arch2}`); - core17.debug(`source dir: ${sourceDir}`); + core18.debug(`Caching tool ${tool} ${version} ${arch2}`); + core18.debug(`source dir: ${sourceDir}`); if (!fs20.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -99606,14 +99606,14 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os4.arch(); - core17.debug(`Caching tool ${tool} ${version} ${arch2}`); - core17.debug(`source file: ${sourceFile}`); + core18.debug(`Caching tool ${tool} ${version} ${arch2}`); + core18.debug(`source file: ${sourceFile}`); if (!fs20.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path18.join(destFolder, targetFile); - core17.debug(`destination file ${destPath}`); + core18.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -99636,12 +99636,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path18.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core17.debug(`checking cache: ${cachePath}`); + core18.debug(`checking cache: ${cachePath}`); if (fs20.existsSync(cachePath) && fs20.existsSync(`${cachePath}.complete`)) { - core17.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core18.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core17.debug("not found"); + core18.debug("not found"); } } return toolPath; @@ -99670,7 +99670,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth2) { - core17.debug("set auth"); + core18.debug("set auth"); headers.authorization = auth2; } const response = yield http.getJson(treeUrl, headers); @@ -99691,7 +99691,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core17.debug("Invalid json"); + core18.debug("Invalid json"); } } return releases; @@ -99715,7 +99715,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter2(this, void 0, void 0, function* () { const folderPath = path18.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); - core17.debug(`destination ${folderPath}`); + core18.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); yield io7.rmRF(markerPath); @@ -99727,18 +99727,18 @@ var require_tool_cache = __commonJS({ const folderPath = path18.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs20.writeFileSync(markerPath, ""); - core17.debug("finished caching tool"); + core18.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver9.clean(versionSpec) || ""; - core17.debug(`isExplicit: ${c}`); + core18.debug(`isExplicit: ${c}`); const valid3 = semver9.valid(c) != null; - core17.debug(`explicit? ${valid3}`); + core18.debug(`explicit? ${valid3}`); return valid3; } function evaluateVersions(versions, versionSpec) { let version = ""; - core17.debug(`evaluating ${versions.length} versions`); + core18.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver9.gt(a, b)) { return 1; @@ -99754,9 +99754,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core17.debug(`matched: ${version}`); + core18.debug(`matched: ${version}`); } else { - core17.debug("match not found"); + core18.debug("match not found"); } return version; } @@ -102438,14 +102438,14 @@ var require_retention = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getExpiration = void 0; var generated_1 = require_generated(); - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); function getExpiration(retentionDays) { if (!retentionDays) { return void 0; } const maxRetentionDays = getRetentionDays(); if (maxRetentionDays && maxRetentionDays < retentionDays) { - core17.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); + core18.warning(`Retention days cannot be greater than the maximum allowed retention set within the repository. Using ${maxRetentionDays} instead.`); retentionDays = maxRetentionDays; } const expirationDate = /* @__PURE__ */ new Date(); @@ -102783,7 +102783,7 @@ var require_util19 = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.maskSecretUrls = exports2.maskSigUrl = exports2.getBackendIdsFromToken = void 0; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var config_1 = require_config2(); var jwt_decode_1 = __importDefault2(require_jwt_decode_cjs()); var core_1 = require_core(); @@ -102810,8 +102810,8 @@ var require_util19 = __commonJS({ workflowRunBackendId: scopeParts[1], workflowJobRunBackendId: scopeParts[2] }; - core17.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); - core17.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); + core18.debug(`Workflow Run Backend ID: ${ids.workflowRunBackendId}`); + core18.debug(`Workflow Job Run Backend ID: ${ids.workflowJobRunBackendId}`); return ids; } throw InvalidJwtError; @@ -103171,7 +103171,7 @@ var require_blob_upload = __commonJS({ exports2.uploadZipToBlobStorage = void 0; var storage_blob_1 = require_commonjs15(); var config_1 = require_config2(); - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var crypto2 = __importStar2(require("crypto")); var stream2 = __importStar2(require("stream")); var errors_1 = require_errors4(); @@ -103197,9 +103197,9 @@ var require_blob_upload = __commonJS({ const bufferSize = (0, config_1.getUploadChunkSize)(); const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL); const blockBlobClient = blobClient.getBlockBlobClient(); - core17.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); + core18.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); const uploadCallback = (progress) => { - core17.info(`Uploaded bytes ${progress.loadedBytes}`); + core18.info(`Uploaded bytes ${progress.loadedBytes}`); uploadByteCount = progress.loadedBytes; lastProgressTime = Date.now(); }; @@ -103213,7 +103213,7 @@ var require_blob_upload = __commonJS({ const hashStream = crypto2.createHash("sha256"); zipUploadStream.pipe(uploadStream); zipUploadStream.pipe(hashStream).setEncoding("hex"); - core17.info("Beginning upload of artifact content to blob storage"); + core18.info("Beginning upload of artifact content to blob storage"); try { yield Promise.race([ blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options), @@ -103227,12 +103227,12 @@ var require_blob_upload = __commonJS({ } finally { abortController.abort(); } - core17.info("Finished uploading artifact content to blob storage!"); + core18.info("Finished uploading artifact content to blob storage!"); hashStream.end(); sha256Hash = hashStream.read(); - core17.info(`SHA256 digest of uploaded artifact zip is ${sha256Hash}`); + core18.info(`SHA256 digest of uploaded artifact zip is ${sha256Hash}`); if (uploadByteCount === 0) { - core17.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); + core18.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`); } return { uploadSize: uploadByteCount, @@ -106563,8 +106563,8 @@ var require_graceful_fs = __commonJS({ } function noop3() { } - function publishQueue(context3, queue2) { - Object.defineProperty(context3, gracefulQueue, { + function publishQueue(context4, queue2) { + Object.defineProperty(context4, gracefulQueue, { get: function() { return queue2; } @@ -118730,7 +118730,7 @@ var require_commonjs21 = __commonJS({ free: c.#free, // methods isBackgroundFetch: (p) => c.#isBackgroundFetch(p), - backgroundFetch: (k, index, options, context3) => c.#backgroundFetch(k, index, options, context3), + backgroundFetch: (k, index, options, context4) => c.#backgroundFetch(k, index, options, context4), moveToTail: (index) => c.#moveToTail(index), indexes: (options) => c.#indexes(options), rindexes: (options) => c.#rindexes(options), @@ -119533,7 +119533,7 @@ var require_commonjs21 = __commonJS({ const v = this.#valList[index]; return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; } - #backgroundFetch(k, index, options, context3) { + #backgroundFetch(k, index, options, context4) { const v = index === void 0 ? void 0 : this.#valList[index]; if (this.#isBackgroundFetch(v)) { return v; @@ -119546,7 +119546,7 @@ var require_commonjs21 = __commonJS({ const fetchOpts = { signal: ac.signal, options, - context: context3 + context: context4 }; const cb = (v2, updateCache = false) => { const { aborted } = ac.signal; @@ -119663,7 +119663,7 @@ var require_commonjs21 = __commonJS({ allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, - context: context3, + context: context4, forceRefresh = false, status, signal @@ -119698,7 +119698,7 @@ var require_commonjs21 = __commonJS({ if (index === void 0) { if (status) status.fetch = "miss"; - const p = this.#backgroundFetch(k, index, options, context3); + const p = this.#backgroundFetch(k, index, options, context4); return p.__returned = p; } else { const v = this.#valList[index]; @@ -119723,7 +119723,7 @@ var require_commonjs21 = __commonJS({ this.#statusTTL(status, index); return v; } - const p = this.#backgroundFetch(k, index, options, context3); + const p = this.#backgroundFetch(k, index, options, context4); const hasStale = p.__staleWhileFetching !== void 0; const staleVal = hasStale && allowStale; if (status) { @@ -119745,13 +119745,13 @@ var require_commonjs21 = __commonJS({ if (!memoMethod) { throw new Error("no memoMethod provided to constructor"); } - const { context: context3, forceRefresh, ...options } = memoOptions; + const { context: context4, forceRefresh, ...options } = memoOptions; const v = this.get(k, options); if (!forceRefresh && v !== void 0) return v; const vv = memoMethod(k, v, { options, - context: context3 + context: context4 }); this.set(k, vv, options); return vv; @@ -128573,7 +128573,7 @@ var require_zip2 = __commonJS({ var stream2 = __importStar2(require("stream")); var promises_1 = require("fs/promises"); var archiver2 = __importStar2(require_archiver()); - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var config_1 = require_config2(); exports2.DEFAULT_COMPRESSION_LEVEL = 6; var ZipUploadStream = class extends stream2.Transform { @@ -128590,7 +128590,7 @@ var require_zip2 = __commonJS({ exports2.ZipUploadStream = ZipUploadStream; function createZipUploadStream(uploadSpecification_1) { return __awaiter2(this, arguments, void 0, function* (uploadSpecification, compressionLevel = exports2.DEFAULT_COMPRESSION_LEVEL) { - core17.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); + core18.debug(`Creating Artifact archive with compressionLevel: ${compressionLevel}`); const zip = archiver2.create("zip", { highWaterMark: (0, config_1.getUploadChunkSize)(), zlib: { level: compressionLevel } @@ -128614,8 +128614,8 @@ var require_zip2 = __commonJS({ } const bufferSize = (0, config_1.getUploadChunkSize)(); const zipUploadStream = new ZipUploadStream(bufferSize); - core17.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); - core17.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); + core18.debug(`Zip write high watermark value ${zipUploadStream.writableHighWaterMark}`); + core18.debug(`Zip read high watermark value ${zipUploadStream.readableHighWaterMark}`); zip.pipe(zipUploadStream); zip.finalize(); return zipUploadStream; @@ -128623,24 +128623,24 @@ var require_zip2 = __commonJS({ } exports2.createZipUploadStream = createZipUploadStream; var zipErrorCallback = (error3) => { - core17.error("An error has occurred while creating the zip file for upload"); - core17.info(error3); + core18.error("An error has occurred while creating the zip file for upload"); + core18.info(error3); throw new Error("An error has occurred during zip creation for the artifact"); }; var zipWarningCallback = (error3) => { if (error3.code === "ENOENT") { - core17.warning("ENOENT warning during artifact zip creation. No such file or directory"); - core17.info(error3); + core18.warning("ENOENT warning during artifact zip creation. No such file or directory"); + core18.info(error3); } else { - core17.warning(`A non-blocking warning has occurred during artifact zip creation: ${error3.code}`); - core17.info(error3); + core18.warning(`A non-blocking warning has occurred during artifact zip creation: ${error3.code}`); + core18.info(error3); } }; var zipFinishCallback = () => { - core17.debug("Zip stream for upload has finished."); + core18.debug("Zip stream for upload has finished."); }; var zipEndCallback = () => { - core17.debug("Zip stream for upload has ended."); + core18.debug("Zip stream for upload has ended."); }; } }); @@ -128705,7 +128705,7 @@ var require_upload_artifact = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadArtifact = void 0; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var retention_1 = require_retention(); var path_and_artifact_name_validation_1 = require_path_and_artifact_name_validation(); var artifact_twirp_client_1 = require_artifact_twirp_client2(); @@ -128752,13 +128752,13 @@ var require_upload_artifact = __commonJS({ value: `sha256:${uploadResult.sha256Hash}` }); } - core17.info(`Finalizing artifact upload`); + core18.info(`Finalizing artifact upload`); const finalizeArtifactResp = yield artifactClient.FinalizeArtifact(finalizeArtifactReq); if (!finalizeArtifactResp.ok) { throw new errors_1.InvalidResponseError("FinalizeArtifact: response from backend was not ok"); } const artifactId = BigInt(finalizeArtifactResp.artifactId); - core17.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); + core18.info(`Artifact ${name}.zip successfully finalized. Artifact ID ${artifactId}`); return { size: uploadResult.uploadSize, digest: uploadResult.sha256Hash, @@ -132645,18 +132645,18 @@ var require_webidl3 = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context3) { - const plural = context3.types.length === 1 ? "" : " one of"; - const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context4) { + const plural = context4.types.length === 1 ? "" : " one of"; + const message = `${context4.argument} could not be converted to${plural}: ${context4.types.join(", ")}.`; return webidl.errors.exception({ - header: context3.prefix, + header: context4.prefix, message }); }; - webidl.errors.invalidArgument = function(context3) { + webidl.errors.invalidArgument = function(context4) { return webidl.errors.exception({ - header: context3.prefix, - message: `"${context3.value}" is an invalid ${context3.type}.` + header: context4.prefix, + message: `"${context4.value}" is an invalid ${context4.type}.` }); }; webidl.brandCheck = function(V, I, opts = void 0) { @@ -137982,15 +137982,15 @@ var require_api_request3 = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (!this.callback) { throw new RequestAbortedError(); } this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context4, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -138017,7 +138017,7 @@ var require_api_request3 = __commonJS({ trailers: this.trailers, opaque, body, - context: context3 + context: context4 }); } } @@ -138137,15 +138137,15 @@ var require_api_stream3 = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (!this.callback) { throw new RequestAbortedError(); } this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context3, callback, responseHeaders } = this; + const { factory, opaque, context: context4, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -138173,7 +138173,7 @@ var require_api_stream3 = __commonJS({ statusCode, headers, opaque, - context: context3 + context: context4 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -138365,17 +138365,17 @@ var require_api_pipeline3 = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { const { ret, res } = this; assert(!res, "pipeline cannot be retried"); if (ret.destroyed) { throw new RequestAbortedError(); } this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context3 } = this; + const { opaque, handler: handler2, context: context4 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -138393,7 +138393,7 @@ var require_api_pipeline3 = __commonJS({ headers, opaque, body: this.res, - context: context3 + context: context4 }); } catch (err) { this.res.on("error", util.nop); @@ -138477,7 +138477,7 @@ var require_api_upgrade3 = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (!this.callback) { throw new RequestAbortedError(); } @@ -138488,7 +138488,7 @@ var require_api_upgrade3 = __commonJS({ throw new SocketError("bad upgrade", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; assert.strictEqual(statusCode, 101); removeSignal(this); this.callback = null; @@ -138497,7 +138497,7 @@ var require_api_upgrade3 = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -138565,18 +138565,18 @@ var require_api_connect3 = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (!this.callback) { throw new RequestAbortedError(); } this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -138588,7 +138588,7 @@ var require_api_connect3 = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -146707,8 +146707,8 @@ var require_dist_node2 = __commonJS({ function isKeyOperator2(operator) { return operator === ";" || operator === "&" || operator === "?"; } - function getValues2(context3, operator, key, modifier) { - var value = context3[key], result = []; + function getValues2(context4, operator, key, modifier) { + var value = context4[key], result = []; if (isDefined3(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); @@ -146772,7 +146772,7 @@ var require_dist_node2 = __commonJS({ expand: expand2.bind(null, template) }; } - function expand2(template, context3) { + function expand2(template, context4) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; template = template.replace( /\{([^\{\}]+)\}|([^\{\}]+)/g, @@ -146786,7 +146786,7 @@ var require_dist_node2 = __commonJS({ } expression.split(/,/g).forEach(function(variable) { var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues2(context3, operator, tmp[1], tmp[2] || tmp[3])); + values.push(getValues2(context4, operator, tmp[1], tmp[2] || tmp[3])); }); if (operator && operator !== "+") { var separator = ","; @@ -152238,8 +152238,8 @@ var require_download_artifact = __commonJS({ var promises_1 = __importDefault2(require("fs/promises")); var crypto2 = __importStar2(require("crypto")); var stream2 = __importStar2(require("stream")); - var github3 = __importStar2(require_github2()); - var core17 = __importStar2(require_core()); + var github4 = __importStar2(require_github2()); + var core18 = __importStar2(require_core()); var httpClient = __importStar2(require_lib()); var unzip_stream_1 = __importDefault2(require_unzip()); var user_agent_1 = require_user_agent2(); @@ -152275,7 +152275,7 @@ var require_download_artifact = __commonJS({ return yield streamExtractExternal(url2, directory); } catch (error3) { retryCount++; - core17.debug(`Failed to download artifact after ${retryCount} retries due to ${error3.message}. Retrying in 5 seconds...`); + core18.debug(`Failed to download artifact after ${retryCount} retries due to ${error3.message}. Retrying in 5 seconds...`); yield new Promise((resolve8) => setTimeout(resolve8, 5e3)); } } @@ -152305,7 +152305,7 @@ var require_download_artifact = __commonJS({ extractStream.on("data", () => { timer.refresh(); }).on("error", (error3) => { - core17.debug(`response.message: Artifact download failed: ${error3.message}`); + core18.debug(`response.message: Artifact download failed: ${error3.message}`); clearTimeout(timer); reject(error3); }).pipe(unzip_stream_1.default.Extract({ path: directory })).on("close", () => { @@ -152313,7 +152313,7 @@ var require_download_artifact = __commonJS({ if (hashStream) { hashStream.end(); sha256Digest = hashStream.read(); - core17.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); + core18.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`); } resolve8({ sha256Digest: `sha256:${sha256Digest}` }); }).on("error", (error3) => { @@ -152326,9 +152326,9 @@ var require_download_artifact = __commonJS({ function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, token, options) { return __awaiter2(this, void 0, void 0, function* () { const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path); - const api = github3.getOctokit(token); + const api = github4.getOctokit(token); let digestMismatch = false; - core17.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); + core18.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); const { headers, status } = yield api.rest.actions.downloadArtifact({ owner: repositoryOwner, repo: repositoryName, @@ -152345,16 +152345,16 @@ var require_download_artifact = __commonJS({ if (!location) { throw new Error(`Unable to redirect to artifact download url`); } - core17.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); + core18.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`); try { - core17.info(`Starting download of artifact to: ${downloadPath}`); + core18.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(location, downloadPath); - core17.info(`Artifact download completed successfully.`); + core18.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core17.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core17.debug(`Expected digest: ${options.expectedHash}`); + core18.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core18.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error3) { @@ -152381,7 +152381,7 @@ var require_download_artifact = __commonJS({ Are you trying to download from a different run? Try specifying a github-token with \`actions:read\` scope.`); } if (artifacts.length > 1) { - core17.warning("Multiple artifacts found, defaulting to first."); + core18.warning("Multiple artifacts found, defaulting to first."); } const signedReq = { workflowRunBackendId: artifacts[0].workflowRunBackendId, @@ -152389,16 +152389,16 @@ Are you trying to download from a different run? Try specifying a github-token w name: artifacts[0].name }; const { signedUrl } = yield artifactClient.GetSignedArtifactURL(signedReq); - core17.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); + core18.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`); try { - core17.info(`Starting download of artifact to: ${downloadPath}`); + core18.info(`Starting download of artifact to: ${downloadPath}`); const extractResponse = yield streamExtract(signedUrl, downloadPath); - core17.info(`Artifact download completed successfully.`); + core18.info(`Artifact download completed successfully.`); if (options === null || options === void 0 ? void 0 : options.expectedHash) { if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) { digestMismatch = true; - core17.debug(`Computed digest: ${extractResponse.sha256Digest}`); - core17.debug(`Expected digest: ${options.expectedHash}`); + core18.debug(`Computed digest: ${extractResponse.sha256Digest}`); + core18.debug(`Expected digest: ${options.expectedHash}`); } } } catch (error3) { @@ -152411,10 +152411,10 @@ Are you trying to download from a different run? Try specifying a github-token w function resolveOrCreateDirectory() { return __awaiter2(this, arguments, void 0, function* (downloadPath = (0, config_1.getGitHubWorkspaceDir)()) { if (!(yield exists(downloadPath))) { - core17.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); + core18.debug(`Artifact destination folder does not exist, creating: ${downloadPath}`); yield promises_1.default.mkdir(downloadPath, { recursive: true }); } else { - core17.debug(`Artifact destination folder already exists: ${downloadPath}`); + core18.debug(`Artifact destination folder already exists: ${downloadPath}`); } return downloadPath; }); @@ -152455,7 +152455,7 @@ var require_retry_options = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getRetryOptions = void 0; - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var defaultMaxRetryNumber = 5; var defaultExemptStatusCodes = [400, 401, 403, 404, 422]; function getRetryOptions(defaultOptions, retries = defaultMaxRetryNumber, exemptStatusCodes = defaultExemptStatusCodes) { @@ -152470,7 +152470,7 @@ var require_retry_options = __commonJS({ retryOptions.doNotRetry = exemptStatusCodes; } const requestOptions = Object.assign(Object.assign({}, defaultOptions.request), { retries }); - core17.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); + core18.debug(`GitHub client configured with: (retries: ${requestOptions.retries}, retry-exempt-status-code: ${(_a = retryOptions.doNotRetry) !== null && _a !== void 0 ? _a : "octokit default: [400, 401, 403, 404, 422]"})`); return [retryOptions, requestOptions]; } exports2.getRetryOptions = getRetryOptions; @@ -152627,7 +152627,7 @@ var require_get_artifact = __commonJS({ exports2.getArtifactInternal = exports2.getArtifactPublic = void 0; var github_1 = require_github2(); var plugin_retry_1 = require_dist_node12(); - var core17 = __importStar2(require_core()); + var core18 = __importStar2(require_core()); var utils_1 = require_utils11(); var retry_options_1 = require_retry_options(); var plugin_request_log_1 = require_dist_node11(); @@ -152647,8 +152647,8 @@ var require_get_artifact = __commonJS({ retry: retryOpts, request: requestOpts }; - const github3 = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); - const getArtifactResp = yield github3.request("GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts{?name}", { + const github4 = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); + const getArtifactResp = yield github4.request("GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts{?name}", { owner: repositoryOwner, repo: repositoryName, run_id: workflowRunId, @@ -152665,7 +152665,7 @@ var require_get_artifact = __commonJS({ let artifact2 = getArtifactResp.data.artifacts[0]; if (getArtifactResp.data.artifacts.length > 1) { artifact2 = getArtifactResp.data.artifacts.sort((a, b) => b.id - a.id)[0]; - core17.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); + core18.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.id})`); } return { artifact: { @@ -152698,7 +152698,7 @@ var require_get_artifact = __commonJS({ let artifact2 = res.artifacts[0]; if (res.artifacts.length > 1) { artifact2 = res.artifacts.sort((a, b) => Number(b.databaseId) - Number(a.databaseId))[0]; - core17.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); + core18.debug(`More than one artifact found for a single name, returning newest (id: ${artifact2.databaseId})`); } return { artifact: { @@ -152771,9 +152771,9 @@ var require_delete_artifact = __commonJS({ retry: retryOpts, request: requestOpts }; - const github3 = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); + const github4 = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); const getArtifactResp = yield (0, get_artifact_1.getArtifactPublic)(artifactName, workflowRunId, repositoryOwner, repositoryName, token); - const deleteArtifactResp = yield github3.rest.actions.deleteArtifact({ + const deleteArtifactResp = yield github4.rest.actions.deleteArtifact({ owner: repositoryOwner, repo: repositoryName, artifact_id: getArtifactResp.artifact.id @@ -152880,9 +152880,9 @@ var require_list_artifacts = __commonJS({ retry: retryOpts, request: requestOpts }; - const github3 = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); + const github4 = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); let currentPageNumber = 1; - const { data: listArtifactResponse } = yield github3.request("GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", { + const { data: listArtifactResponse } = yield github4.request("GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", { owner: repositoryOwner, repo: repositoryName, run_id: workflowRunId, @@ -152907,7 +152907,7 @@ var require_list_artifacts = __commonJS({ currentPageNumber++; for (currentPageNumber; currentPageNumber <= numberOfPages; currentPageNumber++) { (0, core_1.debug)(`Fetching page ${currentPageNumber} of artifact list`); - const { data: listArtifactResponse2 } = yield github3.request("GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", { + const { data: listArtifactResponse2 } = yield github4.request("GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", { owner: repositoryOwner, repo: repositoryName, run_id: workflowRunId, @@ -155806,7 +155806,7 @@ var require_core3 = __commonJS({ ExitCode2[ExitCode2["Success"] = 0] = "Success"; ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; })(ExitCode || (exports2.ExitCode = ExitCode = {})); - function exportVariable9(name, val) { + function exportVariable10(name, val) { const convertedVal = (0, utils_1.toCommandValue)(val); process.env[name] = convertedVal; const filePath = process.env["GITHUB_ENV"] || ""; @@ -155815,7 +155815,7 @@ var require_core3 = __commonJS({ } (0, command_1.issueCommand)("set-env", { name }, convertedVal); } - exports2.exportVariable = exportVariable9; + exports2.exportVariable = exportVariable10; function setSecret(secret) { (0, command_1.issueCommand)("add-mask", {}, secret); } @@ -157436,7 +157436,7 @@ var require_requestUtils2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.retryHttpClientRequest = exports2.retry = void 0; var utils_1 = require_utils13(); - var core17 = __importStar2(require_core3()); + var core18 = __importStar2(require_core3()); var config_variables_1 = require_config_variables(); function retry2(name, operation, customErrorMessages, maxAttempts) { return __awaiter2(this, void 0, void 0, function* () { @@ -157463,13 +157463,13 @@ var require_requestUtils2 = __commonJS({ errorMessage = error3.message; } if (!isRetryable) { - core17.info(`${name} - Error is not retryable`); + core18.info(`${name} - Error is not retryable`); if (response) { (0, utils_1.displayHttpDiagnostics)(response); } break; } - core17.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core18.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); yield (0, utils_1.sleep)((0, utils_1.getExponentialRetryTimeInMilliseconds)(attempt)); attempt++; } @@ -157553,7 +157553,7 @@ var require_upload_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadHttpClient = void 0; var fs20 = __importStar2(require("fs")); - var core17 = __importStar2(require_core3()); + var core18 = __importStar2(require_core3()); var tmp = __importStar2(require_tmp_promise()); var stream2 = __importStar2(require("stream")); var utils_1 = require_utils13(); @@ -157618,7 +157618,7 @@ var require_upload_http_client = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const FILE_CONCURRENCY = (0, config_variables_1.getUploadFileConcurrency)(); const MAX_CHUNK_SIZE = (0, config_variables_1.getUploadChunkSize)(); - core17.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); + core18.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); const parameters = []; let continueOnError = true; if (options) { @@ -157655,15 +157655,15 @@ var require_upload_http_client = __commonJS({ } const startTime = perf_hooks_1.performance.now(); const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters); - if (core17.isDebug()) { - core17.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); + if (core18.isDebug()) { + core18.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); } uploadFileSize += uploadFileResult.successfulUploadSize; totalFileSize += uploadFileResult.totalSize; if (uploadFileResult.isSuccess === false) { failedItemsToReport.push(currentFileParameters.file); if (!continueOnError) { - core17.error(`aborting artifact upload`); + core18.error(`aborting artifact upload`); abortPendingFileUploads = true; } } @@ -157672,7 +157672,7 @@ var require_upload_http_client = __commonJS({ }))); this.statusReporter.stop(); this.uploadHttpManager.disposeAndReplaceAllClients(); - core17.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); + core18.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); return { uploadSize: uploadFileSize, totalSize: totalFileSize, @@ -157698,16 +157698,16 @@ var require_upload_http_client = __commonJS({ let uploadFileSize = 0; let isGzip = true; if (!isFIFO && totalFileSize < 65536) { - core17.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); + core18.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); const buffer = yield (0, upload_gzip_1.createGZipFileInBuffer)(parameters.file); let openUploadStream; if (totalFileSize < buffer.byteLength) { - core17.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core18.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); openUploadStream = () => fs20.createReadStream(parameters.file); isGzip = false; uploadFileSize = totalFileSize; } else { - core17.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); + core18.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); openUploadStream = () => { const passThrough = new stream2.PassThrough(); passThrough.end(buffer); @@ -157719,7 +157719,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += uploadFileSize; - core17.warning(`Aborting upload for ${parameters.file} due to failure`); + core18.warning(`Aborting upload for ${parameters.file} due to failure`); } return { isSuccess: isUploadSuccessful, @@ -157728,16 +157728,16 @@ var require_upload_http_client = __commonJS({ }; } else { const tempFile = yield tmp.file(); - core17.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); + core18.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); uploadFileSize = yield (0, upload_gzip_1.createGZipFileOnDisk)(parameters.file, tempFile.path); let uploadFilePath = tempFile.path; if (!isFIFO && totalFileSize < uploadFileSize) { - core17.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + core18.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); uploadFileSize = totalFileSize; uploadFilePath = parameters.file; isGzip = false; } else { - core17.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); + core18.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); } let abortFileUpload = false; while (offset < uploadFileSize) { @@ -157757,7 +157757,7 @@ var require_upload_http_client = __commonJS({ if (!result) { isUploadSuccessful = false; failedChunkSizes += chunkSize; - core17.warning(`Aborting upload for ${parameters.file} due to failure`); + core18.warning(`Aborting upload for ${parameters.file} due to failure`); abortFileUpload = true; } else { if (uploadFileSize > 8388608) { @@ -157765,7 +157765,7 @@ var require_upload_http_client = __commonJS({ } } } - core17.debug(`deleting temporary gzip file ${tempFile.path}`); + core18.debug(`deleting temporary gzip file ${tempFile.path}`); yield tempFile.cleanup(); return { isSuccess: isUploadSuccessful, @@ -157804,7 +157804,7 @@ var require_upload_http_client = __commonJS({ if (response) { (0, utils_1.displayHttpDiagnostics)(response); } - core17.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); + core18.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); return true; } return false; @@ -157812,14 +157812,14 @@ var require_upload_http_client = __commonJS({ const backOff = (retryAfterValue) => __awaiter2(this, void 0, void 0, function* () { this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core17.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); + core18.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core17.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); + core18.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); yield (0, utils_1.sleep)(backoffTime); } - core17.info(`Finished backoff for retry #${retryCount}, continuing with upload`); + core18.info(`Finished backoff for retry #${retryCount}, continuing with upload`); return; }); while (retryCount <= retryLimit) { @@ -157827,7 +157827,7 @@ var require_upload_http_client = __commonJS({ try { response = yield uploadChunkRequest(); } catch (error3) { - core17.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); + core18.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); console.log(error3); if (incrementAndCheckRetryLimit()) { return false; @@ -157839,13 +157839,13 @@ var require_upload_http_client = __commonJS({ if ((0, utils_1.isSuccessStatusCode)(response.message.statusCode)) { return true; } else if ((0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core17.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); + core18.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); if (incrementAndCheckRetryLimit(response)) { return false; } (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { - core17.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); + core18.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); (0, utils_1.displayHttpDiagnostics)(response); return false; } @@ -157863,7 +157863,7 @@ var require_upload_http_client = __commonJS({ resourceUrl.searchParams.append("artifactName", artifactName); const parameters = { Size: size }; const data = JSON.stringify(parameters, null, 2); - core17.debug(`URL is ${resourceUrl.toString()}`); + core18.debug(`URL is ${resourceUrl.toString()}`); const client = this.uploadHttpManager.getClient(0); const headers = (0, utils_1.getUploadHeaders)("application/json", false); const customErrorMessages = /* @__PURE__ */ new Map([ @@ -157876,7 +157876,7 @@ var require_upload_http_client = __commonJS({ return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages); yield response.readBody(); - core17.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); + core18.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); }); } }; @@ -157945,7 +157945,7 @@ var require_download_http_client = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DownloadHttpClient = void 0; var fs20 = __importStar2(require("fs")); - var core17 = __importStar2(require_core3()); + var core18 = __importStar2(require_core3()); var zlib3 = __importStar2(require("zlib")); var utils_1 = require_utils13(); var url_1 = require("url"); @@ -157999,11 +157999,11 @@ var require_download_http_client = __commonJS({ downloadSingleArtifact(downloadItems) { return __awaiter2(this, void 0, void 0, function* () { const DOWNLOAD_CONCURRENCY = (0, config_variables_1.getDownloadFileConcurrency)(); - core17.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); + core18.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]; let currentFile = 0; let downloadedFiles = 0; - core17.info(`Total number of files that will be downloaded: ${downloadItems.length}`); + core18.info(`Total number of files that will be downloaded: ${downloadItems.length}`); this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length); this.statusReporter.start(); yield Promise.all(parallelDownloads.map((index) => __awaiter2(this, void 0, void 0, function* () { @@ -158012,8 +158012,8 @@ var require_download_http_client = __commonJS({ currentFile += 1; const startTime = perf_hooks_1.performance.now(); yield this.downloadIndividualFile(index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath); - if (core17.isDebug()) { - core17.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); + if (core18.isDebug()) { + core18.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); } this.statusReporter.incrementProcessedCount(); } @@ -158051,19 +158051,19 @@ var require_download_http_client = __commonJS({ } else { this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { - core17.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); + core18.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); yield (0, utils_1.sleep)(retryAfterValue); } else { const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); - core17.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); + core18.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); yield (0, utils_1.sleep)(backoffTime); } - core17.info(`Finished backoff for retry #${retryCount}, continuing with download`); + core18.info(`Finished backoff for retry #${retryCount}, continuing with download`); } }); const isAllBytesReceived = (expected, received) => { if (!expected || !received || process.env["ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION"]) { - core17.info("Skipping download validation."); + core18.info("Skipping download validation."); return true; } return parseInt(expected) === received; @@ -158084,7 +158084,7 @@ var require_download_http_client = __commonJS({ try { response = yield makeDownloadRequest(); } catch (error3) { - core17.info("An error occurred while attempting to download a file"); + core18.info("An error occurred while attempting to download a file"); console.log(error3); yield backOff(); continue; @@ -158104,7 +158104,7 @@ var require_download_http_client = __commonJS({ } } if (forceRetry || (0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { - core17.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); + core18.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); resetDestinationStream(downloadPath); (0, utils_1.isThrottledStatusCode)(response.message.statusCode) ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { @@ -158126,29 +158126,29 @@ var require_download_http_client = __commonJS({ if (isGzip) { const gunzip = zlib3.createGunzip(); response.message.on("error", (error3) => { - core17.info(`An error occurred while attempting to read the response stream`); + core18.info(`An error occurred while attempting to read the response stream`); gunzip.close(); destinationStream.close(); reject(error3); }).pipe(gunzip).on("error", (error3) => { - core17.info(`An error occurred while attempting to decompress the response stream`); + core18.info(`An error occurred while attempting to decompress the response stream`); destinationStream.close(); reject(error3); }).pipe(destinationStream).on("close", () => { resolve8(); }).on("error", (error3) => { - core17.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core18.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error3); }); } else { response.message.on("error", (error3) => { - core17.info(`An error occurred while attempting to read the response stream`); + core18.info(`An error occurred while attempting to read the response stream`); destinationStream.close(); reject(error3); }).pipe(destinationStream).on("close", () => { resolve8(); }).on("error", (error3) => { - core17.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core18.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error3); }); } @@ -158287,7 +158287,7 @@ var require_artifact_client = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultArtifactClient = void 0; - var core17 = __importStar2(require_core3()); + var core18 = __importStar2(require_core3()); var upload_specification_1 = require_upload_specification(); var upload_http_client_1 = require_upload_http_client(); var utils_1 = require_utils13(); @@ -158308,7 +158308,7 @@ var require_artifact_client = __commonJS({ */ uploadArtifact(name, files, rootDirectory, options) { return __awaiter2(this, void 0, void 0, function* () { - core17.info(`Starting artifact upload + core18.info(`Starting artifact upload For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`); (0, path_and_artifact_name_validation_1.checkArtifactName)(name); const uploadSpecification = (0, upload_specification_1.getUploadSpecification)(name, rootDirectory, files); @@ -158320,24 +158320,24 @@ For more detailed logs during the artifact upload process, enable step-debugging }; const uploadHttpClient = new upload_http_client_1.UploadHttpClient(); if (uploadSpecification.length === 0) { - core17.warning(`No files found that can be uploaded`); + core18.warning(`No files found that can be uploaded`); } else { const response = yield uploadHttpClient.createArtifactInFileContainer(name, options); if (!response.fileContainerResourceUrl) { - core17.debug(response.toString()); + core18.debug(response.toString()); throw new Error("No URL provided by the Artifact Service to upload an artifact to"); } - core17.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); - core17.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); + core18.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); + core18.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options); - core17.info(`File upload process has finished. Finalizing the artifact upload`); + core18.info(`File upload process has finished. Finalizing the artifact upload`); yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name); if (uploadResult.failedItems.length > 0) { - core17.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); + core18.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); } else { - core17.info(`Artifact has been finalized. All files have been successfully uploaded!`); + core18.info(`Artifact has been finalized. All files have been successfully uploaded!`); } - core17.info(` + core18.info(` The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage @@ -158371,10 +158371,10 @@ Note: The size of downloaded zips can differ significantly from the reported siz path18 = (0, path_1.resolve)(path18); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path18, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); if (downloadSpecification.filesToDownload.length === 0) { - core17.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); + core18.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); - core17.info("Directory structure has been set up for the artifact"); + core18.info("Directory structure has been set up for the artifact"); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); } @@ -158390,7 +158390,7 @@ Note: The size of downloaded zips can differ significantly from the reported siz const response = []; const artifacts = yield downloadHttpClient.listArtifacts(); if (artifacts.count === 0) { - core17.info("Unable to find any artifacts for the associated workflow"); + core18.info("Unable to find any artifacts for the associated workflow"); return response; } if (!path18) { @@ -158402,11 +158402,11 @@ Note: The size of downloaded zips can differ significantly from the reported siz while (downloadedArtifacts < artifacts.count) { const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; downloadedArtifacts += 1; - core17.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); + core18.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path18, true); if (downloadSpecification.filesToDownload.length === 0) { - core17.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); + core18.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); } else { yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); @@ -161336,7 +161336,7 @@ var require_sarif_schema_2_1_0 = __commonJS({ }); // src/init-action-post.ts -var core16 = __toESM(require_core()); +var core17 = __toESM(require_core()); // src/actions-util.ts var fs2 = __toESM(require("fs")); @@ -168310,14 +168310,14 @@ async function createDatabaseBundleCli(codeql, config, language) { // src/init-action-post-helper.ts var fs19 = __toESM(require("fs")); -var github2 = __toESM(require_github()); +var github3 = __toESM(require_github()); // src/upload-lib.ts var fs17 = __toESM(require("fs")); var path16 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); -var core13 = __toESM(require_core()); +var core14 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/fingerprints.ts @@ -169443,7 +169443,9 @@ async function addFingerprints(sarifLog, sourceRoot, logger) { } // src/init.ts +var core13 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); +var github2 = __toESM(require_github()); var io6 = __toESM(require_io()); async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); @@ -169585,7 +169587,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core13.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core14.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -169684,13 +169686,13 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { if (httpError !== void 0) { switch (httpError.status) { case 403: - core13.warning(httpError.message || GENERIC_403_MSG); + core14.warning(httpError.message || GENERIC_403_MSG); break; case 404: - core13.warning(httpError.message || GENERIC_404_MSG); + core14.warning(httpError.message || GENERIC_404_MSG); break; default: - core13.warning(httpError.message); + core14.warning(httpError.message); break; } } @@ -170048,7 +170050,7 @@ function validateUniqueCategory(sarifLog, sentinelPrefix) { `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core13.exportVariable(sentinelEnvVar, sentinelEnvVar); + core14.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { @@ -170091,7 +170093,7 @@ function filterAlertsByDiffRange(logger, sarifLog) { var fs18 = __toESM(require("fs")); var path17 = __toESM(require("path")); var import_zlib2 = __toESM(require("zlib")); -var core14 = __toESM(require_core()); +var core15 = __toESM(require_core()); function toCodedErrors(errors) { return Object.entries(errors).reduce( (acc, [code, message]) => { @@ -170312,7 +170314,7 @@ async function run(uploadAllAvailableDebugArtifacts, printDebugLogs2, codeql, co ); } if (process.env["CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF"] === "true") { - if (!github2.context.payload.pull_request?.head.repo.fork) { + if (!github3.context.payload.pull_request?.head.repo.fork) { await removeUploadedSarif(uploadFailedSarifResult, logger); } else { logger.info( @@ -170455,7 +170457,7 @@ async function removeUploadedSarif(uploadFailedSarifResult, logger) { // src/status-report.ts var os3 = __toESM(require("os")); -var core15 = __toESM(require_core()); +var core16 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { return true; @@ -170492,12 +170494,12 @@ function getJobStatusDisplayName(status) { } function setJobStatusIfUnsuccessful(actionStatus) { if (actionStatus === "user-error") { - core15.exportVariable( + core16.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); } else if (actionStatus === "failure" || actionStatus === "aborted") { - core15.exportVariable( + core16.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ ); @@ -170516,14 +170518,14 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; if (workflowStartedAt === void 0) { workflowStartedAt = actionStartedAt.toISOString(); - core15.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + core16.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } const runnerOs = getRequiredEnvParam("RUNNER_OS"); const codeQlCliVersion = getCachedCodeQlVersion(); const actionRef = process.env["GITHUB_ACTION_REF"] || ""; const testingEnvironment = getTestingEnvironment(); if (testingEnvironment) { - core15.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + core16.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; const statusReport = { @@ -170606,9 +170608,9 @@ var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpo async function sendStatusReport(statusReport) { setJobStatusIfUnsuccessful(statusReport.status); const statusReportJSON = JSON.stringify(statusReport); - core15.debug(`Sending status report: ${statusReportJSON}`); + core16.debug(`Sending status report: ${statusReportJSON}`); if (isInTestMode()) { - core15.debug("In test mode. Status reports are not uploaded."); + core16.debug("In test mode. Status reports are not uploaded."); return; } const nwo = getRepositoryNwo(); @@ -170628,28 +170630,28 @@ async function sendStatusReport(statusReport) { switch (httpError.status) { case 403: if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core15.warning( + core16.warning( `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` ); } else { - core15.warning( + core16.warning( `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` ); } return; case 404: - core15.warning(httpError.message); + core16.warning(httpError.message); return; case 422: if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core15.debug(INCOMPATIBLE_MSG); + core16.debug(INCOMPATIBLE_MSG); } else { - core15.debug(OUT_OF_DATE_MSG); + core16.debug(OUT_OF_DATE_MSG); } return; } } - core15.warning( + core16.warning( `An unexpected error occurred when sending a status report: ${getErrorMessage( e )}` @@ -170720,7 +170722,7 @@ async function run2(startedAt) { } } catch (unwrappedError) { const error3 = wrapError(unwrappedError); - core16.setFailed(error3.message); + core17.setFailed(error3.message); const statusReportBase2 = await createStatusReportBase( "init-post" /* InitPost */, getActionsStatus(error3), @@ -170765,14 +170767,14 @@ function getFinalJobStatus(config) { } let jobStatus; if (process.env["CODEQL_ACTION_ANALYZE_DID_COMPLETE_SUCCESSFULLY" /* ANALYZE_DID_COMPLETE_SUCCESSFULLY */] === "true") { - core16.exportVariable("CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, "JOB_STATUS_SUCCESS" /* SuccessStatus */); + core17.exportVariable("CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, "JOB_STATUS_SUCCESS" /* SuccessStatus */); jobStatus = "JOB_STATUS_SUCCESS" /* SuccessStatus */; } else if (config !== void 0) { jobStatus = "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */; } else { jobStatus = "JOB_STATUS_UNKNOWN" /* UnknownStatus */; } - core16.exportVariable("CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, jobStatus); + core17.exportVariable("CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, jobStatus); return jobStatus; } function getJobStatusFromEnvironment() { @@ -170791,7 +170793,7 @@ async function runWrapper() { try { await run2(startedAt); } catch (error3) { - core16.setFailed(`init post action failed: ${wrapError(error3).message}`); + core17.setFailed(`init post action failed: ${wrapError(error3).message}`); await sendUnhandledErrorStatusReport( "init-post" /* InitPost */, startedAt, diff --git a/lib/init-action.js b/lib/init-action.js index a2a95f193..b177651be 100644 --- a/lib/init-action.js +++ b/lib/init-action.js @@ -3842,18 +3842,18 @@ var require_webidl = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context3) { - const plural = context3.types.length === 1 ? "" : " one of"; - const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context4) { + const plural = context4.types.length === 1 ? "" : " one of"; + const message = `${context4.argument} could not be converted to${plural}: ${context4.types.join(", ")}.`; return webidl.errors.exception({ - header: context3.prefix, + header: context4.prefix, message }); }; - webidl.errors.invalidArgument = function(context3) { + webidl.errors.invalidArgument = function(context4) { return webidl.errors.exception({ - header: context3.prefix, - message: `"${context3.value}" is an invalid ${context3.type}.` + header: context4.prefix, + message: `"${context4.value}" is an invalid ${context4.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -9849,17 +9849,17 @@ var require_api_request = __commonJS({ } } } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context4, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -9896,7 +9896,7 @@ var require_api_request = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context3 + context: context4 }); } } @@ -10065,17 +10065,17 @@ var require_api_stream = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context3, callback, responseHeaders } = this; + const { factory, opaque, context: context4, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -10103,7 +10103,7 @@ var require_api_stream = __commonJS({ statusCode, headers, opaque, - context: context3 + context: context4 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -10295,7 +10295,7 @@ var require_api_pipeline = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -10304,10 +10304,10 @@ var require_api_pipeline = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context3 } = this; + const { opaque, handler: handler2, context: context4 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10325,7 +10325,7 @@ var require_api_pipeline = __commonJS({ headers, opaque, body: this.res, - context: context3 + context: context4 }); } catch (err) { this.res.on("error", util.nop); @@ -10409,7 +10409,7 @@ var require_api_upgrade = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; @@ -10423,7 +10423,7 @@ var require_api_upgrade = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10431,7 +10431,7 @@ var require_api_upgrade = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -10500,20 +10500,20 @@ var require_api_connect = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -10525,7 +10525,7 @@ var require_api_connect = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -21321,7 +21321,7 @@ var require_core = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.platform = exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = exports2.markdownSummary = exports2.summary = exports2.ExitCode = void 0; - exports2.exportVariable = exportVariable6; + exports2.exportVariable = exportVariable7; exports2.setSecret = setSecret; exports2.addPath = addPath2; exports2.getInput = getInput2; @@ -21353,7 +21353,7 @@ var require_core = __commonJS({ ExitCode2[ExitCode2["Success"] = 0] = "Success"; ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; })(ExitCode || (exports2.ExitCode = ExitCode = {})); - function exportVariable6(name, val) { + function exportVariable7(name, val) { const convertedVal = (0, utils_1.toCommandValue)(val); process.env[name] = convertedVal; const filePath = process.env["GITHUB_ENV"] || ""; @@ -24840,18 +24840,18 @@ var require_webidl2 = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context3) { - const plural = context3.types.length === 1 ? "" : " one of"; - const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context4) { + const plural = context4.types.length === 1 ? "" : " one of"; + const message = `${context4.argument} could not be converted to${plural}: ${context4.types.join(", ")}.`; return webidl.errors.exception({ - header: context3.prefix, + header: context4.prefix, message }); }; - webidl.errors.invalidArgument = function(context3) { + webidl.errors.invalidArgument = function(context4) { return webidl.errors.exception({ - header: context3.prefix, - message: `"${context3.value}" is an invalid ${context3.type}.` + header: context4.prefix, + message: `"${context4.value}" is an invalid ${context4.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -30847,17 +30847,17 @@ var require_api_request2 = __commonJS({ } } } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context4, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -30894,7 +30894,7 @@ var require_api_request2 = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context3 + context: context4 }); } } @@ -31063,17 +31063,17 @@ var require_api_stream2 = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context3, callback, responseHeaders } = this; + const { factory, opaque, context: context4, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -31101,7 +31101,7 @@ var require_api_stream2 = __commonJS({ statusCode, headers, opaque, - context: context3 + context: context4 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -31293,7 +31293,7 @@ var require_api_pipeline2 = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -31302,10 +31302,10 @@ var require_api_pipeline2 = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context3 } = this; + const { opaque, handler: handler2, context: context4 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31323,7 +31323,7 @@ var require_api_pipeline2 = __commonJS({ headers, opaque, body: this.res, - context: context3 + context: context4 }); } catch (err) { this.res.on("error", util.nop); @@ -31407,7 +31407,7 @@ var require_api_upgrade2 = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; @@ -31421,7 +31421,7 @@ var require_api_upgrade2 = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31429,7 +31429,7 @@ var require_api_upgrade2 = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -31498,20 +31498,20 @@ var require_api_connect2 = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context3) { + onConnect(abort, context4) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context3; + this.context = context4; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context3 } = this; + const { callback, opaque, context: context4 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -31523,7 +31523,7 @@ var require_api_connect2 = __commonJS({ headers, socket, opaque, - context: context3 + context: context4 }); } onError(err) { @@ -40178,8 +40178,8 @@ function isDefined(value) { function isKeyOperator(operator) { return operator === ";" || operator === "&" || operator === "?"; } -function getValues(context3, operator, key, modifier) { - var value = context3[key], result = []; +function getValues(context4, operator, key, modifier) { + var value = context4[key], result = []; if (isDefined(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); @@ -40243,7 +40243,7 @@ function parseUrl(template) { expand: expand.bind(null, template) }; } -function expand(template, context3) { +function expand(template, context4) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; template = template.replace( /\{([^\{\}]+)\}|([^\{\}]+)/g, @@ -40257,7 +40257,7 @@ function expand(template, context3) { } expression.split(/,/g).forEach(function(variable) { var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context3, operator, tmp[1], tmp[2] || tmp[3])); + values.push(getValues(context4, operator, tmp[1], tmp[2] || tmp[3])); }); if (operator && operator !== "+") { var separator = ","; @@ -48898,7 +48898,7 @@ var require_internal_glob_options_helper = __commonJS({ })(); Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = getOptions; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -48910,23 +48910,23 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core14.debug(`implicitDescendants '${result.implicitDescendants}'`); + core15.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core14.debug(`matchDirectories '${result.matchDirectories}'`); + core15.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core14.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core15.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -50554,7 +50554,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var fs16 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path17 = __importStar2(require("path")); @@ -50607,7 +50607,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core14.debug(`Search path '${searchPath}'`); + core15.debug(`Search path '${searchPath}'`); try { yield __await2(fs16.promises.lstat(searchPath)); } catch (err) { @@ -50682,7 +50682,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core14.debug(`Broken symlink '${item.path}'`); + core15.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -50698,7 +50698,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -50801,7 +50801,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = hashFiles2; var crypto3 = __importStar2(require("crypto")); - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var fs16 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); @@ -50810,7 +50810,7 @@ var require_internal_hash_files = __commonJS({ return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; - const writeDelegate = verbose ? core14.info : core14.debug; + const writeDelegate = verbose ? core15.info : core15.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto3.createHash("sha256"); @@ -52201,7 +52201,7 @@ var require_cacheUtils = __commonJS({ exports2.assertDefined = assertDefined; exports2.getCacheVersion = getCacheVersion; exports2.getRuntimeToken = getRuntimeToken; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var exec = __importStar2(require_exec()); var glob2 = __importStar2(require_glob()); var io7 = __importStar2(require_io()); @@ -52252,7 +52252,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path17.relative(workspace, file).replace(new RegExp(`\\${path17.sep}`, "g"), "/"); - core14.debug(`Matched: ${relativeFile}`); + core15.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -52280,7 +52280,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { let versionOutput = ""; additionalArgs.push("--version"); - core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core15.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -52291,10 +52291,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core14.debug(err.message); + core15.debug(err.message); } versionOutput = versionOutput.trim(); - core14.debug(versionOutput); + core15.debug(versionOutput); return versionOutput; }); } @@ -52302,7 +52302,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver10.clean(versionOutput); - core14.debug(`zstd version: ${version}`); + core15.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -52427,14 +52427,14 @@ function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, e var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); var _, done = false; for (var i = decorators.length - 1; i >= 0; i--) { - var context3 = {}; - for (var p in contextIn) context3[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context3.access[p] = contextIn.access[p]; - context3.addInitializer = function(f) { + var context4 = {}; + for (var p in contextIn) context4[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context4.access[p] = contextIn.access[p]; + context4.addInitializer = function(f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context3); + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context4); if (kind === "accessor") { if (result === void 0) continue; if (result === null || typeof result !== "object") throw new TypeError("Object expected"); @@ -53161,19 +53161,19 @@ var require_logger = __commonJS({ logger: clientLogger }; } - var context3 = createLoggerContext({ + var context4 = createLoggerContext({ logLevelEnvVarName: "TYPESPEC_RUNTIME_LOG_LEVEL", namespace: "typeSpecRuntime" }); - exports2.TypeSpecRuntimeLogger = context3.logger; + exports2.TypeSpecRuntimeLogger = context4.logger; function setLogLevel(logLevel) { - context3.setLogLevel(logLevel); + context4.setLogLevel(logLevel); } function getLogLevel() { - return context3.getLogLevel(); + return context4.getLogLevel(); } function createClientLogger(namespace) { - return context3.createClientLogger(namespace); + return context4.createClientLogger(namespace); } } }); @@ -57419,19 +57419,19 @@ var require_commonjs2 = __commonJS({ exports2.getLogLevel = getLogLevel; exports2.createClientLogger = createClientLogger; var logger_1 = require_internal(); - var context3 = (0, logger_1.createLoggerContext)({ + var context4 = (0, logger_1.createLoggerContext)({ logLevelEnvVarName: "AZURE_LOG_LEVEL", namespace: "azure" }); - exports2.AzureLogger = context3.logger; + exports2.AzureLogger = context4.logger; function setLogLevel(level) { - context3.setLogLevel(level); + context4.setLogLevel(level); } function getLogLevel() { - return context3.getLogLevel(); + return context4.getLogLevel(); } function createClientLogger(namespace) { - return context3.createClientLogger(namespace); + return context4.createClientLogger(namespace); } } }); @@ -58341,14 +58341,14 @@ var require_tracingContext = __commonJS({ namespace: /* @__PURE__ */ Symbol.for("@azure/core-tracing namespace") }; function createTracingContext(options = {}) { - let context3 = new TracingContextImpl(options.parentContext); + let context4 = new TracingContextImpl(options.parentContext); if (options.span) { - context3 = context3.setValue(exports2.knownContextKeys.span, options.span); + context4 = context4.setValue(exports2.knownContextKeys.span, options.span); } if (options.namespace) { - context3 = context3.setValue(exports2.knownContextKeys.namespace, options.namespace); + context4 = context4.setValue(exports2.knownContextKeys.namespace, options.namespace); } - return context3; + return context4; } var TracingContextImpl = class _TracingContextImpl { _contextMap; @@ -58486,8 +58486,8 @@ var require_tracingClient = __commonJS({ span.end(); } } - function withContext(context3, callback, ...callbackArgs) { - return (0, instrumenter_js_1.getInstrumenter)().withContext(context3, callback, ...callbackArgs); + function withContext(context4, callback, ...callbackArgs) { + return (0, instrumenter_js_1.getInstrumenter)().withContext(context4, callback, ...callbackArgs); } function parseTraceparentHeader(traceparentHeader) { return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); @@ -91958,7 +91958,7 @@ var require_uploadUtils = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadProgress = void 0; exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var storage_blob_1 = require_commonjs15(); var errors_1 = require_errors3(); var UploadProgress = class { @@ -92000,7 +92000,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core15.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -92057,14 +92057,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core15.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error3) { - core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); + core15.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); throw error3; } finally { uploadProgress.stopDisplayTimer(); @@ -92149,7 +92149,7 @@ var require_requestUtils = __commonJS({ exports2.retry = retry2; exports2.retryTypedResponse = retryTypedResponse; exports2.retryHttpClientResponse = retryHttpClientResponse; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants12(); function isSuccessStatusCode(statusCode) { @@ -92207,9 +92207,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core15.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core14.debug(`${name} - Error is not retryable`); + core15.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -92468,7 +92468,7 @@ var require_downloadUtils = __commonJS({ exports2.downloadCacheHttpClient = downloadCacheHttpClient; exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); @@ -92506,7 +92506,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core15.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -92540,7 +92540,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core15.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -92590,7 +92590,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core15.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -92601,7 +92601,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core14.debug("Unable to validate download, no Content-Length header"); + core15.debug("Unable to validate download, no Content-Length header"); } }); } @@ -92719,7 +92719,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core14.debug("Unable to determine content length, downloading file with http-client..."); + core15.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -92809,7 +92809,7 @@ var require_options = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadOptions = getUploadOptions; exports2.getDownloadOptions = getDownloadOptions; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -92829,9 +92829,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core15.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core15.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } function getDownloadOptions(copy) { @@ -92867,12 +92867,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Download concurrency: ${result.downloadConcurrency}`); - core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core14.debug(`Lookup only: ${result.lookupOnly}`); + core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core15.debug(`Download concurrency: ${result.downloadConcurrency}`); + core15.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core15.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core15.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core15.debug(`Lookup only: ${result.lookupOnly}`); return result; } } @@ -93066,7 +93066,7 @@ var require_cacheHttpClient = __commonJS({ exports2.downloadCache = downloadCache; exports2.reserveCache = reserveCache; exports2.saveCache = saveCache5; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs16 = __importStar2(require("fs")); @@ -93084,7 +93084,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core14.debug(`Resource Url: ${url}`); + core15.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -93112,7 +93112,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core14.isDebug()) { + if (core15.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -93125,9 +93125,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core14.setSecret(cacheDownloadUrl); - core14.debug(`Cache Result:`); - core14.debug(JSON.stringify(cacheResult)); + core15.setSecret(cacheDownloadUrl); + core15.debug(`Cache Result:`); + core15.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -93141,10 +93141,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core15.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core15.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -93187,7 +93187,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter2(this, void 0, void 0, function* () { - core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core15.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -93209,7 +93209,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core14.debug("Awaiting all uploads"); + core15.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { @@ -93252,16 +93252,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core14.debug("Upload cache"); + core15.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core14.debug("Commiting cache"); + core15.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core15.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core14.info("Cache saved successfully"); + core15.info("Cache saved successfully"); } }); } @@ -98744,7 +98744,7 @@ var require_cache5 = __commonJS({ exports2.isFeatureAvailable = isFeatureAvailable; exports2.restoreCache = restoreCache5; exports2.saveCache = saveCache5; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var path17 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); @@ -98803,7 +98803,7 @@ var require_cache5 = __commonJS({ function restoreCache5(paths_1, primaryKey_1, restoreKeys_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); + core15.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -98818,8 +98818,8 @@ var require_cache5 = __commonJS({ return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); + core15.debug("Resolved Keys:"); + core15.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -98837,19 +98837,19 @@ var require_cache5 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); + core15.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path17.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); + core15.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error3) { const typedError = error3; @@ -98857,16 +98857,16 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error3.message}`); + core15.error(`Failed to restore: ${error3.message}`); } else { - core14.warning(`Failed to restore: ${error3.message}`); + core15.warning(`Failed to restore: ${error3.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + core15.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -98877,8 +98877,8 @@ var require_cache5 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); + core15.debug("Resolved Keys:"); + core15.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -98896,30 +98896,30 @@ var require_cache5 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request2); if (!response.ok) { - core14.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); + core15.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request2.key !== response.matchedKey; if (isRestoreKeyMatch) { - core14.info(`Cache hit for restore-key: ${response.matchedKey}`); + core15.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core14.info(`Cache hit for: ${response.matchedKey}`); + core15.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); + core15.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path17.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive path: ${archivePath}`); - core14.debug(`Starting download of archive to: ${archivePath}`); + core15.debug(`Archive path: ${archivePath}`); + core15.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core14.isDebug()) { + core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); + core15.info("Cache restored successfully"); return response.matchedKey; } catch (error3) { const typedError = error3; @@ -98927,9 +98927,9 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error3.message}`); + core15.error(`Failed to restore: ${error3.message}`); } else { - core14.warning(`Failed to restore: ${error3.message}`); + core15.warning(`Failed to restore: ${error3.message}`); } } } finally { @@ -98938,7 +98938,7 @@ var require_cache5 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + core15.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -98947,7 +98947,7 @@ var require_cache5 = __commonJS({ function saveCache5(paths_1, key_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); + core15.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -98965,26 +98965,26 @@ var require_cache5 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); + core15.debug("Cache Paths:"); + core15.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path17.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); + core15.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core14.debug("Reserving Cache"); + core15.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -98997,26 +98997,26 @@ var require_cache5 = __commonJS({ } else { throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core14.debug(`Saving Cache (ID: ${cacheId})`); + core15.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error3) { const typedError = error3; if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError2.name) { - core14.info(`Failed to save: ${typedError.message}`); + core15.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); + core15.error(`Failed to save: ${typedError.message}`); } else { - core14.warning(`Failed to save: ${typedError.message}`); + core15.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + core15.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -99029,23 +99029,23 @@ var require_cache5 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); + core15.debug("Cache Paths:"); + core15.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path17.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); + core15.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core14.debug("Reserving Cache"); + core15.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request2 = { key, @@ -99056,16 +99056,16 @@ var require_cache5 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request2); if (!response.ok) { if (response.message) { - core14.warning(`Cache reservation failed: ${response.message}`); + core15.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error3) { - core14.debug(`Failed to reserve cache: ${error3}`); + core15.debug(`Failed to reserve cache: ${error3}`); throw new ReserveCacheError2(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core14.debug(`Attempting to upload cache located at: ${archivePath}`); + core15.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -99073,7 +99073,7 @@ var require_cache5 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core15.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -99086,21 +99086,21 @@ var require_cache5 = __commonJS({ if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError2.name) { - core14.info(`Failed to save: ${typedError.message}`); + core15.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core14.warning(typedError.message); + core15.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); + core15.error(`Failed to save: ${typedError.message}`); } else { - core14.warning(`Failed to save: ${typedError.message}`); + core15.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + core15.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -99327,7 +99327,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -99350,10 +99350,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core14.info(err.message); + core15.info(err.message); } const seconds = this.getSleepAmount(); - core14.info(`Waiting ${seconds} seconds before trying again`); + core15.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -99456,7 +99456,7 @@ var require_tool_cache = __commonJS({ exports2.findFromManifest = findFromManifest; exports2.isExplicitVersion = isExplicitVersion; exports2.evaluateVersions = evaluateVersions; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var io7 = __importStar2(require_io()); var crypto3 = __importStar2(require("crypto")); var fs16 = __importStar2(require("fs")); @@ -99485,8 +99485,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { dest = dest || path17.join(_getTempDirectory(), crypto3.randomUUID()); yield io7.mkdirP(path17.dirname(dest)); - core14.debug(`Downloading ${url}`); - core14.debug(`Destination ${dest}`); + core15.debug(`Downloading ${url}`); + core15.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -99512,7 +99512,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth2) { - core14.debug("set auth"); + core15.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -99521,7 +99521,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core14.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core15.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -99530,16 +99530,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs16.createWriteStream(dest)); - core14.debug("download complete"); + core15.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core14.debug("download failed"); + core15.debug("download failed"); try { yield io7.rmRF(dest); } catch (err) { - core14.debug(`Failed to delete '${dest}'. ${err.message}`); + core15.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -99554,7 +99554,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core15.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", // eXtract files with full paths @@ -99607,7 +99607,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core14.debug("Checking tar --version"); + core15.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -99617,7 +99617,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core14.debug(versionOutput.trim()); + core15.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -99625,7 +99625,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core14.isDebug() && !flags.includes("v")) { + if (core15.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -99656,7 +99656,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core14.isDebug()) { + if (core15.isDebug()) { args.push("-v"); } const xarPath = yield io7.which("xar", true); @@ -99699,7 +99699,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core14.debug(`Using pwsh at path: ${pwshPath}`); + core15.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -99719,7 +99719,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io7.which("powershell", true); - core14.debug(`Using powershell at path: ${powershellPath}`); + core15.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -99728,7 +99728,7 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const unzipPath = yield io7.which("unzip", true); const args = [file]; - if (!core14.isDebug()) { + if (!core15.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -99739,8 +99739,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver10.clean(version) || version; arch2 = arch2 || os6.arch(); - core14.debug(`Caching tool ${tool} ${version} ${arch2}`); - core14.debug(`source dir: ${sourceDir}`); + core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + core15.debug(`source dir: ${sourceDir}`); if (!fs16.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -99757,14 +99757,14 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver10.clean(version) || version; arch2 = arch2 || os6.arch(); - core14.debug(`Caching tool ${tool} ${version} ${arch2}`); - core14.debug(`source file: ${sourceFile}`); + core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + core15.debug(`source file: ${sourceFile}`); if (!fs16.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path17.join(destFolder, targetFile); - core14.debug(`destination file ${destPath}`); + core15.debug(`destination file ${destPath}`); yield io7.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -99787,12 +99787,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver10.clean(versionSpec) || ""; const cachePath = path17.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core14.debug(`checking cache: ${cachePath}`); + core15.debug(`checking cache: ${cachePath}`); if (fs16.existsSync(cachePath) && fs16.existsSync(`${cachePath}.complete`)) { - core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core14.debug("not found"); + core15.debug("not found"); } } return toolPath; @@ -99821,7 +99821,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth2) { - core14.debug("set auth"); + core15.debug("set auth"); headers.authorization = auth2; } const response = yield http.getJson(treeUrl, headers); @@ -99842,7 +99842,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core14.debug("Invalid json"); + core15.debug("Invalid json"); } } return releases; @@ -99866,7 +99866,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter2(this, void 0, void 0, function* () { const folderPath = path17.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch2 || ""); - core14.debug(`destination ${folderPath}`); + core15.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io7.rmRF(folderPath); yield io7.rmRF(markerPath); @@ -99878,18 +99878,18 @@ var require_tool_cache = __commonJS({ const folderPath = path17.join(_getCacheDirectory(), tool, semver10.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs16.writeFileSync(markerPath, ""); - core14.debug("finished caching tool"); + core15.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver10.clean(versionSpec) || ""; - core14.debug(`isExplicit: ${c}`); + core15.debug(`isExplicit: ${c}`); const valid3 = semver10.valid(c) != null; - core14.debug(`explicit? ${valid3}`); + core15.debug(`explicit? ${valid3}`); return valid3; } function evaluateVersions(versions, versionSpec) { let version = ""; - core14.debug(`evaluating ${versions.length} versions`); + core15.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver10.gt(a, b)) { return 1; @@ -99905,9 +99905,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core14.debug(`matched: ${version}`); + core15.debug(`matched: ${version}`); } else { - core14.debug("match not found"); + core15.debug("match not found"); } return version; } @@ -100490,8 +100490,8 @@ __export(init_action_exports, { module.exports = __toCommonJS(init_action_exports); var fs15 = __toESM(require("fs")); var path16 = __toESM(require("path")); -var core13 = __toESM(require_core()); -var github2 = __toESM(require_github()); +var core14 = __toESM(require_core()); +var github3 = __toESM(require_github()); var io6 = __toESM(require_io()); var semver9 = __toESM(require_semver2()); @@ -107141,7 +107141,9 @@ var internal = { // src/init.ts var fs13 = __toESM(require("fs")); var path14 = __toESM(require("path")); +var core11 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); +var github2 = __toESM(require_github()); var io5 = __toESM(require_io()); // src/codeql.ts @@ -109100,6 +109102,13 @@ async function getFileCoverageInformationEnabled(debugMode, codeql, features, re showDeprecationWarning: false }; } + if ((process.env["CODEQL_ACTION_FILE_COVERAGE_ON_PRS" /* FILE_COVERAGE_ON_PRS */] || "").toLocaleLowerCase() === "true") { + return { + enabled: true, + enabledByRepositoryProperty: false, + showDeprecationWarning: false + }; + } if (repositoryProperties["github-codeql-file-coverage-on-prs" /* FILE_COVERAGE_ON_PRS */] === true) { return { enabled: true, @@ -109120,10 +109129,40 @@ async function getFileCoverageInformationEnabled(debugMode, codeql, features, re showDeprecationWarning: false }; } +function logFileCoverageOnPrsDeprecationWarning(logger) { + if (process.env["CODEQL_ACTION_DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION" /* DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION */]) { + return; + } + const repositoryOwnerType = github2.context.payload.repository?.owner.type; + let message = "Starting April 2026, the CodeQL Action will skip computing file coverage information on pull requests to improve analysis performance. File coverage information will still be computed on non-PR analyses."; + const envVarOptOut = "set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true`."; + const repoPropertyOptOut = 'create a custom repository property with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then set this property to `true` in the repository\'s settings.'; + if (repositoryOwnerType === "Organization") { + if (isDefaultSetup()) { + message += ` + +To opt out of this change, ${repoPropertyOptOut}`; + } else { + message += ` + +To opt out of this change, ${envVarOptOut} Alternatively, ${repoPropertyOptOut}`; + } + } else if (isDefaultSetup()) { + message += ` + +To opt out of this change, switch to an advanced setup workflow and ${envVarOptOut}`; + } else { + message += ` + +To opt out of this change, ${envVarOptOut}`; + } + logger.warning(message); + core11.exportVariable("CODEQL_ACTION_DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION" /* DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION */, "true"); +} // src/status-report.ts var os5 = __toESM(require("os")); -var core11 = __toESM(require_core()); +var core12 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { return true; @@ -109139,12 +109178,12 @@ function getActionsStatus(error3, otherFailureCause) { } function setJobStatusIfUnsuccessful(actionStatus) { if (actionStatus === "user-error") { - core11.exportVariable( + core12.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); } else if (actionStatus === "failure" || actionStatus === "aborted") { - core11.exportVariable( + core12.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ ); @@ -109163,14 +109202,14 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; if (workflowStartedAt === void 0) { workflowStartedAt = actionStartedAt.toISOString(); - core11.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + core12.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } const runnerOs = getRequiredEnvParam("RUNNER_OS"); const codeQlCliVersion = getCachedCodeQlVersion(); const actionRef = process.env["GITHUB_ACTION_REF"] || ""; const testingEnvironment = getTestingEnvironment(); if (testingEnvironment) { - core11.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + core12.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; const statusReport = { @@ -109253,9 +109292,9 @@ var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpo async function sendStatusReport(statusReport) { setJobStatusIfUnsuccessful(statusReport.status); const statusReportJSON = JSON.stringify(statusReport); - core11.debug(`Sending status report: ${statusReportJSON}`); + core12.debug(`Sending status report: ${statusReportJSON}`); if (isInTestMode()) { - core11.debug("In test mode. Status reports are not uploaded."); + core12.debug("In test mode. Status reports are not uploaded."); return; } const nwo = getRepositoryNwo(); @@ -109275,28 +109314,28 @@ async function sendStatusReport(statusReport) { switch (httpError.status) { case 403: if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core11.warning( + core12.warning( `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` ); } else { - core11.warning( + core12.warning( `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` ); } return; case 404: - core11.warning(httpError.message); + core12.warning(httpError.message); return; case 422: if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core11.debug(INCOMPATIBLE_MSG); + core12.debug(INCOMPATIBLE_MSG); } else { - core11.debug(OUT_OF_DATE_MSG); + core12.debug(OUT_OF_DATE_MSG); } return; } } - core11.warning( + core12.warning( `An unexpected error occurred when sending a status report: ${getErrorMessage( e )}` @@ -109378,7 +109417,7 @@ async function sendUnhandledErrorStatusReport(actionName, actionStartedAt, error var fs14 = __toESM(require("fs")); var path15 = __toESM(require("path")); var import_zlib = __toESM(require("zlib")); -var core12 = __toESM(require_core()); +var core13 = __toESM(require_core()); function toCodedErrors(errors) { return Object.entries(errors).reduce( (acc, [code, message]) => { @@ -109501,7 +109540,7 @@ async function validateWorkflow(codeql, logger) { } catch (e) { return `error: formatWorkflowErrors() failed: ${String(e)}`; } - core12.warning(message); + core13.warning(message); } return formatWorkflowCause(workflowErrors); } @@ -109547,7 +109586,7 @@ async function getWorkflowAbsolutePath(logger) { } async function checkWorkflow(logger, codeql) { if (!isDynamicWorkflow() && process.env["CODEQL_ACTION_SKIP_WORKFLOW_VALIDATION" /* SKIP_WORKFLOW_VALIDATION */] !== "true") { - core12.startGroup("Validating workflow"); + core13.startGroup("Validating workflow"); const validateWorkflowResult = await internal2.validateWorkflow( codeql, logger @@ -109559,7 +109598,7 @@ async function checkWorkflow(logger, codeql) { `Unable to validate code scanning workflow: ${validateWorkflowResult}` ); } - core12.endGroup(); + core13.endGroup(); } } var internal2 = { @@ -109667,8 +109706,8 @@ async function run(startedAt) { ); const jobRunUuid = v4_default(); logger.info(`Job run UUID is ${jobRunUuid}.`); - core13.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); - core13.exportVariable("CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */, "true"); + core14.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); + core14.exportVariable("CODEQL_ACTION_INIT_HAS_RUN" /* INIT_ACTION_HAS_RUN */, "true"); configFile = getOptionalInput("config-file"); sourceRoot = path16.resolve( getRequiredEnvParam("GITHUB_WORKSPACE"), @@ -109721,12 +109760,12 @@ async function run(startedAt) { ); } if (semver9.lt(actualVer, publicPreview)) { - core13.exportVariable("CODEQL_ENABLE_EXPERIMENTAL_FEATURES" /* EXPERIMENTAL_FEATURES */, "true"); + core14.exportVariable("CODEQL_ENABLE_EXPERIMENTAL_FEATURES" /* EXPERIMENTAL_FEATURES */, "true"); logger.info("Experimental Rust analysis enabled"); } } analysisKinds = await getAnalysisKinds(logger); - const debugMode = getOptionalInput("debug") === "true" || core13.isDebug(); + const debugMode = getOptionalInput("debug") === "true" || core14.isDebug(); const repositoryProperties = repositoryPropertiesResult.orElse({}); const fileCoverageResult = await getFileCoverageInformationEnabled( debugMode, @@ -109787,19 +109826,13 @@ async function run(startedAt) { ) ); } - if (fileCoverageResult.showDeprecationWarning && !process.env["CODEQL_ACTION_DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION" /* DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION */]) { - logger.warning( - 'Starting April 2026, the CodeQL Action will skip collecting file coverage information on pull requests to improve analysis performance. File coverage information will still be computed on non-PR analyses. Repositories owned by an organization can opt out of this change by creating a custom repository property with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then setting this property to `true` in the repository\'s settings.' - ); - core13.exportVariable( - "CODEQL_ACTION_DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION" /* DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION */, - "true" - ); + if (fileCoverageResult.showDeprecationWarning) { + logFileCoverageOnPrsDeprecationWarning(logger); } await checkInstallPython311(config.languages, codeql); } catch (unwrappedError) { const error3 = wrapError(unwrappedError); - core13.setFailed(error3.message); + core14.setFailed(error3.message); const statusReportBase = await createStatusReportBase( "init" /* Init */, error3 instanceof ConfigurationError ? "user-error" : "aborted", @@ -109849,8 +109882,8 @@ async function run(startedAt) { } const goFlags = process.env["GOFLAGS"]; if (goFlags) { - core13.exportVariable("GOFLAGS", goFlags); - core13.warning( + core14.exportVariable("GOFLAGS", goFlags); + core14.warning( "Passing the GOFLAGS env parameter to the init action is deprecated. Please move this to the analyze action." ); } @@ -109874,7 +109907,7 @@ async function run(startedAt) { "bin" ); fs15.mkdirSync(tempBinPath, { recursive: true }); - core13.addPath(tempBinPath); + core14.addPath(tempBinPath); const goWrapperPath = path16.resolve(tempBinPath, "go"); fs15.writeFileSync( goWrapperPath, @@ -109883,14 +109916,14 @@ async function run(startedAt) { exec ${goBinaryPath} "$@"` ); fs15.chmodSync(goWrapperPath, "755"); - core13.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goWrapperPath); + core14.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goWrapperPath); } catch (e) { logger.warning( `Analyzing Go on Linux, but failed to install wrapper script. Tracing custom builds may fail: ${e}` ); } } else { - core13.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goBinaryPath); + core14.exportVariable("CODEQL_ACTION_GO_BINARY" /* GO_BINARY_LOCATION */, goBinaryPath); } } catch (e) { logger.warning( @@ -109917,20 +109950,20 @@ exec ${goBinaryPath} "$@"` } } } - core13.exportVariable( + core14.exportVariable( "CODEQL_RAM", process.env["CODEQL_RAM"] || getCodeQLMemoryLimit(getOptionalInput("ram"), logger).toString() ); - core13.exportVariable( + core14.exportVariable( "CODEQL_THREADS", process.env["CODEQL_THREADS"] || getThreadsFlagValue(getOptionalInput("threads"), logger).toString() ); if (await features.getValue("disable_kotlin_analysis_enabled" /* DisableKotlinAnalysisEnabled */)) { - core13.exportVariable("CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN", "true"); + core14.exportVariable("CODEQL_EXTRACTOR_JAVA_AGENT_DISABLE_KOTLIN", "true"); } const kotlinLimitVar = "CODEQL_EXTRACTOR_KOTLIN_OVERRIDE_MAXIMUM_VERSION_LIMIT"; if (await codeQlVersionAtLeast(codeql, "2.20.3") && !await codeQlVersionAtLeast(codeql, "2.20.4")) { - core13.exportVariable(kotlinLimitVar, "2.1.20"); + core14.exportVariable(kotlinLimitVar, "2.1.20"); } if (config.languages.includes("cpp" /* cpp */)) { const envVar = "CODEQL_EXTRACTOR_CPP_TRAP_CACHING"; @@ -109940,10 +109973,10 @@ exec ${goBinaryPath} "$@"` ); } else if (getTrapCachingEnabled() && await codeQlVersionAtLeast(codeql, "2.17.5")) { logger.info("Enabling CodeQL C++ TRAP caching support"); - core13.exportVariable(envVar, "true"); + core14.exportVariable(envVar, "true"); } else { logger.info("Disabling CodeQL C++ TRAP caching support"); - core13.exportVariable(envVar, "false"); + core14.exportVariable(envVar, "false"); } } if (shouldRestoreCache(config.dependencyCachingEnabled)) { @@ -109958,7 +109991,7 @@ exec ${goBinaryPath} "$@"` } if (await codeQlVersionAtLeast(codeql, "2.17.1")) { } else { - core13.exportVariable( + core14.exportVariable( "CODEQL_EXTRACTOR_PYTHON_DISABLE_LIBRARY_EXTRACTION", "true" ); @@ -109984,7 +110017,7 @@ exec ${goBinaryPath} "$@"` "python_default_is_to_not_extract_stdlib" /* PythonDefaultIsToNotExtractStdlib */, codeql )) { - core13.exportVariable("CODEQL_EXTRACTOR_PYTHON_EXTRACT_STDLIB", "true"); + core14.exportVariable("CODEQL_EXTRACTOR_PYTHON_EXTRACT_STDLIB", "true"); } } if (process.env["CODEQL_EXTRACTOR_JAVA_OPTION_MINIMIZE_DEPENDENCY_JARS" /* JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS */]) { @@ -109992,7 +110025,7 @@ exec ${goBinaryPath} "$@"` `${"CODEQL_EXTRACTOR_JAVA_OPTION_MINIMIZE_DEPENDENCY_JARS" /* JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS */} is already set to '${process.env["CODEQL_EXTRACTOR_JAVA_OPTION_MINIMIZE_DEPENDENCY_JARS" /* JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS */]}', so the Action will not override it.` ); } else if (await codeQlVersionAtLeast(codeql, CODEQL_VERSION_JAR_MINIMIZATION) && config.dependencyCachingEnabled && config.buildMode === "none" /* None */ && config.languages.includes("java" /* java */)) { - core13.exportVariable( + core14.exportVariable( "CODEQL_EXTRACTOR_JAVA_OPTION_MINIMIZE_DEPENDENCY_JARS" /* JAVA_EXTRACTOR_MINIMIZE_DEPENDENCY_JARS */, "true" ); @@ -110036,23 +110069,23 @@ exec ${goBinaryPath} "$@"` const tracerConfig = await getCombinedTracerConfig(codeql, config); if (tracerConfig !== void 0) { for (const [key, value] of Object.entries(tracerConfig.env)) { - core13.exportVariable(key, value); + core14.exportVariable(key, value); } } if (await features.getValue("java_network_debugging" /* JavaNetworkDebugging */)) { const existingJavaToolOptions = getOptionalEnvVar("JAVA_TOOL_OPTIONS" /* JAVA_TOOL_OPTIONS */) || ""; - core13.exportVariable( + core14.exportVariable( "JAVA_TOOL_OPTIONS" /* JAVA_TOOL_OPTIONS */, `${existingJavaToolOptions} -Djavax.net.debug=all` ); } flushDiagnostics(config); await saveConfig(config, logger); - core13.setOutput("codeql-path", config.codeQLCmd); - core13.setOutput("codeql-version", (await codeql.getVersion()).version); + core14.setOutput("codeql-path", config.codeQLCmd); + core14.setOutput("codeql-version", (await codeql.getVersion()).version); } catch (unwrappedError) { const error3 = wrapError(unwrappedError); - core13.setFailed(error3.message); + core14.setFailed(error3.message); await sendCompletedStatusReport( startedAt, config, @@ -110085,7 +110118,7 @@ exec ${goBinaryPath} "$@"` ); } async function loadRepositoryProperties(repositoryNwo, logger) { - const repositoryOwnerType = github2.context.payload.repository?.owner.type; + const repositoryOwnerType = github3.context.payload.repository?.owner.type; logger.debug( `Repository owner type is '${repositoryOwnerType ?? "unknown"}'.` ); @@ -110126,7 +110159,7 @@ async function runWrapper() { try { await run(startedAt); } catch (error3) { - core13.setFailed(`init action failed: ${getErrorMessage(error3)}`); + core14.setFailed(`init action failed: ${getErrorMessage(error3)}`); await sendUnhandledErrorStatusReport( "init" /* Init */, startedAt, diff --git a/lib/setup-codeql-action.js b/lib/setup-codeql-action.js index a4d9f713d..1a1a597e0 100644 --- a/lib/setup-codeql-action.js +++ b/lib/setup-codeql-action.js @@ -3842,18 +3842,18 @@ var require_webidl = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context2) { - const plural = context2.types.length === 1 ? "" : " one of"; - const message = `${context2.argument} could not be converted to${plural}: ${context2.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context3) { + const plural = context3.types.length === 1 ? "" : " one of"; + const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; return webidl.errors.exception({ - header: context2.prefix, + header: context3.prefix, message }); }; - webidl.errors.invalidArgument = function(context2) { + webidl.errors.invalidArgument = function(context3) { return webidl.errors.exception({ - header: context2.prefix, - message: `"${context2.value}" is an invalid ${context2.type}.` + header: context3.prefix, + message: `"${context3.value}" is an invalid ${context3.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -9849,17 +9849,17 @@ var require_api_request = __commonJS({ } } } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context2, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -9896,7 +9896,7 @@ var require_api_request = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context2 + context: context3 }); } } @@ -10065,17 +10065,17 @@ var require_api_stream = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context2, callback, responseHeaders } = this; + const { factory, opaque, context: context3, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -10103,7 +10103,7 @@ var require_api_stream = __commonJS({ statusCode, headers, opaque, - context: context2 + context: context3 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -10295,7 +10295,7 @@ var require_api_pipeline = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -10304,10 +10304,10 @@ var require_api_pipeline = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler2, context: context3 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10325,7 +10325,7 @@ var require_api_pipeline = __commonJS({ headers, opaque, body: this.res, - context: context2 + context: context3 }); } catch (err) { this.res.on("error", util.nop); @@ -10409,7 +10409,7 @@ var require_api_upgrade = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; @@ -10423,7 +10423,7 @@ var require_api_upgrade = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10431,7 +10431,7 @@ var require_api_upgrade = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -10500,20 +10500,20 @@ var require_api_connect = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -10525,7 +10525,7 @@ var require_api_connect = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -21321,7 +21321,7 @@ var require_core = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.platform = exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = exports2.markdownSummary = exports2.summary = exports2.ExitCode = void 0; - exports2.exportVariable = exportVariable6; + exports2.exportVariable = exportVariable7; exports2.setSecret = setSecret; exports2.addPath = addPath; exports2.getInput = getInput2; @@ -21353,7 +21353,7 @@ var require_core = __commonJS({ ExitCode2[ExitCode2["Success"] = 0] = "Success"; ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; })(ExitCode || (exports2.ExitCode = ExitCode = {})); - function exportVariable6(name, val) { + function exportVariable7(name, val) { const convertedVal = (0, utils_1.toCommandValue)(val); process.env[name] = convertedVal; const filePath = process.env["GITHUB_ENV"] || ""; @@ -24840,18 +24840,18 @@ var require_webidl2 = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context2) { - const plural = context2.types.length === 1 ? "" : " one of"; - const message = `${context2.argument} could not be converted to${plural}: ${context2.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context3) { + const plural = context3.types.length === 1 ? "" : " one of"; + const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; return webidl.errors.exception({ - header: context2.prefix, + header: context3.prefix, message }); }; - webidl.errors.invalidArgument = function(context2) { + webidl.errors.invalidArgument = function(context3) { return webidl.errors.exception({ - header: context2.prefix, - message: `"${context2.value}" is an invalid ${context2.type}.` + header: context3.prefix, + message: `"${context3.value}" is an invalid ${context3.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -30847,17 +30847,17 @@ var require_api_request2 = __commonJS({ } } } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context2, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -30894,7 +30894,7 @@ var require_api_request2 = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context2 + context: context3 }); } } @@ -31063,17 +31063,17 @@ var require_api_stream2 = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context2, callback, responseHeaders } = this; + const { factory, opaque, context: context3, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -31101,7 +31101,7 @@ var require_api_stream2 = __commonJS({ statusCode, headers, opaque, - context: context2 + context: context3 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -31293,7 +31293,7 @@ var require_api_pipeline2 = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -31302,10 +31302,10 @@ var require_api_pipeline2 = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler2, context: context3 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31323,7 +31323,7 @@ var require_api_pipeline2 = __commonJS({ headers, opaque, body: this.res, - context: context2 + context: context3 }); } catch (err) { this.res.on("error", util.nop); @@ -31407,7 +31407,7 @@ var require_api_upgrade2 = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; @@ -31421,7 +31421,7 @@ var require_api_upgrade2 = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31429,7 +31429,7 @@ var require_api_upgrade2 = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -31498,20 +31498,20 @@ var require_api_connect2 = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -31523,7 +31523,7 @@ var require_api_connect2 = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -40178,8 +40178,8 @@ function isDefined(value) { function isKeyOperator(operator) { return operator === ";" || operator === "&" || operator === "?"; } -function getValues(context2, operator, key, modifier) { - var value = context2[key], result = []; +function getValues(context3, operator, key, modifier) { + var value = context3[key], result = []; if (isDefined(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); @@ -40243,7 +40243,7 @@ function parseUrl(template) { expand: expand.bind(null, template) }; } -function expand(template, context2) { +function expand(template, context3) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; template = template.replace( /\{([^\{\}]+)\}|([^\{\}]+)/g, @@ -40257,7 +40257,7 @@ function expand(template, context2) { } expression.split(/,/g).forEach(function(variable) { var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context2, operator, tmp[1], tmp[2] || tmp[3])); + values.push(getValues(context3, operator, tmp[1], tmp[2] || tmp[3])); }); if (operator && operator !== "+") { var separator = ","; @@ -47450,7 +47450,7 @@ var require_internal_glob_options_helper = __commonJS({ })(); Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = getOptions; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -47462,23 +47462,23 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core13.debug(`implicitDescendants '${result.implicitDescendants}'`); + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core13.debug(`matchDirectories '${result.matchDirectories}'`); + core14.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core13.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core14.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -49106,7 +49106,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var fs9 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path9 = __importStar2(require("path")); @@ -49159,7 +49159,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core13.debug(`Search path '${searchPath}'`); + core14.debug(`Search path '${searchPath}'`); try { yield __await2(fs9.promises.lstat(searchPath)); } catch (err) { @@ -49234,7 +49234,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core13.debug(`Broken symlink '${item.path}'`); + core14.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -49250,7 +49250,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -49353,7 +49353,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = hashFiles; var crypto2 = __importStar2(require("crypto")); - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var fs9 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); @@ -49362,7 +49362,7 @@ var require_internal_hash_files = __commonJS({ return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; - const writeDelegate = verbose ? core13.info : core13.debug; + const writeDelegate = verbose ? core14.info : core14.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto2.createHash("sha256"); @@ -50753,7 +50753,7 @@ var require_cacheUtils = __commonJS({ exports2.assertDefined = assertDefined; exports2.getCacheVersion = getCacheVersion; exports2.getRuntimeToken = getRuntimeToken; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var exec = __importStar2(require_exec()); var glob = __importStar2(require_glob()); var io6 = __importStar2(require_io()); @@ -50804,7 +50804,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path9.relative(workspace, file).replace(new RegExp(`\\${path9.sep}`, "g"), "/"); - core13.debug(`Matched: ${relativeFile}`); + core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -50832,7 +50832,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { let versionOutput = ""; additionalArgs.push("--version"); - core13.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -50843,10 +50843,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core13.debug(err.message); + core14.debug(err.message); } versionOutput = versionOutput.trim(); - core13.debug(versionOutput); + core14.debug(versionOutput); return versionOutput; }); } @@ -50854,7 +50854,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver9.clean(versionOutput); - core13.debug(`zstd version: ${version}`); + core14.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -50979,14 +50979,14 @@ function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, e var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); var _, done = false; for (var i = decorators.length - 1; i >= 0; i--) { - var context2 = {}; - for (var p in contextIn) context2[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context2.access[p] = contextIn.access[p]; - context2.addInitializer = function(f) { + var context3 = {}; + for (var p in contextIn) context3[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context3.access[p] = contextIn.access[p]; + context3.addInitializer = function(f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context2); + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context3); if (kind === "accessor") { if (result === void 0) continue; if (result === null || typeof result !== "object") throw new TypeError("Object expected"); @@ -51713,19 +51713,19 @@ var require_logger = __commonJS({ logger: clientLogger }; } - var context2 = createLoggerContext({ + var context3 = createLoggerContext({ logLevelEnvVarName: "TYPESPEC_RUNTIME_LOG_LEVEL", namespace: "typeSpecRuntime" }); - exports2.TypeSpecRuntimeLogger = context2.logger; + exports2.TypeSpecRuntimeLogger = context3.logger; function setLogLevel(logLevel) { - context2.setLogLevel(logLevel); + context3.setLogLevel(logLevel); } function getLogLevel() { - return context2.getLogLevel(); + return context3.getLogLevel(); } function createClientLogger(namespace) { - return context2.createClientLogger(namespace); + return context3.createClientLogger(namespace); } } }); @@ -55971,19 +55971,19 @@ var require_commonjs2 = __commonJS({ exports2.getLogLevel = getLogLevel; exports2.createClientLogger = createClientLogger; var logger_1 = require_internal(); - var context2 = (0, logger_1.createLoggerContext)({ + var context3 = (0, logger_1.createLoggerContext)({ logLevelEnvVarName: "AZURE_LOG_LEVEL", namespace: "azure" }); - exports2.AzureLogger = context2.logger; + exports2.AzureLogger = context3.logger; function setLogLevel(level) { - context2.setLogLevel(level); + context3.setLogLevel(level); } function getLogLevel() { - return context2.getLogLevel(); + return context3.getLogLevel(); } function createClientLogger(namespace) { - return context2.createClientLogger(namespace); + return context3.createClientLogger(namespace); } } }); @@ -56893,14 +56893,14 @@ var require_tracingContext = __commonJS({ namespace: /* @__PURE__ */ Symbol.for("@azure/core-tracing namespace") }; function createTracingContext(options = {}) { - let context2 = new TracingContextImpl(options.parentContext); + let context3 = new TracingContextImpl(options.parentContext); if (options.span) { - context2 = context2.setValue(exports2.knownContextKeys.span, options.span); + context3 = context3.setValue(exports2.knownContextKeys.span, options.span); } if (options.namespace) { - context2 = context2.setValue(exports2.knownContextKeys.namespace, options.namespace); + context3 = context3.setValue(exports2.knownContextKeys.namespace, options.namespace); } - return context2; + return context3; } var TracingContextImpl = class _TracingContextImpl { _contextMap; @@ -57038,8 +57038,8 @@ var require_tracingClient = __commonJS({ span.end(); } } - function withContext(context2, callback, ...callbackArgs) { - return (0, instrumenter_js_1.getInstrumenter)().withContext(context2, callback, ...callbackArgs); + function withContext(context3, callback, ...callbackArgs) { + return (0, instrumenter_js_1.getInstrumenter)().withContext(context3, callback, ...callbackArgs); } function parseTraceparentHeader(traceparentHeader) { return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); @@ -90510,7 +90510,7 @@ var require_uploadUtils = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadProgress = void 0; exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var storage_blob_1 = require_commonjs15(); var errors_1 = require_errors3(); var UploadProgress = class { @@ -90552,7 +90552,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -90609,14 +90609,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core13.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error3) { - core13.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); + core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); throw error3; } finally { uploadProgress.stopDisplayTimer(); @@ -90701,7 +90701,7 @@ var require_requestUtils = __commonJS({ exports2.retry = retry2; exports2.retryTypedResponse = retryTypedResponse; exports2.retryHttpClientResponse = retryHttpClientResponse; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants12(); function isSuccessStatusCode(statusCode) { @@ -90759,9 +90759,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core13.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core13.debug(`${name} - Error is not retryable`); + core14.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -91020,7 +91020,7 @@ var require_downloadUtils = __commonJS({ exports2.downloadCacheHttpClient = downloadCacheHttpClient; exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); @@ -91058,7 +91058,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core13.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -91092,7 +91092,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -91142,7 +91142,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core13.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -91153,7 +91153,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core13.debug("Unable to validate download, no Content-Length header"); + core14.debug("Unable to validate download, no Content-Length header"); } }); } @@ -91271,7 +91271,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core13.debug("Unable to determine content length, downloading file with http-client..."); + core14.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -91361,7 +91361,7 @@ var require_options = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadOptions = getUploadOptions; exports2.getDownloadOptions = getDownloadOptions; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -91381,9 +91381,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core13.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } function getDownloadOptions(copy) { @@ -91419,12 +91419,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Download concurrency: ${result.downloadConcurrency}`); - core13.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core13.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core13.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core13.debug(`Lookup only: ${result.lookupOnly}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Download concurrency: ${result.downloadConcurrency}`); + core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core14.debug(`Lookup only: ${result.lookupOnly}`); return result; } } @@ -91618,7 +91618,7 @@ var require_cacheHttpClient = __commonJS({ exports2.downloadCache = downloadCache; exports2.reserveCache = reserveCache; exports2.saveCache = saveCache4; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs9 = __importStar2(require("fs")); @@ -91636,7 +91636,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core13.debug(`Resource Url: ${url}`); + core14.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -91664,7 +91664,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core13.isDebug()) { + if (core14.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -91677,9 +91677,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core13.setSecret(cacheDownloadUrl); - core13.debug(`Cache Result:`); - core13.debug(JSON.stringify(cacheResult)); + core14.setSecret(cacheDownloadUrl); + core14.debug(`Cache Result:`); + core14.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -91693,10 +91693,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core13.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core13.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -91739,7 +91739,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter2(this, void 0, void 0, function* () { - core13.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -91761,7 +91761,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core13.debug("Awaiting all uploads"); + core14.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { @@ -91804,16 +91804,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core13.debug("Upload cache"); + core14.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core13.debug("Commiting cache"); + core14.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core13.info("Cache saved successfully"); + core14.info("Cache saved successfully"); } }); } @@ -97296,7 +97296,7 @@ var require_cache5 = __commonJS({ exports2.isFeatureAvailable = isFeatureAvailable; exports2.restoreCache = restoreCache4; exports2.saveCache = saveCache4; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var path9 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); @@ -97355,7 +97355,7 @@ var require_cache5 = __commonJS({ function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -97370,8 +97370,8 @@ var require_cache5 = __commonJS({ return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -97389,19 +97389,19 @@ var require_cache5 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path9.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error3) { const typedError = error3; @@ -97409,16 +97409,16 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error3.message}`); + core14.error(`Failed to restore: ${error3.message}`); } else { - core13.warning(`Failed to restore: ${error3.message}`); + core14.warning(`Failed to restore: ${error3.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core13.debug(`Failed to delete archive: ${error3}`); + core14.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -97429,8 +97429,8 @@ var require_cache5 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -97448,30 +97448,30 @@ var require_cache5 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request2); if (!response.ok) { - core13.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); + core14.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request2.key !== response.matchedKey; if (isRestoreKeyMatch) { - core13.info(`Cache hit for restore-key: ${response.matchedKey}`); + core14.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core13.info(`Cache hit for: ${response.matchedKey}`); + core14.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path9.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive path: ${archivePath}`); - core13.debug(`Starting download of archive to: ${archivePath}`); + core14.debug(`Archive path: ${archivePath}`); + core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core13.isDebug()) { + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return response.matchedKey; } catch (error3) { const typedError = error3; @@ -97479,9 +97479,9 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error3.message}`); + core14.error(`Failed to restore: ${error3.message}`); } else { - core13.warning(`Failed to restore: ${error3.message}`); + core14.warning(`Failed to restore: ${error3.message}`); } } } finally { @@ -97490,7 +97490,7 @@ var require_cache5 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error3) { - core13.debug(`Failed to delete archive: ${error3}`); + core14.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -97499,7 +97499,7 @@ var require_cache5 = __commonJS({ function saveCache4(paths_1, key_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -97517,26 +97517,26 @@ var require_cache5 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path9.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -97549,26 +97549,26 @@ var require_cache5 = __commonJS({ } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core13.debug(`Saving Cache (ID: ${cacheId})`); + core14.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error3) { const typedError = error3; if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core13.debug(`Failed to delete archive: ${error3}`); + core14.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -97581,23 +97581,23 @@ var require_cache5 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path9.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request2 = { key, @@ -97608,16 +97608,16 @@ var require_cache5 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request2); if (!response.ok) { if (response.message) { - core13.warning(`Cache reservation failed: ${response.message}`); + core14.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error3) { - core13.debug(`Failed to reserve cache: ${error3}`); + core14.debug(`Failed to reserve cache: ${error3}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core13.debug(`Attempting to upload cache located at: ${archivePath}`); + core14.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -97625,7 +97625,7 @@ var require_cache5 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core13.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -97638,21 +97638,21 @@ var require_cache5 = __commonJS({ if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core13.warning(typedError.message); + core14.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core13.debug(`Failed to delete archive: ${error3}`); + core14.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -99176,7 +99176,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -99199,10 +99199,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core13.info(err.message); + core14.info(err.message); } const seconds = this.getSleepAmount(); - core13.info(`Waiting ${seconds} seconds before trying again`); + core14.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -99305,7 +99305,7 @@ var require_tool_cache = __commonJS({ exports2.findFromManifest = findFromManifest; exports2.isExplicitVersion = isExplicitVersion; exports2.evaluateVersions = evaluateVersions; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var io6 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); var fs9 = __importStar2(require("fs")); @@ -99334,8 +99334,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { dest = dest || path9.join(_getTempDirectory(), crypto2.randomUUID()); yield io6.mkdirP(path9.dirname(dest)); - core13.debug(`Downloading ${url}`); - core13.debug(`Destination ${dest}`); + core14.debug(`Downloading ${url}`); + core14.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -99361,7 +99361,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth2) { - core13.debug("set auth"); + core14.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -99370,7 +99370,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core13.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core14.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -99379,16 +99379,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs9.createWriteStream(dest)); - core13.debug("download complete"); + core14.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core13.debug("download failed"); + core14.debug("download failed"); try { yield io6.rmRF(dest); } catch (err) { - core13.debug(`Failed to delete '${dest}'. ${err.message}`); + core14.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -99403,7 +99403,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core13.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", // eXtract files with full paths @@ -99456,7 +99456,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core13.debug("Checking tar --version"); + core14.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -99466,7 +99466,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core13.debug(versionOutput.trim()); + core14.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -99474,7 +99474,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core13.isDebug() && !flags.includes("v")) { + if (core14.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -99505,7 +99505,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core13.isDebug()) { + if (core14.isDebug()) { args.push("-v"); } const xarPath = yield io6.which("xar", true); @@ -99548,7 +99548,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core13.debug(`Using pwsh at path: ${pwshPath}`); + core14.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -99568,7 +99568,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io6.which("powershell", true); - core13.debug(`Using powershell at path: ${powershellPath}`); + core14.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -99577,7 +99577,7 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const unzipPath = yield io6.which("unzip", true); const args = [file]; - if (!core13.isDebug()) { + if (!core14.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -99588,8 +99588,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os3.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch2}`); - core13.debug(`source dir: ${sourceDir}`); + core14.debug(`Caching tool ${tool} ${version} ${arch2}`); + core14.debug(`source dir: ${sourceDir}`); if (!fs9.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -99606,14 +99606,14 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os3.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch2}`); - core13.debug(`source file: ${sourceFile}`); + core14.debug(`Caching tool ${tool} ${version} ${arch2}`); + core14.debug(`source file: ${sourceFile}`); if (!fs9.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path9.join(destFolder, targetFile); - core13.debug(`destination file ${destPath}`); + core14.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -99636,12 +99636,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path9.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core13.debug(`checking cache: ${cachePath}`); + core14.debug(`checking cache: ${cachePath}`); if (fs9.existsSync(cachePath) && fs9.existsSync(`${cachePath}.complete`)) { - core13.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core13.debug("not found"); + core14.debug("not found"); } } return toolPath; @@ -99670,7 +99670,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth2) { - core13.debug("set auth"); + core14.debug("set auth"); headers.authorization = auth2; } const response = yield http.getJson(treeUrl, headers); @@ -99691,7 +99691,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core13.debug("Invalid json"); + core14.debug("Invalid json"); } } return releases; @@ -99715,7 +99715,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter2(this, void 0, void 0, function* () { const folderPath = path9.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); - core13.debug(`destination ${folderPath}`); + core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); yield io6.rmRF(markerPath); @@ -99727,18 +99727,18 @@ var require_tool_cache = __commonJS({ const folderPath = path9.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs9.writeFileSync(markerPath, ""); - core13.debug("finished caching tool"); + core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver9.clean(versionSpec) || ""; - core13.debug(`isExplicit: ${c}`); + core14.debug(`isExplicit: ${c}`); const valid3 = semver9.valid(c) != null; - core13.debug(`explicit? ${valid3}`); + core14.debug(`explicit? ${valid3}`); return valid3; } function evaluateVersions(versions, versionSpec) { let version = ""; - core13.debug(`evaluating ${versions.length} versions`); + core14.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver9.gt(a, b)) { return 1; @@ -99754,9 +99754,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core13.debug(`matched: ${version}`); + core14.debug(`matched: ${version}`); } else { - core13.debug("match not found"); + core14.debug("match not found"); } return version; } @@ -100332,7 +100332,7 @@ var require_follow_redirects = __commonJS({ }); // src/setup-codeql-action.ts -var core12 = __toESM(require_core()); +var core13 = __toESM(require_core()); // node_modules/uuid/dist-node/stringify.js var byteToHex = []; @@ -104508,7 +104508,9 @@ function initFeatures(gitHubVersion, repositoryNwo, tempDir, logger) { } // src/init.ts +var core11 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); +var github2 = __toESM(require_github()); var io5 = __toESM(require_io()); // src/codeql.ts @@ -106453,7 +106455,7 @@ async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVe // src/status-report.ts var os2 = __toESM(require("os")); -var core11 = __toESM(require_core()); +var core12 = __toESM(require_core()); function isFirstPartyAnalysis(actionName) { if (actionName !== "upload-sarif" /* UploadSarif */) { return true; @@ -106469,12 +106471,12 @@ function getActionsStatus(error3, otherFailureCause) { } function setJobStatusIfUnsuccessful(actionStatus) { if (actionStatus === "user-error") { - core11.exportVariable( + core12.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); } else if (actionStatus === "failure" || actionStatus === "aborted") { - core11.exportVariable( + core12.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ ); @@ -106493,14 +106495,14 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; if (workflowStartedAt === void 0) { workflowStartedAt = actionStartedAt.toISOString(); - core11.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + core12.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } const runnerOs = getRequiredEnvParam("RUNNER_OS"); const codeQlCliVersion = getCachedCodeQlVersion(); const actionRef = process.env["GITHUB_ACTION_REF"] || ""; const testingEnvironment = getTestingEnvironment(); if (testingEnvironment) { - core11.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + core12.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; const statusReport = { @@ -106583,9 +106585,9 @@ var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpo async function sendStatusReport(statusReport) { setJobStatusIfUnsuccessful(statusReport.status); const statusReportJSON = JSON.stringify(statusReport); - core11.debug(`Sending status report: ${statusReportJSON}`); + core12.debug(`Sending status report: ${statusReportJSON}`); if (isInTestMode()) { - core11.debug("In test mode. Status reports are not uploaded."); + core12.debug("In test mode. Status reports are not uploaded."); return; } const nwo = getRepositoryNwo(); @@ -106605,28 +106607,28 @@ async function sendStatusReport(statusReport) { switch (httpError.status) { case 403: if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core11.warning( + core12.warning( `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` ); } else { - core11.warning( + core12.warning( `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` ); } return; case 404: - core11.warning(httpError.message); + core12.warning(httpError.message); return; case 422: if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core11.debug(INCOMPATIBLE_MSG); + core12.debug(INCOMPATIBLE_MSG); } else { - core11.debug(OUT_OF_DATE_MSG); + core12.debug(OUT_OF_DATE_MSG); } return; } } - core11.warning( + core12.warning( `An unexpected error occurred when sending a status report: ${getErrorMessage( e )}` @@ -106716,7 +106718,7 @@ async function run(startedAt) { ); const jobRunUuid = v4_default(); logger.info(`Job run UUID is ${jobRunUuid}.`); - core12.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); + core13.exportVariable("JOB_RUN_UUID" /* JOB_RUN_UUID */, jobRunUuid); const statusReportBase = await createStatusReportBase( "setup-codeql" /* SetupCodeQL */, "starting", @@ -106745,12 +106747,12 @@ async function run(startedAt) { toolsDownloadStatusReport = initCodeQLResult.toolsDownloadStatusReport; toolsVersion = initCodeQLResult.toolsVersion; toolsSource = initCodeQLResult.toolsSource; - core12.setOutput("codeql-path", codeql.getPath()); - core12.setOutput("codeql-version", (await codeql.getVersion()).version); - core12.exportVariable("CODEQL_ACTION_SETUP_CODEQL_HAS_RUN" /* SETUP_CODEQL_ACTION_HAS_RUN */, "true"); + core13.setOutput("codeql-path", codeql.getPath()); + core13.setOutput("codeql-version", (await codeql.getVersion()).version); + core13.exportVariable("CODEQL_ACTION_SETUP_CODEQL_HAS_RUN" /* SETUP_CODEQL_ACTION_HAS_RUN */, "true"); } catch (unwrappedError) { const error3 = wrapError(unwrappedError); - core12.setFailed(error3.message); + core13.setFailed(error3.message); const statusReportBase = await createStatusReportBase( "setup-codeql" /* SetupCodeQL */, error3 instanceof ConfigurationError ? "user-error" : "failure", @@ -106781,7 +106783,7 @@ async function runWrapper() { try { await run(startedAt); } catch (error3) { - core12.setFailed(`setup-codeql action failed: ${getErrorMessage(error3)}`); + core13.setFailed(`setup-codeql action failed: ${getErrorMessage(error3)}`); await sendUnhandledErrorStatusReport( "setup-codeql" /* SetupCodeQL */, startedAt, diff --git a/lib/upload-lib.js b/lib/upload-lib.js index 026d90478..d3c6ef7a5 100644 --- a/lib/upload-lib.js +++ b/lib/upload-lib.js @@ -3842,18 +3842,18 @@ var require_webidl = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context2) { - const plural = context2.types.length === 1 ? "" : " one of"; - const message = `${context2.argument} could not be converted to${plural}: ${context2.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context3) { + const plural = context3.types.length === 1 ? "" : " one of"; + const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; return webidl.errors.exception({ - header: context2.prefix, + header: context3.prefix, message }); }; - webidl.errors.invalidArgument = function(context2) { + webidl.errors.invalidArgument = function(context3) { return webidl.errors.exception({ - header: context2.prefix, - message: `"${context2.value}" is an invalid ${context2.type}.` + header: context3.prefix, + message: `"${context3.value}" is an invalid ${context3.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -9849,17 +9849,17 @@ var require_api_request = __commonJS({ } } } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context2, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -9896,7 +9896,7 @@ var require_api_request = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context2 + context: context3 }); } } @@ -10065,17 +10065,17 @@ var require_api_stream = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context2, callback, responseHeaders } = this; + const { factory, opaque, context: context3, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -10103,7 +10103,7 @@ var require_api_stream = __commonJS({ statusCode, headers, opaque, - context: context2 + context: context3 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -10295,7 +10295,7 @@ var require_api_pipeline = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -10304,10 +10304,10 @@ var require_api_pipeline = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler2, context: context3 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10325,7 +10325,7 @@ var require_api_pipeline = __commonJS({ headers, opaque, body: this.res, - context: context2 + context: context3 }); } catch (err) { this.res.on("error", util.nop); @@ -10409,7 +10409,7 @@ var require_api_upgrade = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; @@ -10423,7 +10423,7 @@ var require_api_upgrade = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10431,7 +10431,7 @@ var require_api_upgrade = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -10500,20 +10500,20 @@ var require_api_connect = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -10525,7 +10525,7 @@ var require_api_connect = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -21321,7 +21321,7 @@ var require_core = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.platform = exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = exports2.markdownSummary = exports2.summary = exports2.ExitCode = void 0; - exports2.exportVariable = exportVariable5; + exports2.exportVariable = exportVariable6; exports2.setSecret = setSecret; exports2.addPath = addPath; exports2.getInput = getInput2; @@ -21353,7 +21353,7 @@ var require_core = __commonJS({ ExitCode2[ExitCode2["Success"] = 0] = "Success"; ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; })(ExitCode || (exports2.ExitCode = ExitCode = {})); - function exportVariable5(name, val) { + function exportVariable6(name, val) { const convertedVal = (0, utils_1.toCommandValue)(val); process.env[name] = convertedVal; const filePath = process.env["GITHUB_ENV"] || ""; @@ -26137,18 +26137,18 @@ var require_webidl2 = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context2) { - const plural = context2.types.length === 1 ? "" : " one of"; - const message = `${context2.argument} could not be converted to${plural}: ${context2.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context3) { + const plural = context3.types.length === 1 ? "" : " one of"; + const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; return webidl.errors.exception({ - header: context2.prefix, + header: context3.prefix, message }); }; - webidl.errors.invalidArgument = function(context2) { + webidl.errors.invalidArgument = function(context3) { return webidl.errors.exception({ - header: context2.prefix, - message: `"${context2.value}" is an invalid ${context2.type}.` + header: context3.prefix, + message: `"${context3.value}" is an invalid ${context3.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -32144,17 +32144,17 @@ var require_api_request2 = __commonJS({ } } } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context2, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -32191,7 +32191,7 @@ var require_api_request2 = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context2 + context: context3 }); } } @@ -32360,17 +32360,17 @@ var require_api_stream2 = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context2, callback, responseHeaders } = this; + const { factory, opaque, context: context3, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -32398,7 +32398,7 @@ var require_api_stream2 = __commonJS({ statusCode, headers, opaque, - context: context2 + context: context3 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -32590,7 +32590,7 @@ var require_api_pipeline2 = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -32599,10 +32599,10 @@ var require_api_pipeline2 = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler2, context: context3 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -32620,7 +32620,7 @@ var require_api_pipeline2 = __commonJS({ headers, opaque, body: this.res, - context: context2 + context: context3 }); } catch (err) { this.res.on("error", util.nop); @@ -32704,7 +32704,7 @@ var require_api_upgrade2 = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; @@ -32718,7 +32718,7 @@ var require_api_upgrade2 = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -32726,7 +32726,7 @@ var require_api_upgrade2 = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -32795,20 +32795,20 @@ var require_api_connect2 = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -32820,7 +32820,7 @@ var require_api_connect2 = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -41475,8 +41475,8 @@ function isDefined(value) { function isKeyOperator(operator) { return operator === ";" || operator === "&" || operator === "?"; } -function getValues(context2, operator, key, modifier) { - var value = context2[key], result = []; +function getValues(context3, operator, key, modifier) { + var value = context3[key], result = []; if (isDefined(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); @@ -41540,7 +41540,7 @@ function parseUrl(template) { expand: expand.bind(null, template) }; } -function expand(template, context2) { +function expand(template, context3) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; template = template.replace( /\{([^\{\}]+)\}|([^\{\}]+)/g, @@ -41554,7 +41554,7 @@ function expand(template, context2) { } expression.split(/,/g).forEach(function(variable) { var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context2, operator, tmp[1], tmp[2] || tmp[3])); + values.push(getValues(context3, operator, tmp[1], tmp[2] || tmp[3])); }); if (operator && operator !== "+") { var separator = ","; @@ -48747,7 +48747,7 @@ var require_internal_glob_options_helper = __commonJS({ })(); Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = getOptions; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -48759,23 +48759,23 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core12.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core12.debug(`implicitDescendants '${result.implicitDescendants}'`); + core13.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core12.debug(`matchDirectories '${result.matchDirectories}'`); + core13.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core12.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core12.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core13.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -50403,7 +50403,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var fs13 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path12 = __importStar2(require("path")); @@ -50456,7 +50456,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core12.debug(`Search path '${searchPath}'`); + core13.debug(`Search path '${searchPath}'`); try { yield __await2(fs13.promises.lstat(searchPath)); } catch (err) { @@ -50531,7 +50531,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core12.debug(`Broken symlink '${item.path}'`); + core13.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -50547,7 +50547,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core12.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -50650,7 +50650,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = hashFiles; var crypto2 = __importStar2(require("crypto")); - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var fs13 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); @@ -50659,7 +50659,7 @@ var require_internal_hash_files = __commonJS({ return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; - const writeDelegate = verbose ? core12.info : core12.debug; + const writeDelegate = verbose ? core13.info : core13.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto2.createHash("sha256"); @@ -52050,7 +52050,7 @@ var require_cacheUtils = __commonJS({ exports2.assertDefined = assertDefined; exports2.getCacheVersion = getCacheVersion; exports2.getRuntimeToken = getRuntimeToken; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var exec = __importStar2(require_exec()); var glob = __importStar2(require_glob()); var io6 = __importStar2(require_io()); @@ -52101,7 +52101,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path12.relative(workspace, file).replace(new RegExp(`\\${path12.sep}`, "g"), "/"); - core12.debug(`Matched: ${relativeFile}`); + core13.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -52129,7 +52129,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { let versionOutput = ""; additionalArgs.push("--version"); - core12.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core13.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -52140,10 +52140,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core12.debug(err.message); + core13.debug(err.message); } versionOutput = versionOutput.trim(); - core12.debug(versionOutput); + core13.debug(versionOutput); return versionOutput; }); } @@ -52151,7 +52151,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver9.clean(versionOutput); - core12.debug(`zstd version: ${version}`); + core13.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -52276,14 +52276,14 @@ function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, e var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); var _, done = false; for (var i = decorators.length - 1; i >= 0; i--) { - var context2 = {}; - for (var p in contextIn) context2[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context2.access[p] = contextIn.access[p]; - context2.addInitializer = function(f) { + var context3 = {}; + for (var p in contextIn) context3[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context3.access[p] = contextIn.access[p]; + context3.addInitializer = function(f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context2); + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context3); if (kind === "accessor") { if (result === void 0) continue; if (result === null || typeof result !== "object") throw new TypeError("Object expected"); @@ -53010,19 +53010,19 @@ var require_logger = __commonJS({ logger: clientLogger }; } - var context2 = createLoggerContext({ + var context3 = createLoggerContext({ logLevelEnvVarName: "TYPESPEC_RUNTIME_LOG_LEVEL", namespace: "typeSpecRuntime" }); - exports2.TypeSpecRuntimeLogger = context2.logger; + exports2.TypeSpecRuntimeLogger = context3.logger; function setLogLevel(logLevel) { - context2.setLogLevel(logLevel); + context3.setLogLevel(logLevel); } function getLogLevel() { - return context2.getLogLevel(); + return context3.getLogLevel(); } function createClientLogger(namespace) { - return context2.createClientLogger(namespace); + return context3.createClientLogger(namespace); } } }); @@ -57268,19 +57268,19 @@ var require_commonjs2 = __commonJS({ exports2.getLogLevel = getLogLevel; exports2.createClientLogger = createClientLogger; var logger_1 = require_internal(); - var context2 = (0, logger_1.createLoggerContext)({ + var context3 = (0, logger_1.createLoggerContext)({ logLevelEnvVarName: "AZURE_LOG_LEVEL", namespace: "azure" }); - exports2.AzureLogger = context2.logger; + exports2.AzureLogger = context3.logger; function setLogLevel(level) { - context2.setLogLevel(level); + context3.setLogLevel(level); } function getLogLevel() { - return context2.getLogLevel(); + return context3.getLogLevel(); } function createClientLogger(namespace) { - return context2.createClientLogger(namespace); + return context3.createClientLogger(namespace); } } }); @@ -58190,14 +58190,14 @@ var require_tracingContext = __commonJS({ namespace: /* @__PURE__ */ Symbol.for("@azure/core-tracing namespace") }; function createTracingContext(options = {}) { - let context2 = new TracingContextImpl(options.parentContext); + let context3 = new TracingContextImpl(options.parentContext); if (options.span) { - context2 = context2.setValue(exports2.knownContextKeys.span, options.span); + context3 = context3.setValue(exports2.knownContextKeys.span, options.span); } if (options.namespace) { - context2 = context2.setValue(exports2.knownContextKeys.namespace, options.namespace); + context3 = context3.setValue(exports2.knownContextKeys.namespace, options.namespace); } - return context2; + return context3; } var TracingContextImpl = class _TracingContextImpl { _contextMap; @@ -58335,8 +58335,8 @@ var require_tracingClient = __commonJS({ span.end(); } } - function withContext(context2, callback, ...callbackArgs) { - return (0, instrumenter_js_1.getInstrumenter)().withContext(context2, callback, ...callbackArgs); + function withContext(context3, callback, ...callbackArgs) { + return (0, instrumenter_js_1.getInstrumenter)().withContext(context3, callback, ...callbackArgs); } function parseTraceparentHeader(traceparentHeader) { return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); @@ -91807,7 +91807,7 @@ var require_uploadUtils = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadProgress = void 0; exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var storage_blob_1 = require_commonjs15(); var errors_1 = require_errors3(); var UploadProgress = class { @@ -91849,7 +91849,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core12.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core13.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -91906,14 +91906,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core12.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core13.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error3) { - core12.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); + core13.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); throw error3; } finally { uploadProgress.stopDisplayTimer(); @@ -91998,7 +91998,7 @@ var require_requestUtils = __commonJS({ exports2.retry = retry2; exports2.retryTypedResponse = retryTypedResponse; exports2.retryHttpClientResponse = retryHttpClientResponse; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants12(); function isSuccessStatusCode(statusCode) { @@ -92056,9 +92056,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core12.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core13.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core12.debug(`${name} - Error is not retryable`); + core13.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -92317,7 +92317,7 @@ var require_downloadUtils = __commonJS({ exports2.downloadCacheHttpClient = downloadCacheHttpClient; exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); @@ -92355,7 +92355,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core12.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core13.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -92389,7 +92389,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core12.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core13.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -92439,7 +92439,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core12.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core13.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -92450,7 +92450,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core12.debug("Unable to validate download, no Content-Length header"); + core13.debug("Unable to validate download, no Content-Length header"); } }); } @@ -92568,7 +92568,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core12.debug("Unable to determine content length, downloading file with http-client..."); + core13.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -92658,7 +92658,7 @@ var require_options = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadOptions = getUploadOptions; exports2.getDownloadOptions = getDownloadOptions; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -92678,9 +92678,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core12.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core12.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core12.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core13.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core13.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } function getDownloadOptions(copy) { @@ -92716,12 +92716,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core12.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core12.debug(`Download concurrency: ${result.downloadConcurrency}`); - core12.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core12.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core12.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core12.debug(`Lookup only: ${result.lookupOnly}`); + core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core13.debug(`Download concurrency: ${result.downloadConcurrency}`); + core13.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core13.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core13.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core13.debug(`Lookup only: ${result.lookupOnly}`); return result; } } @@ -92915,7 +92915,7 @@ var require_cacheHttpClient = __commonJS({ exports2.downloadCache = downloadCache; exports2.reserveCache = reserveCache; exports2.saveCache = saveCache4; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs13 = __importStar2(require("fs")); @@ -92933,7 +92933,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core12.debug(`Resource Url: ${url2}`); + core13.debug(`Resource Url: ${url2}`); return url2; } function createAcceptHeader(type2, apiVersion) { @@ -92961,7 +92961,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core12.isDebug()) { + if (core13.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -92974,9 +92974,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core12.setSecret(cacheDownloadUrl); - core12.debug(`Cache Result:`); - core12.debug(JSON.stringify(cacheResult)); + core13.setSecret(cacheDownloadUrl); + core13.debug(`Cache Result:`); + core13.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -92990,10 +92990,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core12.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core13.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core12.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core13.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -93036,7 +93036,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter2(this, void 0, void 0, function* () { - core12.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core13.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -93058,7 +93058,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core12.debug("Awaiting all uploads"); + core13.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { @@ -93101,16 +93101,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core12.debug("Upload cache"); + core13.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core12.debug("Commiting cache"); + core13.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core13.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core12.info("Cache saved successfully"); + core13.info("Cache saved successfully"); } }); } @@ -98593,7 +98593,7 @@ var require_cache5 = __commonJS({ exports2.isFeatureAvailable = isFeatureAvailable; exports2.restoreCache = restoreCache4; exports2.saveCache = saveCache4; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var path12 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); @@ -98652,7 +98652,7 @@ var require_cache5 = __commonJS({ function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core12.debug(`Cache service version: ${cacheServiceVersion}`); + core13.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -98667,8 +98667,8 @@ var require_cache5 = __commonJS({ return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core12.debug("Resolved Keys:"); - core12.debug(JSON.stringify(keys)); + core13.debug("Resolved Keys:"); + core13.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -98686,19 +98686,19 @@ var require_cache5 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core12.info("Lookup only - skipping download"); + core13.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path12.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive Path: ${archivePath}`); + core13.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core12.isDebug()) { + if (core13.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core12.info("Cache restored successfully"); + core13.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error3) { const typedError = error3; @@ -98706,16 +98706,16 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to restore: ${error3.message}`); + core13.error(`Failed to restore: ${error3.message}`); } else { - core12.warning(`Failed to restore: ${error3.message}`); + core13.warning(`Failed to restore: ${error3.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core12.debug(`Failed to delete archive: ${error3}`); + core13.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -98726,8 +98726,8 @@ var require_cache5 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core12.debug("Resolved Keys:"); - core12.debug(JSON.stringify(keys)); + core13.debug("Resolved Keys:"); + core13.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -98745,30 +98745,30 @@ var require_cache5 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request2); if (!response.ok) { - core12.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); + core13.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request2.key !== response.matchedKey; if (isRestoreKeyMatch) { - core12.info(`Cache hit for restore-key: ${response.matchedKey}`); + core13.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core12.info(`Cache hit for: ${response.matchedKey}`); + core13.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core12.info("Lookup only - skipping download"); + core13.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path12.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive path: ${archivePath}`); - core12.debug(`Starting download of archive to: ${archivePath}`); + core13.debug(`Archive path: ${archivePath}`); + core13.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core12.isDebug()) { + core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core13.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core12.info("Cache restored successfully"); + core13.info("Cache restored successfully"); return response.matchedKey; } catch (error3) { const typedError = error3; @@ -98776,9 +98776,9 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to restore: ${error3.message}`); + core13.error(`Failed to restore: ${error3.message}`); } else { - core12.warning(`Failed to restore: ${error3.message}`); + core13.warning(`Failed to restore: ${error3.message}`); } } } finally { @@ -98787,7 +98787,7 @@ var require_cache5 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error3) { - core12.debug(`Failed to delete archive: ${error3}`); + core13.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -98796,7 +98796,7 @@ var require_cache5 = __commonJS({ function saveCache4(paths_1, key_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core12.debug(`Cache service version: ${cacheServiceVersion}`); + core13.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -98814,26 +98814,26 @@ var require_cache5 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core12.debug("Cache Paths:"); - core12.debug(`${JSON.stringify(cachePaths)}`); + core13.debug("Cache Paths:"); + core13.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path12.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive Path: ${archivePath}`); + core13.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core12.isDebug()) { + if (core13.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.debug(`File Size: ${archiveFileSize}`); + core13.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core12.debug("Reserving Cache"); + core13.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -98846,26 +98846,26 @@ var require_cache5 = __commonJS({ } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core12.debug(`Saving Cache (ID: ${cacheId})`); + core13.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error3) { const typedError = error3; if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError.name) { - core12.info(`Failed to save: ${typedError.message}`); + core13.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to save: ${typedError.message}`); + core13.error(`Failed to save: ${typedError.message}`); } else { - core12.warning(`Failed to save: ${typedError.message}`); + core13.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core12.debug(`Failed to delete archive: ${error3}`); + core13.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -98878,23 +98878,23 @@ var require_cache5 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core12.debug("Cache Paths:"); - core12.debug(`${JSON.stringify(cachePaths)}`); + core13.debug("Cache Paths:"); + core13.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path12.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive Path: ${archivePath}`); + core13.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core12.isDebug()) { + if (core13.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.debug(`File Size: ${archiveFileSize}`); + core13.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core12.debug("Reserving Cache"); + core13.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request2 = { key, @@ -98905,16 +98905,16 @@ var require_cache5 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request2); if (!response.ok) { if (response.message) { - core12.warning(`Cache reservation failed: ${response.message}`); + core13.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error3) { - core12.debug(`Failed to reserve cache: ${error3}`); + core13.debug(`Failed to reserve cache: ${error3}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core12.debug(`Attempting to upload cache located at: ${archivePath}`); + core13.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -98922,7 +98922,7 @@ var require_cache5 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core12.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core13.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -98935,21 +98935,21 @@ var require_cache5 = __commonJS({ if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError.name) { - core12.info(`Failed to save: ${typedError.message}`); + core13.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core12.warning(typedError.message); + core13.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to save: ${typedError.message}`); + core13.error(`Failed to save: ${typedError.message}`); } else { - core12.warning(`Failed to save: ${typedError.message}`); + core13.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core12.debug(`Failed to delete archive: ${error3}`); + core13.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -99176,7 +99176,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -99199,10 +99199,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core12.info(err.message); + core13.info(err.message); } const seconds = this.getSleepAmount(); - core12.info(`Waiting ${seconds} seconds before trying again`); + core13.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -99305,7 +99305,7 @@ var require_tool_cache = __commonJS({ exports2.findFromManifest = findFromManifest; exports2.isExplicitVersion = isExplicitVersion; exports2.evaluateVersions = evaluateVersions; - var core12 = __importStar2(require_core()); + var core13 = __importStar2(require_core()); var io6 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); var fs13 = __importStar2(require("fs")); @@ -99334,8 +99334,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { dest = dest || path12.join(_getTempDirectory(), crypto2.randomUUID()); yield io6.mkdirP(path12.dirname(dest)); - core12.debug(`Downloading ${url2}`); - core12.debug(`Destination ${dest}`); + core13.debug(`Downloading ${url2}`); + core13.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -99361,7 +99361,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth2) { - core12.debug("set auth"); + core13.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -99370,7 +99370,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url2, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core12.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core13.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -99379,16 +99379,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs13.createWriteStream(dest)); - core12.debug("download complete"); + core13.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core12.debug("download failed"); + core13.debug("download failed"); try { yield io6.rmRF(dest); } catch (err) { - core12.debug(`Failed to delete '${dest}'. ${err.message}`); + core13.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -99403,7 +99403,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core12.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core13.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", // eXtract files with full paths @@ -99456,7 +99456,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core12.debug("Checking tar --version"); + core13.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -99466,7 +99466,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core12.debug(versionOutput.trim()); + core13.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -99474,7 +99474,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core12.isDebug() && !flags.includes("v")) { + if (core13.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -99505,7 +99505,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core12.isDebug()) { + if (core13.isDebug()) { args.push("-v"); } const xarPath = yield io6.which("xar", true); @@ -99548,7 +99548,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core12.debug(`Using pwsh at path: ${pwshPath}`); + core13.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -99568,7 +99568,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io6.which("powershell", true); - core12.debug(`Using powershell at path: ${powershellPath}`); + core13.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -99577,7 +99577,7 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const unzipPath = yield io6.which("unzip", true); const args = [file]; - if (!core12.isDebug()) { + if (!core13.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -99588,8 +99588,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os2.arch(); - core12.debug(`Caching tool ${tool} ${version} ${arch2}`); - core12.debug(`source dir: ${sourceDir}`); + core13.debug(`Caching tool ${tool} ${version} ${arch2}`); + core13.debug(`source dir: ${sourceDir}`); if (!fs13.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -99606,14 +99606,14 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os2.arch(); - core12.debug(`Caching tool ${tool} ${version} ${arch2}`); - core12.debug(`source file: ${sourceFile}`); + core13.debug(`Caching tool ${tool} ${version} ${arch2}`); + core13.debug(`source file: ${sourceFile}`); if (!fs13.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path12.join(destFolder, targetFile); - core12.debug(`destination file ${destPath}`); + core13.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -99636,12 +99636,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path12.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core12.debug(`checking cache: ${cachePath}`); + core13.debug(`checking cache: ${cachePath}`); if (fs13.existsSync(cachePath) && fs13.existsSync(`${cachePath}.complete`)) { - core12.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core13.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core12.debug("not found"); + core13.debug("not found"); } } return toolPath; @@ -99670,7 +99670,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth2) { - core12.debug("set auth"); + core13.debug("set auth"); headers.authorization = auth2; } const response = yield http.getJson(treeUrl, headers); @@ -99691,7 +99691,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core12.debug("Invalid json"); + core13.debug("Invalid json"); } } return releases; @@ -99715,7 +99715,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter2(this, void 0, void 0, function* () { const folderPath = path12.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); - core12.debug(`destination ${folderPath}`); + core13.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); yield io6.rmRF(markerPath); @@ -99727,18 +99727,18 @@ var require_tool_cache = __commonJS({ const folderPath = path12.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs13.writeFileSync(markerPath, ""); - core12.debug("finished caching tool"); + core13.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver9.clean(versionSpec) || ""; - core12.debug(`isExplicit: ${c}`); + core13.debug(`isExplicit: ${c}`); const valid3 = semver9.valid(c) != null; - core12.debug(`explicit? ${valid3}`); + core13.debug(`explicit? ${valid3}`); return valid3; } function evaluateVersions(versions, versionSpec) { let version = ""; - core12.debug(`evaluating ${versions.length} versions`); + core13.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver9.gt(a, b)) { return 1; @@ -99754,9 +99754,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core12.debug(`matched: ${version}`); + core13.debug(`matched: ${version}`); } else { - core12.debug("match not found"); + core13.debug("match not found"); } return version; } @@ -103253,7 +103253,7 @@ var fs12 = __toESM(require("fs")); var path11 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); -var core11 = __toESM(require_core()); +var core12 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/actions-util.ts @@ -110240,7 +110240,9 @@ async function addFingerprints(sarifLog, sourceRoot, logger) { } // src/init.ts +var core11 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); +var github2 = __toESM(require_github()); var io5 = __toESM(require_io()); async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); @@ -110382,7 +110384,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core11.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core12.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -110481,13 +110483,13 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { if (httpError !== void 0) { switch (httpError.status) { case 403: - core11.warning(httpError.message || GENERIC_403_MSG); + core12.warning(httpError.message || GENERIC_403_MSG); break; case 404: - core11.warning(httpError.message || GENERIC_404_MSG); + core12.warning(httpError.message || GENERIC_404_MSG); break; default: - core11.warning(httpError.message); + core12.warning(httpError.message); break; } } @@ -110921,7 +110923,7 @@ function validateUniqueCategory(sarifLog, sentinelPrefix) { `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core11.exportVariable(sentinelEnvVar, sentinelEnvVar); + core12.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { diff --git a/lib/upload-sarif-action.js b/lib/upload-sarif-action.js index af286f4dd..28befb518 100644 --- a/lib/upload-sarif-action.js +++ b/lib/upload-sarif-action.js @@ -3842,18 +3842,18 @@ var require_webidl = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context2) { - const plural = context2.types.length === 1 ? "" : " one of"; - const message = `${context2.argument} could not be converted to${plural}: ${context2.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context3) { + const plural = context3.types.length === 1 ? "" : " one of"; + const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; return webidl.errors.exception({ - header: context2.prefix, + header: context3.prefix, message }); }; - webidl.errors.invalidArgument = function(context2) { + webidl.errors.invalidArgument = function(context3) { return webidl.errors.exception({ - header: context2.prefix, - message: `"${context2.value}" is an invalid ${context2.type}.` + header: context3.prefix, + message: `"${context3.value}" is an invalid ${context3.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -9849,17 +9849,17 @@ var require_api_request = __commonJS({ } } } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context2, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -9896,7 +9896,7 @@ var require_api_request = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context2 + context: context3 }); } } @@ -10065,17 +10065,17 @@ var require_api_stream = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context2, callback, responseHeaders } = this; + const { factory, opaque, context: context3, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -10103,7 +10103,7 @@ var require_api_stream = __commonJS({ statusCode, headers, opaque, - context: context2 + context: context3 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -10295,7 +10295,7 @@ var require_api_pipeline = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -10304,10 +10304,10 @@ var require_api_pipeline = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler2, context: context3 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10325,7 +10325,7 @@ var require_api_pipeline = __commonJS({ headers, opaque, body: this.res, - context: context2 + context: context3 }); } catch (err) { this.res.on("error", util.nop); @@ -10409,7 +10409,7 @@ var require_api_upgrade = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; @@ -10423,7 +10423,7 @@ var require_api_upgrade = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -10431,7 +10431,7 @@ var require_api_upgrade = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -10500,20 +10500,20 @@ var require_api_connect = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -10525,7 +10525,7 @@ var require_api_connect = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -21321,7 +21321,7 @@ var require_core = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.platform = exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = exports2.markdownSummary = exports2.summary = exports2.ExitCode = void 0; - exports2.exportVariable = exportVariable6; + exports2.exportVariable = exportVariable7; exports2.setSecret = setSecret; exports2.addPath = addPath; exports2.getInput = getInput2; @@ -21353,7 +21353,7 @@ var require_core = __commonJS({ ExitCode2[ExitCode2["Success"] = 0] = "Success"; ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; })(ExitCode || (exports2.ExitCode = ExitCode = {})); - function exportVariable6(name, val) { + function exportVariable7(name, val) { const convertedVal = (0, utils_1.toCommandValue)(val); process.env[name] = convertedVal; const filePath = process.env["GITHUB_ENV"] || ""; @@ -24840,18 +24840,18 @@ var require_webidl2 = __commonJS({ webidl.errors.exception = function(message) { return new TypeError(`${message.header}: ${message.message}`); }; - webidl.errors.conversionFailed = function(context2) { - const plural = context2.types.length === 1 ? "" : " one of"; - const message = `${context2.argument} could not be converted to${plural}: ${context2.types.join(", ")}.`; + webidl.errors.conversionFailed = function(context3) { + const plural = context3.types.length === 1 ? "" : " one of"; + const message = `${context3.argument} could not be converted to${plural}: ${context3.types.join(", ")}.`; return webidl.errors.exception({ - header: context2.prefix, + header: context3.prefix, message }); }; - webidl.errors.invalidArgument = function(context2) { + webidl.errors.invalidArgument = function(context3) { return webidl.errors.exception({ - header: context2.prefix, - message: `"${context2.value}" is an invalid ${context2.type}.` + header: context3.prefix, + message: `"${context3.value}" is an invalid ${context3.type}.` }); }; webidl.brandCheck = function(V, I, opts) { @@ -30847,17 +30847,17 @@ var require_api_request2 = __commonJS({ } } } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { callback, opaque, abort, context: context2, responseHeaders, highWaterMark } = this; + const { callback, opaque, abort, context: context3, responseHeaders, highWaterMark } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -30894,7 +30894,7 @@ var require_api_request2 = __commonJS({ trailers: this.trailers, opaque, body: res, - context: context2 + context: context3 }); } } @@ -31063,17 +31063,17 @@ var require_api_stream2 = __commonJS({ } addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume, statusMessage) { - const { factory, opaque, context: context2, callback, responseHeaders } = this; + const { factory, opaque, context: context3, callback, responseHeaders } = this; const headers = responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); if (statusCode < 200) { if (this.onInfo) { @@ -31101,7 +31101,7 @@ var require_api_stream2 = __commonJS({ statusCode, headers, opaque, - context: context2 + context: context3 }); if (!res || typeof res.write !== "function" || typeof res.end !== "function" || typeof res.on !== "function") { throw new InvalidReturnValueError("expected Writable"); @@ -31293,7 +31293,7 @@ var require_api_pipeline2 = __commonJS({ this.res = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { const { ret, res } = this; if (this.reason) { abort(this.reason); @@ -31302,10 +31302,10 @@ var require_api_pipeline2 = __commonJS({ assert(!res, "pipeline cannot be retried"); assert(!ret.destroyed); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler2, context: context3 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31323,7 +31323,7 @@ var require_api_pipeline2 = __commonJS({ headers, opaque, body: this.res, - context: context2 + context: context3 }); } catch (err) { this.res.on("error", util.nop); @@ -31407,7 +31407,7 @@ var require_api_upgrade2 = __commonJS({ this.context = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; @@ -31421,7 +31421,7 @@ var require_api_upgrade2 = __commonJS({ } onUpgrade(statusCode, rawHeaders, socket) { assert(statusCode === 101); - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -31429,7 +31429,7 @@ var require_api_upgrade2 = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -31498,20 +31498,20 @@ var require_api_connect2 = __commonJS({ this.abort = null; addSignal(this, signal); } - onConnect(abort, context2) { + onConnect(abort, context3) { if (this.reason) { abort(this.reason); return; } assert(this.callback); this.abort = abort; - this.context = context2; + this.context = context3; } onHeaders() { throw new SocketError("bad connect", null); } onUpgrade(statusCode, rawHeaders, socket) { - const { callback, opaque, context: context2 } = this; + const { callback, opaque, context: context3 } = this; removeSignal(this); this.callback = null; let headers = rawHeaders; @@ -31523,7 +31523,7 @@ var require_api_connect2 = __commonJS({ headers, socket, opaque, - context: context2 + context: context3 }); } onError(err) { @@ -40178,8 +40178,8 @@ function isDefined(value) { function isKeyOperator(operator) { return operator === ";" || operator === "&" || operator === "?"; } -function getValues(context2, operator, key, modifier) { - var value = context2[key], result = []; +function getValues(context3, operator, key, modifier) { + var value = context3[key], result = []; if (isDefined(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); @@ -40243,7 +40243,7 @@ function parseUrl(template) { expand: expand.bind(null, template) }; } -function expand(template, context2) { +function expand(template, context3) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; template = template.replace( /\{([^\{\}]+)\}|([^\{\}]+)/g, @@ -40257,7 +40257,7 @@ function expand(template, context2) { } expression.split(/,/g).forEach(function(variable) { var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context2, operator, tmp[1], tmp[2] || tmp[3])); + values.push(getValues(context3, operator, tmp[1], tmp[2] || tmp[3])); }); if (operator && operator !== "+") { var separator = ","; @@ -47450,7 +47450,7 @@ var require_internal_glob_options_helper = __commonJS({ })(); Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = getOptions; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -47462,23 +47462,23 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core15.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core14.debug(`implicitDescendants '${result.implicitDescendants}'`); + core15.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core14.debug(`matchDirectories '${result.matchDirectories}'`); + core15.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core15.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core14.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core15.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -49106,7 +49106,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var fs14 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path13 = __importStar2(require("path")); @@ -49159,7 +49159,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core14.debug(`Search path '${searchPath}'`); + core15.debug(`Search path '${searchPath}'`); try { yield __await2(fs14.promises.lstat(searchPath)); } catch (err) { @@ -49234,7 +49234,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core14.debug(`Broken symlink '${item.path}'`); + core15.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -49250,7 +49250,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core15.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -49353,7 +49353,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = hashFiles; var crypto2 = __importStar2(require("crypto")); - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var fs14 = __importStar2(require("fs")); var stream2 = __importStar2(require("stream")); var util = __importStar2(require("util")); @@ -49362,7 +49362,7 @@ var require_internal_hash_files = __commonJS({ return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; - const writeDelegate = verbose ? core14.info : core14.debug; + const writeDelegate = verbose ? core15.info : core15.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto2.createHash("sha256"); @@ -50753,7 +50753,7 @@ var require_cacheUtils = __commonJS({ exports2.assertDefined = assertDefined; exports2.getCacheVersion = getCacheVersion; exports2.getRuntimeToken = getRuntimeToken; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var exec = __importStar2(require_exec()); var glob = __importStar2(require_glob()); var io6 = __importStar2(require_io()); @@ -50804,7 +50804,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path13.relative(workspace, file).replace(new RegExp(`\\${path13.sep}`, "g"), "/"); - core14.debug(`Matched: ${relativeFile}`); + core15.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -50832,7 +50832,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { let versionOutput = ""; additionalArgs.push("--version"); - core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core15.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -50843,10 +50843,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core14.debug(err.message); + core15.debug(err.message); } versionOutput = versionOutput.trim(); - core14.debug(versionOutput); + core15.debug(versionOutput); return versionOutput; }); } @@ -50854,7 +50854,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver9.clean(versionOutput); - core14.debug(`zstd version: ${version}`); + core15.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -50979,14 +50979,14 @@ function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, e var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); var _, done = false; for (var i = decorators.length - 1; i >= 0; i--) { - var context2 = {}; - for (var p in contextIn) context2[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context2.access[p] = contextIn.access[p]; - context2.addInitializer = function(f) { + var context3 = {}; + for (var p in contextIn) context3[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context3.access[p] = contextIn.access[p]; + context3.addInitializer = function(f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context2); + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context3); if (kind === "accessor") { if (result === void 0) continue; if (result === null || typeof result !== "object") throw new TypeError("Object expected"); @@ -51713,19 +51713,19 @@ var require_logger = __commonJS({ logger: clientLogger }; } - var context2 = createLoggerContext({ + var context3 = createLoggerContext({ logLevelEnvVarName: "TYPESPEC_RUNTIME_LOG_LEVEL", namespace: "typeSpecRuntime" }); - exports2.TypeSpecRuntimeLogger = context2.logger; + exports2.TypeSpecRuntimeLogger = context3.logger; function setLogLevel(logLevel) { - context2.setLogLevel(logLevel); + context3.setLogLevel(logLevel); } function getLogLevel() { - return context2.getLogLevel(); + return context3.getLogLevel(); } function createClientLogger(namespace) { - return context2.createClientLogger(namespace); + return context3.createClientLogger(namespace); } } }); @@ -55971,19 +55971,19 @@ var require_commonjs2 = __commonJS({ exports2.getLogLevel = getLogLevel; exports2.createClientLogger = createClientLogger; var logger_1 = require_internal(); - var context2 = (0, logger_1.createLoggerContext)({ + var context3 = (0, logger_1.createLoggerContext)({ logLevelEnvVarName: "AZURE_LOG_LEVEL", namespace: "azure" }); - exports2.AzureLogger = context2.logger; + exports2.AzureLogger = context3.logger; function setLogLevel(level) { - context2.setLogLevel(level); + context3.setLogLevel(level); } function getLogLevel() { - return context2.getLogLevel(); + return context3.getLogLevel(); } function createClientLogger(namespace) { - return context2.createClientLogger(namespace); + return context3.createClientLogger(namespace); } } }); @@ -56893,14 +56893,14 @@ var require_tracingContext = __commonJS({ namespace: /* @__PURE__ */ Symbol.for("@azure/core-tracing namespace") }; function createTracingContext(options = {}) { - let context2 = new TracingContextImpl(options.parentContext); + let context3 = new TracingContextImpl(options.parentContext); if (options.span) { - context2 = context2.setValue(exports2.knownContextKeys.span, options.span); + context3 = context3.setValue(exports2.knownContextKeys.span, options.span); } if (options.namespace) { - context2 = context2.setValue(exports2.knownContextKeys.namespace, options.namespace); + context3 = context3.setValue(exports2.knownContextKeys.namespace, options.namespace); } - return context2; + return context3; } var TracingContextImpl = class _TracingContextImpl { _contextMap; @@ -57038,8 +57038,8 @@ var require_tracingClient = __commonJS({ span.end(); } } - function withContext(context2, callback, ...callbackArgs) { - return (0, instrumenter_js_1.getInstrumenter)().withContext(context2, callback, ...callbackArgs); + function withContext(context3, callback, ...callbackArgs) { + return (0, instrumenter_js_1.getInstrumenter)().withContext(context3, callback, ...callbackArgs); } function parseTraceparentHeader(traceparentHeader) { return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); @@ -90510,7 +90510,7 @@ var require_uploadUtils = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadProgress = void 0; exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var storage_blob_1 = require_commonjs15(); var errors_1 = require_errors3(); var UploadProgress = class { @@ -90552,7 +90552,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core15.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -90609,14 +90609,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core15.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error3) { - core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); + core15.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); throw error3; } finally { uploadProgress.stopDisplayTimer(); @@ -90701,7 +90701,7 @@ var require_requestUtils = __commonJS({ exports2.retry = retry2; exports2.retryTypedResponse = retryTypedResponse; exports2.retryHttpClientResponse = retryHttpClientResponse; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants12(); function isSuccessStatusCode(statusCode) { @@ -90759,9 +90759,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core15.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core14.debug(`${name} - Error is not retryable`); + core15.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -91020,7 +91020,7 @@ var require_downloadUtils = __commonJS({ exports2.downloadCacheHttpClient = downloadCacheHttpClient; exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); @@ -91058,7 +91058,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core15.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -91092,7 +91092,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core15.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -91142,7 +91142,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core15.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -91153,7 +91153,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core14.debug("Unable to validate download, no Content-Length header"); + core15.debug("Unable to validate download, no Content-Length header"); } }); } @@ -91271,7 +91271,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core14.debug("Unable to determine content length, downloading file with http-client..."); + core15.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -91361,7 +91361,7 @@ var require_options = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadOptions = getUploadOptions; exports2.getDownloadOptions = getDownloadOptions; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -91381,9 +91381,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core15.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core15.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } function getDownloadOptions(copy) { @@ -91419,12 +91419,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core14.debug(`Download concurrency: ${result.downloadConcurrency}`); - core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core14.debug(`Lookup only: ${result.lookupOnly}`); + core15.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core15.debug(`Download concurrency: ${result.downloadConcurrency}`); + core15.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core15.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core15.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core15.debug(`Lookup only: ${result.lookupOnly}`); return result; } } @@ -91618,7 +91618,7 @@ var require_cacheHttpClient = __commonJS({ exports2.downloadCache = downloadCache; exports2.reserveCache = reserveCache; exports2.saveCache = saveCache4; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs14 = __importStar2(require("fs")); @@ -91636,7 +91636,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url2 = `${baseUrl}_apis/artifactcache/${resource}`; - core14.debug(`Resource Url: ${url2}`); + core15.debug(`Resource Url: ${url2}`); return url2; } function createAcceptHeader(type2, apiVersion) { @@ -91664,7 +91664,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core14.isDebug()) { + if (core15.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -91677,9 +91677,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core14.setSecret(cacheDownloadUrl); - core14.debug(`Cache Result:`); - core14.debug(JSON.stringify(cacheResult)); + core15.setSecret(cacheDownloadUrl); + core15.debug(`Cache Result:`); + core15.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -91693,10 +91693,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core15.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core15.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -91739,7 +91739,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter2(this, void 0, void 0, function* () { - core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core15.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -91761,7 +91761,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core14.debug("Awaiting all uploads"); + core15.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { @@ -91804,16 +91804,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core14.debug("Upload cache"); + core15.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core14.debug("Commiting cache"); + core15.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core15.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core14.info("Cache saved successfully"); + core15.info("Cache saved successfully"); } }); } @@ -97296,7 +97296,7 @@ var require_cache5 = __commonJS({ exports2.isFeatureAvailable = isFeatureAvailable; exports2.restoreCache = restoreCache4; exports2.saveCache = saveCache4; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var path13 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); @@ -97355,7 +97355,7 @@ var require_cache5 = __commonJS({ function restoreCache4(paths_1, primaryKey_1, restoreKeys_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); + core15.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -97370,8 +97370,8 @@ var require_cache5 = __commonJS({ return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); + core15.debug("Resolved Keys:"); + core15.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -97389,19 +97389,19 @@ var require_cache5 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); + core15.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path13.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); + core15.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error3) { const typedError = error3; @@ -97409,16 +97409,16 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error3.message}`); + core15.error(`Failed to restore: ${error3.message}`); } else { - core14.warning(`Failed to restore: ${error3.message}`); + core15.warning(`Failed to restore: ${error3.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + core15.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -97429,8 +97429,8 @@ var require_cache5 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core14.debug("Resolved Keys:"); - core14.debug(JSON.stringify(keys)); + core15.debug("Resolved Keys:"); + core15.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -97448,30 +97448,30 @@ var require_cache5 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request2); if (!response.ok) { - core14.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); + core15.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request2.key !== response.matchedKey; if (isRestoreKeyMatch) { - core14.info(`Cache hit for restore-key: ${response.matchedKey}`); + core15.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core14.info(`Cache hit for: ${response.matchedKey}`); + core15.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core14.info("Lookup only - skipping download"); + core15.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path13.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive path: ${archivePath}`); - core14.debug(`Starting download of archive to: ${archivePath}`); + core15.debug(`Archive path: ${archivePath}`); + core15.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core14.isDebug()) { + core15.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core14.info("Cache restored successfully"); + core15.info("Cache restored successfully"); return response.matchedKey; } catch (error3) { const typedError = error3; @@ -97479,9 +97479,9 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to restore: ${error3.message}`); + core15.error(`Failed to restore: ${error3.message}`); } else { - core14.warning(`Failed to restore: ${error3.message}`); + core15.warning(`Failed to restore: ${error3.message}`); } } } finally { @@ -97490,7 +97490,7 @@ var require_cache5 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + core15.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -97499,7 +97499,7 @@ var require_cache5 = __commonJS({ function saveCache4(paths_1, key_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core14.debug(`Cache service version: ${cacheServiceVersion}`); + core15.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -97517,26 +97517,26 @@ var require_cache5 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); + core15.debug("Cache Paths:"); + core15.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path13.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); + core15.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core14.debug("Reserving Cache"); + core15.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -97549,26 +97549,26 @@ var require_cache5 = __commonJS({ } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core14.debug(`Saving Cache (ID: ${cacheId})`); + core15.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error3) { const typedError = error3; if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError.name) { - core14.info(`Failed to save: ${typedError.message}`); + core15.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); + core15.error(`Failed to save: ${typedError.message}`); } else { - core14.warning(`Failed to save: ${typedError.message}`); + core15.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + core15.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -97581,23 +97581,23 @@ var require_cache5 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core14.debug("Cache Paths:"); - core14.debug(`${JSON.stringify(cachePaths)}`); + core15.debug("Cache Paths:"); + core15.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path13.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core14.debug(`Archive Path: ${archivePath}`); + core15.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core14.isDebug()) { + if (core15.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core14.debug(`File Size: ${archiveFileSize}`); + core15.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core14.debug("Reserving Cache"); + core15.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request2 = { key, @@ -97608,16 +97608,16 @@ var require_cache5 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request2); if (!response.ok) { if (response.message) { - core14.warning(`Cache reservation failed: ${response.message}`); + core15.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error3) { - core14.debug(`Failed to reserve cache: ${error3}`); + core15.debug(`Failed to reserve cache: ${error3}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core14.debug(`Attempting to upload cache located at: ${archivePath}`); + core15.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -97625,7 +97625,7 @@ var require_cache5 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core15.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -97638,21 +97638,21 @@ var require_cache5 = __commonJS({ if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError.name) { - core14.info(`Failed to save: ${typedError.message}`); + core15.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core14.warning(typedError.message); + core15.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core14.error(`Failed to save: ${typedError.message}`); + core15.error(`Failed to save: ${typedError.message}`); } else { - core14.warning(`Failed to save: ${typedError.message}`); + core15.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core14.debug(`Failed to delete archive: ${error3}`); + core15.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -99176,7 +99176,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -99199,10 +99199,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core14.info(err.message); + core15.info(err.message); } const seconds = this.getSleepAmount(); - core14.info(`Waiting ${seconds} seconds before trying again`); + core15.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -99305,7 +99305,7 @@ var require_tool_cache = __commonJS({ exports2.findFromManifest = findFromManifest; exports2.isExplicitVersion = isExplicitVersion; exports2.evaluateVersions = evaluateVersions; - var core14 = __importStar2(require_core()); + var core15 = __importStar2(require_core()); var io6 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); var fs14 = __importStar2(require("fs")); @@ -99334,8 +99334,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { dest = dest || path13.join(_getTempDirectory(), crypto2.randomUUID()); yield io6.mkdirP(path13.dirname(dest)); - core14.debug(`Downloading ${url2}`); - core14.debug(`Destination ${dest}`); + core15.debug(`Downloading ${url2}`); + core15.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -99361,7 +99361,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth2) { - core14.debug("set auth"); + core15.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -99370,7 +99370,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url2, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core14.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core15.debug(`Failed to download from "${url2}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream2.pipeline); @@ -99379,16 +99379,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs14.createWriteStream(dest)); - core14.debug("download complete"); + core15.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core14.debug("download failed"); + core15.debug("download failed"); try { yield io6.rmRF(dest); } catch (err) { - core14.debug(`Failed to delete '${dest}'. ${err.message}`); + core15.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -99403,7 +99403,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core15.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", // eXtract files with full paths @@ -99456,7 +99456,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core14.debug("Checking tar --version"); + core15.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -99466,7 +99466,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core14.debug(versionOutput.trim()); + core15.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -99474,7 +99474,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core14.isDebug() && !flags.includes("v")) { + if (core15.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -99505,7 +99505,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core14.isDebug()) { + if (core15.isDebug()) { args.push("-v"); } const xarPath = yield io6.which("xar", true); @@ -99548,7 +99548,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core14.debug(`Using pwsh at path: ${pwshPath}`); + core15.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -99568,7 +99568,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io6.which("powershell", true); - core14.debug(`Using powershell at path: ${powershellPath}`); + core15.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -99577,7 +99577,7 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const unzipPath = yield io6.which("unzip", true); const args = [file]; - if (!core14.isDebug()) { + if (!core15.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -99588,8 +99588,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os3.arch(); - core14.debug(`Caching tool ${tool} ${version} ${arch2}`); - core14.debug(`source dir: ${sourceDir}`); + core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + core15.debug(`source dir: ${sourceDir}`); if (!fs14.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -99606,14 +99606,14 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver9.clean(version) || version; arch2 = arch2 || os3.arch(); - core14.debug(`Caching tool ${tool} ${version} ${arch2}`); - core14.debug(`source file: ${sourceFile}`); + core15.debug(`Caching tool ${tool} ${version} ${arch2}`); + core15.debug(`source file: ${sourceFile}`); if (!fs14.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch2); const destPath = path13.join(destFolder, targetFile); - core14.debug(`destination file ${destPath}`); + core15.debug(`destination file ${destPath}`); yield io6.cp(sourceFile, destPath); _completeToolPath(tool, version, arch2); return destFolder; @@ -99636,12 +99636,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver9.clean(versionSpec) || ""; const cachePath = path13.join(_getCacheDirectory(), toolName, versionSpec, arch2); - core14.debug(`checking cache: ${cachePath}`); + core15.debug(`checking cache: ${cachePath}`); if (fs14.existsSync(cachePath) && fs14.existsSync(`${cachePath}.complete`)) { - core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); + core15.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch2}`); toolPath = cachePath; } else { - core14.debug("not found"); + core15.debug("not found"); } } return toolPath; @@ -99670,7 +99670,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth2) { - core14.debug("set auth"); + core15.debug("set auth"); headers.authorization = auth2; } const response = yield http.getJson(treeUrl, headers); @@ -99691,7 +99691,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core14.debug("Invalid json"); + core15.debug("Invalid json"); } } return releases; @@ -99715,7 +99715,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch2) { return __awaiter2(this, void 0, void 0, function* () { const folderPath = path13.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); - core14.debug(`destination ${folderPath}`); + core15.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io6.rmRF(folderPath); yield io6.rmRF(markerPath); @@ -99727,18 +99727,18 @@ var require_tool_cache = __commonJS({ const folderPath = path13.join(_getCacheDirectory(), tool, semver9.clean(version) || version, arch2 || ""); const markerPath = `${folderPath}.complete`; fs14.writeFileSync(markerPath, ""); - core14.debug("finished caching tool"); + core15.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver9.clean(versionSpec) || ""; - core14.debug(`isExplicit: ${c}`); + core15.debug(`isExplicit: ${c}`); const valid3 = semver9.valid(c) != null; - core14.debug(`explicit? ${valid3}`); + core15.debug(`explicit? ${valid3}`); return valid3; } function evaluateVersions(versions, versionSpec) { let version = ""; - core14.debug(`evaluating ${versions.length} versions`); + core15.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver9.gt(a, b)) { return 1; @@ -99754,9 +99754,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core14.debug(`matched: ${version}`); + core15.debug(`matched: ${version}`); } else { - core14.debug("match not found"); + core15.debug("match not found"); } return version; } @@ -103228,7 +103228,7 @@ var require_sarif_schema_2_1_0 = __commonJS({ }); // src/upload-sarif-action.ts -var core13 = __toESM(require_core()); +var core14 = __toESM(require_core()); // src/actions-util.ts var fs2 = __toESM(require("fs")); @@ -107953,7 +107953,7 @@ var fs13 = __toESM(require("fs")); var path12 = __toESM(require("path")); var url = __toESM(require("url")); var import_zlib = __toESM(require("zlib")); -var core12 = __toESM(require_core()); +var core13 = __toESM(require_core()); var jsonschema2 = __toESM(require_lib2()); // src/codeql.ts @@ -110903,7 +110903,9 @@ async function addFingerprints(sarifLog, sourceRoot, logger) { } // src/init.ts +var core12 = __toESM(require_core()); var toolrunner4 = __toESM(require_toolrunner()); +var github2 = __toESM(require_github()); var io5 = __toESM(require_io()); async function initCodeQL(toolsInput, apiDetails, tempDir, variant, defaultCliVersion, features, logger) { logger.startGroup("Setup CodeQL tools"); @@ -110974,7 +110976,7 @@ async function combineSarifFilesUsingCLI(sarifFiles, gitHubVersion, features, lo logger.warning( `Uploading multiple SARIF runs with the same category is deprecated ${deprecationWarningMessage}. Please update your workflow to upload a single run per category. ${deprecationMoreInformationMessage}` ); - core12.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); + core13.exportVariable("CODEQL_MERGE_SARIF_DEPRECATION_WARNING", "true"); } return combineSarifFiles(sarifFiles, logger); } @@ -111073,13 +111075,13 @@ async function uploadPayload(payload, repositoryNwo, logger, analysis) { if (httpError !== void 0) { switch (httpError.status) { case 403: - core12.warning(httpError.message || GENERIC_403_MSG); + core13.warning(httpError.message || GENERIC_403_MSG); break; case 404: - core12.warning(httpError.message || GENERIC_404_MSG); + core13.warning(httpError.message || GENERIC_404_MSG); break; default: - core12.warning(httpError.message); + core13.warning(httpError.message); break; } } @@ -111466,7 +111468,7 @@ function validateUniqueCategory(sarifLog, sentinelPrefix) { `Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job per tool/category. The easiest fix is to specify a unique value for the \`category\` input. If .runs[].automationDetails.id is specified in the sarif file, that will take precedence over your configured \`category\`. Category: (${id ? id : "none"}) Tool: (${tool ? tool : "none"})` ); } - core12.exportVariable(sentinelEnvVar, sentinelEnvVar); + core13.exportVariable(sentinelEnvVar, sentinelEnvVar); } } function sanitize(str2) { @@ -111603,11 +111605,11 @@ async function run(startedAt) { } const codeScanningResult = uploadResults["code-scanning" /* CodeScanning */]; if (codeScanningResult !== void 0) { - core13.setOutput("sarif-id", codeScanningResult.sarifID); + core14.setOutput("sarif-id", codeScanningResult.sarifID); } - core13.setOutput("sarif-ids", JSON.stringify(uploadResults)); + core14.setOutput("sarif-ids", JSON.stringify(uploadResults)); if (shouldSkipSarifUpload()) { - core13.debug( + core14.debug( "SARIF upload disabled by an environment variable. Waiting for processing is disabled." ); } else if (getRequiredInput("wait-for-processing") === "true") { @@ -111627,7 +111629,7 @@ async function run(startedAt) { } catch (unwrappedError) { const error3 = isThirdPartyAnalysis("upload-sarif" /* UploadSarif */) && unwrappedError instanceof InvalidSarifUploadError ? new ConfigurationError(unwrappedError.message) : wrapError(unwrappedError); const message = error3.message; - core13.setFailed(message); + core14.setFailed(message); const errorStatusReportBase = await createStatusReportBase( "upload-sarif" /* UploadSarif */, getActionsStatus(error3), @@ -111650,7 +111652,7 @@ async function runWrapper() { try { await run(startedAt); } catch (error3) { - core13.setFailed( + core14.setFailed( `codeql/upload-sarif action failed: ${getErrorMessage(error3)}` ); await sendUnhandledErrorStatusReport( diff --git a/src/environment.ts b/src/environment.ts index 03ac727f3..ed44ddcff 100644 --- a/src/environment.ts +++ b/src/environment.ts @@ -50,6 +50,12 @@ export enum EnvVar { /** Whether the deprecation warning for file coverage on PRs has been logged. */ DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION = "CODEQL_ACTION_DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION", + /** + * Set to `true` to opt out of the upcoming change that skips file coverage + * information on pull requests. + */ + FILE_COVERAGE_ON_PRS = "CODEQL_ACTION_FILE_COVERAGE_ON_PRS", + /** Whether the error for a deprecated version of the CodeQL Action was logged. */ LOG_VERSION_DEPRECATION = "CODEQL_ACTION_DID_LOG_VERSION_DEPRECATION", diff --git a/src/init-action.ts b/src/init-action.ts index 76dd0bdba..56902a437 100644 --- a/src/init-action.ts +++ b/src/init-action.ts @@ -48,6 +48,7 @@ import { checkPacksForOverlayCompatibility, cleanupDatabaseClusterDirectory, getFileCoverageInformationEnabled, + logFileCoverageOnPrsDeprecationWarning, initCodeQL, initConfig, runDatabaseInitCluster, @@ -409,21 +410,8 @@ async function run(startedAt: Date) { ); } - if ( - fileCoverageResult.showDeprecationWarning && - !process.env[EnvVar.DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION] - ) { - logger.warning( - "Starting April 2026, the CodeQL Action will skip collecting file coverage information on pull requests " + - "to improve analysis performance. File coverage information will still be computed on non-PR analyses. " + - "Repositories owned by an organization can opt out of this change by creating a custom repository property " + - 'with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then setting this property to ' + - "`true` in the repository's settings.", - ); - core.exportVariable( - EnvVar.DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION, - "true", - ); + if (fileCoverageResult.showDeprecationWarning) { + logFileCoverageOnPrsDeprecationWarning(logger); } await checkInstallPython311(config.languages, codeql); diff --git a/src/init.test.ts b/src/init.test.ts index 0d5a5783c..1f1f0de8d 100644 --- a/src/init.test.ts +++ b/src/init.test.ts @@ -1,6 +1,8 @@ import * as fs from "fs"; import path from "path"; +import * as core from "@actions/core"; +import * as github from "@actions/github"; import test, { ExecutionContext } from "ava"; import * as sinon from "sinon"; @@ -11,6 +13,7 @@ import { checkPacksForOverlayCompatibility, cleanupDatabaseClusterDirectory, getFileCoverageInformationEnabled, + logFileCoverageOnPrsDeprecationWarning, } from "./init"; import { KnownLanguage } from "./languages"; import { @@ -516,6 +519,24 @@ test.serial( }, ); +test.serial( + "file coverage information enabled when env var opt-out is set", + async (t) => { + sinon.stub(actionsUtil, "isAnalyzingPullRequest").returns(true); + process.env["CODEQL_ACTION_FILE_COVERAGE_ON_PRS"] = "true"; + + const result = await getFileCoverageInformationEnabled( + false, // debugMode + createStubCodeQL({}), + createFeatures([Feature.SkipFileCoverageOnPrs]), + {}, + ); + t.true(result.enabled); + t.false(result.enabledByRepositoryProperty); + t.false(result.showDeprecationWarning); + }, +); + test.serial( "file coverage information disabled when all conditions for skipping are met", async (t) => { @@ -532,3 +553,163 @@ test.serial( t.false(result.showDeprecationWarning); }, ); + +test.serial( + "file coverage deprecation warning for org-owned repo with default setup recommends repo property", + (t) => { + const exportVariableStub = sinon.stub(core, "exportVariable"); + sinon.stub(actionsUtil, "isDefaultSetup").returns(true); + github.context.payload = { + repository: { + name: "test-repo", + owner: { login: "test-org", type: "Organization" }, + }, + }; + const messages: LoggedMessage[] = []; + logFileCoverageOnPrsDeprecationWarning(getRecordingLogger(messages)); + t.is(messages.length, 1); + t.is(messages[0].type, "warning"); + t.is( + messages[0].message, + "Starting April 2026, the CodeQL Action will skip computing file coverage information on pull requests " + + "to improve analysis performance. File coverage information will still be computed on non-PR analyses.\n\n" + + "To opt out of this change, create a custom repository property " + + 'with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then set this property to ' + + "`true` in the repository's settings.", + ); + t.true(exportVariableStub.calledOnce); + }, +); + +test.serial( + "file coverage deprecation warning for org-owned repo with advanced setup recommends env var and repo property", + (t) => { + const exportVariableStub = sinon.stub(core, "exportVariable"); + sinon.stub(actionsUtil, "isDefaultSetup").returns(false); + github.context.payload = { + repository: { + name: "test-repo", + owner: { login: "test-org", type: "Organization" }, + }, + }; + const messages: LoggedMessage[] = []; + logFileCoverageOnPrsDeprecationWarning(getRecordingLogger(messages)); + t.is(messages.length, 1); + t.is(messages[0].type, "warning"); + t.is( + messages[0].message, + "Starting April 2026, the CodeQL Action will skip computing file coverage information on pull requests " + + "to improve analysis performance. File coverage information will still be computed on non-PR analyses.\n\n" + + "To opt out of this change, set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true`. " + + "Alternatively, create a custom repository property " + + 'with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then set this property to ' + + "`true` in the repository's settings.", + ); + t.true(exportVariableStub.calledOnce); + }, +); + +test.serial( + "file coverage deprecation warning for user-owned repo with default setup recommends advanced setup", + (t) => { + const exportVariableStub = sinon.stub(core, "exportVariable"); + sinon.stub(actionsUtil, "isDefaultSetup").returns(true); + github.context.payload = { + repository: { + name: "test-repo", + owner: { login: "test-user", type: "User" }, + }, + }; + const messages: LoggedMessage[] = []; + logFileCoverageOnPrsDeprecationWarning(getRecordingLogger(messages)); + t.is(messages.length, 1); + t.is(messages[0].type, "warning"); + t.is( + messages[0].message, + "Starting April 2026, the CodeQL Action will skip computing file coverage information on pull requests " + + "to improve analysis performance. File coverage information will still be computed on non-PR analyses.\n\n" + + "To opt out of this change, switch to an advanced setup workflow and " + + "set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true`.", + ); + t.true(exportVariableStub.calledOnce); + }, +); + +test.serial( + "file coverage deprecation warning for user-owned repo with advanced setup recommends env var", + (t) => { + const exportVariableStub = sinon.stub(core, "exportVariable"); + sinon.stub(actionsUtil, "isDefaultSetup").returns(false); + github.context.payload = { + repository: { + name: "test-repo", + owner: { login: "test-user", type: "User" }, + }, + }; + const messages: LoggedMessage[] = []; + logFileCoverageOnPrsDeprecationWarning(getRecordingLogger(messages)); + t.is(messages.length, 1); + t.is(messages[0].type, "warning"); + t.is( + messages[0].message, + "Starting April 2026, the CodeQL Action will skip computing file coverage information on pull requests " + + "to improve analysis performance. File coverage information will still be computed on non-PR analyses.\n\n" + + "To opt out of this change, set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true`.", + ); + t.true(exportVariableStub.calledOnce); + }, +); + +test.serial( + "file coverage deprecation warning for unknown owner type with default setup recommends advanced setup", + (t) => { + const exportVariableStub = sinon.stub(core, "exportVariable"); + sinon.stub(actionsUtil, "isDefaultSetup").returns(true); + github.context.payload = { repository: undefined }; + const messages: LoggedMessage[] = []; + logFileCoverageOnPrsDeprecationWarning(getRecordingLogger(messages)); + t.is(messages.length, 1); + t.is(messages[0].type, "warning"); + t.is( + messages[0].message, + "Starting April 2026, the CodeQL Action will skip computing file coverage information on pull requests " + + "to improve analysis performance. File coverage information will still be computed on non-PR analyses.\n\n" + + "To opt out of this change, switch to an advanced setup workflow and " + + "set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true`.", + ); + t.true(exportVariableStub.calledOnce); + }, +); + +test.serial( + "file coverage deprecation warning for unknown owner type with advanced setup recommends env var", + (t) => { + const exportVariableStub = sinon.stub(core, "exportVariable"); + sinon.stub(actionsUtil, "isDefaultSetup").returns(false); + github.context.payload = { repository: undefined }; + const messages: LoggedMessage[] = []; + logFileCoverageOnPrsDeprecationWarning(getRecordingLogger(messages)); + t.is(messages.length, 1); + t.is(messages[0].type, "warning"); + t.is( + messages[0].message, + "Starting April 2026, the CodeQL Action will skip computing file coverage information on pull requests " + + "to improve analysis performance. File coverage information will still be computed on non-PR analyses.\n\n" + + "To opt out of this change, set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true`.", + ); + t.true(exportVariableStub.calledOnce); + }, +); + +test.serial( + "logFileCoverageOnPrsDeprecationWarning does not log if already logged", + (t) => { + process.env["CODEQL_ACTION_DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION"] = + "true"; + const exportVariableStub = sinon.stub(core, "exportVariable"); + const messages: LoggedMessage[] = []; + logFileCoverageOnPrsDeprecationWarning(getRecordingLogger(messages)); + t.is(messages.length, 0); + t.true(exportVariableStub.notCalled); + }, +); diff --git a/src/init.ts b/src/init.ts index c4b219afc..8e3fa21a4 100644 --- a/src/init.ts +++ b/src/init.ts @@ -1,18 +1,22 @@ import * as fs from "fs"; import * as path from "path"; +import * as core from "@actions/core"; import * as toolrunner from "@actions/exec/lib/toolrunner"; +import * as github from "@actions/github"; import * as io from "@actions/io"; import * as yaml from "js-yaml"; import { getOptionalInput, isAnalyzingPullRequest, + isDefaultSetup, isSelfHostedRunner, } from "./actions-util"; import { GitHubApiDetails } from "./api-client"; import { CodeQL, setupCodeQL } from "./codeql"; import * as configUtils from "./config-utils"; +import { EnvVar } from "./environment"; import { CodeQLDefaultVersionInfo, Feature, @@ -329,6 +333,18 @@ export async function getFileCoverageInformationEnabled( showDeprecationWarning: false, }; } + // If the user has explicitly opted out via an environment variable, don't + // show the deprecation warning. + if ( + (process.env[EnvVar.FILE_COVERAGE_ON_PRS] || "").toLocaleLowerCase() === + "true" + ) { + return { + enabled: true, + enabledByRepositoryProperty: false, + showDeprecationWarning: false, + }; + } // Allow repositories to opt in to file coverage information on PRs // using a repository property. In this case, don't show the deprecation // warning since the repository has explicitly opted in. @@ -358,3 +374,45 @@ export async function getFileCoverageInformationEnabled( showDeprecationWarning: false, }; } + +/** + * Log a warning about the deprecation of file coverage information on PRs, including how to opt + * back in via an environment variable or repository property. + */ +export function logFileCoverageOnPrsDeprecationWarning(logger: Logger): void { + if (process.env[EnvVar.DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION]) { + return; + } + + const repositoryOwnerType: string | undefined = + github.context.payload.repository?.owner.type; + + let message = + "Starting April 2026, the CodeQL Action will skip computing file coverage information on pull requests " + + "to improve analysis performance. File coverage information will still be computed on non-PR analyses."; + const envVarOptOut = + "set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true`."; + const repoPropertyOptOut = + "create a custom repository property with the name " + + '`github-codeql-file-coverage-on-prs` and the type "True/false", then set this property to ' + + "`true` in the repository's settings."; + + if (repositoryOwnerType === "Organization") { + // Org-owned repo: can use the repository property + if (isDefaultSetup()) { + message += `\n\nTo opt out of this change, ${repoPropertyOptOut}`; + } else { + message += `\n\nTo opt out of this change, ${envVarOptOut} Alternatively, ${repoPropertyOptOut}`; + } + } else if (isDefaultSetup()) { + // User-owned repo on default setup: no repo property available and + // no way to set env vars, so need to switch to advanced setup. + message += `\n\nTo opt out of this change, switch to an advanced setup workflow and ${envVarOptOut}`; + } else { + // User-owned repo on advanced setup: can set the env var + message += `\n\nTo opt out of this change, ${envVarOptOut}`; + } + + logger.warning(message); + core.exportVariable(EnvVar.DID_LOG_FILE_COVERAGE_ON_PRS_DEPRECATION, "true"); +} From e07c3055d7ce4ff908b2217e7efe01532309c5c9 Mon Sep 17 00:00:00 2001 From: Henry Mercer Date: Tue, 10 Mar 2026 15:43:28 +0000 Subject: [PATCH 41/50] Tweak changelog formatting --- CHANGELOG.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7b53a6674..c91166805 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,14 +4,12 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th ## [UNRELEASED] -- Upcoming change: Starting April 2026, the CodeQL Action will skip collecting file coverage information on pull requests to improve analysis performance. File coverage information will still be computed on non-PR analyses. Pull request analyses will log a warning about this upcoming change. +- Upcoming change: Starting April 2026, the CodeQL Action will skip collecting file coverage information on pull requests to improve analysis performance. File coverage information will still be computed on non-PR analyses. Pull request analyses will log a warning about this upcoming change. [#3562](https://github.com/github/codeql-action/pull/3562) To opt out of this change: - **Repositories owned by an organization:** Create a custom repository property with the name `github-codeql-file-coverage-on-prs` and the type "True/false", then set this property to `true` in the repository's settings. For more information, see [Managing custom properties for repositories in your organization](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization). Alternatively, if you are using an advanced setup workflow, you can set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true` in your workflow. - **User-owned repositories using default setup:** Switch to an advanced setup workflow and set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true` in your workflow. - **User-owned repositories using advanced setup:** Set the `CODEQL_ACTION_FILE_COVERAGE_ON_PRS` environment variable to `true` in your workflow. - - [#3562](https://github.com/github/codeql-action/pull/3562) - Fixed [a bug](https://github.com/github/codeql-action/issues/3555) which caused the CodeQL Action to fail loading repository properties if a "Multi select" repository property was configured for the repository. [#3557](https://github.com/github/codeql-action/pull/3557) - The CodeQL Action now loads [custom repository properties](https://docs.github.com/en/organizations/managing-organization-settings/managing-custom-properties-for-repositories-in-your-organization) on GitHub Enterprise Server, enabling the customization of features such as `github-codeql-disable-overlay` that was previously only available on GitHub.com. [#3559](https://github.com/github/codeql-action/pull/3559) From ee5ede79f765f2577bb55472e38588e585182710 Mon Sep 17 00:00:00 2001 From: Henry Mercer Date: Tue, 10 Mar 2026 15:51:28 +0000 Subject: [PATCH 42/50] Address review comments --- lib/analyze-action.js | 7 ++++++- src/database-upload.test.ts | 18 +++++++++++++++--- src/database-upload.ts | 7 ++++++- 3 files changed, 27 insertions(+), 5 deletions(-) diff --git a/lib/analyze-action.js b/lib/analyze-action.js index cb0bf128f..da459df0f 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -111001,7 +111001,12 @@ async function cleanupAndUploadDatabases(repositoryNwo, codeql, config, apiDetai } catch (e) { const httpError = asHTTPError(e); const isRetryable = !httpError || !DO_NOT_RETRY_STATUSES.includes(httpError.status); - if (!isRetryable || attempt === maxAttempts) { + if (!isRetryable) { + throw e; + } else if (attempt === maxAttempts) { + logger.error( + `Maximum retry attempts exhausted (${attempt}), aborting database upload` + ); throw e; } const backoffMs = 15e3 * Math.pow(2, attempt - 1); diff --git a/src/database-upload.test.ts b/src/database-upload.test.ts index 8bd22091c..ecd0eb8c6 100644 --- a/src/database-upload.test.ts +++ b/src/database-upload.test.ts @@ -225,7 +225,7 @@ test.serial( .returns("true"); sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true); - await mockHttpRequests(422); + const databaseUploadSpy = await mockHttpRequests(422); const loggedMessages = [] as LoggedMessage[]; await cleanupAndUploadDatabases( @@ -245,6 +245,9 @@ test.serial( "Failed to upload database for javascript: some error message", ) !== undefined, ); + + // Non-retryable errors should not be retried. + t.is(databaseUploadSpy.callCount, 1); }); }, ); @@ -260,11 +263,11 @@ test.serial( .returns("true"); sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true); - await mockHttpRequests(500); + const databaseUploadSpy = await mockHttpRequests(500); // Stub setTimeout to fire immediately to avoid real delays from retry backoff. const originalSetTimeout = global.setTimeout; - sinon + const setTimeoutStub = sinon .stub(global, "setTimeout") .callsFake((fn: () => void) => originalSetTimeout(fn, 0)); @@ -286,6 +289,15 @@ test.serial( "Failed to upload database for javascript: some error message", ) !== undefined, ); + + // Retryable errors should be retried the expected number of times. + t.is(databaseUploadSpy.callCount, 4); + + // setTimeout should have been called with the expected backoff delays. + const setTimeoutDelays = setTimeoutStub.args.map( + (args) => args[1] as number, + ); + t.deepEqual(setTimeoutDelays, [15_000, 30_000, 60_000]); }); }, ); diff --git a/src/database-upload.ts b/src/database-upload.ts index b9e2f5b06..8d7d49551 100644 --- a/src/database-upload.ts +++ b/src/database-upload.ts @@ -147,7 +147,12 @@ export async function cleanupAndUploadDatabases( const httpError = asHTTPError(e); const isRetryable = !httpError || !DO_NOT_RETRY_STATUSES.includes(httpError.status); - if (!isRetryable || attempt === maxAttempts) { + if (!isRetryable) { + throw e; + } else if (attempt === maxAttempts) { + logger.error( + `Maximum retry attempts exhausted (${attempt}), aborting database upload`, + ); throw e; } const backoffMs = 15_000 * Math.pow(2, attempt - 1); // 15s, 30s, 60s From cf972cde0e8c119c3d1898bf01c1dee40c129932 Mon Sep 17 00:00:00 2001 From: Henry Mercer Date: Tue, 10 Mar 2026 15:52:14 +0000 Subject: [PATCH 43/50] Update database upload tests to use `checkExpectedLogMessages` --- src/database-upload.test.ts | 102 ++++++++++++------------------------ 1 file changed, 33 insertions(+), 69 deletions(-) diff --git a/src/database-upload.test.ts b/src/database-upload.test.ts index ecd0eb8c6..c4ac59e76 100644 --- a/src/database-upload.test.ts +++ b/src/database-upload.test.ts @@ -15,6 +15,7 @@ import * as gitUtils from "./git-utils"; import { KnownLanguage } from "./languages"; import { RepositoryNwo } from "./repository"; import { + checkExpectedLogMessages, createFeatures, createTestConfig, getRecordingLogger, @@ -93,7 +94,7 @@ test.serial( .returns("false"); sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(true); - const loggedMessages = []; + const loggedMessages: LoggedMessage[] = []; await cleanupAndUploadDatabases( testRepoName, getCodeQL(), @@ -102,14 +103,9 @@ test.serial( createFeatures([]), getRecordingLogger(loggedMessages), ); - t.assert( - loggedMessages.find( - (v: LoggedMessage) => - v.type === "debug" && - v.message === - "Database upload disabled in workflow. Skipping upload.", - ) !== undefined, - ); + checkExpectedLogMessages(t, loggedMessages, [ + "Database upload disabled in workflow. Skipping upload.", + ]); }); }, ); @@ -127,7 +123,7 @@ test.serial( await mockHttpRequests(201); - const loggedMessages = []; + const loggedMessages: LoggedMessage[] = []; await cleanupAndUploadDatabases( testRepoName, getCodeQL(), @@ -139,14 +135,9 @@ test.serial( createFeatures([]), getRecordingLogger(loggedMessages), ); - t.assert( - loggedMessages.find( - (v: LoggedMessage) => - v.type === "debug" && - v.message === - "Not uploading database because 'analysis-kinds: code-scanning' is not enabled.", - ) !== undefined, - ); + checkExpectedLogMessages(t, loggedMessages, [ + "Not uploading database because 'analysis-kinds: code-scanning' is not enabled.", + ]); }); }, ); @@ -163,7 +154,7 @@ test.serial("Abort database upload if running against GHES", async (t) => { const config = getTestConfig(tmpDir); config.gitHubVersion = { type: GitHubVariant.GHES, version: "3.0" }; - const loggedMessages = []; + const loggedMessages: LoggedMessage[] = []; await cleanupAndUploadDatabases( testRepoName, getCodeQL(), @@ -172,14 +163,9 @@ test.serial("Abort database upload if running against GHES", async (t) => { createFeatures([]), getRecordingLogger(loggedMessages), ); - t.assert( - loggedMessages.find( - (v: LoggedMessage) => - v.type === "debug" && - v.message === - "Not running against github.com or GHEC-DR. Skipping upload.", - ) !== undefined, - ); + checkExpectedLogMessages(t, loggedMessages, [ + "Not running against github.com or GHEC-DR. Skipping upload.", + ]); }); }); @@ -194,7 +180,7 @@ test.serial( .returns("true"); sinon.stub(gitUtils, "isAnalyzingDefaultBranch").resolves(false); - const loggedMessages = []; + const loggedMessages: LoggedMessage[] = []; await cleanupAndUploadDatabases( testRepoName, getCodeQL(), @@ -203,13 +189,9 @@ test.serial( createFeatures([]), getRecordingLogger(loggedMessages), ); - t.assert( - loggedMessages.find( - (v: LoggedMessage) => - v.type === "debug" && - v.message === "Not analyzing default branch. Skipping upload.", - ) !== undefined, - ); + checkExpectedLogMessages(t, loggedMessages, [ + "Not analyzing default branch. Skipping upload.", + ]); }); }, ); @@ -227,7 +209,7 @@ test.serial( const databaseUploadSpy = await mockHttpRequests(422); - const loggedMessages = [] as LoggedMessage[]; + const loggedMessages: LoggedMessage[] = []; await cleanupAndUploadDatabases( testRepoName, getCodeQL(), @@ -237,14 +219,9 @@ test.serial( getRecordingLogger(loggedMessages), ); - t.assert( - loggedMessages.find( - (v) => - v.type === "warning" && - v.message === - "Failed to upload database for javascript: some error message", - ) !== undefined, - ); + checkExpectedLogMessages(t, loggedMessages, [ + "Failed to upload database for javascript: some error message", + ]); // Non-retryable errors should not be retried. t.is(databaseUploadSpy.callCount, 1); @@ -271,7 +248,7 @@ test.serial( .stub(global, "setTimeout") .callsFake((fn: () => void) => originalSetTimeout(fn, 0)); - const loggedMessages = [] as LoggedMessage[]; + const loggedMessages: LoggedMessage[] = []; await cleanupAndUploadDatabases( testRepoName, getCodeQL(), @@ -281,14 +258,9 @@ test.serial( getRecordingLogger(loggedMessages), ); - t.assert( - loggedMessages.find( - (v) => - v.type === "warning" && - v.message === - "Failed to upload database for javascript: some error message", - ) !== undefined, - ); + checkExpectedLogMessages(t, loggedMessages, [ + "Failed to upload database for javascript: some error message", + ]); // Retryable errors should be retried the expected number of times. t.is(databaseUploadSpy.callCount, 4); @@ -313,7 +285,7 @@ test.serial("Successfully uploading a database to github.com", async (t) => { await mockHttpRequests(201); - const loggedMessages = [] as LoggedMessage[]; + const loggedMessages: LoggedMessage[] = []; await cleanupAndUploadDatabases( testRepoName, getCodeQL(), @@ -322,13 +294,9 @@ test.serial("Successfully uploading a database to github.com", async (t) => { createFeatures([]), getRecordingLogger(loggedMessages), ); - t.assert( - loggedMessages.find( - (v) => - v.type === "debug" && - v.message === "Successfully uploaded database for javascript", - ) !== undefined, - ); + checkExpectedLogMessages(t, loggedMessages, [ + "Successfully uploaded database for javascript", + ]); }); }); @@ -343,7 +311,7 @@ test.serial("Successfully uploading a database to GHEC-DR", async (t) => { const databaseUploadSpy = await mockHttpRequests(201); - const loggedMessages = [] as LoggedMessage[]; + const loggedMessages: LoggedMessage[] = []; await cleanupAndUploadDatabases( testRepoName, getCodeQL(), @@ -356,13 +324,9 @@ test.serial("Successfully uploading a database to GHEC-DR", async (t) => { createFeatures([]), getRecordingLogger(loggedMessages), ); - t.assert( - loggedMessages.find( - (v) => - v.type === "debug" && - v.message === "Successfully uploaded database for javascript", - ) !== undefined, - ); + checkExpectedLogMessages(t, loggedMessages, [ + "Successfully uploaded database for javascript", + ]); t.assert( databaseUploadSpy.calledOnceWith( sinon.match.string, From a63886bff563ff37363fa5140c38d3d121b5e1a6 Mon Sep 17 00:00:00 2001 From: Henry Mercer Date: Tue, 10 Mar 2026 16:36:02 +0000 Subject: [PATCH 44/50] Refactor: Extract separate function for `uploadBundledDatabase` --- lib/analyze-action.js | 75 +++++++++++++++++++------------- src/database-upload.ts | 98 ++++++++++++++++++++++++++---------------- 2 files changed, 106 insertions(+), 67 deletions(-) diff --git a/lib/analyze-action.js b/lib/analyze-action.js index da459df0f..c84eec3a5 100644 --- a/lib/analyze-action.js +++ b/lib/analyze-action.js @@ -110952,13 +110952,6 @@ async function cleanupAndUploadDatabases(repositoryNwo, codeql, config, apiDetai await withGroupAsync("Cleaning up databases", async () => { await codeql.databaseCleanupCluster(config, cleanupLevel); }); - const client = getApiClient(); - const uploadsUrl = new URL(parseGitHubUrl(apiDetails.url)); - uploadsUrl.hostname = `uploads.${uploadsUrl.hostname}`; - let uploadsBaseUrl = uploadsUrl.toString(); - if (uploadsBaseUrl.endsWith("/")) { - uploadsBaseUrl = uploadsBaseUrl.slice(0, -1); - } const reports = []; for (const language of config.languages) { let bundledDbSize = void 0; @@ -110973,30 +110966,15 @@ async function cleanupAndUploadDatabases(repositoryNwo, codeql, config, apiDetai const maxAttempts = 4; let uploadDurationMs; for (let attempt = 1; attempt <= maxAttempts; attempt++) { - const bundledDbReadStream = fs13.createReadStream(bundledDb); try { - const attemptStartTime = performance.now(); - await client.request( - `POST /repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name&commit_oid=:commit_oid`, - { - baseUrl: uploadsBaseUrl, - owner: repositoryNwo.owner, - repo: repositoryNwo.repo, - language, - name: `${language}-database`, - commit_oid: commitOid, - data: bundledDbReadStream, - headers: { - authorization: `token ${apiDetails.auth}`, - "Content-Type": "application/zip", - "Content-Length": bundledDbSize - }, - request: { - retries: 0 - } - } + uploadDurationMs = await uploadBundledDatabase( + repositoryNwo, + language, + commitOid, + bundledDb, + bundledDbSize, + apiDetails ); - uploadDurationMs = performance.now() - attemptStartTime; break; } catch (e) { const httpError = asHTTPError(e); @@ -111014,8 +110992,6 @@ async function cleanupAndUploadDatabases(repositoryNwo, codeql, config, apiDetai `Database upload attempt ${attempt} of ${maxAttempts} failed for ${language}: ${getErrorMessage(e)}. Retrying in ${backoffMs / 1e3}s...` ); await new Promise((resolve8) => setTimeout(resolve8, backoffMs)); - } finally { - bundledDbReadStream.close(); } } reports.push({ @@ -111038,6 +111014,43 @@ async function cleanupAndUploadDatabases(repositoryNwo, codeql, config, apiDetai } return reports; } +async function uploadBundledDatabase(repositoryNwo, language, commitOid, bundledDb, bundledDbSize, apiDetails) { + const client = getApiClient(); + const uploadsUrl = new URL(parseGitHubUrl(apiDetails.url)); + uploadsUrl.hostname = `uploads.${uploadsUrl.hostname}`; + let uploadsBaseUrl = uploadsUrl.toString(); + if (uploadsBaseUrl.endsWith("/")) { + uploadsBaseUrl = uploadsBaseUrl.slice(0, -1); + } + const bundledDbReadStream = fs13.createReadStream(bundledDb); + try { + const startTime = performance.now(); + await client.request( + `POST /repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name&commit_oid=:commit_oid`, + { + baseUrl: uploadsBaseUrl, + owner: repositoryNwo.owner, + repo: repositoryNwo.repo, + language, + name: `${language}-database`, + commit_oid: commitOid, + data: bundledDbReadStream, + headers: { + authorization: `token ${apiDetails.auth}`, + "Content-Type": "application/zip", + "Content-Length": bundledDbSize + }, + // Disable `octokit/plugin-retry.js`, since the request body is a ReadStream which can only be consumed once. + request: { + retries: 0 + } + } + ); + return performance.now() - startTime; + } finally { + bundledDbReadStream.close(); + } +} // src/status-report.ts var os4 = __toESM(require("os")); diff --git a/src/database-upload.ts b/src/database-upload.ts index 8d7d49551..c7db68fc3 100644 --- a/src/database-upload.ts +++ b/src/database-upload.ts @@ -85,18 +85,6 @@ export async function cleanupAndUploadDatabases( await codeql.databaseCleanupCluster(config, cleanupLevel); }); - const client = getApiClient(); - - const uploadsUrl = new URL(parseGitHubUrl(apiDetails.url)); - uploadsUrl.hostname = `uploads.${uploadsUrl.hostname}`; - - // Octokit expects the baseUrl to not have a trailing slash, - // but it is included by default in a URL. - let uploadsBaseUrl = uploadsUrl.toString(); - if (uploadsBaseUrl.endsWith("/")) { - uploadsBaseUrl = uploadsBaseUrl.slice(0, -1); - } - const reports: DatabaseUploadResult[] = []; for (const language of config.languages) { let bundledDbSize: number | undefined = undefined; @@ -118,30 +106,15 @@ export async function cleanupAndUploadDatabases( const maxAttempts = 4; // 1 initial attempt + 3 retries, identical to the default retry behavior of Octokit let uploadDurationMs: number | undefined; for (let attempt = 1; attempt <= maxAttempts; attempt++) { - const bundledDbReadStream = fs.createReadStream(bundledDb); try { - const attemptStartTime = performance.now(); - await client.request( - `POST /repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name&commit_oid=:commit_oid`, - { - baseUrl: uploadsBaseUrl, - owner: repositoryNwo.owner, - repo: repositoryNwo.repo, - language, - name: `${language}-database`, - commit_oid: commitOid, - data: bundledDbReadStream, - headers: { - authorization: `token ${apiDetails.auth}`, - "Content-Type": "application/zip", - "Content-Length": bundledDbSize, - }, - request: { - retries: 0, - }, - }, + uploadDurationMs = await uploadBundledDatabase( + repositoryNwo, + language, + commitOid, + bundledDb, + bundledDbSize, + apiDetails, ); - uploadDurationMs = performance.now() - attemptStartTime; break; } catch (e) { const httpError = asHTTPError(e); @@ -160,8 +133,6 @@ export async function cleanupAndUploadDatabases( `Database upload attempt ${attempt} of ${maxAttempts} failed for ${language}: ${util.getErrorMessage(e)}. Retrying in ${backoffMs / 1000}s...`, ); await new Promise((resolve) => setTimeout(resolve, backoffMs)); - } finally { - bundledDbReadStream.close(); } } reports.push({ @@ -187,3 +158,58 @@ export async function cleanupAndUploadDatabases( } return reports; } + +/** + * Uploads a bundled database to the GitHub API. + * + * @returns the duration of the upload in milliseconds + */ +async function uploadBundledDatabase( + repositoryNwo: RepositoryNwo, + language: string, + commitOid: string, + bundledDb: string, + bundledDbSize: number, + apiDetails: GitHubApiDetails, +): Promise { + const client = getApiClient(); + + const uploadsUrl = new URL(parseGitHubUrl(apiDetails.url)); + uploadsUrl.hostname = `uploads.${uploadsUrl.hostname}`; + + // Octokit expects the baseUrl to not have a trailing slash, + // but it is included by default in a URL. + let uploadsBaseUrl = uploadsUrl.toString(); + if (uploadsBaseUrl.endsWith("/")) { + uploadsBaseUrl = uploadsBaseUrl.slice(0, -1); + } + + const bundledDbReadStream = fs.createReadStream(bundledDb); + try { + const startTime = performance.now(); + await client.request( + `POST /repos/:owner/:repo/code-scanning/codeql/databases/:language?name=:name&commit_oid=:commit_oid`, + { + baseUrl: uploadsBaseUrl, + owner: repositoryNwo.owner, + repo: repositoryNwo.repo, + language, + name: `${language}-database`, + commit_oid: commitOid, + data: bundledDbReadStream, + headers: { + authorization: `token ${apiDetails.auth}`, + "Content-Type": "application/zip", + "Content-Length": bundledDbSize, + }, + // Disable `octokit/plugin-retry.js`, since the request body is a ReadStream which can only be consumed once. + request: { + retries: 0, + }, + }, + ); + return performance.now() - startTime; + } finally { + bundledDbReadStream.close(); + } +} From b9b42bed94c61b0b7e3a9217d9d403ff9be6f23d Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Wed, 11 Mar 2026 11:55:16 +0000 Subject: [PATCH 45/50] Remove last use of `installPython` - Add explicit `setup-python` step with condition to the workflow that was still using it - This allows simplifying the logic in `sync.ts` --- .../workflows/__multi-language-autodetect.yml | 25 +++++++------------ .../checks/multi-language-autodetect.yml | 9 ++++++- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/.github/workflows/__multi-language-autodetect.yml b/.github/workflows/__multi-language-autodetect.yml index afd322785..e1fea295e 100644 --- a/.github/workflows/__multi-language-autodetect.yml +++ b/.github/workflows/__multi-language-autodetect.yml @@ -35,11 +35,6 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' workflow_call: inputs: dotnet-version: @@ -52,17 +47,12 @@ on: description: The version of Go to install required: false default: '>=1.21.0' - python-version: - type: string - description: The version of Python to install - required: false - default: '3.13' defaults: run: shell: bash concurrency: cancel-in-progress: ${{ github.event_name == 'pull_request' || false }} - group: multi-language-autodetect-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}}-${{inputs.python-version}} + group: multi-language-autodetect-${{github.ref}}-${{inputs.dotnet-version}}-${{inputs.go-version}} jobs: multi-language-autodetect: strategy: @@ -124,11 +114,6 @@ jobs: with: go-version: ${{ inputs.go-version || '>=1.21.0' }} cache: false - - name: Install Python - if: matrix.version != 'nightly-latest' || !matrix.version - uses: actions/setup-python@v6 - with: - python-version: ${{ inputs.python-version || '3.13' }} - name: Prepare test id: prepare-test uses: ./.github/actions/prepare-test @@ -136,6 +121,14 @@ jobs: version: ${{ matrix.version }} use-all-platform-bundle: 'false' setup-kotlin: 'true' + - name: Install Python 3.13 for older CLI versions + # We need Python 3.13 for older CLI versions because they are not compatible with Python 3.14 or newer. + # See https://github.com/github/codeql-action/pull/3212 + if: matrix.version != 'nightly-latest' && matrix.version != 'linked' + uses: actions/setup-python@v6 + with: + python-version: '3.13' + - name: Use Xcode 16 if: runner.os == 'macOS' && matrix.version != 'nightly-latest' run: sudo xcode-select -s "/Applications/Xcode_16.app" diff --git a/pr-checks/checks/multi-language-autodetect.yml b/pr-checks/checks/multi-language-autodetect.yml index 4892bcc31..90b342c1a 100644 --- a/pr-checks/checks/multi-language-autodetect.yml +++ b/pr-checks/checks/multi-language-autodetect.yml @@ -4,9 +4,16 @@ operatingSystems: ["macos", "ubuntu"] env: CODEQL_ACTION_RESOLVE_SUPPORTED_LANGUAGES_USING_CLI: true installGo: true -installPython: true installDotNet: true steps: + - name: Install Python 3.13 for older CLI versions + # We need Python 3.13 for older CLI versions because they are not compatible with Python 3.14 or newer. + # See https://github.com/github/codeql-action/pull/3212 + if: matrix.version != 'nightly-latest' && matrix.version != 'linked' + uses: actions/setup-python@v6 + with: + python-version: "3.13" + - name: Use Xcode 16 if: runner.os == 'macOS' && matrix.version != 'nightly-latest' run: sudo xcode-select -s "/Applications/Xcode_16.app" From 2e1f08fe70ac5f965f027f606607759df238ca57 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Wed, 11 Mar 2026 11:55:59 +0000 Subject: [PATCH 46/50] Remove `installPython` condition in `sync.ts` The behaviour of `installPython` now mirrors other `install*` options --- pr-checks/sync.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/pr-checks/sync.ts b/pr-checks/sync.ts index f5d23d72d..4d9911130 100755 --- a/pr-checks/sync.ts +++ b/pr-checks/sync.ts @@ -198,7 +198,6 @@ const languageSetups: LanguageSetups = { steps: [ { name: "Install Python", - if: "matrix.version != 'nightly-latest' || !matrix.version", uses: "actions/setup-python@v6", with: { "python-version": From be7fe2bca68fc0d426462a6c2833e01166d8df3e Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Wed, 11 Mar 2026 12:14:41 +0000 Subject: [PATCH 47/50] Make it more explicit by construction that known inputs always have the same specifications --- pr-checks/sync.ts | 114 ++++++++++++++++++++++++---------------- pr-checks/tsconfig.json | 2 +- 2 files changed, 71 insertions(+), 45 deletions(-) diff --git a/pr-checks/sync.ts b/pr-checks/sync.ts index 4d9911130..75db57b90 100755 --- a/pr-checks/sync.ts +++ b/pr-checks/sync.ts @@ -78,7 +78,8 @@ interface JobSpecification { /** Describes language/framework-specific steps and inputs. */ interface LanguageSetup { specProperty: keyof JobSpecification; - inputs?: WorkflowInputs; + /** The names of the known inputs which are required for this setup step. */ + inputs?: KnownInputName[]; steps: any[]; } @@ -118,6 +119,46 @@ const defaultLanguageVersions = { csharp: "9.x", } as const satisfies Partial>; +/** A mapping from known input names to their specifications. */ +const inputSpecs: WorkflowInputs = { + [KnownInputName.GoVersion]: { + type: "string", + description: "The version of Go to install", + required: false, + default: defaultLanguageVersions.go, + }, + [KnownInputName.JavaVersion]: { + type: "string", + description: "The version of Java to install", + required: false, + default: defaultLanguageVersions.java, + }, + [KnownInputName.PythonVersion]: { + type: "string", + description: "The version of Python to install", + required: false, + default: defaultLanguageVersions.python, + }, + [KnownInputName.DotnetVersion]: { + type: "string", + description: "The version of .NET to install", + required: false, + default: defaultLanguageVersions.csharp, + }, +}; + +/** Obtains a `WorkflowInputs` object for all the inputs given by `requiredInputs`. */ +function getSetupInputs(requiredInputs: Set): WorkflowInputs { + const inputs: WorkflowInputs = {}; + + // Copy the input specifications for the requested inputs into the output. + for (const requiredInput of requiredInputs) { + inputs[requiredInput] = inputSpecs[requiredInput]; + } + + return inputs; +} + /** A partial mapping from known languages to their specific setup information. */ const languageSetups: LanguageSetups = { javascript: { @@ -139,14 +180,7 @@ const languageSetups: LanguageSetups = { }, go: { specProperty: "installGo", - inputs: { - [KnownInputName.GoVersion]: { - type: "string", - description: "The version of Go to install", - required: false, - default: defaultLanguageVersions.go, - }, - }, + inputs: [KnownInputName.GoVersion], steps: [ { name: "Install Go", @@ -163,14 +197,7 @@ const languageSetups: LanguageSetups = { }, java: { specProperty: "installJava", - inputs: { - [KnownInputName.JavaVersion]: { - type: "string", - description: "The version of Java to install", - required: false, - default: defaultLanguageVersions.java, - }, - }, + inputs: [KnownInputName.JavaVersion], steps: [ { name: "Install Java", @@ -187,14 +214,7 @@ const languageSetups: LanguageSetups = { }, python: { specProperty: "installPython", - inputs: { - [KnownInputName.PythonVersion]: { - type: "string", - description: "The version of Python to install", - required: false, - default: defaultLanguageVersions.python, - }, - }, + inputs: [KnownInputName.PythonVersion], steps: [ { name: "Install Python", @@ -210,14 +230,7 @@ const languageSetups: LanguageSetups = { }, csharp: { specProperty: "installDotNet", - inputs: { - [KnownInputName.DotnetVersion]: { - type: "string", - description: "The version of .NET to install", - required: false, - default: defaultLanguageVersions.csharp, - }, - }, + inputs: [KnownInputName.DotnetVersion], steps: [ { name: "Install .NET", @@ -250,6 +263,11 @@ function loadYaml(filePath: string): yaml.Document { return yaml.parseDocument(content); } +/** Computes the union of all given `sets`. */ +function unionAll(sets: Array>): Set { + return sets.reduce((prev, cur) => prev.union(cur), new Set()); +} + /** * Serialize a value to YAML and write it to a file, prepended with the * standard header comment. @@ -332,13 +350,13 @@ function generateJobMatrix( * Retrieves setup steps and additional input definitions based on specific languages or frameworks * that are requested by the `checkSpecification`. * - * @returns An object containing setup steps and additional input specifications. + * @returns An object containing setup steps and required input names. */ function getSetupSteps(checkSpecification: JobSpecification): { - inputs: WorkflowInputs; + inputs: Set; steps: any[]; } { - let inputs: WorkflowInputs = {}; + const inputs: Array> = []; const steps: any[] = []; for (const language of Object.values(KnownLanguage).sort()) { @@ -352,7 +370,7 @@ function getSetupSteps(checkSpecification: JobSpecification): { } steps.push(...setupSpec.steps); - inputs = { ...inputs, ...setupSpec.inputs }; + inputs.push(new Set(setupSpec.inputs)); } const installYq = checkSpecification.installYq; @@ -371,7 +389,7 @@ function getSetupSteps(checkSpecification: JobSpecification): { }); } - return { inputs, steps }; + return { inputs: unionAll(inputs), steps }; } /** @@ -527,13 +545,16 @@ function generateValidationJobs( specDocument: yaml.Document, checkSpecification: Specification, checkName: string, -) { +): { + validationJobs: Record; + workflowInputs: Set; +} { if (checkSpecification.validationJobs === undefined) { - return { validationJobs: {}, workflowInputs: {} }; + return { validationJobs: {}, workflowInputs: new Set() }; } const validationJobs: Record = {}; - let workflowInputs: WorkflowInputs = {}; + const workflowInputs: Array> = []; for (const [jobName, jobSpec] of Object.entries( checkSpecification.validationJobs, @@ -551,10 +572,13 @@ function generateValidationJobs( jobName, ); validationJobs[jobName] = validationJob; - workflowInputs = { ...workflowInputs, ...inputs }; + workflowInputs.push(inputs); } - return { validationJobs, workflowInputs }; + return { + validationJobs, + workflowInputs: unionAll(workflowInputs), + }; } /** @@ -595,7 +619,9 @@ function main(): void { ); const { validationJobs, workflowInputs: validationJobInputs } = generateValidationJobs(specDocument, checkSpecification, checkName); - const combinedInputs = { ...workflowInputs, ...validationJobInputs }; + const combinedInputs = getSetupInputs( + workflowInputs.union(validationJobInputs), + ); // If this check belongs to a named collection, record it. if (checkSpecification.collection) { diff --git a/pr-checks/tsconfig.json b/pr-checks/tsconfig.json index e85403253..1e0562f00 100644 --- a/pr-checks/tsconfig.json +++ b/pr-checks/tsconfig.json @@ -1,7 +1,7 @@ { "compilerOptions": { /* Basic Options */ - "lib": ["ES2022"], + "lib": ["esnext"], "target": "ES2022", "module": "commonjs", "rootDir": "..", From 6570ad3440e6b0b0365c4976971616111f523d88 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Wed, 11 Mar 2026 12:16:28 +0000 Subject: [PATCH 48/50] Extend base `tsconfig.json` --- pr-checks/tsconfig.json | 23 +---------------------- 1 file changed, 1 insertion(+), 22 deletions(-) diff --git a/pr-checks/tsconfig.json b/pr-checks/tsconfig.json index 1e0562f00..aa728005c 100644 --- a/pr-checks/tsconfig.json +++ b/pr-checks/tsconfig.json @@ -1,32 +1,11 @@ { + "extends": "../tsconfig.json", "compilerOptions": { /* Basic Options */ "lib": ["esnext"], - "target": "ES2022", - "module": "commonjs", "rootDir": "..", "sourceMap": false, "noEmit": true, - - /* Strict Type-Checking Options */ - "strict": true, /* Enable all strict type-checking options. */ - "noImplicitAny": false, /* Raise error on expressions and declarations with an implied 'any' type. */ - "strictNullChecks": true, /* Enable strict null checks. */ - "strictFunctionTypes": true, /* Enable strict checking of function types. */ - "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */ - "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ - "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ - "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ - - /* Additional Checks */ - "noUnusedLocals": false, /* Report errors on unused locals. */ - "noUnusedParameters": false, /* Report errors on unused parameters. */ - "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ - "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ - - /* Module Resolution Options */ - "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ - "resolveJsonModule": true, }, "include": ["./*.ts", "../src/**/*.ts"], "exclude": ["node_modules"] From 89f63211edbc884531256b10f4d9ae683f23a78d Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Wed, 11 Mar 2026 12:18:41 +0000 Subject: [PATCH 49/50] Use `version` in error message --- pr-checks/sync.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pr-checks/sync.ts b/pr-checks/sync.ts index 75db57b90..e6ccac629 100755 --- a/pr-checks/sync.ts +++ b/pr-checks/sync.ts @@ -309,7 +309,7 @@ function generateJobMatrix( for (const version of checkSpecification.versions ?? defaultTestVersions) { if (version === "latest") { throw new Error( - 'Did not recognise "version: latest". Did you mean "version: linked"?', + `Did not recognise "version: ${version}". Did you mean "version: linked"?`, ); } From d1a7580bd31a2da8ab72105a65ba08108e7b29e9 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Wed, 11 Mar 2026 12:29:36 +0000 Subject: [PATCH 50/50] Verify PR checks in a different job, with newer Node --- .github/workflows/pr-checks.yml | 43 ++++++++++++++++++++++++++------- 1 file changed, 34 insertions(+), 9 deletions(-) diff --git a/.github/workflows/pr-checks.yml b/.github/workflows/pr-checks.yml index 77a544cfa..501878d22 100644 --- a/.github/workflows/pr-checks.yml +++ b/.github/workflows/pr-checks.yml @@ -52,19 +52,10 @@ jobs: - name: Verify compiled JS up to date run: .github/workflows/script/check-js.sh - - name: Verify PR checks up to date - if: always() - run: .github/workflows/script/verify-pr-checks.sh - - name: Run unit tests if: always() run: npm test - - name: Run pr-checks tests - if: always() - working-directory: pr-checks - run: npm ci && npx tsx --test - - name: Lint if: always() && matrix.os != 'windows-latest' run: npm run lint-ci @@ -76,6 +67,40 @@ jobs: sarif_file: eslint.sarif category: eslint + # Verifying the PR checks are up-to-date requires Node 24. The PR checks are not dependent + # on the main codebase and therefore do not need to be run as part of the same matrix that + # we use for the `unit-tests` job. + verify-pr-checks: + name: Verify PR checks + if: github.triggering_actor != 'dependabot[bot]' + permissions: + contents: read + runs-on: ubuntu-slim + timeout-minutes: 10 + + steps: + - name: Prepare git (Windows) + if: runner.os == 'Windows' + run: git config --global core.autocrlf false + + - name: Checkout repository + uses: actions/checkout@v6 + + - name: Set up Node.js + uses: actions/setup-node@v6 + with: + node-version: 24 + cache: 'npm' + + - name: Verify PR checks up to date + if: always() + run: .github/workflows/script/verify-pr-checks.sh + + - name: Run pr-checks tests + if: always() + working-directory: pr-checks + run: npm ci && npx tsx --test + check-node-version: if: github.triggering_actor != 'dependabot[bot]' name: Check Action Node versions