From c8e26e209a72378c5f0d0c0baff9ed657148b087 Mon Sep 17 00:00:00 2001 From: "Michael B. Gale" Date: Sat, 25 Apr 2026 15:53:18 +0100 Subject: [PATCH] Move `getAuthConfig` out of `start-proxy.ts` --- lib/start-proxy-action.js | 427 +++++++++++++++++----------------- src/start-proxy.ts | 79 +------ src/start-proxy/validation.ts | 83 +++++++ 3 files changed, 299 insertions(+), 290 deletions(-) create mode 100644 src/start-proxy/validation.ts diff --git a/lib/start-proxy-action.js b/lib/start-proxy-action.js index 432ea95ca..8a54c53f0 100644 --- a/lib/start-proxy-action.js +++ b/lib/start-proxy-action.js @@ -47518,7 +47518,7 @@ var require_retry_helper = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.RetryHelper = void 0; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var RetryHelper = class { constructor(maxAttempts, minSeconds, maxSeconds) { if (maxAttempts < 1) { @@ -47541,10 +47541,10 @@ var require_retry_helper = __commonJS({ if (isRetryable && !isRetryable(err)) { throw err; } - core13.info(err.message); + core14.info(err.message); } const seconds = this.getSleepAmount(); - core13.info(`Waiting ${seconds} seconds before trying again`); + core14.info(`Waiting ${seconds} seconds before trying again`); yield this.sleep(seconds); attempt++; } @@ -47647,7 +47647,7 @@ var require_tool_cache = __commonJS({ exports2.findFromManifest = findFromManifest; exports2.isExplicitVersion = isExplicitVersion; exports2.evaluateVersions = evaluateVersions; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var io5 = __importStar2(require_io()); var crypto2 = __importStar2(require("crypto")); var fs3 = __importStar2(require("fs")); @@ -47676,8 +47676,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { dest = dest || path5.join(_getTempDirectory(), crypto2.randomUUID()); yield io5.mkdirP(path5.dirname(dest)); - core13.debug(`Downloading ${url}`); - core13.debug(`Destination ${dest}`); + core14.debug(`Downloading ${url}`); + core14.debug(`Destination ${dest}`); const maxAttempts = 3; const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); @@ -47703,7 +47703,7 @@ var require_tool_cache = __commonJS({ allowRetries: false }); if (auth2) { - core13.debug("set auth"); + core14.debug("set auth"); if (headers === void 0) { headers = {}; } @@ -47712,7 +47712,7 @@ var require_tool_cache = __commonJS({ const response = yield http.get(url, headers); if (response.message.statusCode !== 200) { const err = new HTTPError2(response.message.statusCode); - core13.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + core14.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); throw err; } const pipeline = util.promisify(stream.pipeline); @@ -47721,16 +47721,16 @@ var require_tool_cache = __commonJS({ let succeeded = false; try { yield pipeline(readStream, fs3.createWriteStream(dest)); - core13.debug("download complete"); + core14.debug("download complete"); succeeded = true; return dest; } finally { if (!succeeded) { - core13.debug("download failed"); + core14.debug("download failed"); try { yield io5.rmRF(dest); } catch (err) { - core13.debug(`Failed to delete '${dest}'. ${err.message}`); + core14.debug(`Failed to delete '${dest}'. ${err.message}`); } } } @@ -47745,7 +47745,7 @@ var require_tool_cache = __commonJS({ process.chdir(dest); if (_7zPath) { try { - const logLevel = core13.isDebug() ? "-bb1" : "-bb0"; + const logLevel = core14.isDebug() ? "-bb1" : "-bb0"; const args = [ "x", // eXtract files with full paths @@ -47798,7 +47798,7 @@ var require_tool_cache = __commonJS({ throw new Error("parameter 'file' is required"); } dest = yield _createExtractFolder(dest); - core13.debug("Checking tar --version"); + core14.debug("Checking tar --version"); let versionOutput = ""; yield (0, exec_1.exec)("tar --version", [], { ignoreReturnCode: true, @@ -47808,7 +47808,7 @@ var require_tool_cache = __commonJS({ stderr: (data) => versionOutput += data.toString() } }); - core13.debug(versionOutput.trim()); + core14.debug(versionOutput.trim()); const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); let args; if (flags instanceof Array) { @@ -47816,7 +47816,7 @@ var require_tool_cache = __commonJS({ } else { args = [flags]; } - if (core13.isDebug() && !flags.includes("v")) { + if (core14.isDebug() && !flags.includes("v")) { args.push("-v"); } let destArg = dest; @@ -47847,7 +47847,7 @@ var require_tool_cache = __commonJS({ args = [flags]; } args.push("-x", "-C", dest, "-f", file); - if (core13.isDebug()) { + if (core14.isDebug()) { args.push("-v"); } const xarPath = yield io5.which("xar", true); @@ -47890,7 +47890,7 @@ var require_tool_cache = __commonJS({ "-Command", pwshCommand ]; - core13.debug(`Using pwsh at path: ${pwshPath}`); + core14.debug(`Using pwsh at path: ${pwshPath}`); yield (0, exec_1.exec)(`"${pwshPath}"`, args); } else { const powershellCommand = [ @@ -47910,7 +47910,7 @@ var require_tool_cache = __commonJS({ powershellCommand ]; const powershellPath = yield io5.which("powershell", true); - core13.debug(`Using powershell at path: ${powershellPath}`); + core14.debug(`Using powershell at path: ${powershellPath}`); yield (0, exec_1.exec)(`"${powershellPath}"`, args); } }); @@ -47919,7 +47919,7 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const unzipPath = yield io5.which("unzip", true); const args = [file]; - if (!core13.isDebug()) { + if (!core14.isDebug()) { args.unshift("-q"); } args.unshift("-o"); @@ -47930,8 +47930,8 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver6.clean(version) || version; arch = arch || os2.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch}`); - core13.debug(`source dir: ${sourceDir}`); + core14.debug(`Caching tool ${tool} ${version} ${arch}`); + core14.debug(`source dir: ${sourceDir}`); if (!fs3.statSync(sourceDir).isDirectory()) { throw new Error("sourceDir is not a directory"); } @@ -47948,14 +47948,14 @@ var require_tool_cache = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { version = semver6.clean(version) || version; arch = arch || os2.arch(); - core13.debug(`Caching tool ${tool} ${version} ${arch}`); - core13.debug(`source file: ${sourceFile}`); + core14.debug(`Caching tool ${tool} ${version} ${arch}`); + core14.debug(`source file: ${sourceFile}`); if (!fs3.statSync(sourceFile).isFile()) { throw new Error("sourceFile is not a file"); } const destFolder = yield _createToolPath(tool, version, arch); const destPath = path5.join(destFolder, targetFile); - core13.debug(`destination file ${destPath}`); + core14.debug(`destination file ${destPath}`); yield io5.cp(sourceFile, destPath); _completeToolPath(tool, version, arch); return destFolder; @@ -47978,12 +47978,12 @@ var require_tool_cache = __commonJS({ if (versionSpec) { versionSpec = semver6.clean(versionSpec) || ""; const cachePath = path5.join(_getCacheDirectory(), toolName, versionSpec, arch); - core13.debug(`checking cache: ${cachePath}`); + core14.debug(`checking cache: ${cachePath}`); if (fs3.existsSync(cachePath) && fs3.existsSync(`${cachePath}.complete`)) { - core13.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); + core14.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); toolPath = cachePath; } else { - core13.debug("not found"); + core14.debug("not found"); } } return toolPath; @@ -48012,7 +48012,7 @@ var require_tool_cache = __commonJS({ const http = new httpm.HttpClient("tool-cache"); const headers = {}; if (auth2) { - core13.debug("set auth"); + core14.debug("set auth"); headers.authorization = auth2; } const response = yield http.getJson(treeUrl, headers); @@ -48033,7 +48033,7 @@ var require_tool_cache = __commonJS({ try { releases = JSON.parse(versionsRaw); } catch (_a) { - core13.debug("Invalid json"); + core14.debug("Invalid json"); } } return releases; @@ -48057,7 +48057,7 @@ var require_tool_cache = __commonJS({ function _createToolPath(tool, version, arch) { return __awaiter2(this, void 0, void 0, function* () { const folderPath = path5.join(_getCacheDirectory(), tool, semver6.clean(version) || version, arch || ""); - core13.debug(`destination ${folderPath}`); + core14.debug(`destination ${folderPath}`); const markerPath = `${folderPath}.complete`; yield io5.rmRF(folderPath); yield io5.rmRF(markerPath); @@ -48069,18 +48069,18 @@ var require_tool_cache = __commonJS({ const folderPath = path5.join(_getCacheDirectory(), tool, semver6.clean(version) || version, arch || ""); const markerPath = `${folderPath}.complete`; fs3.writeFileSync(markerPath, ""); - core13.debug("finished caching tool"); + core14.debug("finished caching tool"); } function isExplicitVersion(versionSpec) { const c = semver6.clean(versionSpec) || ""; - core13.debug(`isExplicit: ${c}`); + core14.debug(`isExplicit: ${c}`); const valid2 = semver6.valid(c) != null; - core13.debug(`explicit? ${valid2}`); + core14.debug(`explicit? ${valid2}`); return valid2; } function evaluateVersions(versions, versionSpec) { let version = ""; - core13.debug(`evaluating ${versions.length} versions`); + core14.debug(`evaluating ${versions.length} versions`); versions = versions.sort((a, b) => { if (semver6.gt(a, b)) { return 1; @@ -48096,9 +48096,9 @@ var require_tool_cache = __commonJS({ } } if (version) { - core13.debug(`matched: ${version}`); + core14.debug(`matched: ${version}`); } else { - core13.debug("match not found"); + core14.debug("match not found"); } return version; } @@ -49470,7 +49470,7 @@ var require_internal_glob_options_helper = __commonJS({ })(); Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getOptions = getOptions; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); function getOptions(copy) { const result = { followSymbolicLinks: true, @@ -49482,23 +49482,23 @@ var require_internal_glob_options_helper = __commonJS({ if (copy) { if (typeof copy.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy.followSymbolicLinks; - core13.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + core14.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === "boolean") { result.implicitDescendants = copy.implicitDescendants; - core13.debug(`implicitDescendants '${result.implicitDescendants}'`); + core14.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === "boolean") { result.matchDirectories = copy.matchDirectories; - core13.debug(`matchDirectories '${result.matchDirectories}'`); + core14.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core13.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + core14.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy.excludeHiddenFiles; - core13.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + core14.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; @@ -51126,7 +51126,7 @@ var require_internal_globber = __commonJS({ }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultGlobber = void 0; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var fs3 = __importStar2(require("fs")); var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); var path5 = __importStar2(require("path")); @@ -51179,7 +51179,7 @@ var require_internal_globber = __commonJS({ } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core13.debug(`Search path '${searchPath}'`); + core14.debug(`Search path '${searchPath}'`); try { yield __await2(fs3.promises.lstat(searchPath)); } catch (err) { @@ -51254,7 +51254,7 @@ var require_internal_globber = __commonJS({ } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { - core13.debug(`Broken symlink '${item.path}'`); + core14.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); @@ -51270,7 +51270,7 @@ var require_internal_globber = __commonJS({ traversalChain.pop(); } if (traversalChain.some((x) => x === realPath)) { - core13.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + core14.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); @@ -51373,7 +51373,7 @@ var require_internal_hash_files = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.hashFiles = hashFiles; var crypto2 = __importStar2(require("crypto")); - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var fs3 = __importStar2(require("fs")); var stream = __importStar2(require("stream")); var util = __importStar2(require("util")); @@ -51382,7 +51382,7 @@ var require_internal_hash_files = __commonJS({ return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; - const writeDelegate = verbose ? core13.info : core13.debug; + const writeDelegate = verbose ? core14.info : core14.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto2.createHash("sha256"); @@ -52773,7 +52773,7 @@ var require_cacheUtils = __commonJS({ exports2.assertDefined = assertDefined; exports2.getCacheVersion = getCacheVersion; exports2.getRuntimeToken = getRuntimeToken; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var exec3 = __importStar2(require_exec()); var glob = __importStar2(require_glob()); var io5 = __importStar2(require_io()); @@ -52824,7 +52824,7 @@ var require_cacheUtils = __commonJS({ _e = false; const file = _c; const relativeFile = path5.relative(workspace, file).replace(new RegExp(`\\${path5.sep}`, "g"), "/"); - core13.debug(`Matched: ${relativeFile}`); + core14.debug(`Matched: ${relativeFile}`); if (relativeFile === "") { paths.push("."); } else { @@ -52852,7 +52852,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { let versionOutput = ""; additionalArgs.push("--version"); - core13.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + core14.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec3.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, @@ -52863,10 +52863,10 @@ var require_cacheUtils = __commonJS({ } }); } catch (err) { - core13.debug(err.message); + core14.debug(err.message); } versionOutput = versionOutput.trim(); - core13.debug(versionOutput); + core14.debug(versionOutput); return versionOutput; }); } @@ -52874,7 +52874,7 @@ var require_cacheUtils = __commonJS({ return __awaiter2(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version = semver6.clean(versionOutput); - core13.debug(`zstd version: ${version}`); + core14.debug(`zstd version: ${version}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -93172,7 +93172,7 @@ var require_uploadUtils = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.UploadProgress = void 0; exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var storage_blob_1 = require_commonjs15(); var errors_1 = require_errors3(); var UploadProgress = class { @@ -93214,7 +93214,7 @@ var require_uploadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + core14.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -93271,14 +93271,14 @@ var require_uploadUtils = __commonJS({ }; try { uploadProgress.startDisplayTimer(); - core13.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + core14.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error3) { - core13.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); + core14.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); throw error3; } finally { uploadProgress.stopDisplayTimer(); @@ -93363,7 +93363,7 @@ var require_requestUtils = __commonJS({ exports2.retry = retry2; exports2.retryTypedResponse = retryTypedResponse; exports2.retryHttpClientResponse = retryHttpClientResponse; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var http_client_1 = require_lib(); var constants_1 = require_constants12(); function isSuccessStatusCode(statusCode) { @@ -93421,9 +93421,9 @@ var require_requestUtils = __commonJS({ isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } - core13.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + core14.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { - core13.debug(`${name} - Error is not retryable`); + core14.debug(`${name} - Error is not retryable`); break; } yield sleep(delay2); @@ -93682,7 +93682,7 @@ var require_downloadUtils = __commonJS({ exports2.downloadCacheHttpClient = downloadCacheHttpClient; exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var http_client_1 = require_lib(); var storage_blob_1 = require_commonjs15(); var buffer = __importStar2(require("buffer")); @@ -93720,7 +93720,7 @@ var require_downloadUtils = __commonJS({ this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; - core13.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + core14.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. @@ -93754,7 +93754,7 @@ var require_downloadUtils = __commonJS({ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core13.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + core14.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } @@ -93804,7 +93804,7 @@ var require_downloadUtils = __commonJS({ })); downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); - core13.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + core14.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; @@ -93815,7 +93815,7 @@ var require_downloadUtils = __commonJS({ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { - core13.debug("Unable to validate download, no Content-Length header"); + core14.debug("Unable to validate download, no Content-Length header"); } }); } @@ -93933,7 +93933,7 @@ var require_downloadUtils = __commonJS({ const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { - core13.debug("Unable to determine content length, downloading file with http-client..."); + core14.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); @@ -94023,7 +94023,7 @@ var require_options = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUploadOptions = getUploadOptions; exports2.getDownloadOptions = getDownloadOptions; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); function getUploadOptions(copy) { const result = { useAzureSdk: false, @@ -94043,9 +94043,9 @@ var require_options = __commonJS({ } result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core13.debug(`Upload chunk size: ${result.uploadChunkSize}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core14.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } function getDownloadOptions(copy) { @@ -94081,12 +94081,12 @@ var require_options = __commonJS({ if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - core13.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core13.debug(`Download concurrency: ${result.downloadConcurrency}`); - core13.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core13.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core13.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core13.debug(`Lookup only: ${result.lookupOnly}`); + core14.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core14.debug(`Download concurrency: ${result.downloadConcurrency}`); + core14.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core14.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core14.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core14.debug(`Lookup only: ${result.lookupOnly}`); return result; } } @@ -94280,7 +94280,7 @@ var require_cacheHttpClient = __commonJS({ exports2.downloadCache = downloadCache; exports2.reserveCache = reserveCache; exports2.saveCache = saveCache3; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var http_client_1 = require_lib(); var auth_1 = require_auth(); var fs3 = __importStar2(require("fs")); @@ -94298,7 +94298,7 @@ var require_cacheHttpClient = __commonJS({ throw new Error("Cache Service Url not found, unable to restore cache."); } const url = `${baseUrl}_apis/artifactcache/${resource}`; - core13.debug(`Resource Url: ${url}`); + core14.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type2, apiVersion) { @@ -94326,7 +94326,7 @@ var require_cacheHttpClient = __commonJS({ return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { - if (core13.isDebug()) { + if (core14.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; @@ -94339,9 +94339,9 @@ var require_cacheHttpClient = __commonJS({ if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core13.setSecret(cacheDownloadUrl); - core13.debug(`Cache Result:`); - core13.debug(JSON.stringify(cacheResult)); + core14.setSecret(cacheDownloadUrl); + core14.debug(`Cache Result:`); + core14.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -94355,10 +94355,10 @@ var require_cacheHttpClient = __commonJS({ const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { - core13.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key + core14.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key Other caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core13.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + core14.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } @@ -94401,7 +94401,7 @@ Other caches with similar key:`); } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter2(this, void 0, void 0, function* () { - core13.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + core14.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) @@ -94423,7 +94423,7 @@ Other caches with similar key:`); const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; - core13.debug("Awaiting all uploads"); + core14.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { @@ -94466,16 +94466,16 @@ Other caches with similar key:`); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); - core13.debug("Upload cache"); + core14.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); - core13.debug("Commiting cache"); + core14.debug("Commiting cache"); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + core14.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } - core13.info("Cache saved successfully"); + core14.info("Cache saved successfully"); } }); } @@ -99958,7 +99958,7 @@ var require_cache5 = __commonJS({ exports2.isFeatureAvailable = isFeatureAvailable; exports2.restoreCache = restoreCache3; exports2.saveCache = saveCache3; - var core13 = __importStar2(require_core()); + var core14 = __importStar2(require_core()); var path5 = __importStar2(require("path")); var utils = __importStar2(require_cacheUtils()); var cacheHttpClient = __importStar2(require_cacheHttpClient()); @@ -100017,7 +100017,7 @@ var require_cache5 = __commonJS({ function restoreCache3(paths_1, primaryKey_1, restoreKeys_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": @@ -100032,8 +100032,8 @@ var require_cache5 = __commonJS({ return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -100051,19 +100051,19 @@ var require_cache5 = __commonJS({ return void 0; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path5.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error3) { const typedError = error3; @@ -100071,16 +100071,16 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error3.message}`); + core14.error(`Failed to restore: ${error3.message}`); } else { - core13.warning(`Failed to restore: ${error3.message}`); + core14.warning(`Failed to restore: ${error3.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core13.debug(`Failed to delete archive: ${error3}`); + core14.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -100091,8 +100091,8 @@ var require_cache5 = __commonJS({ options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; - core13.debug("Resolved Keys:"); - core13.debug(JSON.stringify(keys)); + core14.debug("Resolved Keys:"); + core14.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } @@ -100110,30 +100110,30 @@ var require_cache5 = __commonJS({ }; const response = yield twirpClient.GetCacheEntryDownloadURL(request3); if (!response.ok) { - core13.debug(`Cache not found for version ${request3.version} of keys: ${keys.join(", ")}`); + core14.debug(`Cache not found for version ${request3.version} of keys: ${keys.join(", ")}`); return void 0; } const isRestoreKeyMatch = request3.key !== response.matchedKey; if (isRestoreKeyMatch) { - core13.info(`Cache hit for restore-key: ${response.matchedKey}`); + core14.info(`Cache hit for restore-key: ${response.matchedKey}`); } else { - core13.info(`Cache hit for: ${response.matchedKey}`); + core14.info(`Cache hit for: ${response.matchedKey}`); } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core13.info("Lookup only - skipping download"); + core14.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path5.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive path: ${archivePath}`); - core13.debug(`Starting download of archive to: ${archivePath}`); + core14.debug(`Archive path: ${archivePath}`); + core14.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core13.isDebug()) { + core14.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core13.info("Cache restored successfully"); + core14.info("Cache restored successfully"); return response.matchedKey; } catch (error3) { const typedError = error3; @@ -100141,9 +100141,9 @@ var require_cache5 = __commonJS({ throw error3; } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to restore: ${error3.message}`); + core14.error(`Failed to restore: ${error3.message}`); } else { - core13.warning(`Failed to restore: ${error3.message}`); + core14.warning(`Failed to restore: ${error3.message}`); } } } finally { @@ -100152,7 +100152,7 @@ var require_cache5 = __commonJS({ yield utils.unlinkFile(archivePath); } } catch (error3) { - core13.debug(`Failed to delete archive: ${error3}`); + core14.debug(`Failed to delete archive: ${error3}`); } } return void 0; @@ -100161,7 +100161,7 @@ var require_cache5 = __commonJS({ function saveCache3(paths_1, key_1, options_1) { return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core13.debug(`Cache service version: ${cacheServiceVersion}`); + core14.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { @@ -100179,26 +100179,26 @@ var require_cache5 = __commonJS({ const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path5.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, @@ -100211,26 +100211,26 @@ var require_cache5 = __commonJS({ } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } - core13.debug(`Saving Cache (ID: ${cacheId})`); + core14.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error3) { const typedError = error3; if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core13.debug(`Failed to delete archive: ${error3}`); + core14.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -100243,23 +100243,23 @@ var require_cache5 = __commonJS({ const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); - core13.debug("Cache Paths:"); - core13.debug(`${JSON.stringify(cachePaths)}`); + core14.debug("Cache Paths:"); + core14.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path5.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core13.debug(`Archive Path: ${archivePath}`); + core14.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core13.isDebug()) { + if (core14.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core13.debug(`File Size: ${archiveFileSize}`); + core14.debug(`File Size: ${archiveFileSize}`); options.archiveSizeBytes = archiveFileSize; - core13.debug("Reserving Cache"); + core14.debug("Reserving Cache"); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request3 = { key, @@ -100270,16 +100270,16 @@ var require_cache5 = __commonJS({ const response = yield twirpClient.CreateCacheEntry(request3); if (!response.ok) { if (response.message) { - core13.warning(`Cache reservation failed: ${response.message}`); + core14.warning(`Cache reservation failed: ${response.message}`); } throw new Error(response.message || "Response was not ok"); } signedUploadUrl = response.signedUploadUrl; } catch (error3) { - core13.debug(`Failed to reserve cache: ${error3}`); + core14.debug(`Failed to reserve cache: ${error3}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - core13.debug(`Attempting to upload cache located at: ${archivePath}`); + core14.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, @@ -100287,7 +100287,7 @@ var require_cache5 = __commonJS({ sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core13.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + core14.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { if (finalizeResponse.message) { throw new FinalizeCacheError(finalizeResponse.message); @@ -100300,21 +100300,21 @@ var require_cache5 = __commonJS({ if (typedError.name === ValidationError.name) { throw error3; } else if (typedError.name === ReserveCacheError.name) { - core13.info(`Failed to save: ${typedError.message}`); + core14.info(`Failed to save: ${typedError.message}`); } else if (typedError.name === FinalizeCacheError.name) { - core13.warning(typedError.message); + core14.warning(typedError.message); } else { if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core13.error(`Failed to save: ${typedError.message}`); + core14.error(`Failed to save: ${typedError.message}`); } else { - core13.warning(`Failed to save: ${typedError.message}`); + core14.warning(`Failed to save: ${typedError.message}`); } } } finally { try { yield utils.unlinkFile(archivePath); } catch (error3) { - core13.debug(`Failed to delete archive: ${error3}`); + core14.debug(`Failed to delete archive: ${error3}`); } } return cacheId; @@ -118151,7 +118151,7 @@ var require_lib3 = __commonJS({ // src/start-proxy-action.ts var import_child_process = require("child_process"); var path4 = __toESM(require("path")); -var core12 = __toESM(require_core()); +var core13 = __toESM(require_core()); // src/actions-util.ts var core4 = __toESM(require_core()); @@ -121949,7 +121949,7 @@ function getActionsLogger() { // src/start-proxy.ts var path2 = __toESM(require("path")); -var core11 = __toESM(require_core()); +var core12 = __toESM(require_core()); var toolcache = __toESM(require_tool_cache()); // src/artifact-scanner.ts @@ -122083,12 +122083,57 @@ function getAddressString(address) { } } +// src/start-proxy/validation.ts +var core8 = __toESM(require_core()); +function getAuthConfig(config) { + if (isAzureConfig(config)) { + return { + "tenant-id": config["tenant-id"], + "client-id": config["client-id"] + }; + } else if (isAWSConfig(config)) { + return { + "aws-region": config["aws-region"], + "account-id": config["account-id"], + "role-name": config["role-name"], + domain: config.domain, + "domain-owner": config["domain-owner"], + audience: config.audience + }; + } else if (isJFrogConfig(config)) { + return { + "jfrog-oidc-provider-name": config["jfrog-oidc-provider-name"], + "identity-mapping-name": config["identity-mapping-name"], + audience: config.audience + }; + } else if (isToken(config)) { + if (isDefined2(config.token)) { + core8.setSecret(config.token); + } + return { username: config.username, token: config.token }; + } else { + let username = void 0; + let password = void 0; + if ("password" in config && isString(config.password)) { + core8.setSecret(config.password); + password = config.password; + } + if ("username" in config && isString(config.username)) { + username = config.username; + } + return { + username, + password + }; + } +} + // src/status-report.ts var os = __toESM(require("os")); -var core10 = __toESM(require_core()); +var core11 = __toESM(require_core()); // src/config-utils.ts -var core9 = __toESM(require_core()); +var core10 = __toESM(require_core()); // src/analyses.ts var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { @@ -122100,7 +122145,7 @@ var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { var supportedAnalysisKinds = new Set(Object.values(AnalysisKind)); // src/caching-utils.ts -var core8 = __toESM(require_core()); +var core9 = __toESM(require_core()); // src/config/db-config.ts var jsonschema = __toESM(require_lib2()); @@ -122172,12 +122217,12 @@ function getActionsStatus(error3, otherFailureCause) { } function setJobStatusIfUnsuccessful(actionStatus) { if (actionStatus === "user-error") { - core10.exportVariable( + core11.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_CONFIGURATION_ERROR" /* ConfigErrorStatus */ ); } else if (actionStatus === "failure" || actionStatus === "aborted") { - core10.exportVariable( + core11.exportVariable( "CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */, process.env["CODEQL_ACTION_JOB_STATUS" /* JOB_STATUS */] ?? "JOB_STATUS_FAILURE" /* FailureStatus */ ); @@ -122196,14 +122241,14 @@ async function createStatusReportBase(actionName, status, actionStartedAt, confi let workflowStartedAt = process.env["CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */]; if (workflowStartedAt === void 0) { workflowStartedAt = actionStartedAt.toISOString(); - core10.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); + core11.exportVariable("CODEQL_WORKFLOW_STARTED_AT" /* WORKFLOW_STARTED_AT */, workflowStartedAt); } const runnerOs = getRequiredEnvParam("RUNNER_OS"); const codeQlCliVersion = getCachedCodeQlVersion(); const actionRef = process.env["GITHUB_ACTION_REF"] || ""; const testingEnvironment = getTestingEnvironment(); if (testingEnvironment) { - core10.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); + core11.exportVariable("CODEQL_ACTION_TESTING_ENVIRONMENT" /* TESTING_ENVIRONMENT */, testingEnvironment); } const isSteadyStateDefaultSetupRun = process.env["CODE_SCANNING_IS_STEADY_STATE_DEFAULT_SETUP"] === "true"; const statusReport = { @@ -122286,9 +122331,9 @@ var INCOMPATIBLE_MSG = "CodeQL Action version is incompatible with the API endpo async function sendStatusReport(statusReport) { setJobStatusIfUnsuccessful(statusReport.status); const statusReportJSON = JSON.stringify(statusReport); - core10.debug(`Sending status report: ${statusReportJSON}`); + core11.debug(`Sending status report: ${statusReportJSON}`); if (isInTestMode()) { - core10.debug("In test mode. Status reports are not uploaded."); + core11.debug("In test mode. Status reports are not uploaded."); return; } const nwo = getRepositoryNwo(); @@ -122308,28 +122353,28 @@ async function sendStatusReport(statusReport) { switch (httpError.status) { case 403: if (getWorkflowEventName() === "push" && process.env["GITHUB_ACTOR"] === "dependabot[bot]") { - core10.warning( + core11.warning( `Workflows triggered by Dependabot on the "push" event run with read-only access. Uploading CodeQL results requires write access. To use CodeQL with Dependabot, please ensure you are using the "pull_request" event for this workflow and avoid triggering on the "push" event for Dependabot branches. See ${"https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning#scanning-on-push" /* SCANNING_ON_PUSH */} for more information on how to configure these events.` ); } else { - core10.warning( + core11.warning( `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` ); } return; case 404: - core10.warning(httpError.message); + core11.warning(httpError.message); return; case 422: if (getRequiredEnvParam("GITHUB_SERVER_URL") !== GITHUB_DOTCOM_URL) { - core10.debug(INCOMPATIBLE_MSG); + core11.debug(INCOMPATIBLE_MSG); } else { - core10.debug(OUT_OF_DATE_MSG); + core11.debug(OUT_OF_DATE_MSG); } return; } } - core10.warning( + core11.warning( `An unexpected error occurred when sending a status report: ${getErrorMessage( e )}` @@ -122404,7 +122449,7 @@ function getSafeErrorMessage(error3) { } async function sendFailedStatusReport(logger, startedAt, language, unwrappedError) { const error3 = wrapError(unwrappedError); - core11.setFailed(`start-proxy action failed: ${error3.message}`); + core12.setFailed(`start-proxy action failed: ${error3.message}`); const statusReportMessage = getSafeErrorMessage(error3); const errorStatusReportBase = await createStatusReportBase( "start-proxy" /* StartProxy */, @@ -122468,48 +122513,6 @@ function getRegistryAddress(registry) { ); } } -function getAuthConfig(config) { - if (isAzureConfig(config)) { - return { - "tenant-id": config["tenant-id"], - "client-id": config["client-id"] - }; - } else if (isAWSConfig(config)) { - return { - "aws-region": config["aws-region"], - "account-id": config["account-id"], - "role-name": config["role-name"], - domain: config.domain, - "domain-owner": config["domain-owner"], - audience: config.audience - }; - } else if (isJFrogConfig(config)) { - return { - "jfrog-oidc-provider-name": config["jfrog-oidc-provider-name"], - "identity-mapping-name": config["identity-mapping-name"], - audience: config.audience - }; - } else if (isToken(config)) { - if (isDefined2(config.token)) { - core11.setSecret(config.token); - } - return { username: config.username, token: config.token }; - } else { - let username = void 0; - let password = void 0; - if ("password" in config && isString(config.password)) { - core11.setSecret(config.password); - password = config.password; - } - if ("username" in config && isString(config.username)) { - username = config.username; - } - return { - username, - password - }; - } -} function getCredentials(logger, registrySecrets, registriesCredentials, language, skipUnusedRegistries = false) { const registryMapping = skipUnusedRegistries ? NEW_LANGUAGE_TO_REGISTRY_TYPE : LANGUAGE_TO_REGISTRY_TYPE; const registryTypeForLanguage = language ? registryMapping[language] : void 0; @@ -122996,7 +122999,7 @@ async function run(startedAt) { persistInputs(); const tempDir = getTemporaryDirectory(); const proxyLogFilePath = path4.resolve(tempDir, "proxy.log"); - core12.saveState("proxy-log-file", proxyLogFilePath); + core13.saveState("proxy-log-file", proxyLogFilePath); const repositoryNwo = getRepositoryNwo(); const gitHubVersion = await getGitHubVersion(); features = initFeatures( @@ -123025,7 +123028,7 @@ async function run(startedAt) { `Credentials loaded for the following registries: ${credentials.map((c) => credentialToStr(c)).join("\n")}` ); - if (core12.isDebug() || isInTestMode()) { + if (core13.isDebug() || isInTestMode()) { try { await checkProxyEnvironment(logger, language); } catch (err) { @@ -123065,7 +123068,7 @@ async function runWrapper() { try { await run(startedAt); } catch (error3) { - core12.setFailed(`start-proxy action failed: ${getErrorMessage(error3)}`); + core13.setFailed(`start-proxy action failed: ${getErrorMessage(error3)}`); await sendUnhandledErrorStatusReport( "start-proxy" /* StartProxy */, startedAt, @@ -123091,7 +123094,7 @@ async function startProxy(binPath, config, logFilePath, logger) { ); subprocess.unref(); if (subprocess.pid) { - core12.saveState("proxy-process-pid", `${subprocess.pid}`); + core13.saveState("proxy-process-pid", `${subprocess.pid}`); } subprocess.on("error", (error3) => { subprocessError = error3; @@ -123110,14 +123113,14 @@ async function startProxy(binPath, config, logFilePath, logger) { throw subprocessError; } logger.info(`Proxy started on ${host}:${port}`); - core12.setOutput("proxy_host", host); - core12.setOutput("proxy_port", port.toString()); - core12.setOutput("proxy_ca_certificate", config.ca.cert); + core13.setOutput("proxy_host", host); + core13.setOutput("proxy_port", port.toString()); + core13.setOutput("proxy_ca_certificate", config.ca.cert); const registry_urls = config.all_credentials.filter((credential) => credential.url !== void 0).map((credential) => ({ type: credential.type, url: credential.url })); - core12.setOutput("proxy_urls", JSON.stringify(registry_urls)); + core13.setOutput("proxy_urls", JSON.stringify(registry_urls)); return { host, port, cert: config.ca.cert, registries: registry_urls }; } void runWrapper(); diff --git a/src/start-proxy.ts b/src/start-proxy.ts index 8859eb16e..94ae303e6 100644 --- a/src/start-proxy.ts +++ b/src/start-proxy.ts @@ -24,20 +24,12 @@ import { Address, Registry, Credential, - AuthConfig, isToken, - isAzureConfig, - Token, - UsernamePassword, - AzureConfig, - isAWSConfig, - AWSConfig, - isJFrogConfig, - JFrogConfig, isUsernamePassword, hasUsername, RawCredential, } from "./start-proxy/types"; +import { getAuthConfig } from "./start-proxy/validation"; import { ActionName, createStatusReportBase, @@ -251,75 +243,6 @@ function getRegistryAddress( } } -/** Extracts an `AuthConfig` value from `config`. */ -export function getAuthConfig( - config: json.UnvalidatedObject, -): AuthConfig { - // Start by checking for the OIDC configurations, since they have required properties - // which we can use to identify them. - if (isAzureConfig(config)) { - return { - "tenant-id": config["tenant-id"], - "client-id": config["client-id"], - } satisfies AzureConfig; - } else if (isAWSConfig(config)) { - return { - "aws-region": config["aws-region"], - "account-id": config["account-id"], - "role-name": config["role-name"], - domain: config.domain, - "domain-owner": config["domain-owner"], - audience: config.audience, - } satisfies AWSConfig; - } else if (isJFrogConfig(config)) { - return { - "jfrog-oidc-provider-name": config["jfrog-oidc-provider-name"], - "identity-mapping-name": config["identity-mapping-name"], - audience: config.audience, - } satisfies JFrogConfig; - } else if (isToken(config)) { - // There are three scenarios for non-OIDC authentication based on the registry type: - // - // 1. `username`+`token` - // 2. A `token` that combines the username and actual token, separated by ':'. - // 3. `username`+`password` - // - // In all three cases, all fields are optional. If the `token` field is present, - // we accept the configuration as a `Token` typed configuration, with the `token` - // value and an optional `username`. Otherwise, we accept the configuration - // typed as `UsernamePassword` (in the `else` clause below) with optional - // username and password. I.e. a private registry type that uses 1. or 2., - // but has no `token` configured, will get accepted as `UsernamePassword` here. - - if (isDefined(config.token)) { - // Mask token to reduce chance of accidental leakage in logs, if we have one. - core.setSecret(config.token); - } - - return { username: config.username, token: config.token } satisfies Token; - } else { - let username: string | undefined = undefined; - let password: string | undefined = undefined; - - // Both "username" and "password" are optional. If we have reached this point, we need - // to validate which of them are present and that they have the correct type if so. - if ("password" in config && json.isString(config.password)) { - // Mask password to reduce chance of accidental leakage in logs, if we have one. - core.setSecret(config.password); - password = config.password; - } - if ("username" in config && json.isString(config.username)) { - username = config.username; - } - - // Return the `UsernamePassword` object. Both username and password may be undefined. - return { - username, - password, - } satisfies UsernamePassword; - } -} - // getCredentials returns registry credentials from action inputs. // It prefers `registries_credentials` over `registry_secrets`. // If neither is set, it returns an empty array. diff --git a/src/start-proxy/validation.ts b/src/start-proxy/validation.ts new file mode 100644 index 000000000..29a40338d --- /dev/null +++ b/src/start-proxy/validation.ts @@ -0,0 +1,83 @@ +import * as core from "@actions/core"; + +import * as json from "../json"; +import { isDefined } from "../util"; + +import type { + AuthConfig, + AWSConfig, + AzureConfig, + JFrogConfig, + Token, + UsernamePassword, +} from "./types"; +import * as types from "./types"; + +/** Extracts an `AuthConfig` value from `config`. */ +export function getAuthConfig( + config: json.UnvalidatedObject, +): AuthConfig { + // Start by checking for the OIDC configurations, since they have required properties + // which we can use to identify them. + if (types.isAzureConfig(config)) { + return { + "tenant-id": config["tenant-id"], + "client-id": config["client-id"], + } satisfies AzureConfig; + } else if (types.isAWSConfig(config)) { + return { + "aws-region": config["aws-region"], + "account-id": config["account-id"], + "role-name": config["role-name"], + domain: config.domain, + "domain-owner": config["domain-owner"], + audience: config.audience, + } satisfies AWSConfig; + } else if (types.isJFrogConfig(config)) { + return { + "jfrog-oidc-provider-name": config["jfrog-oidc-provider-name"], + "identity-mapping-name": config["identity-mapping-name"], + audience: config.audience, + } satisfies JFrogConfig; + } else if (types.isToken(config)) { + // There are three scenarios for non-OIDC authentication based on the registry type: + // + // 1. `username`+`token` + // 2. A `token` that combines the username and actual token, separated by ':'. + // 3. `username`+`password` + // + // In all three cases, all fields are optional. If the `token` field is present, + // we accept the configuration as a `Token` typed configuration, with the `token` + // value and an optional `username`. Otherwise, we accept the configuration + // typed as `UsernamePassword` (in the `else` clause below) with optional + // username and password. I.e. a private registry type that uses 1. or 2., + // but has no `token` configured, will get accepted as `UsernamePassword` here. + + if (isDefined(config.token)) { + // Mask token to reduce chance of accidental leakage in logs, if we have one. + core.setSecret(config.token); + } + + return { username: config.username, token: config.token } satisfies Token; + } else { + let username: string | undefined = undefined; + let password: string | undefined = undefined; + + // Both "username" and "password" are optional. If we have reached this point, we need + // to validate which of them are present and that they have the correct type if so. + if ("password" in config && json.isString(config.password)) { + // Mask password to reduce chance of accidental leakage in logs, if we have one. + core.setSecret(config.password); + password = config.password; + } + if ("username" in config && json.isString(config.username)) { + username = config.username; + } + + // Return the `UsernamePassword` object. Both username and password may be undefined. + return { + username, + password, + } satisfies UsernamePassword; + } +}