mirror of
https://github.com/github/codeql-action.git
synced 2026-05-09 15:20:28 +00:00
Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a6fad803a6 | |||
| e09b8dbe8a | |||
| 6e368b7e89 | |||
| 2ab95cc5ab | |||
| af6b899441 | |||
| 989d6f4689 | |||
| dbfd510456 | |||
| 4acf6eb187 | |||
| 8b990db7d7 |
@@ -1,5 +1,5 @@
|
||||
name: "CodeQL config"
|
||||
queries:
|
||||
queries:
|
||||
- name: Run custom queries
|
||||
uses: ./queries
|
||||
# Run all extra query suites, both because we want to
|
||||
@@ -13,5 +13,3 @@ queries:
|
||||
paths-ignore:
|
||||
- lib
|
||||
- tests
|
||||
- "**/*.test.ts"
|
||||
- "**/testing-util.ts"
|
||||
|
||||
@@ -52,6 +52,14 @@ jobs:
|
||||
- name: Verify compiled JS up to date
|
||||
run: .github/workflows/script/check-js.sh
|
||||
|
||||
- name: Upload esbuild metadata
|
||||
uses: actions/upload-artifact@v7
|
||||
with:
|
||||
name: bundle-metadata-${{ matrix.os }}-${{ matrix.node-version }}
|
||||
path: build-metadata.json
|
||||
retention-days: ${{ (github.ref_name == github.event.repository.default_branch && 90) || 7 }}
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Run unit tests
|
||||
if: always()
|
||||
run: npm test
|
||||
|
||||
+1
-1
@@ -12,4 +12,4 @@ eslint.sarif
|
||||
# for local incremental compilation
|
||||
tsconfig.tsbuildinfo
|
||||
# esbuild metadata file
|
||||
meta.json
|
||||
build-metadata.json
|
||||
|
||||
+1
-8
@@ -4,15 +4,8 @@ See the [releases page](https://github.com/github/codeql-action/releases) for th
|
||||
|
||||
## [UNRELEASED]
|
||||
|
||||
No user facing changes.
|
||||
|
||||
## 4.35.3 - 01 May 2026
|
||||
|
||||
- _Upcoming breaking change_: Add a deprecation warning for customers using CodeQL version 2.19.3 and earlier. These versions of CodeQL were discontinued on 9 April 2026 alongside GitHub Enterprise Server 3.15, and will be unsupported by the next minor release of the CodeQL Action. [#3837](https://github.com/github/codeql-action/pull/3837)
|
||||
- Configurations for private registries that use Cloudsmith or GCP OIDC are now accepted. [#3850](https://github.com/github/codeql-action/pull/3850)
|
||||
- Best-effort connection tests for private registries now use `GET` requests instead of `HEAD` for better compatibility with various registry implementations. For NuGet feeds, the test is now always performed against the service index. [#3853](https://github.com/github/codeql-action/pull/3853)
|
||||
- Fixed a bug where two diagnostics produced within the same millisecond could overwrite each other on disk, causing one of them to be lost. [#3852](https://github.com/github/codeql-action/pull/3852)
|
||||
- Update default CodeQL bundle version to [2.25.3](https://github.com/github/codeql-action/releases/tag/codeql-bundle-v2.25.3). [#3865](https://github.com/github/codeql-action/pull/3865)
|
||||
- _Upcoming breaking change_: Add a deprecation warning for customers using CodeQL version 2.19.3 and earlier. These versions of CodeQL were discontinued on 9 April 2026 alongside GitHub Enterprise Server 3.15, and will be unsupported by the next minor release of the CodeQL Action. [#3837](https://github.com/github/codeql-action/pull/3837)
|
||||
|
||||
## 4.35.2 - 15 Apr 2026
|
||||
|
||||
|
||||
@@ -82,6 +82,6 @@ const context = await esbuild.context({
|
||||
});
|
||||
|
||||
const result = await context.rebuild();
|
||||
await writeFile(join(__dirname, "meta.json"), JSON.stringify(result.metafile));
|
||||
await writeFile(join(__dirname, "build-metadata.json"), JSON.stringify(result.metafile));
|
||||
|
||||
await context.dispose();
|
||||
|
||||
Generated
+35422
-482
File diff suppressed because one or more lines are too long
Generated
+18598
-421
File diff suppressed because one or more lines are too long
Generated
+18554
-373
File diff suppressed because one or more lines are too long
+4
-4
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.25.3",
|
||||
"cliVersion": "2.25.3",
|
||||
"priorBundleVersion": "codeql-bundle-v2.25.2",
|
||||
"priorCliVersion": "2.25.2"
|
||||
"bundleVersion": "codeql-bundle-v2.25.2",
|
||||
"cliVersion": "2.25.2",
|
||||
"priorBundleVersion": "codeql-bundle-v2.25.1",
|
||||
"priorCliVersion": "2.25.1"
|
||||
}
|
||||
|
||||
Generated
+35426
-486
File diff suppressed because one or more lines are too long
Generated
+18599
-422
File diff suppressed because one or more lines are too long
Generated
+18552
-371
File diff suppressed because one or more lines are too long
Generated
+18554
-373
File diff suppressed because one or more lines are too long
Generated
+35422
-482
File diff suppressed because one or more lines are too long
Generated
+18898
-797
File diff suppressed because one or more lines are too long
Generated
+18554
-373
File diff suppressed because one or more lines are too long
Generated
+35422
-482
File diff suppressed because one or more lines are too long
Generated
+18554
-373
File diff suppressed because one or more lines are too long
Generated
+35
-7
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "4.35.4",
|
||||
"version": "4.35.3",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "codeql",
|
||||
"version": "4.35.4",
|
||||
"version": "4.35.3",
|
||||
"license": "MIT",
|
||||
"workspaces": [
|
||||
"pr-checks"
|
||||
@@ -410,6 +410,15 @@
|
||||
"undici": "^6.23.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/github/node_modules/undici": {
|
||||
"version": "6.23.0",
|
||||
"resolved": "https://registry.npmjs.org/undici/-/undici-6.23.0.tgz",
|
||||
"integrity": "sha512-VfQPToRA5FZs/qJxLIinmU59u0r7LXqoJkCzinq3ckNJp3vKEh7jTWN589YQ5+aoAC/TGRLyJLCPKcLQbM8r9g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18.17"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/glob": {
|
||||
"version": "0.5.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.5.1.tgz",
|
||||
@@ -430,6 +439,15 @@
|
||||
"undici": "^6.23.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/http-client/node_modules/undici": {
|
||||
"version": "6.23.0",
|
||||
"resolved": "https://registry.npmjs.org/undici/-/undici-6.23.0.tgz",
|
||||
"integrity": "sha512-VfQPToRA5FZs/qJxLIinmU59u0r7LXqoJkCzinq3ckNJp3vKEh7jTWN589YQ5+aoAC/TGRLyJLCPKcLQbM8r9g==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18.17"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/io": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@actions/io/-/io-2.0.0.tgz",
|
||||
@@ -1476,6 +1494,14 @@
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@fastify/busboy": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz",
|
||||
"integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==",
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
},
|
||||
"node_modules/@github/browserslist-config": {
|
||||
"version": "1.0.0",
|
||||
"dev": true,
|
||||
@@ -9828,12 +9854,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/undici": {
|
||||
"version": "6.24.1",
|
||||
"resolved": "https://registry.npmjs.org/undici/-/undici-6.24.1.tgz",
|
||||
"integrity": "sha512-sC+b0tB1whOCzbtlx20fx3WgCXwkW627p4EA9uM+/tNNPkSS+eSEld6pAs9nDv7WbY1UUljBMYPtu9BCOrCWKA==",
|
||||
"license": "MIT",
|
||||
"version": "5.29.0",
|
||||
"resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz",
|
||||
"integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==",
|
||||
"dependencies": {
|
||||
"@fastify/busboy": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.17"
|
||||
"node": ">=14.0"
|
||||
}
|
||||
},
|
||||
"node_modules/undici-types": {
|
||||
|
||||
+3
-4
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"name": "codeql",
|
||||
"version": "4.35.4",
|
||||
"version": "4.35.3",
|
||||
"private": true,
|
||||
"description": "CodeQL action",
|
||||
"scripts": {
|
||||
"_build_comment": "echo 'Run the full build so we typecheck the project and can reuse the transpiled files in npm test'",
|
||||
"build": "./scripts/check-node-modules.sh && npm run transpile && node build.mjs && npx tsx ./pr-checks/bundle-metadata.ts",
|
||||
"build": "./scripts/check-node-modules.sh && npm run transpile && node build.mjs",
|
||||
"lint": "eslint --report-unused-disable-directives --max-warnings=0 .",
|
||||
"lint-ci": "SARIF_ESLINT_IGNORE_SUPPRESSED=true eslint --report-unused-disable-directives --max-warnings=0 . --format @microsoft/eslint-formatter-sarif --output-file=eslint.sarif",
|
||||
"lint-fix": "eslint --report-unused-disable-directives --max-warnings=0 . --fix",
|
||||
@@ -90,7 +90,6 @@
|
||||
"semver": ">=6.3.1"
|
||||
},
|
||||
"brace-expansion@2.0.1": "2.0.2",
|
||||
"glob": "^11.1.0",
|
||||
"undici": "^6.24.0"
|
||||
"glob": "^11.1.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,16 @@
|
||||
import { ParseArgsConfig } from "node:util";
|
||||
|
||||
import * as githubUtils from "@actions/github/lib/utils";
|
||||
import { type Octokit } from "@octokit/core";
|
||||
import { type PaginateInterface } from "@octokit/plugin-paginate-rest";
|
||||
import { type Api } from "@octokit/plugin-rest-endpoint-methods";
|
||||
|
||||
/** Identifies the CodeQL Action repository. */
|
||||
export const CODEQL_ACTION_REPO = {
|
||||
owner: "github",
|
||||
repo: "codeql-action",
|
||||
};
|
||||
|
||||
/** The type of the Octokit client. */
|
||||
export type ApiClient = Octokit & Api & { paginate: PaginateInterface };
|
||||
|
||||
@@ -11,3 +19,16 @@ export function getApiClient(token: string): ApiClient {
|
||||
const opts = githubUtils.getOctokitOptions(token);
|
||||
return new githubUtils.GitHub(opts);
|
||||
}
|
||||
|
||||
export interface TokenOption {
|
||||
/** The token to use to authenticate to the GitHub API. */
|
||||
token?: string;
|
||||
}
|
||||
|
||||
/** Command-line argument parser settings for the token parameter. */
|
||||
export const TOKEN_OPTION_CONFIG = {
|
||||
// The token to use to authenticate to the API.
|
||||
token: {
|
||||
type: "string",
|
||||
},
|
||||
} satisfies ParseArgsConfig["options"];
|
||||
|
||||
@@ -1,8 +1,43 @@
|
||||
#!/usr/bin/env npx tsx
|
||||
|
||||
import * as fs from "node:fs/promises";
|
||||
import { parseArgs, ParseArgsConfig } from "node:util";
|
||||
|
||||
import { BUNDLE_METADATA_FILE } from "./config";
|
||||
import * as exec from "@actions/exec";
|
||||
|
||||
import {
|
||||
ApiClient,
|
||||
CODEQL_ACTION_REPO,
|
||||
getApiClient,
|
||||
TOKEN_OPTION_CONFIG,
|
||||
} from "./api-client";
|
||||
import { BASELINE_BUNDLE_METADATA_FILE, BUNDLE_METADATA_FILE } from "./config";
|
||||
|
||||
const optionsConfig = {
|
||||
...TOKEN_OPTION_CONFIG,
|
||||
branch: {
|
||||
type: "string",
|
||||
default: "main",
|
||||
},
|
||||
runner: {
|
||||
type: "string",
|
||||
default: "macos-latest",
|
||||
},
|
||||
"node-version": {
|
||||
type: "string",
|
||||
default: "24",
|
||||
},
|
||||
} satisfies ParseArgsConfig["options"];
|
||||
|
||||
function parseOptions() {
|
||||
const { values: options } = parseArgs({
|
||||
options: optionsConfig,
|
||||
});
|
||||
|
||||
return options;
|
||||
}
|
||||
|
||||
type Options = ReturnType<typeof parseOptions>;
|
||||
|
||||
interface InputInfo {
|
||||
bytesInOutput: number;
|
||||
@@ -23,21 +58,125 @@ function toMB(bytes: number): string {
|
||||
return `${(bytes / (1024 * 1024)).toFixed(2)}MB`;
|
||||
}
|
||||
|
||||
async function getBaselineFrom(client: ApiClient, options: Options) {
|
||||
const workflowRun = await client.rest.actions.listWorkflowRuns({
|
||||
...CODEQL_ACTION_REPO,
|
||||
branch: options.branch,
|
||||
workflow_id: "pr-checks.yml",
|
||||
status: "success",
|
||||
per_page: 1,
|
||||
event: "push",
|
||||
});
|
||||
|
||||
if (workflowRun.data.total_count === 0) {
|
||||
throw new Error(
|
||||
`Expected to find a 'pr-checks.yml' run for '${options.branch}', but found none.`,
|
||||
);
|
||||
}
|
||||
|
||||
const expectedArtifactName = `bundle-metadata-${options.runner}-${options["node-version"]}`;
|
||||
const artifacts = await client.rest.actions.listWorkflowRunArtifacts({
|
||||
...CODEQL_ACTION_REPO,
|
||||
run_id: workflowRun.data.workflow_runs[0].id,
|
||||
name: expectedArtifactName,
|
||||
});
|
||||
|
||||
if (artifacts.data.total_count === 0) {
|
||||
throw new Error(
|
||||
`Expected to find an artifact named '${expectedArtifactName}', but found none.`,
|
||||
);
|
||||
}
|
||||
|
||||
const downloadInfo = await client.rest.actions.downloadArtifact({
|
||||
...CODEQL_ACTION_REPO,
|
||||
artifact_id: artifacts.data.artifacts[0].id,
|
||||
archive_format: "zip",
|
||||
});
|
||||
|
||||
// This works fine for us with our version of Octokit, so we don't need to
|
||||
// worry about over-complicating this script and handle other possibilities.
|
||||
if (downloadInfo.data instanceof ArrayBuffer) {
|
||||
const archivePath = `${expectedArtifactName}.zip`;
|
||||
await fs.writeFile(archivePath, Buffer.from(downloadInfo.data));
|
||||
|
||||
console.info(`Extracting zip file: ${archivePath}`);
|
||||
await exec.exec("unzip", ["-o", archivePath, "-d", "."]);
|
||||
|
||||
// We no longer need the archive after unzipping it.
|
||||
await fs.rm(archivePath);
|
||||
|
||||
// Check that we have the expected file.
|
||||
try {
|
||||
await fs.stat(BASELINE_BUNDLE_METADATA_FILE);
|
||||
} catch (err) {
|
||||
throw new Error(
|
||||
`Expected '${BASELINE_BUNDLE_METADATA_FILE}' to have been extracted, but it does not exist: ${err}`,
|
||||
);
|
||||
}
|
||||
|
||||
const baselineData = await fs.readFile(BASELINE_BUNDLE_METADATA_FILE);
|
||||
return JSON.parse(String(baselineData)) as Metadata;
|
||||
} else {
|
||||
throw new Error("Expected to receive artifact data, but didn't.");
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const options = parseOptions();
|
||||
|
||||
if (options.token === undefined) {
|
||||
throw new Error("Missing --token");
|
||||
}
|
||||
|
||||
// Initialise the API client.
|
||||
const client = getApiClient(options.token);
|
||||
const baselineMetadata = await getBaselineFrom(client, options);
|
||||
|
||||
const fileContents = await fs.readFile(BUNDLE_METADATA_FILE);
|
||||
const metadata = JSON.parse(String(fileContents)) as Metadata;
|
||||
|
||||
console.info("Comparing bundle metadata to baseline...");
|
||||
|
||||
const filesInBaseline = new Set(Object.keys(baselineMetadata.outputs));
|
||||
const filesInCurrent = new Set(Object.keys(metadata.outputs));
|
||||
|
||||
const filesNotPresent = filesInBaseline.difference(filesInCurrent);
|
||||
if (filesNotPresent.size > 0) {
|
||||
console.info(`Found ${filesNotPresent.size} file(s) which were removed:`);
|
||||
for (const removedFile of filesNotPresent) {
|
||||
console.info(` - ${removedFile}`);
|
||||
}
|
||||
}
|
||||
|
||||
for (const [outputFile, outputData] of Object.entries(
|
||||
metadata.outputs,
|
||||
).reverse()) {
|
||||
console.info(`${outputFile}: ${toMB(outputData.bytes)}`);
|
||||
const baselineOutputData = baselineMetadata.outputs[outputFile];
|
||||
|
||||
for (const [inputName, inputData] of Object.entries(outputData.inputs)) {
|
||||
// Ignore any inputs that make up less than 5% of the output.
|
||||
const percentage = (inputData.bytesInOutput / outputData.bytes) * 100.0;
|
||||
if (percentage < 5.0) continue;
|
||||
if (baselineOutputData === undefined) {
|
||||
console.info(`${outputFile}: New file (${toMB(outputData.bytes)})`);
|
||||
} else {
|
||||
const percentageDifference =
|
||||
((outputData.bytes - baselineOutputData.bytes) /
|
||||
baselineOutputData.bytes) *
|
||||
100.0;
|
||||
|
||||
console.info(` ${inputName}: ${toMB(inputData.bytesInOutput)}`);
|
||||
if (Math.abs(percentageDifference) >= 5) {
|
||||
console.info(
|
||||
`${outputFile}: ${toMB(outputData.bytes)} (${percentageDifference.toFixed(2)}%)`,
|
||||
);
|
||||
|
||||
for (const [inputName, inputData] of Object.entries(
|
||||
outputData.inputs,
|
||||
)) {
|
||||
// Ignore any inputs that make up less than 5% of the output.
|
||||
const percentage =
|
||||
(inputData.bytesInOutput / outputData.bytes) * 100.0;
|
||||
if (percentage < 5.0) continue;
|
||||
|
||||
console.info(` ${inputName}: ${toMB(inputData.bytesInOutput)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
+11
-1
@@ -10,7 +10,17 @@ export const PR_CHECKS_DIR = __dirname;
|
||||
export const PR_CHECK_EXCLUDED_FILE = path.join(PR_CHECKS_DIR, "excluded.yml");
|
||||
|
||||
/** The path to the esbuild metadata file. */
|
||||
export const BUNDLE_METADATA_FILE = path.join(PR_CHECKS_DIR, "..", "meta.json");
|
||||
export const BUNDLE_METADATA_FILE = path.join(
|
||||
PR_CHECKS_DIR,
|
||||
"..",
|
||||
"build-metadata.json",
|
||||
);
|
||||
|
||||
/** The path of the baseline esbuild metadata file, once extracted from a workflow artifact. */
|
||||
export const BASELINE_BUNDLE_METADATA_FILE = path.join(
|
||||
PR_CHECKS_DIR,
|
||||
"build-metadata.json",
|
||||
);
|
||||
|
||||
/** The `src` directory. */
|
||||
const SOURCE_ROOT = path.join(PR_CHECKS_DIR, "..", "src");
|
||||
|
||||
+13
-18
@@ -7,16 +7,20 @@ import { parseArgs } from "node:util";
|
||||
|
||||
import * as yaml from "yaml";
|
||||
|
||||
import { type ApiClient, getApiClient } from "./api-client";
|
||||
import {
|
||||
type ApiClient,
|
||||
CODEQL_ACTION_REPO,
|
||||
getApiClient,
|
||||
TOKEN_OPTION_CONFIG,
|
||||
TokenOption,
|
||||
} from "./api-client";
|
||||
import {
|
||||
OLDEST_SUPPORTED_MAJOR_VERSION,
|
||||
PR_CHECK_EXCLUDED_FILE,
|
||||
} from "./config";
|
||||
|
||||
/** Represents the command-line options. */
|
||||
export interface Options {
|
||||
/** The token to use to authenticate to the GitHub API. */
|
||||
token?: string;
|
||||
export interface Options extends TokenOption {
|
||||
/** The git ref to use the checks for. */
|
||||
ref?: string;
|
||||
/** Whether to actually apply the changes or not. */
|
||||
@@ -25,12 +29,6 @@ export interface Options {
|
||||
verbose: boolean;
|
||||
}
|
||||
|
||||
/** Identifies the CodeQL Action repository. */
|
||||
const codeqlActionRepo = {
|
||||
owner: "github",
|
||||
repo: "codeql-action",
|
||||
};
|
||||
|
||||
/** Represents a configuration of which checks should not be set up as required checks. */
|
||||
export interface Exclusions {
|
||||
/** A list of strings that, if contained in a check name, are excluded. */
|
||||
@@ -100,7 +98,7 @@ async function getChecksFor(
|
||||
const response = await client.paginate(
|
||||
"GET /repos/{owner}/{repo}/commits/{ref}/check-runs",
|
||||
{
|
||||
...codeqlActionRepo,
|
||||
...CODEQL_ACTION_REPO,
|
||||
ref,
|
||||
},
|
||||
);
|
||||
@@ -133,7 +131,7 @@ async function getChecksFor(
|
||||
/** Gets the current list of release branches. */
|
||||
async function getReleaseBranches(client: ApiClient): Promise<string[]> {
|
||||
const refs = await client.rest.git.listMatchingRefs({
|
||||
...codeqlActionRepo,
|
||||
...CODEQL_ACTION_REPO,
|
||||
ref: "heads/releases/v",
|
||||
});
|
||||
return refs.data.map((ref) => ref.ref).sort();
|
||||
@@ -146,7 +144,7 @@ async function patchBranchProtectionRule(
|
||||
checks: Set<string>,
|
||||
) {
|
||||
await client.rest.repos.setStatusCheckContexts({
|
||||
...codeqlActionRepo,
|
||||
...CODEQL_ACTION_REPO,
|
||||
branch,
|
||||
contexts: Array.from(checks),
|
||||
});
|
||||
@@ -163,7 +161,7 @@ async function updateBranch(
|
||||
|
||||
// Query the current set of required checks for this branch.
|
||||
const currentContexts = await client.rest.repos.getAllStatusCheckContexts({
|
||||
...codeqlActionRepo,
|
||||
...CODEQL_ACTION_REPO,
|
||||
branch,
|
||||
});
|
||||
|
||||
@@ -205,10 +203,7 @@ async function updateBranch(
|
||||
async function main(): Promise<void> {
|
||||
const { values: options } = parseArgs({
|
||||
options: {
|
||||
// The token to use to authenticate to the API.
|
||||
token: {
|
||||
type: "string",
|
||||
},
|
||||
...TOKEN_OPTION_CONFIG,
|
||||
// The git ref for which to retrieve the check runs.
|
||||
ref: {
|
||||
type: "string",
|
||||
|
||||
+5
-22
@@ -128,8 +128,6 @@ export async function getGitHubVersionFromApi(
|
||||
|
||||
// Doesn't strictly have to be the meta endpoint as we're only
|
||||
// using the response headers which are available on every request.
|
||||
//
|
||||
// See https://docs.github.com/en/rest/meta/meta#get-github-meta-information.
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
|
||||
const response = await apiClient.rest.meta.get();
|
||||
|
||||
@@ -166,9 +164,6 @@ export async function getGitHubVersion(): Promise<GitHubVersion> {
|
||||
|
||||
/**
|
||||
* Get the path of the currently executing workflow relative to the repository root.
|
||||
*
|
||||
* See https://docs.github.com/en/rest/actions/workflow-runs#get-a-workflow-run
|
||||
* and https://docs.github.com/en/rest/actions/workflows#get-a-workflow.
|
||||
*/
|
||||
export async function getWorkflowRelativePath(): Promise<string> {
|
||||
const repo_nwo = getRepositoryNwo();
|
||||
@@ -257,13 +252,9 @@ export interface ActionsCacheItem {
|
||||
size_in_bytes?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* List all Actions cache entries starting with the provided key prefix and matching the provided ref.
|
||||
*
|
||||
* See https://docs.github.com/en/rest/actions/cache#list-github-actions-caches-for-a-repository.
|
||||
*/
|
||||
/** List all Actions cache entries matching the provided key and ref. */
|
||||
export async function listActionsCaches(
|
||||
keyPrefix: string,
|
||||
key: string,
|
||||
ref?: string,
|
||||
): Promise<ActionsCacheItem[]> {
|
||||
const repositoryNwo = getRepositoryNwo();
|
||||
@@ -273,17 +264,13 @@ export async function listActionsCaches(
|
||||
{
|
||||
owner: repositoryNwo.owner,
|
||||
repo: repositoryNwo.repo,
|
||||
key: keyPrefix,
|
||||
key,
|
||||
ref,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete an Actions cache item by its ID.
|
||||
*
|
||||
* See https://docs.github.com/en/rest/actions/cache#delete-a-github-actions-cache-for-a-repository-using-a-cache-id.
|
||||
*/
|
||||
/** Delete an Actions cache item by its ID. */
|
||||
export async function deleteActionsCache(id: number) {
|
||||
const repositoryNwo = getRepositoryNwo();
|
||||
|
||||
@@ -294,11 +281,7 @@ export async function deleteActionsCache(id: number) {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve all custom repository properties.
|
||||
*
|
||||
* See https://docs.github.com/en/rest/repos/custom-properties#get-all-custom-property-values-for-a-repository.
|
||||
*/
|
||||
/** Retrieve all custom repository properties. */
|
||||
export async function getRepositoryProperties(repositoryNwo: RepositoryNwo) {
|
||||
return getApiClient().request("GET /repos/:owner/:repo/properties/values", {
|
||||
owner: repositoryNwo.owner,
|
||||
|
||||
+4
-4
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"bundleVersion": "codeql-bundle-v2.25.3",
|
||||
"cliVersion": "2.25.3",
|
||||
"priorBundleVersion": "codeql-bundle-v2.25.2",
|
||||
"priorCliVersion": "2.25.2"
|
||||
"bundleVersion": "codeql-bundle-v2.25.2",
|
||||
"cliVersion": "2.25.2",
|
||||
"priorBundleVersion": "codeql-bundle-v2.25.1",
|
||||
"priorCliVersion": "2.25.1"
|
||||
}
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
import test from "ava";
|
||||
|
||||
import { setupTests } from "../testing-utils";
|
||||
|
||||
import * as json from ".";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
const testSchema = {
|
||||
requiredKey: json.string,
|
||||
};
|
||||
|
||||
const optionalSchema = {
|
||||
optionalKey: json.optional(json.string),
|
||||
};
|
||||
|
||||
test("validateSchema - required properties are required", async (t) => {
|
||||
t.false(json.validateSchema(testSchema, {}));
|
||||
t.false(json.validateSchema(testSchema, { requiredKey: undefined }));
|
||||
t.false(json.validateSchema(testSchema, { requiredKey: null }));
|
||||
t.false(json.validateSchema(testSchema, { requiredKey: 0 }));
|
||||
t.false(json.validateSchema(testSchema, { requiredKey: 123 }));
|
||||
t.false(json.validateSchema(testSchema, { requiredKey: false }));
|
||||
t.false(json.validateSchema(testSchema, { requiredKey: true }));
|
||||
t.false(json.validateSchema(testSchema, { requiredKey: [] }));
|
||||
t.false(json.validateSchema(testSchema, { requiredKey: {} }));
|
||||
t.true(json.validateSchema(testSchema, { requiredKey: "" }));
|
||||
t.true(json.validateSchema(testSchema, { requiredKey: "foo" }));
|
||||
});
|
||||
|
||||
test("validateSchema - optional properties are optional", async (t) => {
|
||||
// Optional fields may be absent
|
||||
t.true(json.validateSchema(optionalSchema, {}));
|
||||
t.true(json.validateSchema(optionalSchema, { optionalKey: undefined }));
|
||||
t.true(json.validateSchema(optionalSchema, { optionalKey: null }));
|
||||
|
||||
// But, if present, should have the expected type
|
||||
t.false(json.validateSchema(optionalSchema, { optionalKey: 0 }));
|
||||
t.false(json.validateSchema(optionalSchema, { optionalKey: 123 }));
|
||||
t.false(json.validateSchema(optionalSchema, { optionalKey: false }));
|
||||
t.false(json.validateSchema(optionalSchema, { optionalKey: true }));
|
||||
t.false(json.validateSchema(optionalSchema, { optionalKey: [] }));
|
||||
t.false(json.validateSchema(optionalSchema, { optionalKey: {} }));
|
||||
t.true(json.validateSchema(optionalSchema, { optionalKey: "" }));
|
||||
t.true(json.validateSchema(optionalSchema, { optionalKey: "foo" }));
|
||||
});
|
||||
@@ -36,82 +36,3 @@ export function isStringOrUndefined(
|
||||
): value is string | undefined {
|
||||
return value === undefined || isString(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a field of type `T` in a schema.
|
||||
* Carries a validation function and flag indicating whether the field is required or not.
|
||||
*/
|
||||
export type Validator<T> = {
|
||||
validate: (val: unknown) => val is T;
|
||||
required: boolean;
|
||||
};
|
||||
|
||||
/** Extracts `T` from `Validator<T>`. */
|
||||
export type UnwrapValidator<V> = V extends Validator<infer A> ? A : never;
|
||||
|
||||
/** A validator for string fields in schemas. */
|
||||
export const string = {
|
||||
validate: isString,
|
||||
required: true,
|
||||
} as const satisfies Validator<string>;
|
||||
|
||||
/** Transforms a validator to be optional. */
|
||||
export function optional<T>(validator: Validator<T>) {
|
||||
return {
|
||||
validate: (val: unknown) => {
|
||||
return val === undefined || val === null || validator.validate(val);
|
||||
},
|
||||
required: false,
|
||||
} as const satisfies Validator<T | undefined | null>;
|
||||
}
|
||||
|
||||
/** Represents an arbitrary object schema. */
|
||||
export type Schema = Record<string, Validator<any>>;
|
||||
|
||||
/** Extracts the required keys from `S`. */
|
||||
export type RequiredKeys<S extends Schema> = {
|
||||
[K in keyof S]: S[K]["required"] extends true ? K : never;
|
||||
}[keyof S];
|
||||
|
||||
/** Extracts optional keys from `S`. */
|
||||
export type OptionalKeys<S extends Schema> = {
|
||||
[K in keyof S]: S[K]["required"] extends true ? never : K;
|
||||
}[keyof S];
|
||||
|
||||
/** Constructs an object type corresponding to a schema. */
|
||||
export type FromSchema<S extends Schema> = {
|
||||
[K in RequiredKeys<S>]: UnwrapValidator<S[K]>;
|
||||
} & { [K in OptionalKeys<S>]?: UnwrapValidator<S[K]> };
|
||||
|
||||
/**
|
||||
* Validates that `obj` satisfies at least `schema`. Additional keys are accepted.
|
||||
*
|
||||
* @param schema The schema to validate against.
|
||||
* @param obj The object to validate.
|
||||
* @returns Asserts that `obj` is of the `schema`'s type if validation is successful.
|
||||
*/
|
||||
export function validateSchema<S extends Schema>(
|
||||
schema: S,
|
||||
obj: UnvalidatedObject<any>,
|
||||
): obj is FromSchema<S> {
|
||||
for (const [key, validator] of Object.entries(schema)) {
|
||||
const hasKey = key in obj;
|
||||
|
||||
// If the property is required, but absent, fail.
|
||||
if (validator.required && !hasKey) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the property is required, but undefined or null, fail.
|
||||
if (validator.required && (obj[key] === undefined || obj[key] === null)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the property is present, validate it.
|
||||
if (hasKey && !validator.validate(obj[key])) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -1,106 +0,0 @@
|
||||
import { ExecutionContext } from "ava";
|
||||
|
||||
import * as json from ".";
|
||||
|
||||
/**
|
||||
* Constructs an object based on `schema` for unit tests.
|
||||
* Assumes that all keys in `schema` have string values.
|
||||
*
|
||||
* @param includeOptional Whether to include optional properties.
|
||||
* @param schema The schema to base the object on.
|
||||
* @returns An object that satisfies `schema`.
|
||||
*/
|
||||
export function makeFromSchema<S extends json.Schema>(
|
||||
includeOptional: boolean,
|
||||
schema: S,
|
||||
): json.FromSchema<S> {
|
||||
const result = {};
|
||||
for (const [key, validator] of Object.entries(schema)) {
|
||||
if (!validator.required && !includeOptional) {
|
||||
continue;
|
||||
}
|
||||
result[key] = `value-for-${key}`;
|
||||
}
|
||||
return result as json.FromSchema<S>;
|
||||
}
|
||||
|
||||
/** Options for `withSchemaMatrix`. */
|
||||
export interface SchemaMatrixOptions {
|
||||
/** Whether cases where the properties are entirely absent should be excluded. */
|
||||
excludeAbsent?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a test matrix of possible objects for `schema`: all required properties
|
||||
* plus all permutations of possible states for the optional properties.
|
||||
*
|
||||
* @param schema The schema to construct a test matrix for.
|
||||
* @param body The test body to call with each value from the test matrix.
|
||||
*/
|
||||
export function withSchemaMatrix<S extends json.Schema>(
|
||||
t: ExecutionContext<any>,
|
||||
schema: S,
|
||||
opts: SchemaMatrixOptions,
|
||||
body: (value: json.FromSchema<S>) => void,
|
||||
): void {
|
||||
// Construct a base object that includes all required properties.
|
||||
const required = makeFromSchema(false, schema);
|
||||
|
||||
// Identify optional properties.
|
||||
const optionalKeys: Array<keyof S> = [];
|
||||
|
||||
for (const [key, validator] of Object.entries(schema)) {
|
||||
if (!validator.required) {
|
||||
optionalKeys.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
const optionalValues = (key: keyof S) => [
|
||||
null,
|
||||
undefined,
|
||||
`value-for-${String(key)}`,
|
||||
];
|
||||
|
||||
// Constructs an array of test objects, starting with `required` and combining it with all
|
||||
// possible states of each optional property. For example, with default settings:
|
||||
//
|
||||
// For { requiredKey: string }, we get: `[{ requiredKey: "some-string-value" }]`
|
||||
//
|
||||
// For { requiredKey: string, optionalKey?: string }, we get:
|
||||
// [ { requiredKey: "some-string-value" },
|
||||
// { requiredKey: "some-string-value", optionalKey: undefined },
|
||||
// { requiredKey: "some-string-value", optionalKey: null },
|
||||
// { requiredKey: "some-string-value", optionalKey: "some-value" },
|
||||
// ]
|
||||
const permutations = (keys: Array<keyof S>) => {
|
||||
if (keys.length === 0) return [required];
|
||||
|
||||
const bases = permutations(keys.slice(1));
|
||||
const result: Array<json.FromSchema<S>> = [];
|
||||
|
||||
const optionalKey = keys[0];
|
||||
for (const base of bases) {
|
||||
if (!opts.excludeAbsent) {
|
||||
// Optional keys can be absent entirely.
|
||||
result.push(base);
|
||||
}
|
||||
|
||||
// Or be present and have one of the `optionalValues`.
|
||||
for (const optionalValue of optionalValues(optionalKey)) {
|
||||
result.push({ ...base, [optionalKey]: optionalValue });
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
// Call `body` for all test cases.
|
||||
const testCases = permutations(optionalKeys);
|
||||
for (const testCase of testCases) {
|
||||
try {
|
||||
body(testCase);
|
||||
} catch (err) {
|
||||
t.log(testCase);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
+1
-133
@@ -7,7 +7,7 @@ import * as sinon from "sinon";
|
||||
|
||||
import * as actionsUtil from "../actions-util";
|
||||
import * as apiClient from "../api-client";
|
||||
import type { ResolveDatabaseOutput } from "../codeql";
|
||||
import { ResolveDatabaseOutput } from "../codeql";
|
||||
import * as gitUtils from "../git-utils";
|
||||
import { BuiltInLanguage } from "../languages";
|
||||
import { getRunnerLogger } from "../logging";
|
||||
@@ -23,7 +23,6 @@ import {
|
||||
downloadOverlayBaseDatabaseFromCache,
|
||||
getCacheRestoreKeyPrefix,
|
||||
getCacheSaveKey,
|
||||
getCodeQlVersionsForOverlayBaseDatabases,
|
||||
} from "./caching";
|
||||
import { OverlayDatabaseMode } from "./overlay-database-mode";
|
||||
|
||||
@@ -286,134 +285,3 @@ test.serial("overlay-base database cache keys remain stable", async (t) => {
|
||||
`Expected save key "${saveKey}" to start with restore key prefix "${restoreKeyPrefix}"`,
|
||||
);
|
||||
});
|
||||
|
||||
test.serial(
|
||||
"getCodeQlVersionsForOverlayBaseDatabases returns unique versions sorted latest first",
|
||||
async (t) => {
|
||||
const logger = getRunnerLogger(true);
|
||||
|
||||
sinon.stub(apiClient, "getAutomationID").resolves("test-automation-id/");
|
||||
sinon.stub(apiClient, "listActionsCaches").resolves([
|
||||
{
|
||||
key: "codeql-overlay-base-database-1-c5666c509a2d9895-javascript_python-2.23.0-abc123-1-1",
|
||||
},
|
||||
{
|
||||
key: "codeql-overlay-base-database-1-c5666c509a2d9895-javascript_python-2.24.1-def456-2-1",
|
||||
},
|
||||
{
|
||||
key: "codeql-overlay-base-database-1-c5666c509a2d9895-javascript_python-2.23.0-ghi789-3-1",
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await getCodeQlVersionsForOverlayBaseDatabases(
|
||||
["javascript", "python"],
|
||||
logger,
|
||||
);
|
||||
t.deepEqual(result, ["2.24.1", "2.23.0"]);
|
||||
},
|
||||
);
|
||||
|
||||
test.serial(
|
||||
"getCodeQlVersionsForOverlayBaseDatabases returns empty list when no caches exist",
|
||||
async (t) => {
|
||||
const logger = getRunnerLogger(true);
|
||||
|
||||
sinon.stub(apiClient, "getAutomationID").resolves("test-automation-id/");
|
||||
sinon.stub(apiClient, "listActionsCaches").resolves([]);
|
||||
|
||||
const result = await getCodeQlVersionsForOverlayBaseDatabases(
|
||||
["python"],
|
||||
logger,
|
||||
);
|
||||
t.deepEqual(result, []);
|
||||
},
|
||||
);
|
||||
|
||||
test.serial(
|
||||
"getCodeQlVersionsForOverlayBaseDatabases returns empty list when cache keys are unparseable",
|
||||
async (t) => {
|
||||
const logger = getRunnerLogger(true);
|
||||
|
||||
sinon.stub(apiClient, "getAutomationID").resolves("test-automation-id/");
|
||||
sinon.stub(apiClient, "listActionsCaches").resolves([
|
||||
{
|
||||
key: "codeql-overlay-base-database-1-c5666c509a2d9895-python-malformed",
|
||||
},
|
||||
{ key: undefined },
|
||||
]);
|
||||
|
||||
const result = await getCodeQlVersionsForOverlayBaseDatabases(
|
||||
["python"],
|
||||
logger,
|
||||
);
|
||||
t.deepEqual(result, []);
|
||||
},
|
||||
);
|
||||
|
||||
test.serial(
|
||||
"getCodeQlVersionsForOverlayBaseDatabases returns the single version when only one cache exists",
|
||||
async (t) => {
|
||||
const logger = getRunnerLogger(true);
|
||||
|
||||
sinon.stub(apiClient, "getAutomationID").resolves("test-automation-id/");
|
||||
sinon.stub(apiClient, "listActionsCaches").resolves([
|
||||
{
|
||||
key: "codeql-overlay-base-database-1-c5666c509a2d9895-cpp-2.25.0-abc123-1-1",
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await getCodeQlVersionsForOverlayBaseDatabases(
|
||||
["cpp"],
|
||||
logger,
|
||||
);
|
||||
t.deepEqual(result, ["2.25.0"]);
|
||||
},
|
||||
);
|
||||
|
||||
test.serial(
|
||||
"getCodeQlVersionsForOverlayBaseDatabases resolves language aliases",
|
||||
async (t) => {
|
||||
const logger = getRunnerLogger(true);
|
||||
// The alias `c++` should be resolved to "cpp" and match cache entries keyed with "cpp"
|
||||
|
||||
sinon.stub(apiClient, "getAutomationID").resolves("test-automation-id/");
|
||||
sinon.stub(apiClient, "listActionsCaches").resolves([
|
||||
{
|
||||
key: "codeql-overlay-base-database-1-c5666c509a2d9895-cpp-2.25.0-abc123-1-1",
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await getCodeQlVersionsForOverlayBaseDatabases(
|
||||
["c++"],
|
||||
logger,
|
||||
);
|
||||
t.deepEqual(result, ["2.25.0"]);
|
||||
},
|
||||
);
|
||||
|
||||
test.serial(
|
||||
"getCodeQlVersionsForOverlayBaseDatabases ignores nightly versions with build metadata",
|
||||
async (t) => {
|
||||
const logger = getRunnerLogger(true);
|
||||
|
||||
sinon.stub(apiClient, "getAutomationID").resolves("test-automation-id/");
|
||||
sinon.stub(apiClient, "listActionsCaches").resolves([
|
||||
{
|
||||
key: "codeql-overlay-base-database-1-c5666c509a2d9895-python-2.25.0-abc123-1-1",
|
||||
},
|
||||
{
|
||||
// Nightly release with semver build metadata; should be ignored.
|
||||
key: "codeql-overlay-base-database-1-c5666c509a2d9895-python-2.26.0+202604211234-def456-2-1",
|
||||
},
|
||||
{
|
||||
key: "codeql-overlay-base-database-1-c5666c509a2d9895-python-2.24.0-ghi789-3-1",
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await getCodeQlVersionsForOverlayBaseDatabases(
|
||||
["python"],
|
||||
logger,
|
||||
);
|
||||
t.deepEqual(result, ["2.25.0", "2.24.0"]);
|
||||
},
|
||||
);
|
||||
|
||||
+12
-104
@@ -1,20 +1,18 @@
|
||||
import * as fs from "fs";
|
||||
|
||||
import * as actionsCache from "@actions/cache";
|
||||
import * as semver from "semver";
|
||||
|
||||
import {
|
||||
getRequiredInput,
|
||||
getWorkflowRunAttempt,
|
||||
getWorkflowRunID,
|
||||
} from "../actions-util";
|
||||
import { getAutomationID, listActionsCaches } from "../api-client";
|
||||
import { getAutomationID } from "../api-client";
|
||||
import { createCacheKeyHash } from "../caching-utils";
|
||||
import { type CodeQL } from "../codeql";
|
||||
import { type Config } from "../config-utils";
|
||||
import { getCommitOid } from "../git-utils";
|
||||
import { type Language, parseBuiltInLanguage } from "../languages";
|
||||
import { type Logger, withGroupAsync } from "../logging";
|
||||
import { Logger, withGroupAsync } from "../logging";
|
||||
import {
|
||||
CleanupLevel,
|
||||
getBaseDatabaseOidsFilePath,
|
||||
@@ -406,17 +404,7 @@ export async function getCacheRestoreKeyPrefix(
|
||||
config: Config,
|
||||
codeQlVersion: string,
|
||||
): Promise<string> {
|
||||
return `${await getCacheKeyPrefixBase(config.languages)}${codeQlVersion}-`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the cache key prefix for overlay-base databases, excluding the
|
||||
* CodeQL version.
|
||||
*/
|
||||
async function getCacheKeyPrefixBase(
|
||||
parsedLanguages: Language[],
|
||||
): Promise<string> {
|
||||
const languagesComponent = [...parsedLanguages].sort().join("_");
|
||||
const languages = [...config.languages].sort().join("_");
|
||||
|
||||
const cacheKeyComponents = {
|
||||
automationID: await getAutomationID(),
|
||||
@@ -424,97 +412,17 @@ async function getCacheKeyPrefixBase(
|
||||
};
|
||||
const componentsHash = createCacheKeyHash(cacheKeyComponents);
|
||||
|
||||
// For a cached overlay-base database to be considered compatible for overlay
|
||||
// analysis, all components in the cache restore key must match:
|
||||
//
|
||||
// CACHE_PREFIX: distinguishes overlay-base databases from other cache objects
|
||||
// CACHE_VERSION: cache format version
|
||||
// componentsHash: hash of additional components (see above for details)
|
||||
// languagesComponent: the languages included in the overlay-base database
|
||||
// languages: the languages included in the overlay-base database
|
||||
// codeQlVersion: CodeQL bundle version
|
||||
//
|
||||
// Technically we can also include languages in the componentsHash, but
|
||||
// including them explicitly in the cache key makes it easier to debug and
|
||||
// understand the cache key structure.
|
||||
return `${CACHE_PREFIX}-${CACHE_VERSION}-${componentsHash}-${languagesComponent}-`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches the GitHub Actions cache for overlay-base databases matching the given languages, and
|
||||
* returns all stable CodeQL versions found across matching cache entries.
|
||||
*
|
||||
* Note that we do not guarantee that the cache entry for these versions of CodeQL will still be
|
||||
* present by the time we attempt to restore the cache. We could achieve that with a download retry
|
||||
* loop, but we expect that if there is sufficient Actions cache contention that an overlay-base
|
||||
* cache entry for a particular CodeQL version is evicted before we can use it, then it is likely
|
||||
* that the same thing will happen to other overlay-base cache entries, and therefore we will not be
|
||||
* able to use overlay.
|
||||
*
|
||||
* @returns Unique stable CodeQL versions found in cached overlay-base databases, sorted from latest to
|
||||
* earliest, or undefined if one of the languages is not a built-in language.
|
||||
*/
|
||||
export async function getCodeQlVersionsForOverlayBaseDatabases(
|
||||
rawLanguages: string[],
|
||||
logger: Logger,
|
||||
): Promise<string[] | undefined> {
|
||||
const languages = rawLanguages.map(parseBuiltInLanguage);
|
||||
if (languages.includes(undefined)) {
|
||||
logger.warning(
|
||||
"One or more provided languages are not recognized as built-in languages. " +
|
||||
"Skipping searching for overlay-base databases in cache.",
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
const cacheKeyPrefix = await getCacheKeyPrefixBase(
|
||||
languages.filter((l) => l !== undefined),
|
||||
);
|
||||
|
||||
logger.debug(
|
||||
`Searching for overlay-base databases in Actions cache with ` +
|
||||
`prefix ${cacheKeyPrefix}`,
|
||||
);
|
||||
|
||||
const caches = await listActionsCaches(cacheKeyPrefix);
|
||||
|
||||
if (caches.length === 0) {
|
||||
logger.info("No overlay-base databases found in Actions cache.");
|
||||
return [];
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`Found ${caches.length} overlay-base ` +
|
||||
`${caches.length === 1 ? "database" : "databases"} in the Actions cache.`,
|
||||
);
|
||||
|
||||
// Parse CodeQL versions from cache keys, matching only stable releases.
|
||||
//
|
||||
// After the prefix, the remaining key format starts with `${codeQlVersion}-`. Nightlies will have
|
||||
// a suffix like `+202604201548` that will break the match.
|
||||
//
|
||||
// Caveat: this relies on the fact that we haven't released any CodeQL bundles with the
|
||||
// `x.y.z-<pre-release>` semver format which does not interact well with the current overlay base
|
||||
// DB cache key format.
|
||||
const versionRegex = /^([\d.]+)-/;
|
||||
const versionSet = new Set<string>();
|
||||
|
||||
for (const cache of caches) {
|
||||
if (!cache.key) continue;
|
||||
const suffix = cache.key.substring(cacheKeyPrefix.length);
|
||||
const match = suffix.match(versionRegex);
|
||||
if (match && semver.valid(match[1])) {
|
||||
versionSet.add(match[1]);
|
||||
}
|
||||
}
|
||||
|
||||
if (versionSet.size === 0) {
|
||||
logger.info(
|
||||
"Could not parse any CodeQL versions from overlay-base database " +
|
||||
"cache keys.",
|
||||
);
|
||||
return [];
|
||||
}
|
||||
|
||||
const versions = [...versionSet].sort(semver.rcompare);
|
||||
|
||||
logger.info(
|
||||
`Found overlay databases for the following CodeQL versions in the Actions cache: ${versions.join(", ")}`,
|
||||
);
|
||||
|
||||
return versions;
|
||||
// Technically we can also include languages and codeQlVersion in the
|
||||
// componentsHash, but including them explicitly in the cache key makes it
|
||||
// easier to debug and understand the cache key structure.
|
||||
return `${CACHE_PREFIX}-${CACHE_VERSION}-${componentsHash}-${languages}-${codeQlVersion}-`;
|
||||
}
|
||||
|
||||
@@ -198,7 +198,6 @@ async function startProxy(
|
||||
.map((credential) => ({
|
||||
type: credential.type,
|
||||
url: credential.url,
|
||||
"replaces-base": credential["replaces-base"],
|
||||
}));
|
||||
core.setOutput("proxy_urls", JSON.stringify(registry_urls));
|
||||
|
||||
|
||||
+127
-122
@@ -8,8 +8,6 @@ import sinon from "sinon";
|
||||
import * as apiClient from "./api-client";
|
||||
import * as defaults from "./defaults.json";
|
||||
import { setUpFeatureFlagTests } from "./feature-flags/testing-util";
|
||||
import { UnvalidatedObject, validateSchema } from "./json";
|
||||
import { makeFromSchema } from "./json/testing-util";
|
||||
import { BuiltInLanguage } from "./languages";
|
||||
import { getRunnerLogger, Logger } from "./logging";
|
||||
import * as startProxyExports from "./start-proxy";
|
||||
@@ -351,46 +349,131 @@ test("getCredentials throws an error when non-printable characters are used", as
|
||||
}
|
||||
});
|
||||
|
||||
for (const oidcSchemaInfo of startProxyExports.oidcSchemas) {
|
||||
test(`getCredentials throws when non-printable characters are used (${oidcSchemaInfo.name} OIDC)`, (t) => {
|
||||
const validCredential = makeFromSchema(true, oidcSchemaInfo.schema);
|
||||
for (const key of Object.keys(validCredential)) {
|
||||
const invalidAuthConfig = {
|
||||
...validCredential,
|
||||
[key]: "123\x00",
|
||||
};
|
||||
const invalidCredential: startProxyExports.RawCredential = {
|
||||
type: "nuget_feed",
|
||||
host: `${key}.nuget.pkg.github.com`,
|
||||
...invalidAuthConfig,
|
||||
};
|
||||
const credentialsInput = toEncodedJSON([invalidCredential]);
|
||||
const validAzureCredential: startProxyExports.AzureConfig = {
|
||||
"tenant-id": "12345678-1234-1234-1234-123456789012",
|
||||
"client-id": "abcdef01-2345-6789-abcd-ef0123456789",
|
||||
};
|
||||
|
||||
t.throws(
|
||||
() =>
|
||||
startProxyExports.getCredentials(
|
||||
getRunnerLogger(true),
|
||||
undefined,
|
||||
credentialsInput,
|
||||
undefined,
|
||||
),
|
||||
{
|
||||
message:
|
||||
"Invalid credentials - fields must contain only printable characters",
|
||||
},
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
const validAwsCredential: startProxyExports.AWSConfig = {
|
||||
"aws-region": "us-east-1",
|
||||
"account-id": "123456789012",
|
||||
"role-name": "MY_ROLE",
|
||||
domain: "MY_DOMAIN",
|
||||
"domain-owner": "987654321098",
|
||||
audience: "custom-audience",
|
||||
};
|
||||
|
||||
const validJFrogCredential: startProxyExports.JFrogConfig = {
|
||||
"jfrog-oidc-provider-name": "MY_PROVIDER",
|
||||
audience: "jfrog-audience",
|
||||
"identity-mapping-name": "my-mapping",
|
||||
};
|
||||
|
||||
test("getCredentials throws an error when non-printable characters are used for Azure OIDC", (t) => {
|
||||
for (const key of Object.keys(validAzureCredential)) {
|
||||
const invalidAzureCredential = {
|
||||
...validAzureCredential,
|
||||
[key]: "123\x00",
|
||||
};
|
||||
const invalidCredential: startProxyExports.RawCredential = {
|
||||
type: "nuget_feed",
|
||||
host: `${key}.nuget.pkg.github.com`,
|
||||
...invalidAzureCredential,
|
||||
};
|
||||
const credentialsInput = toEncodedJSON([invalidCredential]);
|
||||
|
||||
t.throws(
|
||||
() =>
|
||||
startProxyExports.getCredentials(
|
||||
getRunnerLogger(true),
|
||||
undefined,
|
||||
credentialsInput,
|
||||
undefined,
|
||||
),
|
||||
{
|
||||
message:
|
||||
"Invalid credentials - fields must contain only printable characters",
|
||||
},
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
test("getCredentials throws an error when non-printable characters are used for AWS OIDC", (t) => {
|
||||
for (const key of Object.keys(validAwsCredential)) {
|
||||
const invalidAwsCredential = {
|
||||
...validAwsCredential,
|
||||
[key]: "123\x00",
|
||||
};
|
||||
const invalidCredential: startProxyExports.RawCredential = {
|
||||
type: "nuget_feed",
|
||||
host: `${key}.nuget.pkg.github.com`,
|
||||
...invalidAwsCredential,
|
||||
};
|
||||
const credentialsInput = toEncodedJSON([invalidCredential]);
|
||||
|
||||
t.throws(
|
||||
() =>
|
||||
startProxyExports.getCredentials(
|
||||
getRunnerLogger(true),
|
||||
undefined,
|
||||
credentialsInput,
|
||||
undefined,
|
||||
),
|
||||
{
|
||||
message:
|
||||
"Invalid credentials - fields must contain only printable characters",
|
||||
},
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
test("getCredentials throws an error when non-printable characters are used for JFrog OIDC", (t) => {
|
||||
for (const key of Object.keys(validJFrogCredential)) {
|
||||
const invalidJFrogCredential = {
|
||||
...validJFrogCredential,
|
||||
[key]: "123\x00",
|
||||
};
|
||||
const invalidCredential: startProxyExports.RawCredential = {
|
||||
type: "nuget_feed",
|
||||
host: `${key}.nuget.pkg.github.com`,
|
||||
...invalidJFrogCredential,
|
||||
};
|
||||
const credentialsInput = toEncodedJSON([invalidCredential]);
|
||||
|
||||
t.throws(
|
||||
() =>
|
||||
startProxyExports.getCredentials(
|
||||
getRunnerLogger(true),
|
||||
undefined,
|
||||
credentialsInput,
|
||||
undefined,
|
||||
),
|
||||
{
|
||||
message:
|
||||
"Invalid credentials - fields must contain only printable characters",
|
||||
},
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
test("getCredentials accepts OIDC configurations", (t) => {
|
||||
const oidcConfigurations = startProxyExports.oidcSchemas.map(
|
||||
(schemaInfo) => ({
|
||||
const oidcConfigurations = [
|
||||
{
|
||||
type: "nuget_feed",
|
||||
host: `${schemaInfo.name.toLowerCase()}.pkg.github.com`,
|
||||
...makeFromSchema(true, schemaInfo.schema),
|
||||
}),
|
||||
);
|
||||
host: "azure.pkg.github.com",
|
||||
...validAzureCredential,
|
||||
},
|
||||
{
|
||||
type: "nuget_feed",
|
||||
host: "aws.pkg.github.com",
|
||||
...validAwsCredential,
|
||||
},
|
||||
{
|
||||
type: "nuget_feed",
|
||||
host: "jfrog.pkg.github.com",
|
||||
...validJFrogCredential,
|
||||
},
|
||||
];
|
||||
|
||||
const credentials = startProxyExports.getCredentials(
|
||||
getRunnerLogger(true),
|
||||
@@ -398,20 +481,12 @@ test("getCredentials accepts OIDC configurations", (t) => {
|
||||
toEncodedJSON(oidcConfigurations),
|
||||
BuiltInLanguage.csharp,
|
||||
);
|
||||
t.is(credentials.length, startProxyExports.oidcSchemas.length);
|
||||
t.is(credentials.length, 3);
|
||||
|
||||
t.assert(credentials.every((c) => c.type === "nuget_feed"));
|
||||
|
||||
for (const oidcSchemaInfo of startProxyExports.oidcSchemas) {
|
||||
t.assert(
|
||||
credentials.some((c) =>
|
||||
validateSchema(
|
||||
oidcSchemaInfo.schema,
|
||||
c as unknown as UnvalidatedObject<any>,
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
t.assert(credentials.some((c) => startProxyExports.isAzureConfig(c)));
|
||||
t.assert(credentials.some((c) => startProxyExports.isAWSConfig(c)));
|
||||
t.assert(credentials.some((c) => startProxyExports.isJFrogConfig(c)));
|
||||
});
|
||||
|
||||
const getCredentialsMacro = test.macro({
|
||||
@@ -457,7 +532,7 @@ test(
|
||||
t.is(results[0].type, "git_server");
|
||||
t.is(results[0].host, "https://github.com/");
|
||||
|
||||
if (startProxyExports.hasUsernameAndPassword(results[0])) {
|
||||
if (startProxyExports.isUsernamePassword(results[0])) {
|
||||
t.assert(results[0].password?.startsWith("ghp_"));
|
||||
} else {
|
||||
t.fail("Expected a `UsernamePassword`-based credential.");
|
||||
@@ -488,7 +563,7 @@ test(
|
||||
t.is(results[0].type, "git_server");
|
||||
t.is(results[0].host, "https://github.com/");
|
||||
|
||||
if (startProxyExports.hasUsernameAndPassword(results[0])) {
|
||||
if (startProxyExports.isUsernamePassword(results[0])) {
|
||||
t.assert(results[0].password?.startsWith("ghp_"));
|
||||
} else {
|
||||
t.fail("Expected a `UsernamePassword`-based credential.");
|
||||
@@ -564,76 +639,6 @@ test(
|
||||
},
|
||||
);
|
||||
|
||||
test("getCredentials validates 'replaces-base' correctly", async (t) => {
|
||||
// Valid cases.
|
||||
const credentialsInput = toEncodedJSON([
|
||||
{
|
||||
type: "maven_repository",
|
||||
host: "maven1.pkg.github.com",
|
||||
token: "abc",
|
||||
"replaces-base": false,
|
||||
},
|
||||
{
|
||||
type: "maven_repository",
|
||||
host: "maven2.pkg.github.com",
|
||||
token: "def",
|
||||
"replaces-base": true,
|
||||
},
|
||||
{
|
||||
type: "maven_repository",
|
||||
host: "maven3.pkg.github.com",
|
||||
token: "ghi",
|
||||
},
|
||||
]);
|
||||
|
||||
const credentials = startProxyExports.getCredentials(
|
||||
getRunnerLogger(true),
|
||||
undefined,
|
||||
credentialsInput,
|
||||
BuiltInLanguage.java,
|
||||
false,
|
||||
);
|
||||
|
||||
t.is(credentials.length, 3);
|
||||
t.true(credentials.some((c) => c["replaces-base"] === true));
|
||||
t.true(credentials.some((c) => c["replaces-base"] === false));
|
||||
t.true(credentials.some((c) => c["replaces-base"] === undefined));
|
||||
|
||||
// Invalid cases.
|
||||
const baseInvalid = {
|
||||
type: "maven_repository",
|
||||
host: "maven4.pkg.github.com",
|
||||
token: "jkl",
|
||||
};
|
||||
t.throws(() =>
|
||||
startProxyExports.getCredentials(
|
||||
getRunnerLogger(true),
|
||||
undefined,
|
||||
toEncodedJSON([{ ...baseInvalid, "replaces-base": null }]),
|
||||
BuiltInLanguage.actions,
|
||||
false,
|
||||
),
|
||||
);
|
||||
t.throws(() =>
|
||||
startProxyExports.getCredentials(
|
||||
getRunnerLogger(true),
|
||||
undefined,
|
||||
toEncodedJSON([{ ...baseInvalid, "replaces-base": 123 }]),
|
||||
BuiltInLanguage.actions,
|
||||
false,
|
||||
),
|
||||
);
|
||||
t.throws(() =>
|
||||
startProxyExports.getCredentials(
|
||||
getRunnerLogger(true),
|
||||
undefined,
|
||||
toEncodedJSON([{ ...baseInvalid, "replaces-base": "true" }]),
|
||||
BuiltInLanguage.actions,
|
||||
false,
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
test("getCredentials returns all credentials for Actions when using LANGUAGE_TO_REGISTRY_TYPE", async (t) => {
|
||||
const credentialsInput = toEncodedJSON(mixedCredentials);
|
||||
|
||||
|
||||
+83
-23
@@ -24,12 +24,20 @@ import {
|
||||
Address,
|
||||
Registry,
|
||||
Credential,
|
||||
hasToken,
|
||||
hasUsernameAndPassword,
|
||||
AuthConfig,
|
||||
isToken,
|
||||
isAzureConfig,
|
||||
Token,
|
||||
UsernamePassword,
|
||||
AzureConfig,
|
||||
isAWSConfig,
|
||||
AWSConfig,
|
||||
isJFrogConfig,
|
||||
JFrogConfig,
|
||||
isUsernamePassword,
|
||||
hasUsername,
|
||||
RawCredential,
|
||||
} from "./start-proxy/types";
|
||||
import { getAuthConfig } from "./start-proxy/validation";
|
||||
import {
|
||||
ActionName,
|
||||
createStatusReportBase,
|
||||
@@ -243,6 +251,75 @@ function getRegistryAddress(
|
||||
}
|
||||
}
|
||||
|
||||
/** Extracts an `AuthConfig` value from `config`. */
|
||||
export function getAuthConfig(
|
||||
config: json.UnvalidatedObject<AuthConfig>,
|
||||
): AuthConfig {
|
||||
// Start by checking for the OIDC configurations, since they have required properties
|
||||
// which we can use to identify them.
|
||||
if (isAzureConfig(config)) {
|
||||
return {
|
||||
"tenant-id": config["tenant-id"],
|
||||
"client-id": config["client-id"],
|
||||
} satisfies AzureConfig;
|
||||
} else if (isAWSConfig(config)) {
|
||||
return {
|
||||
"aws-region": config["aws-region"],
|
||||
"account-id": config["account-id"],
|
||||
"role-name": config["role-name"],
|
||||
domain: config.domain,
|
||||
"domain-owner": config["domain-owner"],
|
||||
audience: config.audience,
|
||||
} satisfies AWSConfig;
|
||||
} else if (isJFrogConfig(config)) {
|
||||
return {
|
||||
"jfrog-oidc-provider-name": config["jfrog-oidc-provider-name"],
|
||||
"identity-mapping-name": config["identity-mapping-name"],
|
||||
audience: config.audience,
|
||||
} satisfies JFrogConfig;
|
||||
} else if (isToken(config)) {
|
||||
// There are three scenarios for non-OIDC authentication based on the registry type:
|
||||
//
|
||||
// 1. `username`+`token`
|
||||
// 2. A `token` that combines the username and actual token, separated by ':'.
|
||||
// 3. `username`+`password`
|
||||
//
|
||||
// In all three cases, all fields are optional. If the `token` field is present,
|
||||
// we accept the configuration as a `Token` typed configuration, with the `token`
|
||||
// value and an optional `username`. Otherwise, we accept the configuration
|
||||
// typed as `UsernamePassword` (in the `else` clause below) with optional
|
||||
// username and password. I.e. a private registry type that uses 1. or 2.,
|
||||
// but has no `token` configured, will get accepted as `UsernamePassword` here.
|
||||
|
||||
if (isDefined(config.token)) {
|
||||
// Mask token to reduce chance of accidental leakage in logs, if we have one.
|
||||
core.setSecret(config.token);
|
||||
}
|
||||
|
||||
return { username: config.username, token: config.token } satisfies Token;
|
||||
} else {
|
||||
let username: string | undefined = undefined;
|
||||
let password: string | undefined = undefined;
|
||||
|
||||
// Both "username" and "password" are optional. If we have reached this point, we need
|
||||
// to validate which of them are present and that they have the correct type if so.
|
||||
if ("password" in config && json.isString(config.password)) {
|
||||
// Mask password to reduce chance of accidental leakage in logs, if we have one.
|
||||
core.setSecret(config.password);
|
||||
password = config.password;
|
||||
}
|
||||
if ("username" in config && json.isString(config.username)) {
|
||||
username = config.username;
|
||||
}
|
||||
|
||||
// Return the `UsernamePassword` object. Both username and password may be undefined.
|
||||
return {
|
||||
username,
|
||||
password,
|
||||
} satisfies UsernamePassword;
|
||||
}
|
||||
}
|
||||
|
||||
// getCredentials returns registry credentials from action inputs.
|
||||
// It prefers `registries_credentials` over `registry_secrets`.
|
||||
// If neither is set, it returns an empty array.
|
||||
@@ -331,11 +408,11 @@ export function getCredentials(
|
||||
const noUsername =
|
||||
!hasUsername(authConfig) || !isDefined(authConfig.username);
|
||||
const passwordIsPAT =
|
||||
hasUsernameAndPassword(authConfig) &&
|
||||
isUsernamePassword(authConfig) &&
|
||||
isDefined(authConfig.password) &&
|
||||
isPAT(authConfig.password);
|
||||
const tokenIsPAT =
|
||||
hasToken(authConfig) &&
|
||||
isToken(authConfig) &&
|
||||
isDefined(authConfig.token) &&
|
||||
isPAT(authConfig.token);
|
||||
|
||||
@@ -347,25 +424,8 @@ export function getCredentials(
|
||||
);
|
||||
}
|
||||
|
||||
// Construct the base credential object.
|
||||
const baseCredential: Omit<Registry, keyof Address> = { type: e.type };
|
||||
|
||||
// If "replaces-base" is present, it must be a boolean.
|
||||
if ("replaces-base" in e) {
|
||||
if (
|
||||
isDefined(e["replaces-base"]) &&
|
||||
typeof e["replaces-base"] === "boolean"
|
||||
) {
|
||||
baseCredential["replaces-base"] = e["replaces-base"];
|
||||
} else {
|
||||
throw new ConfigurationError(
|
||||
"Invalid credentials - 'replaces-base' must be a boolean",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
out.push({
|
||||
...baseCredential,
|
||||
type: e.type,
|
||||
...authConfig,
|
||||
...address,
|
||||
});
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import test from "ava";
|
||||
|
||||
import { makeFromSchema, withSchemaMatrix } from "../json/testing-util";
|
||||
import { setupTests } from "../testing-utils";
|
||||
|
||||
import * as types from "./types";
|
||||
@@ -27,38 +26,6 @@ const validJFrogCredential: types.JFrogConfig = {
|
||||
"identity-mapping-name": "my-mapping",
|
||||
};
|
||||
|
||||
test("hasUsername", (t) => {
|
||||
// Reject the case where `username` is missing.
|
||||
t.false(types.hasUsername({}));
|
||||
|
||||
// Test all cases where `username` is present.
|
||||
withSchemaMatrix(
|
||||
t,
|
||||
types.usernameSchema,
|
||||
{ excludeAbsent: true },
|
||||
(value) => {
|
||||
t.true(types.hasUsername(value));
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test("hasUsernameAndPassword", (t) => {
|
||||
// Reject cases where `username` or `password` are missing.
|
||||
t.false(types.hasUsernameAndPassword({}));
|
||||
t.false(types.hasUsernameAndPassword({ username: "foo" }));
|
||||
t.false(types.hasUsernameAndPassword({ password: "foo" }));
|
||||
|
||||
// Test all cases where both `username` and `password` are present.
|
||||
withSchemaMatrix(
|
||||
t,
|
||||
types.usernamePasswordSchema,
|
||||
{ excludeAbsent: true },
|
||||
(value) => {
|
||||
t.true(types.hasUsernameAndPassword(value));
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test("credentialToStr - pretty-prints valid username+password configurations", (t) => {
|
||||
const secret = "password123";
|
||||
const credential: types.Credential = {
|
||||
@@ -140,46 +107,13 @@ test("credentialToStr - pretty-prints valid JFrog OIDC configurations", (t) => {
|
||||
);
|
||||
});
|
||||
|
||||
test("credentialToStr - pretty-prints valid Cloudsmith OIDC configurations", (t) => {
|
||||
const credential: types.Credential = {
|
||||
type: "maven_credential",
|
||||
url: "https://localhost",
|
||||
...(makeFromSchema(
|
||||
true,
|
||||
types.cloudsmithConfigSchema,
|
||||
) as types.CloudsmithConfig),
|
||||
};
|
||||
|
||||
const str = types.credentialToStr(credential);
|
||||
|
||||
t.is(
|
||||
"Type: maven_credential; Url: https://localhost; Cloudsmith Namespace: value-for-namespace; Cloudsmith Service Slug: value-for-service-slug; Cloudsmith API Host: value-for-api-host;",
|
||||
str,
|
||||
);
|
||||
});
|
||||
|
||||
test("credentialToStr - pretty-prints valid GCP OIDC configurations", (t) => {
|
||||
const credential: types.Credential = {
|
||||
type: "maven_credential",
|
||||
url: "https://localhost",
|
||||
...(makeFromSchema(true, types.gcpConfigSchema) as types.GCPConfig),
|
||||
};
|
||||
|
||||
const str = types.credentialToStr(credential);
|
||||
|
||||
t.is(
|
||||
"Type: maven_credential; Url: https://localhost; GCP Workload Identity Provider: value-for-workload-identity-provider; GCP Service Account: value-for-service-account; GCP Audience: value-for-audience;",
|
||||
str,
|
||||
);
|
||||
});
|
||||
|
||||
test("credentialToStr - hides passwords", (t) => {
|
||||
const secret = "password123";
|
||||
const credential = {
|
||||
type: "maven_credential",
|
||||
password: secret,
|
||||
url: "https://localhost",
|
||||
} satisfies types.Credential;
|
||||
};
|
||||
|
||||
const str = types.credentialToStr(credential);
|
||||
|
||||
@@ -193,7 +127,7 @@ test("credentialToStr - hides tokens", (t) => {
|
||||
type: "maven_credential",
|
||||
token: secret,
|
||||
url: "https://localhost",
|
||||
} satisfies types.Credential;
|
||||
};
|
||||
|
||||
const str = types.credentialToStr(credential);
|
||||
|
||||
|
||||
+88
-134
@@ -9,177 +9,144 @@ import { isDefined } from "../util";
|
||||
*/
|
||||
export type RawCredential = UnvalidatedObject<Credential>;
|
||||
|
||||
/** A schema for credential objects with a username. */
|
||||
export const usernameSchema = {
|
||||
/** The username needed to authenticate to the package registry, if any. */
|
||||
username: json.optional(json.string),
|
||||
} as const satisfies json.Schema;
|
||||
|
||||
/** Usernames may be present for both authentication with tokens or passwords. */
|
||||
export type Username = json.FromSchema<typeof usernameSchema>;
|
||||
export type Username = {
|
||||
/** The username needed to authenticate to the package registry, if any. */
|
||||
username?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Narrows `config` to `Username` if `config` has a `username` property.
|
||||
* Not used for validation. Assumes that `config` is already a validated `AuthConfig`.
|
||||
*/
|
||||
/** Decides whether `config` has a username. */
|
||||
export function hasUsername(config: AuthConfig): config is Username {
|
||||
return "username" in config;
|
||||
}
|
||||
|
||||
/** A schema for credential objects with a username and password. */
|
||||
export const usernamePasswordSchema = {
|
||||
/** The password needed to authenticate to the package registry, if any. */
|
||||
password: json.optional(json.string),
|
||||
...usernameSchema,
|
||||
} as const satisfies json.Schema;
|
||||
|
||||
/**
|
||||
* Fields expected for authentication based on a username and password.
|
||||
* Both username and password are optional.
|
||||
*/
|
||||
export type UsernamePassword = json.FromSchema<typeof usernamePasswordSchema>;
|
||||
export type UsernamePassword = {
|
||||
/** The password needed to authenticate to the package registry, if any. */
|
||||
password?: string;
|
||||
} & Username;
|
||||
|
||||
/**
|
||||
* Narrows `config` to `UsernamePassword` if it has a `username` and `password` property.
|
||||
* Not used for validation. Assumes that `config` is already a validated `AuthConfig`.
|
||||
*/
|
||||
export function hasUsernameAndPassword(
|
||||
/** Decides whether `config` is based on a username and password. */
|
||||
export function isUsernamePassword(
|
||||
config: AuthConfig,
|
||||
): config is UsernamePassword {
|
||||
return hasUsername(config) && "password" in config;
|
||||
}
|
||||
|
||||
/** A schema for credential objects for token-based authentication. */
|
||||
export const tokenSchema = {
|
||||
/** The token needed to authenticate to the package registry, if any. */
|
||||
token: json.optional(json.string),
|
||||
...usernameSchema,
|
||||
} as const satisfies json.Schema;
|
||||
|
||||
/**
|
||||
* Fields expected for token-based authentication.
|
||||
* Both username and token are optional.
|
||||
*/
|
||||
export type Token = json.FromSchema<typeof tokenSchema>;
|
||||
|
||||
/**
|
||||
* Narrows `config` to `Token` if it has a `token` property.
|
||||
* Not used for validation. Assumes that `config` is already a validated `AuthConfig`.
|
||||
*/
|
||||
export function hasToken(config: AuthConfig): config is Token {
|
||||
return "token" in config;
|
||||
}
|
||||
export type Token = {
|
||||
/** The token needed to authenticate to the package registry, if any. */
|
||||
token?: string;
|
||||
} & Username;
|
||||
|
||||
/** Decides whether `config` is token-based. */
|
||||
export function isToken(
|
||||
config: UnvalidatedObject<AuthConfig>,
|
||||
): config is Token {
|
||||
return "token" in config && json.validateSchema(tokenSchema, config);
|
||||
// The "username" field is optional, but should be a string if present.
|
||||
if ("username" in config && !json.isStringOrUndefined(config.username)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// The "token" field is required, and must be a string or undefined.
|
||||
return "token" in config && json.isStringOrUndefined(config.token);
|
||||
}
|
||||
|
||||
/** A schema for Azure OIDC configurations. */
|
||||
export const azureConfigSchema = {
|
||||
"tenant-id": json.string,
|
||||
"client-id": json.string,
|
||||
} as const satisfies json.Schema;
|
||||
|
||||
/** Configuration for Azure OIDC. */
|
||||
export type AzureConfig = json.FromSchema<typeof azureConfigSchema>;
|
||||
export type AzureConfig = { "tenant-id": string; "client-id": string };
|
||||
|
||||
/** Decides whether `config` is an Azure OIDC configuration. */
|
||||
export function isAzureConfig(
|
||||
config: UnvalidatedObject<AuthConfig>,
|
||||
): config is AzureConfig {
|
||||
return json.validateSchema(azureConfigSchema, config);
|
||||
return (
|
||||
"tenant-id" in config &&
|
||||
"client-id" in config &&
|
||||
isDefined(config["tenant-id"]) &&
|
||||
isDefined(config["client-id"]) &&
|
||||
json.isString(config["tenant-id"]) &&
|
||||
json.isString(config["client-id"])
|
||||
);
|
||||
}
|
||||
|
||||
/** A schema for AWS OIDC configurations. */
|
||||
export const awsConfigSchema = {
|
||||
"aws-region": json.string,
|
||||
"account-id": json.string,
|
||||
"role-name": json.string,
|
||||
domain: json.string,
|
||||
"domain-owner": json.string,
|
||||
audience: json.optional(json.string),
|
||||
} as const satisfies json.Schema;
|
||||
|
||||
/** Configuration for AWS OIDC. */
|
||||
export type AWSConfig = json.FromSchema<typeof awsConfigSchema>;
|
||||
export type AWSConfig = {
|
||||
"aws-region": string;
|
||||
"account-id": string;
|
||||
"role-name": string;
|
||||
domain: string;
|
||||
"domain-owner": string;
|
||||
audience?: string;
|
||||
};
|
||||
|
||||
/** Decides whether `config` is an AWS OIDC configuration. */
|
||||
export function isAWSConfig(
|
||||
config: UnvalidatedObject<AuthConfig>,
|
||||
): config is AWSConfig {
|
||||
return json.validateSchema(awsConfigSchema, config);
|
||||
// All of these properties are required.
|
||||
const requiredProperties = [
|
||||
"aws-region",
|
||||
"account-id",
|
||||
"role-name",
|
||||
"domain",
|
||||
"domain-owner",
|
||||
];
|
||||
|
||||
for (const property of requiredProperties) {
|
||||
if (
|
||||
!(property in config) ||
|
||||
!isDefined(config[property]) ||
|
||||
!json.isString(config[property])
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// The "audience" field is optional, but should be a string if present.
|
||||
if ("audience" in config && !json.isStringOrUndefined(config.audience)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/** A schema for JFrog OIDC configurations. */
|
||||
export const jfrogConfigSchema = {
|
||||
"jfrog-oidc-provider-name": json.string,
|
||||
audience: json.optional(json.string),
|
||||
"identity-mapping-name": json.optional(json.string),
|
||||
} as const satisfies json.Schema;
|
||||
|
||||
/** Configuration for JFrog OIDC. */
|
||||
export type JFrogConfig = json.FromSchema<typeof jfrogConfigSchema>;
|
||||
export type JFrogConfig = {
|
||||
"jfrog-oidc-provider-name": string;
|
||||
audience?: string;
|
||||
"identity-mapping-name"?: string;
|
||||
};
|
||||
|
||||
/** Decides whether `config` is a JFrog OIDC configuration. */
|
||||
export function isJFrogConfig(
|
||||
config: UnvalidatedObject<AuthConfig>,
|
||||
): config is JFrogConfig {
|
||||
return json.validateSchema(jfrogConfigSchema, config);
|
||||
// The "audience" and "identity-mapping-name" fields are optional, but should be strings if present.
|
||||
if ("audience" in config && !json.isStringOrUndefined(config.audience)) {
|
||||
return false;
|
||||
}
|
||||
if (
|
||||
"identity-mapping-name" in config &&
|
||||
!json.isStringOrUndefined(config["identity-mapping-name"])
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return (
|
||||
"jfrog-oidc-provider-name" in config &&
|
||||
isDefined(config["jfrog-oidc-provider-name"]) &&
|
||||
json.isString(config["jfrog-oidc-provider-name"])
|
||||
);
|
||||
}
|
||||
|
||||
/** A schema for Cloudsmith OIDC configurations. */
|
||||
export const cloudsmithConfigSchema = {
|
||||
namespace: json.string,
|
||||
"service-slug": json.string,
|
||||
"api-host": json.string,
|
||||
} as const satisfies json.Schema;
|
||||
|
||||
/** Configuration for Cloudsmith OIDC. */
|
||||
export type CloudsmithConfig = json.FromSchema<typeof cloudsmithConfigSchema>;
|
||||
|
||||
/** Decides whether `config` is a Cloudsmith OIDC configuration. */
|
||||
export function isCloudsmithConfig(
|
||||
config: UnvalidatedObject<AuthConfig>,
|
||||
): config is CloudsmithConfig {
|
||||
return json.validateSchema(cloudsmithConfigSchema, config);
|
||||
}
|
||||
|
||||
/** A schema for GCP OIDC configurations. */
|
||||
export const gcpConfigSchema = {
|
||||
"workload-identity-provider": json.string,
|
||||
"service-account": json.optional(json.string),
|
||||
audience: json.optional(json.string),
|
||||
} as const satisfies json.Schema;
|
||||
|
||||
/** Configuration for GCP OIDC. */
|
||||
export type GCPConfig = json.FromSchema<typeof gcpConfigSchema>;
|
||||
|
||||
/** Decides whether `config` is a GCP OIDC configuration. */
|
||||
export function isGCPConfig(
|
||||
config: UnvalidatedObject<AuthConfig>,
|
||||
): config is GCPConfig {
|
||||
return json.validateSchema(gcpConfigSchema, config);
|
||||
}
|
||||
|
||||
/** An array of all OIDC configuration schemas along with output-friendly names. */
|
||||
export const oidcSchemas = [
|
||||
{ schema: azureConfigSchema, name: "Azure" },
|
||||
{ schema: awsConfigSchema, name: "AWS" },
|
||||
{ schema: jfrogConfigSchema, name: "JFrog" },
|
||||
{ schema: cloudsmithConfigSchema, name: "Cloudsmith" },
|
||||
{ schema: gcpConfigSchema, name: "GCP" },
|
||||
];
|
||||
|
||||
/** Represents all supported OIDC configurations. */
|
||||
export type OIDC =
|
||||
| AzureConfig
|
||||
| AWSConfig
|
||||
| JFrogConfig
|
||||
| CloudsmithConfig
|
||||
| GCPConfig;
|
||||
export type OIDC = AzureConfig | AWSConfig | JFrogConfig;
|
||||
|
||||
/** All authentication-related fields. */
|
||||
export type AuthConfig = UsernamePassword | Token | OIDC;
|
||||
@@ -198,7 +165,7 @@ export type Credential = AuthConfig & Registry;
|
||||
export function credentialToStr(credential: Credential): string {
|
||||
let result: string = `Type: ${credential.type};`;
|
||||
|
||||
const appendIfDefined = (name: string, val: string | undefined | null) => {
|
||||
const appendIfDefined = (name: string, val: string | undefined) => {
|
||||
if (isDefined(val)) {
|
||||
result += ` ${name}: ${val};`;
|
||||
}
|
||||
@@ -217,7 +184,7 @@ export function credentialToStr(credential: Credential): string {
|
||||
isDefined(credential.password) ? "***" : undefined,
|
||||
);
|
||||
}
|
||||
if (hasToken(credential)) {
|
||||
if (isToken(credential)) {
|
||||
appendIfDefined("Token", isDefined(credential.token) ? "***" : undefined);
|
||||
}
|
||||
|
||||
@@ -238,17 +205,6 @@ export function credentialToStr(credential: Credential): string {
|
||||
credential["identity-mapping-name"],
|
||||
);
|
||||
appendIfDefined("JFrog Audience", credential.audience);
|
||||
} else if (isCloudsmithConfig(credential)) {
|
||||
appendIfDefined("Cloudsmith Namespace", credential.namespace);
|
||||
appendIfDefined("Cloudsmith Service Slug", credential["service-slug"]);
|
||||
appendIfDefined("Cloudsmith API Host", credential["api-host"]);
|
||||
} else if (isGCPConfig(credential)) {
|
||||
appendIfDefined(
|
||||
"GCP Workload Identity Provider",
|
||||
credential["workload-identity-provider"],
|
||||
);
|
||||
appendIfDefined("GCP Service Account", credential["service-account"]);
|
||||
appendIfDefined("GCP Audience", credential.audience);
|
||||
}
|
||||
|
||||
return result;
|
||||
@@ -258,8 +214,6 @@ export function credentialToStr(credential: Credential): string {
|
||||
export type Registry = {
|
||||
/** The type of the package registry. */
|
||||
type: string;
|
||||
/** Whether the registry replaces the base registry for the ecosystem. */
|
||||
"replaces-base"?: boolean;
|
||||
} & Address;
|
||||
|
||||
// If a registry has an `url`, then that takes precedence over the `host` which may or may
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
import test from "ava";
|
||||
|
||||
import * as json from "../json";
|
||||
import { makeFromSchema } from "../json/testing-util";
|
||||
import { setupTests } from "../testing-utils";
|
||||
|
||||
import * as types from "./types";
|
||||
import { getAuthConfig } from "./validation";
|
||||
|
||||
setupTests(test);
|
||||
|
||||
for (const schemaTest of types.oidcSchemas) {
|
||||
for (const includeOptional of [true, false]) {
|
||||
const minimalName = includeOptional ? "full" : "minimal";
|
||||
|
||||
test(`getAuthConfig - ${schemaTest.name} - ${minimalName}`, async (t) => {
|
||||
const config = makeFromSchema(includeOptional, schemaTest.schema);
|
||||
|
||||
t.deepEqual(
|
||||
getAuthConfig({
|
||||
...config,
|
||||
unexpected: "unexpected-value",
|
||||
} as unknown as json.UnvalidatedObject<types.AuthConfig>),
|
||||
config,
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
test("getAuthConfig - token", async (t) => {
|
||||
const config = makeFromSchema(true, types.tokenSchema);
|
||||
|
||||
t.deepEqual(
|
||||
getAuthConfig({
|
||||
...config,
|
||||
unexpected: "unexpected-value",
|
||||
} as json.UnvalidatedObject<types.AuthConfig>),
|
||||
config,
|
||||
);
|
||||
});
|
||||
|
||||
test("getAuthConfig - username and password", async (t) => {
|
||||
const config = makeFromSchema(true, types.usernamePasswordSchema);
|
||||
|
||||
t.deepEqual(
|
||||
getAuthConfig({
|
||||
...config,
|
||||
unexpected: "unexpected-value",
|
||||
} as json.UnvalidatedObject<types.AuthConfig>),
|
||||
config,
|
||||
);
|
||||
});
|
||||
|
||||
test("getAuthConfig - empty", async (t) => {
|
||||
const config = makeFromSchema(false, types.usernamePasswordSchema);
|
||||
|
||||
// Since the purpose of constructing the `AuthConfig` values is for
|
||||
// serialisation to JSON so that they can be passed to the proxy as configuration,
|
||||
// we only care that the stringified JSON representations are the same.
|
||||
t.deepEqual(
|
||||
JSON.stringify(
|
||||
getAuthConfig({
|
||||
...config,
|
||||
unexpected: "unexpected-value",
|
||||
} as json.UnvalidatedObject<types.AuthConfig>),
|
||||
),
|
||||
JSON.stringify({}),
|
||||
);
|
||||
});
|
||||
@@ -1,81 +0,0 @@
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import * as json from "../json";
|
||||
import { isDefined } from "../util";
|
||||
|
||||
import type { AuthConfig, UsernamePassword } from "./types";
|
||||
import * as types from "./types";
|
||||
|
||||
/** Constructs a new object from `obj` with only keys that exist in `schema`. */
|
||||
export function cloneCredential<S extends json.Schema>(
|
||||
schema: S,
|
||||
obj: json.FromSchema<S>,
|
||||
): json.FromSchema<S> {
|
||||
const result = {};
|
||||
|
||||
for (const key of Object.keys(schema)) {
|
||||
// Skip keys that don't exist or don't have a value.
|
||||
if (!isDefined(obj[key])) {
|
||||
continue;
|
||||
}
|
||||
result[key] = obj[key];
|
||||
}
|
||||
|
||||
return result as json.FromSchema<S>;
|
||||
}
|
||||
|
||||
/** Extracts an `AuthConfig` value from `config`. */
|
||||
export function getAuthConfig(
|
||||
config: json.UnvalidatedObject<AuthConfig>,
|
||||
): AuthConfig {
|
||||
// Start by checking for the OIDC configurations, since they have required properties
|
||||
// which we can use to identify them.
|
||||
for (const oidcSchema of types.oidcSchemas) {
|
||||
if (json.validateSchema(oidcSchema.schema, config)) {
|
||||
return cloneCredential(oidcSchema.schema, config);
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise, try the basic configuration types.
|
||||
if (types.isToken(config)) {
|
||||
// There are three scenarios for non-OIDC authentication based on the registry type:
|
||||
//
|
||||
// 1. `username`+`token`
|
||||
// 2. A `token` that combines the username and actual token, separated by ':'.
|
||||
// 3. `username`+`password`
|
||||
//
|
||||
// In all three cases, all fields are optional. If the `token` field is present,
|
||||
// we accept the configuration as a `Token` typed configuration, with the `token`
|
||||
// value and an optional `username`. Otherwise, we accept the configuration
|
||||
// typed as `UsernamePassword` (in the `else` clause below) with optional
|
||||
// username and password. I.e. a private registry type that uses 1. or 2.,
|
||||
// but has no `token` configured, will get accepted as `UsernamePassword` here.
|
||||
|
||||
if (isDefined(config.token)) {
|
||||
// Mask token to reduce chance of accidental leakage in logs, if we have one.
|
||||
core.setSecret(config.token);
|
||||
}
|
||||
|
||||
return cloneCredential(types.tokenSchema, config);
|
||||
} else {
|
||||
let username: string | undefined = undefined;
|
||||
let password: string | undefined = undefined;
|
||||
|
||||
// Both "username" and "password" are optional. If we have reached this point, we need
|
||||
// to validate which of them are present and that they have the correct type if so.
|
||||
if ("password" in config && json.isString(config.password)) {
|
||||
// Mask password to reduce chance of accidental leakage in logs, if we have one.
|
||||
core.setSecret(config.password);
|
||||
password = config.password;
|
||||
}
|
||||
if ("username" in config && json.isString(config.username)) {
|
||||
username = config.username;
|
||||
}
|
||||
|
||||
// Return the `UsernamePassword` object. Both username and password may be undefined.
|
||||
return {
|
||||
username,
|
||||
password,
|
||||
} satisfies UsernamePassword;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user