Merge pull request #1392 from github/henrymercer/parse-category
Add functionality for parsing Action inputs from a workflow file
This commit is contained in:
commit
0d9b15ca93
18 changed files with 1846 additions and 1382 deletions
225
lib/actions-util.js
generated
225
lib/actions-util.js
generated
|
|
@ -19,17 +19,17 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.workflowEventName = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.workflowEventName = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const os = __importStar(require("os"));
|
const os = __importStar(require("os"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||||
const yaml = __importStar(require("js-yaml"));
|
|
||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
|
const workflow_1 = require("./workflow");
|
||||||
// eslint-disable-next-line import/no-commonjs
|
// eslint-disable-next-line import/no-commonjs
|
||||||
const pkg = require("../package.json");
|
const pkg = require("../package.json");
|
||||||
/**
|
/**
|
||||||
|
|
@ -145,225 +145,6 @@ const determineMergeBaseCommitOid = async function () {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
exports.determineMergeBaseCommitOid = determineMergeBaseCommitOid;
|
exports.determineMergeBaseCommitOid = determineMergeBaseCommitOid;
|
||||||
function isObject(o) {
|
|
||||||
return o !== null && typeof o === "object";
|
|
||||||
}
|
|
||||||
const GLOB_PATTERN = new RegExp("(\\*\\*?)");
|
|
||||||
function escapeRegExp(string) {
|
|
||||||
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
|
||||||
}
|
|
||||||
function patternToRegExp(value) {
|
|
||||||
return new RegExp(`^${value
|
|
||||||
.toString()
|
|
||||||
.split(GLOB_PATTERN)
|
|
||||||
.reduce(function (arr, cur) {
|
|
||||||
if (cur === "**") {
|
|
||||||
arr.push(".*?");
|
|
||||||
}
|
|
||||||
else if (cur === "*") {
|
|
||||||
arr.push("[^/]*?");
|
|
||||||
}
|
|
||||||
else if (cur) {
|
|
||||||
arr.push(escapeRegExp(cur));
|
|
||||||
}
|
|
||||||
return arr;
|
|
||||||
}, [])
|
|
||||||
.join("")}$`);
|
|
||||||
}
|
|
||||||
// this function should return true if patternA is a superset of patternB
|
|
||||||
// e.g: * is a superset of main-* but main-* is not a superset of *.
|
|
||||||
function patternIsSuperset(patternA, patternB) {
|
|
||||||
return patternToRegExp(patternA).test(patternB);
|
|
||||||
}
|
|
||||||
exports.patternIsSuperset = patternIsSuperset;
|
|
||||||
function branchesToArray(branches) {
|
|
||||||
if (typeof branches === "string") {
|
|
||||||
return [branches];
|
|
||||||
}
|
|
||||||
if (Array.isArray(branches)) {
|
|
||||||
if (branches.length === 0) {
|
|
||||||
return "**";
|
|
||||||
}
|
|
||||||
return branches;
|
|
||||||
}
|
|
||||||
return "**";
|
|
||||||
}
|
|
||||||
function toCodedErrors(errors) {
|
|
||||||
return Object.entries(errors).reduce((acc, [key, value]) => {
|
|
||||||
acc[key] = { message: value, code: key };
|
|
||||||
return acc;
|
|
||||||
}, {});
|
|
||||||
}
|
|
||||||
// code to send back via status report
|
|
||||||
// message to add as a warning annotation to the run
|
|
||||||
exports.WorkflowErrors = toCodedErrors({
|
|
||||||
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
|
||||||
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
|
||||||
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
|
||||||
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
|
||||||
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
|
||||||
});
|
|
||||||
function getWorkflowErrors(doc) {
|
|
||||||
var _a, _b, _c, _d, _e;
|
|
||||||
const errors = [];
|
|
||||||
const jobName = process.env.GITHUB_JOB;
|
|
||||||
if (jobName) {
|
|
||||||
const job = (_a = doc === null || doc === void 0 ? void 0 : doc.jobs) === null || _a === void 0 ? void 0 : _a[jobName];
|
|
||||||
const steps = job === null || job === void 0 ? void 0 : job.steps;
|
|
||||||
if (Array.isArray(steps)) {
|
|
||||||
for (const step of steps) {
|
|
||||||
// this was advice that we used to give in the README
|
|
||||||
// we actually want to run the analysis on the merge commit
|
|
||||||
// to produce results that are more inline with expectations
|
|
||||||
// (i.e: this is what will happen if you merge this PR)
|
|
||||||
// and avoid some race conditions
|
|
||||||
if ((step === null || step === void 0 ? void 0 : step.run) === "git checkout HEAD^2") {
|
|
||||||
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let missingPush = false;
|
|
||||||
if (doc.on === undefined) {
|
|
||||||
// this is not a valid config
|
|
||||||
}
|
|
||||||
else if (typeof doc.on === "string") {
|
|
||||||
if (doc.on === "pull_request") {
|
|
||||||
missingPush = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (Array.isArray(doc.on)) {
|
|
||||||
const hasPush = doc.on.includes("push");
|
|
||||||
const hasPullRequest = doc.on.includes("pull_request");
|
|
||||||
if (hasPullRequest && !hasPush) {
|
|
||||||
missingPush = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (isObject(doc.on)) {
|
|
||||||
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
|
|
||||||
const hasPullRequest = Object.prototype.hasOwnProperty.call(doc.on, "pull_request");
|
|
||||||
if (!hasPush && hasPullRequest) {
|
|
||||||
missingPush = true;
|
|
||||||
}
|
|
||||||
if (hasPush && hasPullRequest) {
|
|
||||||
const paths = (_b = doc.on.push) === null || _b === void 0 ? void 0 : _b.paths;
|
|
||||||
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
|
||||||
// if they didn't change any files
|
|
||||||
// currently we cannot go back through the history and find the most recent baseline
|
|
||||||
if (Array.isArray(paths) && paths.length > 0) {
|
|
||||||
errors.push(exports.WorkflowErrors.PathsSpecified);
|
|
||||||
}
|
|
||||||
const pathsIgnore = (_c = doc.on.push) === null || _c === void 0 ? void 0 : _c["paths-ignore"];
|
|
||||||
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
|
|
||||||
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// if doc.on.pull_request is null that means 'all branches'
|
|
||||||
// if doc.on.pull_request is undefined that means 'off'
|
|
||||||
// we only want to check for mismatched branches if pull_request is on.
|
|
||||||
if (doc.on.pull_request !== undefined) {
|
|
||||||
const push = branchesToArray((_d = doc.on.push) === null || _d === void 0 ? void 0 : _d.branches);
|
|
||||||
if (push !== "**") {
|
|
||||||
const pull_request = branchesToArray((_e = doc.on.pull_request) === null || _e === void 0 ? void 0 : _e.branches);
|
|
||||||
if (pull_request !== "**") {
|
|
||||||
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value)));
|
|
||||||
if (difference.length > 0) {
|
|
||||||
// there are branches in pull_request that may not have a baseline
|
|
||||||
// because we are not building them on push
|
|
||||||
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (push.length > 0) {
|
|
||||||
// push is set up to run on a subset of branches
|
|
||||||
// and you could open a PR against a branch with no baseline
|
|
||||||
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (missingPush) {
|
|
||||||
errors.push(exports.WorkflowErrors.MissingPushHook);
|
|
||||||
}
|
|
||||||
return errors;
|
|
||||||
}
|
|
||||||
exports.getWorkflowErrors = getWorkflowErrors;
|
|
||||||
async function validateWorkflow() {
|
|
||||||
let workflow;
|
|
||||||
try {
|
|
||||||
workflow = await getWorkflow();
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
return `error: getWorkflow() failed: ${String(e)}`;
|
|
||||||
}
|
|
||||||
let workflowErrors;
|
|
||||||
try {
|
|
||||||
workflowErrors = getWorkflowErrors(workflow);
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
return `error: getWorkflowErrors() failed: ${String(e)}`;
|
|
||||||
}
|
|
||||||
if (workflowErrors.length > 0) {
|
|
||||||
let message;
|
|
||||||
try {
|
|
||||||
message = formatWorkflowErrors(workflowErrors);
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
return `error: formatWorkflowErrors() failed: ${String(e)}`;
|
|
||||||
}
|
|
||||||
core.warning(message);
|
|
||||||
}
|
|
||||||
return formatWorkflowCause(workflowErrors);
|
|
||||||
}
|
|
||||||
exports.validateWorkflow = validateWorkflow;
|
|
||||||
function formatWorkflowErrors(errors) {
|
|
||||||
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
|
||||||
const errorsList = errors.map((e) => e.message).join(" ");
|
|
||||||
return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`;
|
|
||||||
}
|
|
||||||
exports.formatWorkflowErrors = formatWorkflowErrors;
|
|
||||||
function formatWorkflowCause(errors) {
|
|
||||||
if (errors.length === 0) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return errors.map((e) => e.code).join(",");
|
|
||||||
}
|
|
||||||
exports.formatWorkflowCause = formatWorkflowCause;
|
|
||||||
async function getWorkflow() {
|
|
||||||
const relativePath = await getWorkflowPath();
|
|
||||||
const absolutePath = path.join((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), relativePath);
|
|
||||||
return yaml.load(fs.readFileSync(absolutePath, "utf-8"));
|
|
||||||
}
|
|
||||||
exports.getWorkflow = getWorkflow;
|
|
||||||
/**
|
|
||||||
* Get the path of the currently executing workflow.
|
|
||||||
*/
|
|
||||||
async function getWorkflowPath() {
|
|
||||||
const repo_nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY").split("/");
|
|
||||||
const owner = repo_nwo[0];
|
|
||||||
const repo = repo_nwo[1];
|
|
||||||
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
|
||||||
const apiClient = api.getApiClient();
|
|
||||||
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
run_id,
|
|
||||||
});
|
|
||||||
const workflowUrl = runsResponse.data.workflow_url;
|
|
||||||
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
|
||||||
return workflowResponse.data.path;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get the workflow run ID.
|
|
||||||
*/
|
|
||||||
function getWorkflowRunID() {
|
|
||||||
const workflowRunID = parseInt((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"), 10);
|
|
||||||
if (Number.isNaN(workflowRunID)) {
|
|
||||||
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
|
||||||
}
|
|
||||||
return workflowRunID;
|
|
||||||
}
|
|
||||||
exports.getWorkflowRunID = getWorkflowRunID;
|
|
||||||
/**
|
/**
|
||||||
* Get the analysis key parameter for the current job.
|
* Get the analysis key parameter for the current job.
|
||||||
*
|
*
|
||||||
|
|
@ -377,7 +158,7 @@ async function getAnalysisKey() {
|
||||||
if (analysisKey !== undefined) {
|
if (analysisKey !== undefined) {
|
||||||
return analysisKey;
|
return analysisKey;
|
||||||
}
|
}
|
||||||
const workflowPath = await getWorkflowPath();
|
const workflowPath = await (0, workflow_1.getWorkflowPath)();
|
||||||
const jobName = (0, util_1.getRequiredEnvParam)("GITHUB_JOB");
|
const jobName = (0, util_1.getRequiredEnvParam)("GITHUB_JOB");
|
||||||
analysisKey = `${workflowPath}:${jobName}`;
|
analysisKey = `${workflowPath}:${jobName}`;
|
||||||
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
331
lib/actions-util.test.js
generated
331
lib/actions-util.test.js
generated
|
|
@ -25,14 +25,10 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const yaml = __importStar(require("js-yaml"));
|
|
||||||
const sinon = __importStar(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const actionsutil = __importStar(require("./actions-util"));
|
const actionsutil = __importStar(require("./actions-util"));
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
function errorCodes(actual, expected) {
|
|
||||||
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
|
||||||
}
|
|
||||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
(0, ava_1.default)("getRef() throws on the empty string", async (t) => {
|
(0, ava_1.default)("getRef() throws on the empty string", async (t) => {
|
||||||
process.env["GITHUB_REF"] = "";
|
process.env["GITHUB_REF"] = "";
|
||||||
|
|
@ -143,333 +139,6 @@ function errorCodes(actual, expected) {
|
||||||
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", undefined);
|
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", undefined);
|
||||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
|
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on is empty", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is valid", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: ["push", "pull_request"],
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: ["push", "pull_request", "schedule"],
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push should not have a path", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main"], paths: ["test/*"] },
|
|
||||||
pull_request: { branches: ["main"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is a correct object", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
pull_request:
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main"] },
|
|
||||||
pull_request: { branches: ["feature"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main", "feature"] },
|
|
||||||
pull_request: { branches: ["main"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main"] },
|
|
||||||
pull_request: { branches: ["main", "feature"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: 1,
|
|
||||||
pull_request: 1,
|
|
||||||
},
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: 1,
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: [1],
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { 1: 1 },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: 1 },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: [1] },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: { steps: 1 } },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: [undefined] },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: {
|
|
||||||
branches: 1,
|
|
||||||
},
|
|
||||||
pull_request: {
|
|
||||||
branches: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}), []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: ["main"]
|
|
||||||
pull_request:
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["feature/*"] },
|
|
||||||
pull_request: { branches: "feature/moose" },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["feature/moose"] },
|
|
||||||
pull_request: { branches: "feature/*" },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
|
||||||
process.env.GITHUB_JOB = "test";
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: ["push", "pull_request"],
|
|
||||||
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("formatWorkflowErrors() when there is one error", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowErrors([
|
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
|
||||||
]);
|
|
||||||
t.true(message.startsWith("1 issue was detected with this workflow:"));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("formatWorkflowErrors() when there are multiple errors", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowErrors([
|
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
|
||||||
actionsutil.WorkflowErrors.PathsSpecified,
|
|
||||||
]);
|
|
||||||
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("formatWorkflowCause() with no errors", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowCause([]);
|
|
||||||
t.deepEqual(message, undefined);
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("formatWorkflowCause()", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowCause([
|
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
|
||||||
actionsutil.WorkflowErrors.PathsSpecified,
|
|
||||||
]);
|
|
||||||
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
|
||||||
t.deepEqual(actionsutil.formatWorkflowCause([]), undefined);
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("patternIsSuperset()", (t) => {
|
|
||||||
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("*", "*"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("*", "main-*"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("main-*", "*"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("main", "main"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("*", "feature/*"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("**", "feature/*"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("feature-*", "**"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**/d"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**", "a/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**/c", "a/main-**/c"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/d/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/c/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/d/**/c"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/c/d/**/c", "a/**/b/**/c"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/main-**/c", "a/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*"));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when branches contain dots", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [4.1, master]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [4.1, master]
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master, ]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [master]
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
|
||||||
process.env.GITHUB_JOB = "test";
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [master]
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test2:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test3:
|
|
||||||
steps: []
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
|
||||||
process.env.GITHUB_JOB = "test3";
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [master]
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test2:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test3:
|
|
||||||
steps: []
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on is missing", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() with a different on setup", (t) => {
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on: "workflow_dispatch"
|
|
||||||
`)), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on: [workflow_dispatch]
|
|
||||||
`)), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
workflow_dispatch: {}
|
|
||||||
`)), []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
`)), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on: ["push"]
|
|
||||||
`)), []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("initializeEnvironment", (t) => {
|
(0, ava_1.default)("initializeEnvironment", (t) => {
|
||||||
(0, util_1.initializeEnvironment)("1.2.3");
|
(0, util_1.initializeEnvironment)("1.2.3");
|
||||||
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "1.2.3");
|
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "1.2.3");
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
3
lib/init-action.js
generated
3
lib/init-action.js
generated
|
|
@ -31,6 +31,7 @@ const logging_1 = require("./logging");
|
||||||
const repository_1 = require("./repository");
|
const repository_1 = require("./repository");
|
||||||
const trap_caching_1 = require("./trap-caching");
|
const trap_caching_1 = require("./trap-caching");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
|
const workflow_1 = require("./workflow");
|
||||||
// eslint-disable-next-line import/no-commonjs
|
// eslint-disable-next-line import/no-commonjs
|
||||||
const pkg = require("../package.json");
|
const pkg = require("../package.json");
|
||||||
async function sendSuccessStatusReport(startedAt, config, toolsVersion, logger) {
|
async function sendSuccessStatusReport(startedAt, config, toolsVersion, logger) {
|
||||||
|
|
@ -90,7 +91,7 @@ async function run() {
|
||||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||||
try {
|
try {
|
||||||
const workflowErrors = await (0, actions_util_1.validateWorkflow)();
|
const workflowErrors = await (0, workflow_1.validateWorkflow)();
|
||||||
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
|
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
3
lib/upload-lib.js
generated
3
lib/upload-lib.js
generated
|
|
@ -36,6 +36,7 @@ const fingerprints = __importStar(require("./fingerprints"));
|
||||||
const repository_1 = require("./repository");
|
const repository_1 = require("./repository");
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
|
const workflow = __importStar(require("./workflow"));
|
||||||
// Takes a list of paths to sarif files and combines them together,
|
// Takes a list of paths to sarif files and combines them together,
|
||||||
// returning the contents of the combined sarif file.
|
// returning the contents of the combined sarif file.
|
||||||
function combineSarifFiles(sarifFiles) {
|
function combineSarifFiles(sarifFiles) {
|
||||||
|
|
@ -129,7 +130,7 @@ exports.findSarifFilesInDir = findSarifFilesInDir;
|
||||||
// depending on what the path happens to refer to.
|
// depending on what the path happens to refer to.
|
||||||
// Returns true iff the upload occurred and succeeded
|
// Returns true iff the upload occurred and succeeded
|
||||||
async function uploadFromActions(sarifPath, logger) {
|
async function uploadFromActions(sarifPath, logger) {
|
||||||
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(actionsUtil.getRequiredInput("checkout_path")), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getOptionalInput("category"), util.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), logger);
|
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(actionsUtil.getRequiredInput("checkout_path")), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getOptionalInput("category"), util.getRequiredEnvParam("GITHUB_WORKFLOW"), workflow.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), logger);
|
||||||
}
|
}
|
||||||
exports.uploadFromActions = uploadFromActions;
|
exports.uploadFromActions = uploadFromActions;
|
||||||
function getSarifFilePaths(sarifPath) {
|
function getSarifFilePaths(sarifPath) {
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
308
lib/workflow.js
generated
Normal file
308
lib/workflow.js
generated
Normal file
|
|
@ -0,0 +1,308 @@
|
||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.getCategoryInputOrThrow = exports.getWorkflowRunID = exports.getWorkflowPath = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = void 0;
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
|
const yaml = __importStar(require("js-yaml"));
|
||||||
|
const api = __importStar(require("./api-client"));
|
||||||
|
const util_1 = require("./util");
|
||||||
|
function isObject(o) {
|
||||||
|
return o !== null && typeof o === "object";
|
||||||
|
}
|
||||||
|
const GLOB_PATTERN = new RegExp("(\\*\\*?)");
|
||||||
|
function escapeRegExp(string) {
|
||||||
|
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
||||||
|
}
|
||||||
|
function patternToRegExp(value) {
|
||||||
|
return new RegExp(`^${value
|
||||||
|
.toString()
|
||||||
|
.split(GLOB_PATTERN)
|
||||||
|
.reduce(function (arr, cur) {
|
||||||
|
if (cur === "**") {
|
||||||
|
arr.push(".*?");
|
||||||
|
}
|
||||||
|
else if (cur === "*") {
|
||||||
|
arr.push("[^/]*?");
|
||||||
|
}
|
||||||
|
else if (cur) {
|
||||||
|
arr.push(escapeRegExp(cur));
|
||||||
|
}
|
||||||
|
return arr;
|
||||||
|
}, [])
|
||||||
|
.join("")}$`);
|
||||||
|
}
|
||||||
|
// this function should return true if patternA is a superset of patternB
|
||||||
|
// e.g: * is a superset of main-* but main-* is not a superset of *.
|
||||||
|
function patternIsSuperset(patternA, patternB) {
|
||||||
|
return patternToRegExp(patternA).test(patternB);
|
||||||
|
}
|
||||||
|
exports.patternIsSuperset = patternIsSuperset;
|
||||||
|
function branchesToArray(branches) {
|
||||||
|
if (typeof branches === "string") {
|
||||||
|
return [branches];
|
||||||
|
}
|
||||||
|
if (Array.isArray(branches)) {
|
||||||
|
if (branches.length === 0) {
|
||||||
|
return "**";
|
||||||
|
}
|
||||||
|
return branches;
|
||||||
|
}
|
||||||
|
return "**";
|
||||||
|
}
|
||||||
|
function toCodedErrors(errors) {
|
||||||
|
return Object.entries(errors).reduce((acc, [code, message]) => {
|
||||||
|
acc[code] = { message, code };
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
}
|
||||||
|
// code to send back via status report
|
||||||
|
// message to add as a warning annotation to the run
|
||||||
|
exports.WorkflowErrors = toCodedErrors({
|
||||||
|
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||||
|
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||||
|
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||||
|
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||||
|
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
||||||
|
});
|
||||||
|
function getWorkflowErrors(doc) {
|
||||||
|
var _a, _b, _c, _d, _e;
|
||||||
|
const errors = [];
|
||||||
|
const jobName = process.env.GITHUB_JOB;
|
||||||
|
if (jobName) {
|
||||||
|
const job = (_a = doc === null || doc === void 0 ? void 0 : doc.jobs) === null || _a === void 0 ? void 0 : _a[jobName];
|
||||||
|
const steps = job === null || job === void 0 ? void 0 : job.steps;
|
||||||
|
if (Array.isArray(steps)) {
|
||||||
|
for (const step of steps) {
|
||||||
|
// this was advice that we used to give in the README
|
||||||
|
// we actually want to run the analysis on the merge commit
|
||||||
|
// to produce results that are more inline with expectations
|
||||||
|
// (i.e: this is what will happen if you merge this PR)
|
||||||
|
// and avoid some race conditions
|
||||||
|
if ((step === null || step === void 0 ? void 0 : step.run) === "git checkout HEAD^2") {
|
||||||
|
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let missingPush = false;
|
||||||
|
if (doc.on === undefined) {
|
||||||
|
// this is not a valid config
|
||||||
|
}
|
||||||
|
else if (typeof doc.on === "string") {
|
||||||
|
if (doc.on === "pull_request") {
|
||||||
|
missingPush = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (Array.isArray(doc.on)) {
|
||||||
|
const hasPush = doc.on.includes("push");
|
||||||
|
const hasPullRequest = doc.on.includes("pull_request");
|
||||||
|
if (hasPullRequest && !hasPush) {
|
||||||
|
missingPush = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (isObject(doc.on)) {
|
||||||
|
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
|
||||||
|
const hasPullRequest = Object.prototype.hasOwnProperty.call(doc.on, "pull_request");
|
||||||
|
if (!hasPush && hasPullRequest) {
|
||||||
|
missingPush = true;
|
||||||
|
}
|
||||||
|
if (hasPush && hasPullRequest) {
|
||||||
|
const paths = (_b = doc.on.push) === null || _b === void 0 ? void 0 : _b.paths;
|
||||||
|
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
||||||
|
// if they didn't change any files
|
||||||
|
// currently we cannot go back through the history and find the most recent baseline
|
||||||
|
if (Array.isArray(paths) && paths.length > 0) {
|
||||||
|
errors.push(exports.WorkflowErrors.PathsSpecified);
|
||||||
|
}
|
||||||
|
const pathsIgnore = (_c = doc.on.push) === null || _c === void 0 ? void 0 : _c["paths-ignore"];
|
||||||
|
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
|
||||||
|
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// if doc.on.pull_request is null that means 'all branches'
|
||||||
|
// if doc.on.pull_request is undefined that means 'off'
|
||||||
|
// we only want to check for mismatched branches if pull_request is on.
|
||||||
|
if (doc.on.pull_request !== undefined) {
|
||||||
|
const push = branchesToArray((_d = doc.on.push) === null || _d === void 0 ? void 0 : _d.branches);
|
||||||
|
if (push !== "**") {
|
||||||
|
const pull_request = branchesToArray((_e = doc.on.pull_request) === null || _e === void 0 ? void 0 : _e.branches);
|
||||||
|
if (pull_request !== "**") {
|
||||||
|
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value)));
|
||||||
|
if (difference.length > 0) {
|
||||||
|
// there are branches in pull_request that may not have a baseline
|
||||||
|
// because we are not building them on push
|
||||||
|
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (push.length > 0) {
|
||||||
|
// push is set up to run on a subset of branches
|
||||||
|
// and you could open a PR against a branch with no baseline
|
||||||
|
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (missingPush) {
|
||||||
|
errors.push(exports.WorkflowErrors.MissingPushHook);
|
||||||
|
}
|
||||||
|
return errors;
|
||||||
|
}
|
||||||
|
exports.getWorkflowErrors = getWorkflowErrors;
|
||||||
|
async function validateWorkflow() {
|
||||||
|
let workflow;
|
||||||
|
try {
|
||||||
|
workflow = await getWorkflow();
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
return `error: getWorkflow() failed: ${String(e)}`;
|
||||||
|
}
|
||||||
|
let workflowErrors;
|
||||||
|
try {
|
||||||
|
workflowErrors = getWorkflowErrors(workflow);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
return `error: getWorkflowErrors() failed: ${String(e)}`;
|
||||||
|
}
|
||||||
|
if (workflowErrors.length > 0) {
|
||||||
|
let message;
|
||||||
|
try {
|
||||||
|
message = formatWorkflowErrors(workflowErrors);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
return `error: formatWorkflowErrors() failed: ${String(e)}`;
|
||||||
|
}
|
||||||
|
core.warning(message);
|
||||||
|
}
|
||||||
|
return formatWorkflowCause(workflowErrors);
|
||||||
|
}
|
||||||
|
exports.validateWorkflow = validateWorkflow;
|
||||||
|
function formatWorkflowErrors(errors) {
|
||||||
|
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
||||||
|
const errorsList = errors.map((e) => e.message).join(" ");
|
||||||
|
return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`;
|
||||||
|
}
|
||||||
|
exports.formatWorkflowErrors = formatWorkflowErrors;
|
||||||
|
function formatWorkflowCause(errors) {
|
||||||
|
if (errors.length === 0) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return errors.map((e) => e.code).join(",");
|
||||||
|
}
|
||||||
|
exports.formatWorkflowCause = formatWorkflowCause;
|
||||||
|
async function getWorkflow() {
|
||||||
|
const relativePath = await getWorkflowPath();
|
||||||
|
const absolutePath = path.join((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), relativePath);
|
||||||
|
return yaml.load(fs.readFileSync(absolutePath, "utf-8"));
|
||||||
|
}
|
||||||
|
exports.getWorkflow = getWorkflow;
|
||||||
|
/**
|
||||||
|
* Get the path of the currently executing workflow.
|
||||||
|
*/
|
||||||
|
async function getWorkflowPath() {
|
||||||
|
const repo_nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY").split("/");
|
||||||
|
const owner = repo_nwo[0];
|
||||||
|
const repo = repo_nwo[1];
|
||||||
|
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
||||||
|
const apiClient = api.getApiClient();
|
||||||
|
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
run_id,
|
||||||
|
});
|
||||||
|
const workflowUrl = runsResponse.data.workflow_url;
|
||||||
|
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
||||||
|
return workflowResponse.data.path;
|
||||||
|
}
|
||||||
|
exports.getWorkflowPath = getWorkflowPath;
|
||||||
|
/**
|
||||||
|
* Get the workflow run ID.
|
||||||
|
*/
|
||||||
|
function getWorkflowRunID() {
|
||||||
|
const workflowRunID = parseInt((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"), 10);
|
||||||
|
if (Number.isNaN(workflowRunID)) {
|
||||||
|
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
||||||
|
}
|
||||||
|
return workflowRunID;
|
||||||
|
}
|
||||||
|
exports.getWorkflowRunID = getWorkflowRunID;
|
||||||
|
function getStepsCallingAction(job, actionName) {
|
||||||
|
const steps = job.steps;
|
||||||
|
if (!Array.isArray(steps)) {
|
||||||
|
throw new Error(`Could not get steps calling ${actionName} since job.steps was not an array.`);
|
||||||
|
}
|
||||||
|
return steps.filter((step) => { var _a; return (_a = step.uses) === null || _a === void 0 ? void 0 : _a.includes(actionName); });
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Makes a best effort attempt to retrieve the value of a particular input with which
|
||||||
|
* an Action in the workflow would be invoked.
|
||||||
|
*
|
||||||
|
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||||
|
*
|
||||||
|
* @returns the value of the input, or undefined if no such input is passed to the Action
|
||||||
|
* @throws an error if the value of the input could not be determined, or we could not
|
||||||
|
* determine that no such input is passed to the Action.
|
||||||
|
*/
|
||||||
|
function getInputOrThrow(workflow, jobName, actionName, inputName, matrixVars) {
|
||||||
|
if (!workflow.jobs) {
|
||||||
|
throw new Error(`Could not get ${inputName} input to ${actionName} since the workflow has no jobs.`);
|
||||||
|
}
|
||||||
|
if (!workflow.jobs[jobName]) {
|
||||||
|
throw new Error(`Could not get ${inputName} input to ${actionName} since the workflow has no job named ${jobName}.`);
|
||||||
|
}
|
||||||
|
const inputs = getStepsCallingAction(workflow.jobs[jobName], actionName)
|
||||||
|
.map((step) => { var _a; return (_a = step.with) === null || _a === void 0 ? void 0 : _a[inputName]; })
|
||||||
|
.filter((input) => input !== undefined)
|
||||||
|
.map((input) => input);
|
||||||
|
if (inputs.length === 0) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (!inputs.every((input) => input === inputs[0])) {
|
||||||
|
throw new Error(`Could not get ${inputName} input to ${actionName} since there were multiple steps calling ` +
|
||||||
|
`${actionName} with different values for ${inputName}.`);
|
||||||
|
}
|
||||||
|
// Make a basic attempt to substitute matrix variables
|
||||||
|
// First normalize by removing whitespace
|
||||||
|
let input = inputs[0].replace(/\${{\s+/, "${{").replace(/\s+}}/, "}}");
|
||||||
|
for (const [key, value] of Object.entries(matrixVars)) {
|
||||||
|
input = input.replace(`\${{matrix.${key}}}`, value);
|
||||||
|
}
|
||||||
|
if (input.includes("${{")) {
|
||||||
|
throw new Error(`Could not get ${inputName} input to ${actionName} since it contained an unrecognized dynamic value.`);
|
||||||
|
}
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Makes a best effort attempt to retrieve the category input for the particular job,
|
||||||
|
* given a set of matrix variables.
|
||||||
|
*
|
||||||
|
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||||
|
*
|
||||||
|
* @returns the category input, or undefined if the category input is not defined
|
||||||
|
* @throws an error if the category input could not be determined
|
||||||
|
*/
|
||||||
|
function getCategoryInputOrThrow(workflow, jobName, matrixVars) {
|
||||||
|
return getInputOrThrow(workflow, jobName, "github/codeql-action/analyze", "category", matrixVars);
|
||||||
|
}
|
||||||
|
exports.getCategoryInputOrThrow = getCategoryInputOrThrow;
|
||||||
|
//# sourceMappingURL=workflow.js.map
|
||||||
1
lib/workflow.js.map
Normal file
1
lib/workflow.js.map
Normal file
File diff suppressed because one or more lines are too long
457
lib/workflow.test.js
generated
Normal file
457
lib/workflow.test.js
generated
Normal file
|
|
@ -0,0 +1,457 @@
|
||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const yaml = __importStar(require("js-yaml"));
|
||||||
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
const workflow_1 = require("./workflow");
|
||||||
|
function errorCodes(actual, expected) {
|
||||||
|
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
||||||
|
}
|
||||||
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on is empty", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({ on: {} });
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({ on: ["push"] });
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({ on: ["pull_request"] });
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MissingPushHook]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is valid", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: ["push", "pull_request"],
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: ["push", "pull_request", "schedule"],
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push should not have a path", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["main"], paths: ["test/*"] },
|
||||||
|
pull_request: { branches: ["main"] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.PathsSpecified]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is a correct object", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["main"] },
|
||||||
|
pull_request: { branches: ["feature"] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["main", "feature"] },
|
||||||
|
pull_request: { branches: ["main"] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["main"] },
|
||||||
|
pull_request: { branches: ["main", "feature"] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: {
|
||||||
|
push: 1,
|
||||||
|
pull_request: 1,
|
||||||
|
},
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
jobs: 1,
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
jobs: [1],
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
jobs: { 1: 1 },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: 1 },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: [1] },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: { steps: 1 } },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: [undefined] },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(1), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: {
|
||||||
|
push: {
|
||||||
|
branches: 1,
|
||||||
|
},
|
||||||
|
pull_request: {
|
||||||
|
branches: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}), []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: ["main"]
|
||||||
|
pull_request:
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["feature/*"] },
|
||||||
|
pull_request: { branches: "feature/moose" },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["feature/moose"] },
|
||||||
|
pull_request: { branches: "feature/*" },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test";
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: ["push", "pull_request"],
|
||||||
|
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.CheckoutWrongHead]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("formatWorkflowErrors() when there is one error", (t) => {
|
||||||
|
const message = (0, workflow_1.formatWorkflowErrors)([workflow_1.WorkflowErrors.CheckoutWrongHead]);
|
||||||
|
t.true(message.startsWith("1 issue was detected with this workflow:"));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("formatWorkflowErrors() when there are multiple errors", (t) => {
|
||||||
|
const message = (0, workflow_1.formatWorkflowErrors)([
|
||||||
|
workflow_1.WorkflowErrors.CheckoutWrongHead,
|
||||||
|
workflow_1.WorkflowErrors.PathsSpecified,
|
||||||
|
]);
|
||||||
|
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("formatWorkflowCause() with no errors", (t) => {
|
||||||
|
const message = (0, workflow_1.formatWorkflowCause)([]);
|
||||||
|
t.deepEqual(message, undefined);
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("formatWorkflowCause()", (t) => {
|
||||||
|
const message = (0, workflow_1.formatWorkflowCause)([
|
||||||
|
workflow_1.WorkflowErrors.CheckoutWrongHead,
|
||||||
|
workflow_1.WorkflowErrors.PathsSpecified,
|
||||||
|
]);
|
||||||
|
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
||||||
|
t.deepEqual((0, workflow_1.formatWorkflowCause)([]), undefined);
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("patternIsSuperset()", (t) => {
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("main-*", "main"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("*", "*"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("*", "main-*"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("main-*", "*"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("main-*", "main"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("main", "main"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("*", "feature/*"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("**", "feature/*"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("feature-*", "**"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("a/**/c", "a/**/d"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("a/**/c", "a/**"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("a/**", "a/**/c"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("a/**/c", "a/main-**/c"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("a/**/b/**/c", "a/**/d/**/c"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("a/**/b/**/c", "a/**/b/c/**/c"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("a/**/b/**/c", "a/**/b/d/**/c"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("a/**/c/d/**/c", "a/**/b/**/c"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("a/main-**/c", "a/**/c"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("/robin/*/release/*", "/robin/moose/release/goose"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("/robin/moose/release/goose", "/robin/*/release/*"));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when branches contain dots", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [4.1, master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [4.1, master]
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master, ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test";
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test2:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test3:
|
||||||
|
steps: []
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.CheckoutWrongHead]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test3";
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test2:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test3:
|
||||||
|
steps: []
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on is missing", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() with a different on setup", (t) => {
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: "workflow_dispatch"
|
||||||
|
`)), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: [workflow_dispatch]
|
||||||
|
`)), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
workflow_dispatch: {}
|
||||||
|
`)), []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
`)), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: ["push"]
|
||||||
|
`)), []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getCategoryInputOrThrow returns category for simple workflow with category", (t) => {
|
||||||
|
t.is((0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: some-category
|
||||||
|
`), "analysis", {}), "some-category");
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getCategoryInputOrThrow returns undefined for simple workflow without category", (t) => {
|
||||||
|
t.is((0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
`), "analysis", {}), undefined);
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getCategoryInputOrThrow returns category for workflow with multiple jobs", (t) => {
|
||||||
|
t.is((0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||||
|
jobs:
|
||||||
|
foo:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- runs: ./build foo
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: foo-category
|
||||||
|
bar:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- runs: ./build bar
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: bar-category
|
||||||
|
`), "bar", {}), "bar-category");
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getCategoryInputOrThrow finds category for workflow with language matrix", (t) => {
|
||||||
|
t.is((0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
language: [javascript, python]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
with:
|
||||||
|
language: \${{ matrix.language }}
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "/language:\${{ matrix.language }}"
|
||||||
|
`), "analysis", { language: "javascript" }), "/language:javascript");
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getCategoryInputOrThrow throws error for workflow with dynamic category", (t) => {
|
||||||
|
t.throws(() => (0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "\${{ github.workflow }}"
|
||||||
|
`), "analysis", {}), {
|
||||||
|
message: "Could not get category input to github/codeql-action/analyze since it contained " +
|
||||||
|
"an unrecognized dynamic value.",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getCategoryInputOrThrow throws error for workflow with multiple categories", (t) => {
|
||||||
|
t.throws(() => (0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: some-category
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: another-category
|
||||||
|
`), "analysis", {}), {
|
||||||
|
message: "Could not get category input to github/codeql-action/analyze since there were multiple steps " +
|
||||||
|
"calling github/codeql-action/analyze with different values for category.",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=workflow.test.js.map
|
||||||
1
lib/workflow.test.js.map
Normal file
1
lib/workflow.test.js.map
Normal file
File diff suppressed because one or more lines are too long
|
|
@ -2,20 +2,12 @@ import * as fs from "fs";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import * as yaml from "js-yaml";
|
|
||||||
import * as sinon from "sinon";
|
import * as sinon from "sinon";
|
||||||
|
|
||||||
import * as actionsutil from "./actions-util";
|
import * as actionsutil from "./actions-util";
|
||||||
import { setupActionsVars, setupTests } from "./testing-utils";
|
import { setupActionsVars, setupTests } from "./testing-utils";
|
||||||
import { initializeEnvironment, withTmpDir } from "./util";
|
import { initializeEnvironment, withTmpDir } from "./util";
|
||||||
|
|
||||||
function errorCodes(
|
|
||||||
actual: actionsutil.CodedError[],
|
|
||||||
expected: actionsutil.CodedError[]
|
|
||||||
): [string[], string[]] {
|
|
||||||
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
|
||||||
}
|
|
||||||
|
|
||||||
setupTests(test);
|
setupTests(test);
|
||||||
|
|
||||||
test("getRef() throws on the empty string", async (t) => {
|
test("getRef() throws on the empty string", async (t) => {
|
||||||
|
|
@ -188,536 +180,6 @@ test("computeAutomationID()", async (t) => {
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("getWorkflowErrors() when on is empty", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is valid", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: ["push", "pull_request"],
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: ["push", "pull_request", "schedule"],
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push should not have a path", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main"], paths: ["test/*"] },
|
|
||||||
pull_request: { branches: ["main"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is a correct object", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
pull_request:
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is mismatched", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main"] },
|
|
||||||
pull_request: { branches: ["feature"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main", "feature"] },
|
|
||||||
pull_request: { branches: ["main"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main"] },
|
|
||||||
pull_request: { branches: ["main", "feature"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: 1,
|
|
||||||
pull_request: 1,
|
|
||||||
},
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: 1,
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: [1],
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { 1: 1 },
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: 1 },
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: [1] },
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: { steps: 1 } },
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: [undefined] },
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1 as any), []));
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: {
|
|
||||||
branches: 1,
|
|
||||||
},
|
|
||||||
pull_request: {
|
|
||||||
branches: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: ["main"]
|
|
||||||
pull_request:
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["feature/*"] },
|
|
||||||
pull_request: { branches: "feature/moose" },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["feature/moose"] },
|
|
||||||
pull_request: { branches: "feature/*" },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
|
||||||
process.env.GITHUB_JOB = "test";
|
|
||||||
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: ["push", "pull_request"],
|
|
||||||
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("formatWorkflowErrors() when there is one error", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowErrors([
|
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
|
||||||
]);
|
|
||||||
t.true(message.startsWith("1 issue was detected with this workflow:"));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("formatWorkflowErrors() when there are multiple errors", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowErrors([
|
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
|
||||||
actionsutil.WorkflowErrors.PathsSpecified,
|
|
||||||
]);
|
|
||||||
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("formatWorkflowCause() with no errors", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowCause([]);
|
|
||||||
|
|
||||||
t.deepEqual(message, undefined);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("formatWorkflowCause()", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowCause([
|
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
|
||||||
actionsutil.WorkflowErrors.PathsSpecified,
|
|
||||||
]);
|
|
||||||
|
|
||||||
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
|
||||||
t.deepEqual(actionsutil.formatWorkflowCause([]), undefined);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("patternIsSuperset()", (t) => {
|
|
||||||
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("*", "*"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("*", "main-*"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("main-*", "*"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("main", "main"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("*", "feature/*"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("**", "feature/*"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("feature-*", "**"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**/d"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**", "a/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**/c", "a/main-**/c"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/d/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/c/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/d/**/c"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/c/d/**/c", "a/**/b/**/c"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/main-**/c", "a/**/c"));
|
|
||||||
t.true(
|
|
||||||
actionsutil.patternIsSuperset(
|
|
||||||
"/robin/*/release/*",
|
|
||||||
"/robin/moose/release/goose"
|
|
||||||
)
|
|
||||||
);
|
|
||||||
t.false(
|
|
||||||
actionsutil.patternIsSuperset(
|
|
||||||
"/robin/moose/release/goose",
|
|
||||||
"/robin/*/release/*"
|
|
||||||
)
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when branches contain dots", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [4.1, master]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [4.1, master]
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master, ]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [master]
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
|
||||||
process.env.GITHUB_JOB = "test";
|
|
||||||
|
|
||||||
const errors = actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [master]
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test2:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test3:
|
|
||||||
steps: []
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
|
||||||
process.env.GITHUB_JOB = "test3";
|
|
||||||
|
|
||||||
const errors = actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [master]
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test2:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test3:
|
|
||||||
steps: []
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on is missing", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() with a different on setup", (t) => {
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on: "workflow_dispatch"
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on: [workflow_dispatch]
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
workflow_dispatch: {}
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on: ["push"]
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("initializeEnvironment", (t) => {
|
test("initializeEnvironment", (t) => {
|
||||||
initializeEnvironment("1.2.3");
|
initializeEnvironment("1.2.3");
|
||||||
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "1.2.3");
|
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "1.2.3");
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,6 @@ import * as path from "path";
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||||
import * as safeWhich from "@chrisgavin/safe-which";
|
import * as safeWhich from "@chrisgavin/safe-which";
|
||||||
import * as yaml from "js-yaml";
|
|
||||||
|
|
||||||
import * as api from "./api-client";
|
import * as api from "./api-client";
|
||||||
import { Config } from "./config-utils";
|
import { Config } from "./config-utils";
|
||||||
|
|
@ -20,6 +19,7 @@ import {
|
||||||
isInTestMode,
|
isInTestMode,
|
||||||
UserError,
|
UserError,
|
||||||
} from "./util";
|
} from "./util";
|
||||||
|
import { getWorkflowPath } from "./workflow";
|
||||||
|
|
||||||
// eslint-disable-next-line import/no-commonjs
|
// eslint-disable-next-line import/no-commonjs
|
||||||
const pkg = require("../package.json");
|
const pkg = require("../package.json");
|
||||||
|
|
@ -156,288 +156,6 @@ export const determineMergeBaseCommitOid = async function (): Promise<
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
interface WorkflowJobStep {
|
|
||||||
run: any;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface WorkflowJob {
|
|
||||||
steps?: WorkflowJobStep[];
|
|
||||||
}
|
|
||||||
|
|
||||||
interface WorkflowTrigger {
|
|
||||||
branches?: string[] | string;
|
|
||||||
paths?: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
// on: {} then push/pull_request are undefined
|
|
||||||
// on:
|
|
||||||
// push:
|
|
||||||
// pull_request:
|
|
||||||
// then push/pull_request are null
|
|
||||||
interface WorkflowTriggers {
|
|
||||||
push?: WorkflowTrigger | null;
|
|
||||||
pull_request?: WorkflowTrigger | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface Workflow {
|
|
||||||
jobs?: { [key: string]: WorkflowJob };
|
|
||||||
on?: string | string[] | WorkflowTriggers;
|
|
||||||
}
|
|
||||||
|
|
||||||
function isObject(o: unknown): o is object {
|
|
||||||
return o !== null && typeof o === "object";
|
|
||||||
}
|
|
||||||
|
|
||||||
const GLOB_PATTERN = new RegExp("(\\*\\*?)");
|
|
||||||
|
|
||||||
function escapeRegExp(string) {
|
|
||||||
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
|
||||||
}
|
|
||||||
|
|
||||||
function patternToRegExp(value) {
|
|
||||||
return new RegExp(
|
|
||||||
`^${value
|
|
||||||
.toString()
|
|
||||||
.split(GLOB_PATTERN)
|
|
||||||
.reduce(function (arr, cur) {
|
|
||||||
if (cur === "**") {
|
|
||||||
arr.push(".*?");
|
|
||||||
} else if (cur === "*") {
|
|
||||||
arr.push("[^/]*?");
|
|
||||||
} else if (cur) {
|
|
||||||
arr.push(escapeRegExp(cur));
|
|
||||||
}
|
|
||||||
return arr;
|
|
||||||
}, [])
|
|
||||||
.join("")}$`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// this function should return true if patternA is a superset of patternB
|
|
||||||
// e.g: * is a superset of main-* but main-* is not a superset of *.
|
|
||||||
export function patternIsSuperset(patternA: string, patternB: string): boolean {
|
|
||||||
return patternToRegExp(patternA).test(patternB);
|
|
||||||
}
|
|
||||||
|
|
||||||
function branchesToArray(branches?: string | null | string[]): string[] | "**" {
|
|
||||||
if (typeof branches === "string") {
|
|
||||||
return [branches];
|
|
||||||
}
|
|
||||||
if (Array.isArray(branches)) {
|
|
||||||
if (branches.length === 0) {
|
|
||||||
return "**";
|
|
||||||
}
|
|
||||||
return branches;
|
|
||||||
}
|
|
||||||
return "**";
|
|
||||||
}
|
|
||||||
export interface CodedError {
|
|
||||||
message: string;
|
|
||||||
code: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
function toCodedErrors<T>(errors: T): Record<keyof T, CodedError> {
|
|
||||||
return Object.entries(errors).reduce((acc, [key, value]) => {
|
|
||||||
acc[key] = { message: value, code: key };
|
|
||||||
return acc;
|
|
||||||
}, {} as Record<keyof T, CodedError>);
|
|
||||||
}
|
|
||||||
|
|
||||||
// code to send back via status report
|
|
||||||
// message to add as a warning annotation to the run
|
|
||||||
export const WorkflowErrors = toCodedErrors({
|
|
||||||
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
|
||||||
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
|
||||||
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
|
||||||
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
|
||||||
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
|
||||||
});
|
|
||||||
|
|
||||||
export function getWorkflowErrors(doc: Workflow): CodedError[] {
|
|
||||||
const errors: CodedError[] = [];
|
|
||||||
|
|
||||||
const jobName = process.env.GITHUB_JOB;
|
|
||||||
|
|
||||||
if (jobName) {
|
|
||||||
const job = doc?.jobs?.[jobName];
|
|
||||||
|
|
||||||
const steps = job?.steps;
|
|
||||||
|
|
||||||
if (Array.isArray(steps)) {
|
|
||||||
for (const step of steps) {
|
|
||||||
// this was advice that we used to give in the README
|
|
||||||
// we actually want to run the analysis on the merge commit
|
|
||||||
// to produce results that are more inline with expectations
|
|
||||||
// (i.e: this is what will happen if you merge this PR)
|
|
||||||
// and avoid some race conditions
|
|
||||||
if (step?.run === "git checkout HEAD^2") {
|
|
||||||
errors.push(WorkflowErrors.CheckoutWrongHead);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let missingPush = false;
|
|
||||||
|
|
||||||
if (doc.on === undefined) {
|
|
||||||
// this is not a valid config
|
|
||||||
} else if (typeof doc.on === "string") {
|
|
||||||
if (doc.on === "pull_request") {
|
|
||||||
missingPush = true;
|
|
||||||
}
|
|
||||||
} else if (Array.isArray(doc.on)) {
|
|
||||||
const hasPush = doc.on.includes("push");
|
|
||||||
const hasPullRequest = doc.on.includes("pull_request");
|
|
||||||
if (hasPullRequest && !hasPush) {
|
|
||||||
missingPush = true;
|
|
||||||
}
|
|
||||||
} else if (isObject(doc.on)) {
|
|
||||||
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
|
|
||||||
const hasPullRequest = Object.prototype.hasOwnProperty.call(
|
|
||||||
doc.on,
|
|
||||||
"pull_request"
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!hasPush && hasPullRequest) {
|
|
||||||
missingPush = true;
|
|
||||||
}
|
|
||||||
if (hasPush && hasPullRequest) {
|
|
||||||
const paths = doc.on.push?.paths;
|
|
||||||
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
|
||||||
// if they didn't change any files
|
|
||||||
// currently we cannot go back through the history and find the most recent baseline
|
|
||||||
if (Array.isArray(paths) && paths.length > 0) {
|
|
||||||
errors.push(WorkflowErrors.PathsSpecified);
|
|
||||||
}
|
|
||||||
const pathsIgnore = doc.on.push?.["paths-ignore"];
|
|
||||||
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
|
|
||||||
errors.push(WorkflowErrors.PathsIgnoreSpecified);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// if doc.on.pull_request is null that means 'all branches'
|
|
||||||
// if doc.on.pull_request is undefined that means 'off'
|
|
||||||
// we only want to check for mismatched branches if pull_request is on.
|
|
||||||
if (doc.on.pull_request !== undefined) {
|
|
||||||
const push = branchesToArray(doc.on.push?.branches);
|
|
||||||
|
|
||||||
if (push !== "**") {
|
|
||||||
const pull_request = branchesToArray(doc.on.pull_request?.branches);
|
|
||||||
|
|
||||||
if (pull_request !== "**") {
|
|
||||||
const difference = pull_request.filter(
|
|
||||||
(value) => !push.some((o) => patternIsSuperset(o, value))
|
|
||||||
);
|
|
||||||
if (difference.length > 0) {
|
|
||||||
// there are branches in pull_request that may not have a baseline
|
|
||||||
// because we are not building them on push
|
|
||||||
errors.push(WorkflowErrors.MismatchedBranches);
|
|
||||||
}
|
|
||||||
} else if (push.length > 0) {
|
|
||||||
// push is set up to run on a subset of branches
|
|
||||||
// and you could open a PR against a branch with no baseline
|
|
||||||
errors.push(WorkflowErrors.MismatchedBranches);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (missingPush) {
|
|
||||||
errors.push(WorkflowErrors.MissingPushHook);
|
|
||||||
}
|
|
||||||
|
|
||||||
return errors;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function validateWorkflow(): Promise<undefined | string> {
|
|
||||||
let workflow: Workflow;
|
|
||||||
try {
|
|
||||||
workflow = await getWorkflow();
|
|
||||||
} catch (e) {
|
|
||||||
return `error: getWorkflow() failed: ${String(e)}`;
|
|
||||||
}
|
|
||||||
let workflowErrors: CodedError[];
|
|
||||||
try {
|
|
||||||
workflowErrors = getWorkflowErrors(workflow);
|
|
||||||
} catch (e) {
|
|
||||||
return `error: getWorkflowErrors() failed: ${String(e)}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (workflowErrors.length > 0) {
|
|
||||||
let message: string;
|
|
||||||
try {
|
|
||||||
message = formatWorkflowErrors(workflowErrors);
|
|
||||||
} catch (e) {
|
|
||||||
return `error: formatWorkflowErrors() failed: ${String(e)}`;
|
|
||||||
}
|
|
||||||
core.warning(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
return formatWorkflowCause(workflowErrors);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function formatWorkflowErrors(errors: CodedError[]): string {
|
|
||||||
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
|
||||||
|
|
||||||
const errorsList = errors.map((e) => e.message).join(" ");
|
|
||||||
|
|
||||||
return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function formatWorkflowCause(errors: CodedError[]): undefined | string {
|
|
||||||
if (errors.length === 0) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return errors.map((e) => e.code).join(",");
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getWorkflow(): Promise<Workflow> {
|
|
||||||
const relativePath = await getWorkflowPath();
|
|
||||||
const absolutePath = path.join(
|
|
||||||
getRequiredEnvParam("GITHUB_WORKSPACE"),
|
|
||||||
relativePath
|
|
||||||
);
|
|
||||||
|
|
||||||
return yaml.load(fs.readFileSync(absolutePath, "utf-8")) as Workflow;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the path of the currently executing workflow.
|
|
||||||
*/
|
|
||||||
async function getWorkflowPath(): Promise<string> {
|
|
||||||
const repo_nwo = getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
|
||||||
const owner = repo_nwo[0];
|
|
||||||
const repo = repo_nwo[1];
|
|
||||||
const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID"));
|
|
||||||
|
|
||||||
const apiClient = api.getApiClient();
|
|
||||||
const runsResponse = await apiClient.request(
|
|
||||||
"GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true",
|
|
||||||
{
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
run_id,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
const workflowUrl = runsResponse.data.workflow_url;
|
|
||||||
|
|
||||||
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
|
||||||
|
|
||||||
return workflowResponse.data.path;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the workflow run ID.
|
|
||||||
*/
|
|
||||||
export function getWorkflowRunID(): number {
|
|
||||||
const workflowRunID = parseInt(getRequiredEnvParam("GITHUB_RUN_ID"), 10);
|
|
||||||
if (Number.isNaN(workflowRunID)) {
|
|
||||||
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
|
||||||
}
|
|
||||||
return workflowRunID;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the analysis key parameter for the current job.
|
* Get the analysis key parameter for the current job.
|
||||||
*
|
*
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,6 @@ import {
|
||||||
getTemporaryDirectory,
|
getTemporaryDirectory,
|
||||||
sendStatusReport,
|
sendStatusReport,
|
||||||
StatusReportBase,
|
StatusReportBase,
|
||||||
validateWorkflow,
|
|
||||||
} from "./actions-util";
|
} from "./actions-util";
|
||||||
import { getGitHubVersion } from "./api-client";
|
import { getGitHubVersion } from "./api-client";
|
||||||
import { CodeQL, CODEQL_VERSION_NEW_TRACING } from "./codeql";
|
import { CodeQL, CODEQL_VERSION_NEW_TRACING } from "./codeql";
|
||||||
|
|
@ -43,6 +42,7 @@ import {
|
||||||
isHostedRunner,
|
isHostedRunner,
|
||||||
shouldBypassToolcache,
|
shouldBypassToolcache,
|
||||||
} from "./util";
|
} from "./util";
|
||||||
|
import { validateWorkflow } from "./workflow";
|
||||||
|
|
||||||
// eslint-disable-next-line import/no-commonjs
|
// eslint-disable-next-line import/no-commonjs
|
||||||
const pkg = require("../package.json");
|
const pkg = require("../package.json");
|
||||||
|
|
|
||||||
|
|
@ -16,6 +16,7 @@ import { parseRepositoryNwo, RepositoryNwo } from "./repository";
|
||||||
import * as sharedEnv from "./shared-environment";
|
import * as sharedEnv from "./shared-environment";
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
import { SarifFile, SarifResult, SarifRun } from "./util";
|
import { SarifFile, SarifResult, SarifRun } from "./util";
|
||||||
|
import * as workflow from "./workflow";
|
||||||
|
|
||||||
// Takes a list of paths to sarif files and combines them together,
|
// Takes a list of paths to sarif files and combines them together,
|
||||||
// returning the contents of the combined sarif file.
|
// returning the contents of the combined sarif file.
|
||||||
|
|
@ -172,7 +173,7 @@ export async function uploadFromActions(
|
||||||
await actionsUtil.getAnalysisKey(),
|
await actionsUtil.getAnalysisKey(),
|
||||||
actionsUtil.getOptionalInput("category"),
|
actionsUtil.getOptionalInput("category"),
|
||||||
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||||
actionsUtil.getWorkflowRunID(),
|
workflow.getWorkflowRunID(),
|
||||||
actionsUtil.getRequiredInput("checkout_path"),
|
actionsUtil.getRequiredInput("checkout_path"),
|
||||||
actionsUtil.getRequiredInput("matrix"),
|
actionsUtil.getRequiredInput("matrix"),
|
||||||
logger
|
logger
|
||||||
|
|
|
||||||
676
src/workflow.test.ts
Normal file
676
src/workflow.test.ts
Normal file
|
|
@ -0,0 +1,676 @@
|
||||||
|
import test from "ava";
|
||||||
|
import * as yaml from "js-yaml";
|
||||||
|
|
||||||
|
import { setupTests } from "./testing-utils";
|
||||||
|
import {
|
||||||
|
CodedError,
|
||||||
|
formatWorkflowCause,
|
||||||
|
formatWorkflowErrors,
|
||||||
|
getCategoryInputOrThrow,
|
||||||
|
getWorkflowErrors,
|
||||||
|
patternIsSuperset,
|
||||||
|
Workflow,
|
||||||
|
WorkflowErrors,
|
||||||
|
} from "./workflow";
|
||||||
|
|
||||||
|
function errorCodes(
|
||||||
|
actual: CodedError[],
|
||||||
|
expected: CodedError[]
|
||||||
|
): [string[], string[]] {
|
||||||
|
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
||||||
|
}
|
||||||
|
|
||||||
|
setupTests(test);
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on is empty", (t) => {
|
||||||
|
const errors = getWorkflowErrors({ on: {} });
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
||||||
|
const errors = getWorkflowErrors({ on: ["push"] });
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
||||||
|
const errors = getWorkflowErrors({ on: ["pull_request"] });
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.MissingPushHook]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is valid", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: ["push", "pull_request"],
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: ["push", "pull_request", "schedule"],
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push should not have a path", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["main"], paths: ["test/*"] },
|
||||||
|
pull_request: { branches: ["main"] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.PathsSpecified]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is a correct object", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
||||||
|
const errors = getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
`) as Workflow
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is mismatched", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["main"] },
|
||||||
|
pull_request: { branches: ["feature"] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["main", "feature"] },
|
||||||
|
pull_request: { branches: ["main"] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["main"] },
|
||||||
|
pull_request: { branches: ["main", "feature"] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: 1,
|
||||||
|
pull_request: 1,
|
||||||
|
},
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: 1,
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: [1],
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { 1: 1 },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: 1 },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: [1] },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: { steps: 1 } },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: [undefined] },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(getWorkflowErrors(1 as any), []));
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: {
|
||||||
|
branches: 1,
|
||||||
|
},
|
||||||
|
pull_request: {
|
||||||
|
branches: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||||
|
const errors = getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: ["main"]
|
||||||
|
pull_request:
|
||||||
|
`) as Workflow
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["feature/*"] },
|
||||||
|
pull_request: { branches: "feature/moose" },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["feature/moose"] },
|
||||||
|
pull_request: { branches: "feature/*" },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test";
|
||||||
|
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: ["push", "pull_request"],
|
||||||
|
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.CheckoutWrongHead]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("formatWorkflowErrors() when there is one error", (t) => {
|
||||||
|
const message = formatWorkflowErrors([WorkflowErrors.CheckoutWrongHead]);
|
||||||
|
t.true(message.startsWith("1 issue was detected with this workflow:"));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("formatWorkflowErrors() when there are multiple errors", (t) => {
|
||||||
|
const message = formatWorkflowErrors([
|
||||||
|
WorkflowErrors.CheckoutWrongHead,
|
||||||
|
WorkflowErrors.PathsSpecified,
|
||||||
|
]);
|
||||||
|
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("formatWorkflowCause() with no errors", (t) => {
|
||||||
|
const message = formatWorkflowCause([]);
|
||||||
|
|
||||||
|
t.deepEqual(message, undefined);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("formatWorkflowCause()", (t) => {
|
||||||
|
const message = formatWorkflowCause([
|
||||||
|
WorkflowErrors.CheckoutWrongHead,
|
||||||
|
WorkflowErrors.PathsSpecified,
|
||||||
|
]);
|
||||||
|
|
||||||
|
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
||||||
|
t.deepEqual(formatWorkflowCause([]), undefined);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("patternIsSuperset()", (t) => {
|
||||||
|
t.false(patternIsSuperset("main-*", "main"));
|
||||||
|
t.true(patternIsSuperset("*", "*"));
|
||||||
|
t.true(patternIsSuperset("*", "main-*"));
|
||||||
|
t.false(patternIsSuperset("main-*", "*"));
|
||||||
|
t.false(patternIsSuperset("main-*", "main"));
|
||||||
|
t.true(patternIsSuperset("main", "main"));
|
||||||
|
t.false(patternIsSuperset("*", "feature/*"));
|
||||||
|
t.true(patternIsSuperset("**", "feature/*"));
|
||||||
|
t.false(patternIsSuperset("feature-*", "**"));
|
||||||
|
t.false(patternIsSuperset("a/**/c", "a/**/d"));
|
||||||
|
t.false(patternIsSuperset("a/**/c", "a/**"));
|
||||||
|
t.true(patternIsSuperset("a/**", "a/**/c"));
|
||||||
|
t.true(patternIsSuperset("a/**/c", "a/main-**/c"));
|
||||||
|
t.false(patternIsSuperset("a/**/b/**/c", "a/**/d/**/c"));
|
||||||
|
t.true(patternIsSuperset("a/**/b/**/c", "a/**/b/c/**/c"));
|
||||||
|
t.true(patternIsSuperset("a/**/b/**/c", "a/**/b/d/**/c"));
|
||||||
|
t.false(patternIsSuperset("a/**/c/d/**/c", "a/**/b/**/c"));
|
||||||
|
t.false(patternIsSuperset("a/main-**/c", "a/**/c"));
|
||||||
|
t.true(patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose"));
|
||||||
|
t.false(
|
||||||
|
patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*")
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when branches contain dots", (t) => {
|
||||||
|
const errors = getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [4.1, master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [4.1, master]
|
||||||
|
`) as Workflow
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
||||||
|
const errors = getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master, ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
`) as Workflow
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test";
|
||||||
|
|
||||||
|
const errors = getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test2:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test3:
|
||||||
|
steps: []
|
||||||
|
`) as Workflow
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.CheckoutWrongHead]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test3";
|
||||||
|
|
||||||
|
const errors = getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test2:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test3:
|
||||||
|
steps: []
|
||||||
|
`) as Workflow
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on is missing", (t) => {
|
||||||
|
const errors = getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
`) as Workflow
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() with a different on setup", (t) => {
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: "workflow_dispatch"
|
||||||
|
`) as Workflow
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: [workflow_dispatch]
|
||||||
|
`) as Workflow
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
workflow_dispatch: {}
|
||||||
|
`) as Workflow
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
`) as Workflow
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: ["push"]
|
||||||
|
`) as Workflow
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCategoryInputOrThrow returns category for simple workflow with category", (t) => {
|
||||||
|
t.is(
|
||||||
|
getCategoryInputOrThrow(
|
||||||
|
yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: some-category
|
||||||
|
`) as Workflow,
|
||||||
|
"analysis",
|
||||||
|
{}
|
||||||
|
),
|
||||||
|
"some-category"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCategoryInputOrThrow returns undefined for simple workflow without category", (t) => {
|
||||||
|
t.is(
|
||||||
|
getCategoryInputOrThrow(
|
||||||
|
yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
`) as Workflow,
|
||||||
|
"analysis",
|
||||||
|
{}
|
||||||
|
),
|
||||||
|
undefined
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCategoryInputOrThrow returns category for workflow with multiple jobs", (t) => {
|
||||||
|
t.is(
|
||||||
|
getCategoryInputOrThrow(
|
||||||
|
yaml.load(`
|
||||||
|
jobs:
|
||||||
|
foo:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- runs: ./build foo
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: foo-category
|
||||||
|
bar:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- runs: ./build bar
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: bar-category
|
||||||
|
`) as Workflow,
|
||||||
|
"bar",
|
||||||
|
{}
|
||||||
|
),
|
||||||
|
"bar-category"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCategoryInputOrThrow finds category for workflow with language matrix", (t) => {
|
||||||
|
t.is(
|
||||||
|
getCategoryInputOrThrow(
|
||||||
|
yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
language: [javascript, python]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
with:
|
||||||
|
language: \${{ matrix.language }}
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "/language:\${{ matrix.language }}"
|
||||||
|
`) as Workflow,
|
||||||
|
"analysis",
|
||||||
|
{ language: "javascript" }
|
||||||
|
),
|
||||||
|
"/language:javascript"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCategoryInputOrThrow throws error for workflow with dynamic category", (t) => {
|
||||||
|
t.throws(
|
||||||
|
() =>
|
||||||
|
getCategoryInputOrThrow(
|
||||||
|
yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "\${{ github.workflow }}"
|
||||||
|
`) as Workflow,
|
||||||
|
"analysis",
|
||||||
|
{}
|
||||||
|
),
|
||||||
|
{
|
||||||
|
message:
|
||||||
|
"Could not get category input to github/codeql-action/analyze since it contained " +
|
||||||
|
"an unrecognized dynamic value.",
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCategoryInputOrThrow throws error for workflow with multiple categories", (t) => {
|
||||||
|
t.throws(
|
||||||
|
() =>
|
||||||
|
getCategoryInputOrThrow(
|
||||||
|
yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: some-category
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: another-category
|
||||||
|
`) as Workflow,
|
||||||
|
"analysis",
|
||||||
|
{}
|
||||||
|
),
|
||||||
|
{
|
||||||
|
message:
|
||||||
|
"Could not get category input to github/codeql-action/analyze since there were multiple steps " +
|
||||||
|
"calling github/codeql-action/analyze with different values for category.",
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
388
src/workflow.ts
Normal file
388
src/workflow.ts
Normal file
|
|
@ -0,0 +1,388 @@
|
||||||
|
import * as fs from "fs";
|
||||||
|
import * as path from "path";
|
||||||
|
|
||||||
|
import * as core from "@actions/core";
|
||||||
|
import * as yaml from "js-yaml";
|
||||||
|
|
||||||
|
import * as api from "./api-client";
|
||||||
|
import { getRequiredEnvParam } from "./util";
|
||||||
|
|
||||||
|
interface WorkflowJobStep {
|
||||||
|
run: any;
|
||||||
|
uses?: string;
|
||||||
|
with?: { [key: string]: string };
|
||||||
|
}
|
||||||
|
|
||||||
|
interface WorkflowJob {
|
||||||
|
steps?: WorkflowJobStep[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface WorkflowTrigger {
|
||||||
|
branches?: string[] | string;
|
||||||
|
paths?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// on: {} then push/pull_request are undefined
|
||||||
|
// on:
|
||||||
|
// push:
|
||||||
|
// pull_request:
|
||||||
|
// then push/pull_request are null
|
||||||
|
interface WorkflowTriggers {
|
||||||
|
push?: WorkflowTrigger | null;
|
||||||
|
pull_request?: WorkflowTrigger | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Workflow {
|
||||||
|
jobs?: { [key: string]: WorkflowJob };
|
||||||
|
on?: string | string[] | WorkflowTriggers;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isObject(o: unknown): o is object {
|
||||||
|
return o !== null && typeof o === "object";
|
||||||
|
}
|
||||||
|
|
||||||
|
const GLOB_PATTERN = new RegExp("(\\*\\*?)");
|
||||||
|
|
||||||
|
function escapeRegExp(string) {
|
||||||
|
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
||||||
|
}
|
||||||
|
|
||||||
|
function patternToRegExp(value) {
|
||||||
|
return new RegExp(
|
||||||
|
`^${value
|
||||||
|
.toString()
|
||||||
|
.split(GLOB_PATTERN)
|
||||||
|
.reduce(function (arr, cur) {
|
||||||
|
if (cur === "**") {
|
||||||
|
arr.push(".*?");
|
||||||
|
} else if (cur === "*") {
|
||||||
|
arr.push("[^/]*?");
|
||||||
|
} else if (cur) {
|
||||||
|
arr.push(escapeRegExp(cur));
|
||||||
|
}
|
||||||
|
return arr;
|
||||||
|
}, [])
|
||||||
|
.join("")}$`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// this function should return true if patternA is a superset of patternB
|
||||||
|
// e.g: * is a superset of main-* but main-* is not a superset of *.
|
||||||
|
export function patternIsSuperset(patternA: string, patternB: string): boolean {
|
||||||
|
return patternToRegExp(patternA).test(patternB);
|
||||||
|
}
|
||||||
|
|
||||||
|
function branchesToArray(branches?: string | null | string[]): string[] | "**" {
|
||||||
|
if (typeof branches === "string") {
|
||||||
|
return [branches];
|
||||||
|
}
|
||||||
|
if (Array.isArray(branches)) {
|
||||||
|
if (branches.length === 0) {
|
||||||
|
return "**";
|
||||||
|
}
|
||||||
|
return branches;
|
||||||
|
}
|
||||||
|
return "**";
|
||||||
|
}
|
||||||
|
export interface CodedError {
|
||||||
|
message: string;
|
||||||
|
code: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
function toCodedErrors(errors: {
|
||||||
|
[code: string]: string;
|
||||||
|
}): Record<string, CodedError> {
|
||||||
|
return Object.entries(errors).reduce((acc, [code, message]) => {
|
||||||
|
acc[code] = { message, code };
|
||||||
|
return acc;
|
||||||
|
}, {} as Record<string, CodedError>);
|
||||||
|
}
|
||||||
|
|
||||||
|
// code to send back via status report
|
||||||
|
// message to add as a warning annotation to the run
|
||||||
|
export const WorkflowErrors = toCodedErrors({
|
||||||
|
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||||
|
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||||
|
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||||
|
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||||
|
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
||||||
|
});
|
||||||
|
|
||||||
|
export function getWorkflowErrors(doc: Workflow): CodedError[] {
|
||||||
|
const errors: CodedError[] = [];
|
||||||
|
|
||||||
|
const jobName = process.env.GITHUB_JOB;
|
||||||
|
|
||||||
|
if (jobName) {
|
||||||
|
const job = doc?.jobs?.[jobName];
|
||||||
|
|
||||||
|
const steps = job?.steps;
|
||||||
|
|
||||||
|
if (Array.isArray(steps)) {
|
||||||
|
for (const step of steps) {
|
||||||
|
// this was advice that we used to give in the README
|
||||||
|
// we actually want to run the analysis on the merge commit
|
||||||
|
// to produce results that are more inline with expectations
|
||||||
|
// (i.e: this is what will happen if you merge this PR)
|
||||||
|
// and avoid some race conditions
|
||||||
|
if (step?.run === "git checkout HEAD^2") {
|
||||||
|
errors.push(WorkflowErrors.CheckoutWrongHead);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let missingPush = false;
|
||||||
|
|
||||||
|
if (doc.on === undefined) {
|
||||||
|
// this is not a valid config
|
||||||
|
} else if (typeof doc.on === "string") {
|
||||||
|
if (doc.on === "pull_request") {
|
||||||
|
missingPush = true;
|
||||||
|
}
|
||||||
|
} else if (Array.isArray(doc.on)) {
|
||||||
|
const hasPush = doc.on.includes("push");
|
||||||
|
const hasPullRequest = doc.on.includes("pull_request");
|
||||||
|
if (hasPullRequest && !hasPush) {
|
||||||
|
missingPush = true;
|
||||||
|
}
|
||||||
|
} else if (isObject(doc.on)) {
|
||||||
|
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
|
||||||
|
const hasPullRequest = Object.prototype.hasOwnProperty.call(
|
||||||
|
doc.on,
|
||||||
|
"pull_request"
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!hasPush && hasPullRequest) {
|
||||||
|
missingPush = true;
|
||||||
|
}
|
||||||
|
if (hasPush && hasPullRequest) {
|
||||||
|
const paths = doc.on.push?.paths;
|
||||||
|
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
||||||
|
// if they didn't change any files
|
||||||
|
// currently we cannot go back through the history and find the most recent baseline
|
||||||
|
if (Array.isArray(paths) && paths.length > 0) {
|
||||||
|
errors.push(WorkflowErrors.PathsSpecified);
|
||||||
|
}
|
||||||
|
const pathsIgnore = doc.on.push?.["paths-ignore"];
|
||||||
|
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
|
||||||
|
errors.push(WorkflowErrors.PathsIgnoreSpecified);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// if doc.on.pull_request is null that means 'all branches'
|
||||||
|
// if doc.on.pull_request is undefined that means 'off'
|
||||||
|
// we only want to check for mismatched branches if pull_request is on.
|
||||||
|
if (doc.on.pull_request !== undefined) {
|
||||||
|
const push = branchesToArray(doc.on.push?.branches);
|
||||||
|
|
||||||
|
if (push !== "**") {
|
||||||
|
const pull_request = branchesToArray(doc.on.pull_request?.branches);
|
||||||
|
|
||||||
|
if (pull_request !== "**") {
|
||||||
|
const difference = pull_request.filter(
|
||||||
|
(value) => !push.some((o) => patternIsSuperset(o, value))
|
||||||
|
);
|
||||||
|
if (difference.length > 0) {
|
||||||
|
// there are branches in pull_request that may not have a baseline
|
||||||
|
// because we are not building them on push
|
||||||
|
errors.push(WorkflowErrors.MismatchedBranches);
|
||||||
|
}
|
||||||
|
} else if (push.length > 0) {
|
||||||
|
// push is set up to run on a subset of branches
|
||||||
|
// and you could open a PR against a branch with no baseline
|
||||||
|
errors.push(WorkflowErrors.MismatchedBranches);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (missingPush) {
|
||||||
|
errors.push(WorkflowErrors.MissingPushHook);
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function validateWorkflow(): Promise<undefined | string> {
|
||||||
|
let workflow: Workflow;
|
||||||
|
try {
|
||||||
|
workflow = await getWorkflow();
|
||||||
|
} catch (e) {
|
||||||
|
return `error: getWorkflow() failed: ${String(e)}`;
|
||||||
|
}
|
||||||
|
let workflowErrors: CodedError[];
|
||||||
|
try {
|
||||||
|
workflowErrors = getWorkflowErrors(workflow);
|
||||||
|
} catch (e) {
|
||||||
|
return `error: getWorkflowErrors() failed: ${String(e)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workflowErrors.length > 0) {
|
||||||
|
let message: string;
|
||||||
|
try {
|
||||||
|
message = formatWorkflowErrors(workflowErrors);
|
||||||
|
} catch (e) {
|
||||||
|
return `error: formatWorkflowErrors() failed: ${String(e)}`;
|
||||||
|
}
|
||||||
|
core.warning(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
return formatWorkflowCause(workflowErrors);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function formatWorkflowErrors(errors: CodedError[]): string {
|
||||||
|
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
||||||
|
|
||||||
|
const errorsList = errors.map((e) => e.message).join(" ");
|
||||||
|
|
||||||
|
return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function formatWorkflowCause(errors: CodedError[]): undefined | string {
|
||||||
|
if (errors.length === 0) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return errors.map((e) => e.code).join(",");
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getWorkflow(): Promise<Workflow> {
|
||||||
|
const relativePath = await getWorkflowPath();
|
||||||
|
const absolutePath = path.join(
|
||||||
|
getRequiredEnvParam("GITHUB_WORKSPACE"),
|
||||||
|
relativePath
|
||||||
|
);
|
||||||
|
|
||||||
|
return yaml.load(fs.readFileSync(absolutePath, "utf-8")) as Workflow;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the path of the currently executing workflow.
|
||||||
|
*/
|
||||||
|
export async function getWorkflowPath(): Promise<string> {
|
||||||
|
const repo_nwo = getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
||||||
|
const owner = repo_nwo[0];
|
||||||
|
const repo = repo_nwo[1];
|
||||||
|
const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID"));
|
||||||
|
|
||||||
|
const apiClient = api.getApiClient();
|
||||||
|
const runsResponse = await apiClient.request(
|
||||||
|
"GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true",
|
||||||
|
{
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
run_id,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
const workflowUrl = runsResponse.data.workflow_url;
|
||||||
|
|
||||||
|
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
||||||
|
|
||||||
|
return workflowResponse.data.path;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the workflow run ID.
|
||||||
|
*/
|
||||||
|
export function getWorkflowRunID(): number {
|
||||||
|
const workflowRunID = parseInt(getRequiredEnvParam("GITHUB_RUN_ID"), 10);
|
||||||
|
if (Number.isNaN(workflowRunID)) {
|
||||||
|
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
||||||
|
}
|
||||||
|
return workflowRunID;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getStepsCallingAction(
|
||||||
|
job: WorkflowJob,
|
||||||
|
actionName: string
|
||||||
|
): WorkflowJobStep[] {
|
||||||
|
const steps = job.steps;
|
||||||
|
if (!Array.isArray(steps)) {
|
||||||
|
throw new Error(
|
||||||
|
`Could not get steps calling ${actionName} since job.steps was not an array.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return steps.filter((step) => step.uses?.includes(actionName));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Makes a best effort attempt to retrieve the value of a particular input with which
|
||||||
|
* an Action in the workflow would be invoked.
|
||||||
|
*
|
||||||
|
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||||
|
*
|
||||||
|
* @returns the value of the input, or undefined if no such input is passed to the Action
|
||||||
|
* @throws an error if the value of the input could not be determined, or we could not
|
||||||
|
* determine that no such input is passed to the Action.
|
||||||
|
*/
|
||||||
|
function getInputOrThrow(
|
||||||
|
workflow: Workflow,
|
||||||
|
jobName: string,
|
||||||
|
actionName: string,
|
||||||
|
inputName: string,
|
||||||
|
matrixVars: { [key: string]: string }
|
||||||
|
) {
|
||||||
|
if (!workflow.jobs) {
|
||||||
|
throw new Error(
|
||||||
|
`Could not get ${inputName} input to ${actionName} since the workflow has no jobs.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (!workflow.jobs[jobName]) {
|
||||||
|
throw new Error(
|
||||||
|
`Could not get ${inputName} input to ${actionName} since the workflow has no job named ${jobName}.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const inputs = getStepsCallingAction(workflow.jobs[jobName], actionName)
|
||||||
|
.map((step) => step.with?.[inputName])
|
||||||
|
.filter((input) => input !== undefined)
|
||||||
|
.map((input) => input!);
|
||||||
|
|
||||||
|
if (inputs.length === 0) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (!inputs.every((input) => input === inputs[0])) {
|
||||||
|
throw new Error(
|
||||||
|
`Could not get ${inputName} input to ${actionName} since there were multiple steps calling ` +
|
||||||
|
`${actionName} with different values for ${inputName}.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make a basic attempt to substitute matrix variables
|
||||||
|
// First normalize by removing whitespace
|
||||||
|
let input = inputs[0].replace(/\${{\s+/, "${{").replace(/\s+}}/, "}}");
|
||||||
|
for (const [key, value] of Object.entries(matrixVars)) {
|
||||||
|
input = input.replace(`\${{matrix.${key}}}`, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (input.includes("${{")) {
|
||||||
|
throw new Error(
|
||||||
|
`Could not get ${inputName} input to ${actionName} since it contained an unrecognized dynamic value.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Makes a best effort attempt to retrieve the category input for the particular job,
|
||||||
|
* given a set of matrix variables.
|
||||||
|
*
|
||||||
|
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||||
|
*
|
||||||
|
* @returns the category input, or undefined if the category input is not defined
|
||||||
|
* @throws an error if the category input could not be determined
|
||||||
|
*/
|
||||||
|
export function getCategoryInputOrThrow(
|
||||||
|
workflow: Workflow,
|
||||||
|
jobName: string,
|
||||||
|
matrixVars: { [key: string]: string }
|
||||||
|
): string | undefined {
|
||||||
|
return getInputOrThrow(
|
||||||
|
workflow,
|
||||||
|
jobName,
|
||||||
|
"github/codeql-action/analyze",
|
||||||
|
"category",
|
||||||
|
matrixVars
|
||||||
|
);
|
||||||
|
}
|
||||||
Loading…
Add table
Add a link
Reference in a new issue