Merge pull request #372 from github/update-v1-7a340d32
Merge main into v1
This commit is contained in:
commit
3a205cc343
25 changed files with 300 additions and 223 deletions
|
|
@ -44,7 +44,6 @@
|
|||
"@typescript-eslint/no-unsafe-call": "off",
|
||||
"@typescript-eslint/no-unsafe-member-access": "off",
|
||||
"@typescript-eslint/no-unsafe-return": "off",
|
||||
"@typescript-eslint/no-unused-vars": "off",
|
||||
"@typescript-eslint/no-var-requires": "off",
|
||||
"@typescript-eslint/prefer-regexp-exec": "off",
|
||||
"@typescript-eslint/require-await": "off",
|
||||
|
|
|
|||
13
.github/workflows/pr-checks.yml
vendored
13
.github/workflows/pr-checks.yml
vendored
|
|
@ -54,6 +54,7 @@ jobs:
|
|||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
- uses: ./../action/init
|
||||
- name: Build code
|
||||
shell: bash
|
||||
|
|
@ -93,6 +94,7 @@ jobs:
|
|||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
tools: ${{ matrix.tools }}
|
||||
|
|
@ -126,6 +128,7 @@ jobs:
|
|||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
|
|
@ -159,6 +162,7 @@ jobs:
|
|||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
|
|
@ -185,6 +189,7 @@ jobs:
|
|||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: go
|
||||
|
|
@ -205,6 +210,7 @@ jobs:
|
|||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
|
|
@ -246,6 +252,7 @@ jobs:
|
|||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
- uses: ./../action/init
|
||||
with:
|
||||
languages: javascript
|
||||
|
|
@ -339,6 +346,7 @@ jobs:
|
|||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
|
|
@ -374,6 +382,7 @@ jobs:
|
|||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
|
|
@ -410,6 +419,7 @@ jobs:
|
|||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
|
|
@ -447,6 +457,7 @@ jobs:
|
|||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
|
|
@ -481,6 +492,7 @@ jobs:
|
|||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
|
|
@ -516,6 +528,7 @@ jobs:
|
|||
mkdir ../action
|
||||
mv * .github ../action/
|
||||
mv ../action/tests/multi-language-repo/{*,.github} .
|
||||
mv ../action/.github/workflows .github
|
||||
|
||||
- name: Build runner
|
||||
run: |
|
||||
|
|
|
|||
46
lib/actions-util.js
generated
46
lib/actions-util.js
generated
|
|
@ -153,6 +153,8 @@ function toCodedErrors(errors) {
|
|||
return acc;
|
||||
}, {});
|
||||
}
|
||||
// code to send back via status report
|
||||
// message to add as a warning annotation to the run
|
||||
exports.WorkflowErrors = toCodedErrors({
|
||||
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||
MissingHooks: `Please specify on.push and on.pull_request hooks so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||
|
|
@ -161,9 +163,8 @@ exports.WorkflowErrors = toCodedErrors({
|
|||
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
||||
LintFailed: `Unable to lint workflow for CodeQL.`,
|
||||
});
|
||||
function validateWorkflow(doc) {
|
||||
function getWorkflowErrors(doc) {
|
||||
var _a, _b, _c, _d, _e, _f, _g, _h;
|
||||
const errors = [];
|
||||
const jobName = process.env.GITHUB_JOB;
|
||||
|
|
@ -260,20 +261,35 @@ function validateWorkflow(doc) {
|
|||
}
|
||||
return errors;
|
||||
}
|
||||
exports.validateWorkflow = validateWorkflow;
|
||||
async function getWorkflowErrors() {
|
||||
exports.getWorkflowErrors = getWorkflowErrors;
|
||||
async function validateWorkflow() {
|
||||
let workflow;
|
||||
try {
|
||||
const workflow = await getWorkflow();
|
||||
if (workflow === undefined) {
|
||||
return [];
|
||||
}
|
||||
return validateWorkflow(workflow);
|
||||
workflow = await getWorkflow();
|
||||
}
|
||||
catch (e) {
|
||||
return [exports.WorkflowErrors.LintFailed];
|
||||
return `error: getWorkflow() failed: ${e.toString()}`;
|
||||
}
|
||||
let workflowErrors;
|
||||
try {
|
||||
workflowErrors = getWorkflowErrors(workflow);
|
||||
}
|
||||
catch (e) {
|
||||
return `error: getWorkflowErrors() failed: ${e.toString()}`;
|
||||
}
|
||||
if (workflowErrors.length > 0) {
|
||||
let message;
|
||||
try {
|
||||
message = formatWorkflowErrors(workflowErrors);
|
||||
}
|
||||
catch (e) {
|
||||
return `error: formatWorkflowErrors() failed: ${e.toString()}`;
|
||||
}
|
||||
core.warning(message);
|
||||
}
|
||||
return `warning: ${formatWorkflowCause(workflowErrors)}`;
|
||||
}
|
||||
exports.getWorkflowErrors = getWorkflowErrors;
|
||||
exports.validateWorkflow = validateWorkflow;
|
||||
function formatWorkflowErrors(errors) {
|
||||
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
||||
const errorsList = errors.map((e) => e.message).join(" ");
|
||||
|
|
@ -290,13 +306,7 @@ exports.formatWorkflowCause = formatWorkflowCause;
|
|||
async function getWorkflow() {
|
||||
const relativePath = await getWorkflowPath();
|
||||
const absolutePath = path.join(getRequiredEnvParam("GITHUB_WORKSPACE"), relativePath);
|
||||
try {
|
||||
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
|
||||
}
|
||||
catch (e) {
|
||||
core.warning(`Could not read workflow: ${e.toString()}`);
|
||||
return undefined;
|
||||
}
|
||||
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
|
||||
}
|
||||
exports.getWorkflow = getWorkflow;
|
||||
/**
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
128
lib/actions-util.test.js
generated
128
lib/actions-util.test.js
generated
|
|
@ -72,32 +72,32 @@ ava_1.default("prepareEnvironment() when a local run", (t) => {
|
|||
t.deepEqual(process.env.GITHUB_JOB, "UNKNOWN-JOB");
|
||||
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB");
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on is empty", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({ on: {} });
|
||||
ava_1.default("getWorkflowErrors() when on is empty", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on.push is an array missing pull_request", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({ on: ["push"] });
|
||||
ava_1.default("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on.push is an array missing push", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({ on: ["pull_request"] });
|
||||
ava_1.default("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook]));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on.push is valid", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
ava_1.default("getWorkflowErrors() when on.push is valid", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: ["push", "pull_request"],
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on.push is a valid superset", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
ava_1.default("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: ["push", "pull_request", "schedule"],
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on.push should not have a path", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
ava_1.default("getWorkflowErrors() when on.push should not have a path", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main"], paths: ["test/*"] },
|
||||
pull_request: { branches: ["main"] },
|
||||
|
|
@ -105,34 +105,34 @@ ava_1.default("validateWorkflow() when on.push should not have a path", (t) => {
|
|||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified]));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on.push is a correct object", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
ava_1.default("getWorkflowErrors() when on.push is a correct object", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on.pull_requests is a string", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
ava_1.default("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on.pull_requests is a string and correct", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
ava_1.default("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
||||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on.push is correct with empty objects", (t) => {
|
||||
const errors = actionsutil.validateWorkflow(yaml.safeLoad(`
|
||||
ava_1.default("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on.push is mismatched", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
ava_1.default("getWorkflowErrors() when on.push is mismatched", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main"] },
|
||||
pull_request: { branches: ["feature"] },
|
||||
|
|
@ -140,8 +140,8 @@ ava_1.default("validateWorkflow() when on.push is mismatched", (t) => {
|
|||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on.push is not mismatched", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
ava_1.default("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main", "feature"] },
|
||||
pull_request: { branches: ["main"] },
|
||||
|
|
@ -149,8 +149,8 @@ ava_1.default("validateWorkflow() when on.push is not mismatched", (t) => {
|
|||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on.push is mismatched for pull_request", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
ava_1.default("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main"] },
|
||||
pull_request: { branches: ["main", "feature"] },
|
||||
|
|
@ -158,50 +158,50 @@ ava_1.default("validateWorkflow() when on.push is mismatched for pull_request",
|
|||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
ava_1.default("validateWorkflow() for a range of malformed workflows", (t) => {
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow({
|
||||
ava_1.default("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: 1,
|
||||
pull_request: 1,
|
||||
},
|
||||
}), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow({
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
}), [actionsutil.WorkflowErrors.MissingHooks]));
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow({
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: 1,
|
||||
}), [actionsutil.WorkflowErrors.MissingHooks]));
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow({
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: [1],
|
||||
}), [actionsutil.WorkflowErrors.MissingHooks]));
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow({
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { 1: 1 },
|
||||
}), [actionsutil.WorkflowErrors.MissingHooks]));
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow({
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: 1 },
|
||||
}), [actionsutil.WorkflowErrors.MissingHooks]));
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow({
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: [1] },
|
||||
}), [actionsutil.WorkflowErrors.MissingHooks]));
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow({
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: { steps: 1 } },
|
||||
}), [actionsutil.WorkflowErrors.MissingHooks]));
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow({
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
||||
}), [actionsutil.WorkflowErrors.MissingHooks]));
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow({
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: [undefined] },
|
||||
}), [actionsutil.WorkflowErrors.MissingHooks]));
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow(1), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow({
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: {
|
||||
branches: 1,
|
||||
|
|
@ -212,8 +212,8 @@ ava_1.default("validateWorkflow() for a range of malformed workflows", (t) => {
|
|||
},
|
||||
}), []));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||
const errors = actionsutil.validateWorkflow(yaml.safeLoad(`
|
||||
ava_1.default("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
|
|
@ -222,8 +222,8 @@ on:
|
|||
`));
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on.pull_request for wildcard branches", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
ava_1.default("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["feature/*"] },
|
||||
pull_request: { branches: "feature/moose" },
|
||||
|
|
@ -231,8 +231,8 @@ ava_1.default("validateWorkflow() when on.pull_request for wildcard branches", (
|
|||
});
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on.pull_request for mismatched wildcard branches", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
ava_1.default("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["feature/moose"] },
|
||||
pull_request: { branches: "feature/*" },
|
||||
|
|
@ -240,9 +240,9 @@ ava_1.default("validateWorkflow() when on.pull_request for mismatched wildcard b
|
|||
});
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when HEAD^2 is checked out", (t) => {
|
||||
ava_1.default("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
||||
process.env.GITHUB_JOB = "test";
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: ["push", "pull_request"],
|
||||
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
||||
});
|
||||
|
|
@ -291,8 +291,8 @@ ava_1.default("patternIsSuperset()", (t) => {
|
|||
t.true(actionsutil.patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose"));
|
||||
t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*"));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when branches contain dots", (t) => {
|
||||
const errors = actionsutil.validateWorkflow(yaml.safeLoad(`
|
||||
ava_1.default("getWorkflowErrors() when branches contain dots", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||
on:
|
||||
push:
|
||||
branches: [4.1, master]
|
||||
|
|
@ -302,8 +302,8 @@ ava_1.default("validateWorkflow() when branches contain dots", (t) => {
|
|||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on.push has a trailing comma", (t) => {
|
||||
const errors = actionsutil.validateWorkflow(yaml.safeLoad(`
|
||||
ava_1.default("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
|
|
@ -314,9 +314,9 @@ on:
|
|||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
ava_1.default("validateWorkflow() should only report the current job's CheckoutWrongHead", (t) => {
|
||||
ava_1.default("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
||||
process.env.GITHUB_JOB = "test";
|
||||
const errors = actionsutil.validateWorkflow(yaml.safeLoad(`
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
|
|
@ -338,9 +338,9 @@ jobs:
|
|||
`));
|
||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
||||
});
|
||||
ava_1.default("validateWorkflow() should not report a different job's CheckoutWrongHead", (t) => {
|
||||
ava_1.default("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
||||
process.env.GITHUB_JOB = "test3";
|
||||
const errors = actionsutil.validateWorkflow(yaml.safeLoad(`
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
|
|
@ -362,35 +362,35 @@ jobs:
|
|||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
ava_1.default("validateWorkflow() when on is missing", (t) => {
|
||||
const errors = actionsutil.validateWorkflow(yaml.safeLoad(`
|
||||
ava_1.default("getWorkflowErrors() when on is missing", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
`));
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
ava_1.default("validateWorkflow() with a different on setup", (t) => {
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow(yaml.safeLoad(`
|
||||
ava_1.default("getWorkflowErrors() with a different on setup", (t) => {
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on: "workflow_dispatch"
|
||||
`)), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow(yaml.safeLoad(`
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on: [workflow_dispatch]
|
||||
`)), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow(yaml.safeLoad(`
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
workflow_dispatch: {}
|
||||
`)), []));
|
||||
});
|
||||
ava_1.default("validateWorkflow() should not report an error if PRs are totally unconfigured", (t) => {
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow(yaml.safeLoad(`
|
||||
ava_1.default("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
`)), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow(yaml.safeLoad(`
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on: ["push"]
|
||||
`)), []));
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
2
lib/config-utils.test.js
generated
2
lib/config-utils.test.js
generated
|
|
@ -428,7 +428,7 @@ ava_1.default("Invalid queries in workflow file handled correctly", async (t) =>
|
|||
// This function just needs to be type-correct; it doesn't need to do anything,
|
||||
// since we're deliberately passing in invalid data
|
||||
const codeQL = codeql_1.setCodeQL({
|
||||
async resolveQueries(_queries, _extraSearchPath) {
|
||||
async resolveQueries() {
|
||||
return {
|
||||
byLanguage: {
|
||||
javascript: {},
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
20
lib/external-queries.js
generated
20
lib/external-queries.js
generated
|
|
@ -22,13 +22,10 @@ async function checkoutExternalRepository(repository, ref, apiDetails, tempDir,
|
|||
throw new Error(`'${repository}@${ref}' is not a valid repository and reference.`);
|
||||
}
|
||||
if (!fs.existsSync(checkoutLocation)) {
|
||||
const repoCloneURL = new URL(apiDetails.url);
|
||||
repoCloneURL.username = "x-access-token";
|
||||
repoCloneURL.password = apiDetails.externalRepoAuth;
|
||||
repoCloneURL.pathname += `/${repository}`;
|
||||
const repoCloneURL = buildCheckoutURL(repository, apiDetails);
|
||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
|
||||
"clone",
|
||||
repoCloneURL.toString(),
|
||||
repoCloneURL,
|
||||
checkoutLocation,
|
||||
]).exec();
|
||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
|
||||
|
|
@ -41,4 +38,17 @@ async function checkoutExternalRepository(repository, ref, apiDetails, tempDir,
|
|||
return checkoutLocation;
|
||||
}
|
||||
exports.checkoutExternalRepository = checkoutExternalRepository;
|
||||
function buildCheckoutURL(repository, apiDetails) {
|
||||
const repoCloneURL = new URL(apiDetails.url);
|
||||
if (apiDetails.externalRepoAuth !== undefined) {
|
||||
repoCloneURL.username = "x-access-token";
|
||||
repoCloneURL.password = apiDetails.externalRepoAuth;
|
||||
}
|
||||
if (!repoCloneURL.pathname.endsWith("/")) {
|
||||
repoCloneURL.pathname += "/";
|
||||
}
|
||||
repoCloneURL.pathname += `${repository}`;
|
||||
return repoCloneURL.toString();
|
||||
}
|
||||
exports.buildCheckoutURL = buildCheckoutURL;
|
||||
//# sourceMappingURL=external-queries.js.map
|
||||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAKpD;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,UAAwC,EACxC,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,gBAAgB,UAAU,EAAE,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAClE,CAAC;KACH;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,YAAY,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;QAC7C,YAAY,CAAC,QAAQ,GAAG,gBAAgB,CAAC;QACzC,YAAY,CAAC,QAAQ,GAAG,UAAU,CAAC,gBAAgB,CAAC;QACpD,YAAY,CAAC,QAAQ,IAAI,IAAI,UAAU,EAAE,CAAC;QAC1C,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,OAAO;YACP,YAAY,CAAC,QAAQ,EAAE;YACvB,gBAAgB;SACjB,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,eAAe,gBAAgB,EAAE;YACjC,aAAa,gBAAgB,OAAO;YACpC,UAAU;YACV,GAAG;SACJ,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AArCD,gEAqCC"}
|
||||
{"version":3,"file":"external-queries.js","sourceRoot":"","sources":["../src/external-queries.ts"],"names":[],"mappings":";;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AAKpD;;GAEG;AACI,KAAK,UAAU,0BAA0B,CAC9C,UAAkB,EAClB,GAAW,EACX,UAAwC,EACxC,OAAe,EACf,MAAc;IAEd,MAAM,CAAC,IAAI,CAAC,gBAAgB,UAAU,EAAE,CAAC,CAAC;IAE1C,MAAM,gBAAgB,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC;IAE7D,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;QACzC,wGAAwG;QACxG,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,IAAI,GAAG,4CAA4C,CAClE,CAAC;KACH;IAED,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;QACpC,MAAM,YAAY,GAAG,gBAAgB,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC;QAC9D,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,OAAO;YACP,YAAY;YACZ,gBAAgB;SACjB,CAAC,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE;YAChE,eAAe,gBAAgB,EAAE;YACjC,aAAa,gBAAgB,OAAO;YACpC,UAAU;YACV,GAAG;SACJ,CAAC,CAAC,IAAI,EAAE,CAAC;KACX;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAlCD,gEAkCC;AAED,SAAgB,gBAAgB,CAC9B,UAAkB,EAClB,UAAwC;IAExC,MAAM,YAAY,GAAG,IAAI,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC7C,IAAI,UAAU,CAAC,gBAAgB,KAAK,SAAS,EAAE;QAC7C,YAAY,CAAC,QAAQ,GAAG,gBAAgB,CAAC;QACzC,YAAY,CAAC,QAAQ,GAAG,UAAU,CAAC,gBAAgB,CAAC;KACrD;IACD,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;QACxC,YAAY,CAAC,QAAQ,IAAI,GAAG,CAAC;KAC9B;IACD,YAAY,CAAC,QAAQ,IAAI,GAAG,UAAU,EAAE,CAAC;IACzC,OAAO,YAAY,CAAC,QAAQ,EAAE,CAAC;AACjC,CAAC;AAdD,4CAcC"}
|
||||
18
lib/external-queries.test.js
generated
18
lib/external-queries.test.js
generated
|
|
@ -94,4 +94,22 @@ ava_1.default("checkoutExternalQueries", async (t) => {
|
|||
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b")));
|
||||
});
|
||||
});
|
||||
ava_1.default("buildCheckoutURL", (t) => {
|
||||
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
||||
url: "https://github.com",
|
||||
externalRepoAuth: undefined,
|
||||
}), "https://github.com/foo/bar");
|
||||
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
||||
url: "https://github.example.com/",
|
||||
externalRepoAuth: undefined,
|
||||
}), "https://github.example.com/foo/bar");
|
||||
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
||||
url: "https://github.com",
|
||||
externalRepoAuth: "abc",
|
||||
}), "https://x-access-token:abc@github.com/foo/bar");
|
||||
t.deepEqual(externalQueries.buildCheckoutURL("foo/bar", {
|
||||
url: "https://github.example.com/",
|
||||
externalRepoAuth: "abc",
|
||||
}), "https://x-access-token:abc@github.example.com/foo/bar");
|
||||
});
|
||||
//# sourceMappingURL=external-queries.test.js.map
|
||||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AACpD,8CAAuB;AAEvB,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAChC,OAAO,EACP;oBACE,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CACF,CAAC,IAAI,EAAE,CAAC;aACV;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QACnD,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,gBAAgB,EAAE,OAAO,CAAC,CAAC,CAAC;QAEpD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"external-queries.test.js","sourceRoot":"","sources":["../src/external-queries.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,yEAA2D;AAC3D,kEAAoD;AACpD,8CAAuB;AAEvB,oEAAsD;AACtD,uCAA4C;AAC5C,mDAA6C;AAC7C,6CAA+B;AAE/B,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,yBAAyB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC1C,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QACrC,kDAAkD;QAClD,mFAAmF;QACnF,gDAAgD;QAChD,wCAAwC;QACxC,8EAA8E;QAC9E,MAAM,eAAe,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;QAC3D,MAAM,QAAQ,GAAG,WAAW,CAAC;QAC7B,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE,QAAQ,CAAC,CAAC;QACtD,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;QAE/C,oDAAoD;QACpD,oCAAoC;QACpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,KAAK,WAAW,OAAiB;YAC9C,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,OAAO,GAAG;gBACR,aAAa,UAAU,EAAE;gBACzB,eAAe,QAAQ,EAAE;gBACzB,GAAG,OAAO;aACX,CAAC;YACF,OAAO,CAAC,GAAG,CAAC,gBAAgB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;YACjD,IAAI;gBACF,MAAM,IAAI,UAAU,CAAC,UAAU,CAC7B,MAAM,SAAS,CAAC,SAAS,CAAC,KAAK,CAAC,EAChC,OAAO,EACP;oBACE,MAAM,EAAE,IAAI;oBACZ,SAAS,EAAE;wBACT,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;wBACD,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;4BACf,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;wBAC5B,CAAC;qBACF;iBACF,CACF,CAAC,IAAI,EAAE,CAAC;aACV;YAAC,OAAO,CAAC,EAAE;gBACV,OAAO,CAAC,GAAG,CAAC,uBAAuB,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;gBACxD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7B,MAAM,CAAC,CAAC;aACT;YACD,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;QACvB,CAAC,CAAC;QAEF,EAAE,CAAC,SAAS,CAAC,QAAQ,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC5C,MAAM,MAAM,CAAC,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC;QACjC,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,YAAY,EAAE,iBAAiB,CAAC,CAAC,CAAC;QAC1D,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,WAAW,EAAE,WAAW,CAAC,CAAC,CAAC;QACnD,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,gBAAgB,EAAE,OAAO,CAAC,CAAC,CAAC;QAEpD,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAE1C,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,EAAE,WAAW,CAAC,CAAC;QACxD,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;QACxC,MAAM,MAAM,CAAC,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3B,MAAM,MAAM,CAAC,CAAC,QAAQ,EAAE,IAAI,EAAE,SAAS,CAAC,CAAC,CAAC;QAC1C,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC;QAEvD,8DAA8D;QAC9D,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACpD,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QAEpE,oEAAoE;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAChE,MAAM,eAAe,CAAC,0BAA0B,CAC9C,QAAQ,EACR,UAAU,EACV,EAAE,GAAG,EAAE,UAAU,eAAe,EAAE,EAAE,gBAAgB,EAAE,EAAE,EAAE,EAC1D,MAAM,EACN,yBAAe,CAAC,IAAI,CAAC,CACtB,CAAC;QACF,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC;QAC/D,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;QACpE,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC;IACvE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE,EAAE;IAC7B,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,4BAA4B,CAC7B,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,SAAS;KAC5B,CAAC,EACF,oCAAoC,CACrC,CAAC;IAEF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,oBAAoB;QACzB,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,+CAA+C,CAChD,CAAC;IACF,CAAC,CAAC,SAAS,CACT,eAAe,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC1C,GAAG,EAAE,6BAA6B;QAClC,gBAAgB,EAAE,KAAK;KACxB,CAAC,EACF,uDAAuD,CACxD,CAAC;AACJ,CAAC,CAAC,CAAC"}
|
||||
14
lib/init-action.js
generated
14
lib/init-action.js
generated
|
|
@ -49,7 +49,6 @@ async function sendSuccessStatusReport(startedAt, config, toolsVersion) {
|
|||
await actionsUtil.sendStatusReport(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
var _a;
|
||||
const startedAt = new Date();
|
||||
const logger = logging_1.getActionsLogger();
|
||||
let config;
|
||||
|
|
@ -57,7 +56,7 @@ async function run() {
|
|||
let toolsVersion;
|
||||
const apiDetails = {
|
||||
auth: actionsUtil.getRequiredInput("token"),
|
||||
externalRepoAuth: (_a = actionsUtil.getOptionalInput("external-repository-token"), (_a !== null && _a !== void 0 ? _a : actionsUtil.getRequiredInput("token"))),
|
||||
externalRepoAuth: actionsUtil.getOptionalInput("external-repository-token"),
|
||||
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||
};
|
||||
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
||||
|
|
@ -66,15 +65,8 @@ async function run() {
|
|||
}
|
||||
try {
|
||||
actionsUtil.prepareLocalRunEnvironment();
|
||||
const workflowErrors = await actionsUtil.getWorkflowErrors();
|
||||
// we do not want to worry users if linting is failing
|
||||
// but we do want to send a status report containing this error code
|
||||
// below
|
||||
const userWorkflowErrors = workflowErrors.filter((o) => o.code !== "LintFailed");
|
||||
if (userWorkflowErrors.length > 0) {
|
||||
core.warning(actionsUtil.formatWorkflowErrors(userWorkflowErrors));
|
||||
}
|
||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "starting", startedAt, actionsUtil.formatWorkflowCause(workflowErrors))))) {
|
||||
const workflowErrors = await actionsUtil.validateWorkflow();
|
||||
if (!(await actionsUtil.sendStatusReport(await actionsUtil.createStatusReportBase("init", "starting", startedAt, workflowErrors)))) {
|
||||
return;
|
||||
}
|
||||
const initCodeQLResult = await init_1.initCodeQL(actionsUtil.getOptionalInput("tools"), apiDetails, actionsUtil.getRequiredEnvParam("RUNNER_TEMP"), actionsUtil.getRequiredEnvParam("RUNNER_TOOL_CACHE"), "actions", logger);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
3
lib/runner.js
generated
3
lib/runner.js
generated
|
|
@ -96,7 +96,6 @@ program
|
|||
// Remove this if commander.js starts supporting hidden options.
|
||||
.allowUnknownOption()
|
||||
.action(async (cmd) => {
|
||||
var _a;
|
||||
const logger = logging_1.getRunnerLogger(cmd.debug);
|
||||
try {
|
||||
const tempDir = getTempDir(cmd.tempDir);
|
||||
|
|
@ -107,7 +106,7 @@ program
|
|||
fs.mkdirSync(tempDir, { recursive: true });
|
||||
const apiDetails = {
|
||||
auth: cmd.githubAuth,
|
||||
externalRepoAuth: (_a = cmd.externalRepositoryToken, (_a !== null && _a !== void 0 ? _a : cmd.githubAuth)),
|
||||
externalRepoAuth: cmd.externalRepositoryToken,
|
||||
url: util_1.parseGithubUrl(cmd.githubUrl),
|
||||
};
|
||||
const gitHubVersion = await util_1.getGitHubVersion(apiDetails);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -90,44 +90,44 @@ test("prepareEnvironment() when a local run", (t) => {
|
|||
t.deepEqual(process.env.CODEQL_ACTION_ANALYSIS_KEY, "LOCAL-RUN:UNKNOWN-JOB");
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on is empty", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({ on: {} });
|
||||
test("getWorkflowErrors() when on is empty", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
||||
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on.push is an array missing pull_request", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({ on: ["push"] });
|
||||
test("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
|
||||
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on.push is an array missing push", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({ on: ["pull_request"] });
|
||||
test("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
|
||||
|
||||
t.deepEqual(
|
||||
...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook])
|
||||
);
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on.push is valid", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
test("getWorkflowErrors() when on.push is valid", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: ["push", "pull_request"],
|
||||
});
|
||||
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on.push is a valid superset", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
test("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: ["push", "pull_request", "schedule"],
|
||||
});
|
||||
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on.push should not have a path", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
test("getWorkflowErrors() when on.push should not have a path", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main"], paths: ["test/*"] },
|
||||
pull_request: { branches: ["main"] },
|
||||
|
|
@ -139,16 +139,16 @@ test("validateWorkflow() when on.push should not have a path", (t) => {
|
|||
);
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on.push is a correct object", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
test("getWorkflowErrors() when on.push is a correct object", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
||||
});
|
||||
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on.pull_requests is a string", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
test("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
||||
});
|
||||
|
||||
|
|
@ -157,16 +157,16 @@ test("validateWorkflow() when on.pull_requests is a string", (t) => {
|
|||
);
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on.pull_requests is a string and correct", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
test("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
||||
});
|
||||
|
||||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on.push is correct with empty objects", (t) => {
|
||||
const errors = actionsutil.validateWorkflow(
|
||||
test("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(
|
||||
yaml.safeLoad(`
|
||||
on:
|
||||
push:
|
||||
|
|
@ -177,8 +177,8 @@ on:
|
|||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on.push is mismatched", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
test("getWorkflowErrors() when on.push is mismatched", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main"] },
|
||||
pull_request: { branches: ["feature"] },
|
||||
|
|
@ -190,8 +190,8 @@ test("validateWorkflow() when on.push is mismatched", (t) => {
|
|||
);
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on.push is not mismatched", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
test("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main", "feature"] },
|
||||
pull_request: { branches: ["main"] },
|
||||
|
|
@ -201,8 +201,8 @@ test("validateWorkflow() when on.push is not mismatched", (t) => {
|
|||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on.push is mismatched for pull_request", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
test("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["main"] },
|
||||
pull_request: { branches: ["main", "feature"] },
|
||||
|
|
@ -214,10 +214,10 @@ test("validateWorkflow() when on.push is mismatched for pull_request", (t) => {
|
|||
);
|
||||
});
|
||||
|
||||
test("validateWorkflow() for a range of malformed workflows", (t) => {
|
||||
test("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
||||
t.deepEqual(
|
||||
...errorCodes(
|
||||
actionsutil.validateWorkflow({
|
||||
actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: 1,
|
||||
pull_request: 1,
|
||||
|
|
@ -229,7 +229,7 @@ test("validateWorkflow() for a range of malformed workflows", (t) => {
|
|||
|
||||
t.deepEqual(
|
||||
...errorCodes(
|
||||
actionsutil.validateWorkflow({
|
||||
actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
} as any),
|
||||
[actionsutil.WorkflowErrors.MissingHooks]
|
||||
|
|
@ -238,7 +238,7 @@ test("validateWorkflow() for a range of malformed workflows", (t) => {
|
|||
|
||||
t.deepEqual(
|
||||
...errorCodes(
|
||||
actionsutil.validateWorkflow({
|
||||
actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: 1,
|
||||
} as any),
|
||||
|
|
@ -248,7 +248,7 @@ test("validateWorkflow() for a range of malformed workflows", (t) => {
|
|||
|
||||
t.deepEqual(
|
||||
...errorCodes(
|
||||
actionsutil.validateWorkflow({
|
||||
actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: [1],
|
||||
} as any),
|
||||
|
|
@ -258,7 +258,7 @@ test("validateWorkflow() for a range of malformed workflows", (t) => {
|
|||
|
||||
t.deepEqual(
|
||||
...errorCodes(
|
||||
actionsutil.validateWorkflow({
|
||||
actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { 1: 1 },
|
||||
} as any),
|
||||
|
|
@ -268,7 +268,7 @@ test("validateWorkflow() for a range of malformed workflows", (t) => {
|
|||
|
||||
t.deepEqual(
|
||||
...errorCodes(
|
||||
actionsutil.validateWorkflow({
|
||||
actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: 1 },
|
||||
} as any),
|
||||
|
|
@ -278,7 +278,7 @@ test("validateWorkflow() for a range of malformed workflows", (t) => {
|
|||
|
||||
t.deepEqual(
|
||||
...errorCodes(
|
||||
actionsutil.validateWorkflow({
|
||||
actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: [1] },
|
||||
} as any),
|
||||
|
|
@ -288,7 +288,7 @@ test("validateWorkflow() for a range of malformed workflows", (t) => {
|
|||
|
||||
t.deepEqual(
|
||||
...errorCodes(
|
||||
actionsutil.validateWorkflow({
|
||||
actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: { steps: 1 } },
|
||||
} as any),
|
||||
|
|
@ -298,7 +298,7 @@ test("validateWorkflow() for a range of malformed workflows", (t) => {
|
|||
|
||||
t.deepEqual(
|
||||
...errorCodes(
|
||||
actionsutil.validateWorkflow({
|
||||
actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
||||
} as any),
|
||||
|
|
@ -308,7 +308,7 @@ test("validateWorkflow() for a range of malformed workflows", (t) => {
|
|||
|
||||
t.deepEqual(
|
||||
...errorCodes(
|
||||
actionsutil.validateWorkflow({
|
||||
actionsutil.getWorkflowErrors({
|
||||
on: 1,
|
||||
jobs: { test: [undefined] },
|
||||
} as any),
|
||||
|
|
@ -316,11 +316,11 @@ test("validateWorkflow() for a range of malformed workflows", (t) => {
|
|||
)
|
||||
);
|
||||
|
||||
t.deepEqual(...errorCodes(actionsutil.validateWorkflow(1 as any), []));
|
||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1 as any), []));
|
||||
|
||||
t.deepEqual(
|
||||
...errorCodes(
|
||||
actionsutil.validateWorkflow({
|
||||
actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: {
|
||||
branches: 1,
|
||||
|
|
@ -335,8 +335,8 @@ test("validateWorkflow() for a range of malformed workflows", (t) => {
|
|||
);
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||
const errors = actionsutil.validateWorkflow(
|
||||
test("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(
|
||||
yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
|
|
@ -351,8 +351,8 @@ on:
|
|||
);
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on.pull_request for wildcard branches", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
test("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["feature/*"] },
|
||||
pull_request: { branches: "feature/moose" },
|
||||
|
|
@ -362,8 +362,8 @@ test("validateWorkflow() when on.pull_request for wildcard branches", (t) => {
|
|||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on.pull_request for mismatched wildcard branches", (t) => {
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
test("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: {
|
||||
push: { branches: ["feature/moose"] },
|
||||
pull_request: { branches: "feature/*" },
|
||||
|
|
@ -375,10 +375,10 @@ test("validateWorkflow() when on.pull_request for mismatched wildcard branches",
|
|||
);
|
||||
});
|
||||
|
||||
test("validateWorkflow() when HEAD^2 is checked out", (t) => {
|
||||
test("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
||||
process.env.GITHUB_JOB = "test";
|
||||
|
||||
const errors = actionsutil.validateWorkflow({
|
||||
const errors = actionsutil.getWorkflowErrors({
|
||||
on: ["push", "pull_request"],
|
||||
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
||||
});
|
||||
|
|
@ -446,8 +446,8 @@ test("patternIsSuperset()", (t) => {
|
|||
);
|
||||
});
|
||||
|
||||
test("validateWorkflow() when branches contain dots", (t) => {
|
||||
const errors = actionsutil.validateWorkflow(
|
||||
test("getWorkflowErrors() when branches contain dots", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(
|
||||
yaml.safeLoad(`
|
||||
on:
|
||||
push:
|
||||
|
|
@ -461,8 +461,8 @@ test("validateWorkflow() when branches contain dots", (t) => {
|
|||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on.push has a trailing comma", (t) => {
|
||||
const errors = actionsutil.validateWorkflow(
|
||||
test("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(
|
||||
yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
|
|
@ -477,10 +477,10 @@ on:
|
|||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
|
||||
test("validateWorkflow() should only report the current job's CheckoutWrongHead", (t) => {
|
||||
test("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
||||
process.env.GITHUB_JOB = "test";
|
||||
|
||||
const errors = actionsutil.validateWorkflow(
|
||||
const errors = actionsutil.getWorkflowErrors(
|
||||
yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
|
|
@ -508,10 +508,10 @@ jobs:
|
|||
);
|
||||
});
|
||||
|
||||
test("validateWorkflow() should not report a different job's CheckoutWrongHead", (t) => {
|
||||
test("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
||||
process.env.GITHUB_JOB = "test3";
|
||||
|
||||
const errors = actionsutil.validateWorkflow(
|
||||
const errors = actionsutil.getWorkflowErrors(
|
||||
yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
|
|
@ -537,8 +537,8 @@ jobs:
|
|||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
|
||||
test("validateWorkflow() when on is missing", (t) => {
|
||||
const errors = actionsutil.validateWorkflow(
|
||||
test("getWorkflowErrors() when on is missing", (t) => {
|
||||
const errors = actionsutil.getWorkflowErrors(
|
||||
yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
`)
|
||||
|
|
@ -547,10 +547,10 @@ name: "CodeQL"
|
|||
t.deepEqual(...errorCodes(errors, []));
|
||||
});
|
||||
|
||||
test("validateWorkflow() with a different on setup", (t) => {
|
||||
test("getWorkflowErrors() with a different on setup", (t) => {
|
||||
t.deepEqual(
|
||||
...errorCodes(
|
||||
actionsutil.validateWorkflow(
|
||||
actionsutil.getWorkflowErrors(
|
||||
yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on: "workflow_dispatch"
|
||||
|
|
@ -562,7 +562,7 @@ on: "workflow_dispatch"
|
|||
|
||||
t.deepEqual(
|
||||
...errorCodes(
|
||||
actionsutil.validateWorkflow(
|
||||
actionsutil.getWorkflowErrors(
|
||||
yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on: [workflow_dispatch]
|
||||
|
|
@ -574,7 +574,7 @@ on: [workflow_dispatch]
|
|||
|
||||
t.deepEqual(
|
||||
...errorCodes(
|
||||
actionsutil.validateWorkflow(
|
||||
actionsutil.getWorkflowErrors(
|
||||
yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
|
|
@ -586,10 +586,10 @@ on:
|
|||
);
|
||||
});
|
||||
|
||||
test("validateWorkflow() should not report an error if PRs are totally unconfigured", (t) => {
|
||||
test("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
||||
t.deepEqual(
|
||||
...errorCodes(
|
||||
actionsutil.validateWorkflow(
|
||||
actionsutil.getWorkflowErrors(
|
||||
yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on:
|
||||
|
|
@ -603,7 +603,7 @@ on:
|
|||
|
||||
t.deepEqual(
|
||||
...errorCodes(
|
||||
actionsutil.validateWorkflow(
|
||||
actionsutil.getWorkflowErrors(
|
||||
yaml.safeLoad(`
|
||||
name: "CodeQL"
|
||||
on: ["push"]
|
||||
|
|
|
|||
|
|
@ -188,15 +188,15 @@ export interface CodedError {
|
|||
message: string;
|
||||
code: string;
|
||||
}
|
||||
function toCodedErrors(errors: {
|
||||
[key: string]: string;
|
||||
}): { [key: string]: CodedError } {
|
||||
function toCodedErrors<T>(errors: T): Record<keyof T, CodedError> {
|
||||
return Object.entries(errors).reduce((acc, [key, value]) => {
|
||||
acc[key] = { message: value, code: key };
|
||||
return acc;
|
||||
}, {} as ReturnType<typeof toCodedErrors>);
|
||||
}, {} as Record<keyof T, CodedError>);
|
||||
}
|
||||
|
||||
// code to send back via status report
|
||||
// message to add as a warning annotation to the run
|
||||
export const WorkflowErrors = toCodedErrors({
|
||||
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||
MissingHooks: `Please specify on.push and on.pull_request hooks so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||
|
|
@ -205,10 +205,9 @@ export const WorkflowErrors = toCodedErrors({
|
|||
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
||||
LintFailed: `Unable to lint workflow for CodeQL.`,
|
||||
});
|
||||
|
||||
export function validateWorkflow(doc: Workflow): CodedError[] {
|
||||
export function getWorkflowErrors(doc: Workflow): CodedError[] {
|
||||
const errors: CodedError[] = [];
|
||||
|
||||
const jobName = process.env.GITHUB_JOB;
|
||||
|
|
@ -317,18 +316,31 @@ export function validateWorkflow(doc: Workflow): CodedError[] {
|
|||
return errors;
|
||||
}
|
||||
|
||||
export async function getWorkflowErrors(): Promise<CodedError[]> {
|
||||
export async function validateWorkflow(): Promise<undefined | string> {
|
||||
let workflow: Workflow;
|
||||
try {
|
||||
const workflow = await getWorkflow();
|
||||
|
||||
if (workflow === undefined) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return validateWorkflow(workflow);
|
||||
workflow = await getWorkflow();
|
||||
} catch (e) {
|
||||
return [WorkflowErrors.LintFailed];
|
||||
return `error: getWorkflow() failed: ${e.toString()}`;
|
||||
}
|
||||
let workflowErrors: CodedError[];
|
||||
try {
|
||||
workflowErrors = getWorkflowErrors(workflow);
|
||||
} catch (e) {
|
||||
return `error: getWorkflowErrors() failed: ${e.toString()}`;
|
||||
}
|
||||
|
||||
if (workflowErrors.length > 0) {
|
||||
let message: string;
|
||||
try {
|
||||
message = formatWorkflowErrors(workflowErrors);
|
||||
} catch (e) {
|
||||
return `error: formatWorkflowErrors() failed: ${e.toString()}`;
|
||||
}
|
||||
core.warning(message);
|
||||
}
|
||||
|
||||
return `warning: ${formatWorkflowCause(workflowErrors)}`;
|
||||
}
|
||||
|
||||
export function formatWorkflowErrors(errors: CodedError[]): string {
|
||||
|
|
@ -346,19 +358,14 @@ export function formatWorkflowCause(errors: CodedError[]): undefined | string {
|
|||
return errors.map((e) => e.code).join(",");
|
||||
}
|
||||
|
||||
export async function getWorkflow(): Promise<Workflow | undefined> {
|
||||
export async function getWorkflow(): Promise<Workflow> {
|
||||
const relativePath = await getWorkflowPath();
|
||||
const absolutePath = path.join(
|
||||
getRequiredEnvParam("GITHUB_WORKSPACE"),
|
||||
relativePath
|
||||
);
|
||||
|
||||
try {
|
||||
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
|
||||
} catch (e) {
|
||||
core.warning(`Could not read workflow: ${e.toString()}`);
|
||||
return undefined;
|
||||
}
|
||||
return yaml.safeLoad(fs.readFileSync(absolutePath, "utf-8"));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ export interface GitHubApiDetails {
|
|||
}
|
||||
|
||||
export interface GitHubApiExternalRepoDetails {
|
||||
externalRepoAuth: string;
|
||||
externalRepoAuth: string | undefined;
|
||||
url: string;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -726,10 +726,7 @@ test("Invalid queries in workflow file handled correctly", async (t) => {
|
|||
// This function just needs to be type-correct; it doesn't need to do anything,
|
||||
// since we're deliberately passing in invalid data
|
||||
const codeQL = setCodeQL({
|
||||
async resolveQueries(
|
||||
_queries: string[],
|
||||
_extraSearchPath: string | undefined
|
||||
) {
|
||||
async resolveQueries() {
|
||||
return {
|
||||
byLanguage: {
|
||||
javascript: {},
|
||||
|
|
|
|||
|
|
@ -108,3 +108,35 @@ test("checkoutExternalQueries", async (t) => {
|
|||
t.false(fs.existsSync(path.join(tmpDir, repoName, commit2Sha, "b")));
|
||||
});
|
||||
});
|
||||
|
||||
test("buildCheckoutURL", (t) => {
|
||||
t.deepEqual(
|
||||
externalQueries.buildCheckoutURL("foo/bar", {
|
||||
url: "https://github.com",
|
||||
externalRepoAuth: undefined,
|
||||
}),
|
||||
"https://github.com/foo/bar"
|
||||
);
|
||||
t.deepEqual(
|
||||
externalQueries.buildCheckoutURL("foo/bar", {
|
||||
url: "https://github.example.com/",
|
||||
externalRepoAuth: undefined,
|
||||
}),
|
||||
"https://github.example.com/foo/bar"
|
||||
);
|
||||
|
||||
t.deepEqual(
|
||||
externalQueries.buildCheckoutURL("foo/bar", {
|
||||
url: "https://github.com",
|
||||
externalRepoAuth: "abc",
|
||||
}),
|
||||
"https://x-access-token:abc@github.com/foo/bar"
|
||||
);
|
||||
t.deepEqual(
|
||||
externalQueries.buildCheckoutURL("foo/bar", {
|
||||
url: "https://github.example.com/",
|
||||
externalRepoAuth: "abc",
|
||||
}),
|
||||
"https://x-access-token:abc@github.example.com/foo/bar"
|
||||
);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -29,13 +29,10 @@ export async function checkoutExternalRepository(
|
|||
}
|
||||
|
||||
if (!fs.existsSync(checkoutLocation)) {
|
||||
const repoCloneURL = new URL(apiDetails.url);
|
||||
repoCloneURL.username = "x-access-token";
|
||||
repoCloneURL.password = apiDetails.externalRepoAuth;
|
||||
repoCloneURL.pathname += `/${repository}`;
|
||||
const repoCloneURL = buildCheckoutURL(repository, apiDetails);
|
||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
|
||||
"clone",
|
||||
repoCloneURL.toString(),
|
||||
repoCloneURL,
|
||||
checkoutLocation,
|
||||
]).exec();
|
||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), [
|
||||
|
|
@ -48,3 +45,19 @@ export async function checkoutExternalRepository(
|
|||
|
||||
return checkoutLocation;
|
||||
}
|
||||
|
||||
export function buildCheckoutURL(
|
||||
repository: string,
|
||||
apiDetails: GitHubApiExternalRepoDetails
|
||||
): string {
|
||||
const repoCloneURL = new URL(apiDetails.url);
|
||||
if (apiDetails.externalRepoAuth !== undefined) {
|
||||
repoCloneURL.username = "x-access-token";
|
||||
repoCloneURL.password = apiDetails.externalRepoAuth;
|
||||
}
|
||||
if (!repoCloneURL.pathname.endsWith("/")) {
|
||||
repoCloneURL.pathname += "/";
|
||||
}
|
||||
repoCloneURL.pathname += `${repository}`;
|
||||
return repoCloneURL.toString();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -96,9 +96,7 @@ async function run() {
|
|||
|
||||
const apiDetails = {
|
||||
auth: actionsUtil.getRequiredInput("token"),
|
||||
externalRepoAuth:
|
||||
actionsUtil.getOptionalInput("external-repository-token") ??
|
||||
actionsUtil.getRequiredInput("token"),
|
||||
externalRepoAuth: actionsUtil.getOptionalInput("external-repository-token"),
|
||||
url: actionsUtil.getRequiredEnvParam("GITHUB_SERVER_URL"),
|
||||
};
|
||||
|
||||
|
|
@ -110,18 +108,7 @@ async function run() {
|
|||
try {
|
||||
actionsUtil.prepareLocalRunEnvironment();
|
||||
|
||||
const workflowErrors = await actionsUtil.getWorkflowErrors();
|
||||
|
||||
// we do not want to worry users if linting is failing
|
||||
// but we do want to send a status report containing this error code
|
||||
// below
|
||||
const userWorkflowErrors = workflowErrors.filter(
|
||||
(o) => o.code !== "LintFailed"
|
||||
);
|
||||
|
||||
if (userWorkflowErrors.length > 0) {
|
||||
core.warning(actionsUtil.formatWorkflowErrors(userWorkflowErrors));
|
||||
}
|
||||
const workflowErrors = await actionsUtil.validateWorkflow();
|
||||
|
||||
if (
|
||||
!(await actionsUtil.sendStatusReport(
|
||||
|
|
@ -129,7 +116,7 @@ async function run() {
|
|||
"init",
|
||||
"starting",
|
||||
startedAt,
|
||||
actionsUtil.formatWorkflowCause(workflowErrors)
|
||||
workflowErrors
|
||||
)
|
||||
))
|
||||
) {
|
||||
|
|
|
|||
|
|
@ -155,7 +155,7 @@ program
|
|||
|
||||
const apiDetails = {
|
||||
auth: cmd.githubAuth,
|
||||
externalRepoAuth: cmd.externalRepositoryToken ?? cmd.githubAuth,
|
||||
externalRepoAuth: cmd.externalRepositoryToken,
|
||||
url: parseGithubUrl(cmd.githubUrl),
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ description: 'Upload the analysis results'
|
|||
author: 'GitHub'
|
||||
inputs:
|
||||
sarif_file:
|
||||
description: The SARIF file or directory of SARIF files to be uploaded.
|
||||
description: The SARIF file or directory of SARIF files to be uploaded. Each upload should contain a maximum of 1000 results, any additional results are ignored.
|
||||
required: false
|
||||
default: '../results'
|
||||
checkout_path:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue