Merge pull request #1428 from github/update-v2.1.36-2b971a70
Merge main into releases/v2
This commit is contained in:
commit
a669cc5936
92 changed files with 3079 additions and 1792 deletions
21
.github/dependabot.yml
vendored
21
.github/dependabot.yml
vendored
|
|
@ -1,20 +1,17 @@
|
||||||
version: 2
|
version: 2
|
||||||
updates:
|
updates:
|
||||||
- package-ecosystem: "npm"
|
- package-ecosystem: npm
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: weekly
|
||||||
day: "thursday" # Gives us a working day to merge this before our typical release
|
|
||||||
labels:
|
labels:
|
||||||
- "Update dependencies"
|
- Update dependencies
|
||||||
ignore:
|
ignore:
|
||||||
- dependency-name: "*"
|
- dependency-name: "*"
|
||||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
update-types:
|
||||||
- package-ecosystem: "npm"
|
- version-update:semver-minor
|
||||||
directory: "/runner"
|
- version-update:semver-patch
|
||||||
|
- package-ecosystem: github-actions
|
||||||
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: weekly
|
||||||
day: "thursday" # Gives us a working day to merge this before our typical release
|
|
||||||
ignore:
|
|
||||||
- dependency-name: "*"
|
|
||||||
update-types: ["version-update:semver-minor", "version-update:semver-patch"]
|
|
||||||
|
|
|
||||||
4
.github/workflows/__export-file-baseline-information.yml
generated
vendored
4
.github/workflows/__export-file-baseline-information.yml
generated
vendored
|
|
@ -42,11 +42,11 @@ jobs:
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
with:
|
with:
|
||||||
version: ${{ matrix.version }}
|
version: ${{ matrix.version }}
|
||||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||||
# Windows doesn't support Swift, and only macOS latest and nightly-latest support Swift 5.7.1.
|
# Windows doesn't support Swift, and only macOS latest and nightly-latest support Swift 5.7.1.
|
||||||
if: runner.os == 'Linux' || (runner.os == 'macOS' && matrix.version == 'cached')
|
if: runner.os == 'Linux' || (runner.os == 'macOS' && matrix.version == 'cached')
|
||||||
with:
|
with:
|
||||||
swift-version: '5.7'
|
swift-version: 5.7.0
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
languages: javascript
|
languages: javascript
|
||||||
|
|
|
||||||
8
.github/workflows/__multi-language-autodetect.yml
generated
vendored
8
.github/workflows/__multi-language-autodetect.yml
generated
vendored
|
|
@ -45,10 +45,6 @@ jobs:
|
||||||
version: latest
|
version: latest
|
||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
version: latest
|
version: latest
|
||||||
- os: ubuntu-latest
|
|
||||||
version: nightly-latest
|
|
||||||
- os: macos-latest
|
|
||||||
version: nightly-latest
|
|
||||||
name: Multi-language repository
|
name: Multi-language repository
|
||||||
timeout-minutes: 45
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
@ -65,11 +61,11 @@ jobs:
|
||||||
uses: actions/setup-go@v3
|
uses: actions/setup-go@v3
|
||||||
with:
|
with:
|
||||||
go-version: ^1.13.1
|
go-version: ^1.13.1
|
||||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||||
with:
|
with:
|
||||||
swift-version: '5.7'
|
swift-version: 5.7.0
|
||||||
|
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
|
|
|
||||||
72
.github/workflows/__submit-sarif-failure.yml
generated
vendored
Normal file
72
.github/workflows/__submit-sarif-failure.yml
generated
vendored
Normal file
|
|
@ -0,0 +1,72 @@
|
||||||
|
# Warning: This file is generated automatically, and should not be modified.
|
||||||
|
# Instead, please modify the template in the pr-checks directory and run:
|
||||||
|
# pip install ruamel.yaml && python3 sync.py
|
||||||
|
# to regenerate this file.
|
||||||
|
|
||||||
|
name: PR Check - Submit SARIF after failure
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
GO111MODULE: auto
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- releases/v1
|
||||||
|
- releases/v2
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- synchronize
|
||||||
|
- reopened
|
||||||
|
- ready_for_review
|
||||||
|
workflow_dispatch: {}
|
||||||
|
jobs:
|
||||||
|
submit-sarif-failure:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: latest
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: cached
|
||||||
|
- os: ubuntu-latest
|
||||||
|
version: nightly-latest
|
||||||
|
name: Submit SARIF after failure
|
||||||
|
timeout-minutes: 45
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Check out repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Prepare test
|
||||||
|
id: prepare-test
|
||||||
|
uses: ./.github/prepare-test
|
||||||
|
with:
|
||||||
|
version: ${{ matrix.version }}
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: ./init
|
||||||
|
with:
|
||||||
|
languages: javascript
|
||||||
|
- name: Fail
|
||||||
|
# We want this job to pass if the Action correctly uploads the SARIF file for
|
||||||
|
# the failed run.
|
||||||
|
# Setting this step to continue on error means that it is marked as completing
|
||||||
|
# successfully, so will not fail the job.
|
||||||
|
continue-on-error: true
|
||||||
|
run: exit 1
|
||||||
|
- uses: ./analyze
|
||||||
|
# In a real workflow, this step wouldn't run. Since we used `continue-on-error`
|
||||||
|
# above, we manually disable it with an `if` condition.
|
||||||
|
if: false
|
||||||
|
with:
|
||||||
|
category: /test-codeql-version:${{ matrix.version }}
|
||||||
|
env:
|
||||||
|
# Internal-only environment variable used to indicate that the post-init Action
|
||||||
|
# should expect to upload a SARIF file for the failed run.
|
||||||
|
CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF: true
|
||||||
|
# Make sure the uploading SARIF files feature is enabled.
|
||||||
|
CODEQL_ACTION_UPLOAD_FAILED_SARIF: true
|
||||||
|
# Upload the failed SARIF file as an integration test of the API endpoint.
|
||||||
|
CODEQL_ACTION_TEST_MODE: false
|
||||||
|
# Mark telemetry for this workflow so it can be treated separately.
|
||||||
|
CODEQL_ACTION_TESTING_ENVIRONMENT: codeql-action-pr-checks
|
||||||
|
|
||||||
4
.github/workflows/__swift-autobuild.yml
generated
vendored
4
.github/workflows/__swift-autobuild.yml
generated
vendored
|
|
@ -42,11 +42,11 @@ jobs:
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
with:
|
with:
|
||||||
version: ${{ matrix.version }}
|
version: ${{ matrix.version }}
|
||||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||||
with:
|
with:
|
||||||
swift-version: '5.7'
|
swift-version: 5.7.0
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
languages: swift
|
languages: swift
|
||||||
|
|
|
||||||
8
.github/workflows/__swift-custom-build.yml
generated
vendored
8
.github/workflows/__swift-custom-build.yml
generated
vendored
|
|
@ -33,10 +33,6 @@ jobs:
|
||||||
version: cached
|
version: cached
|
||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
version: cached
|
version: cached
|
||||||
- os: ubuntu-latest
|
|
||||||
version: nightly-latest
|
|
||||||
- os: macos-latest
|
|
||||||
version: nightly-latest
|
|
||||||
name: Swift analysis using a custom build command
|
name: Swift analysis using a custom build command
|
||||||
timeout-minutes: 45
|
timeout-minutes: 45
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
@ -48,11 +44,11 @@ jobs:
|
||||||
uses: ./.github/prepare-test
|
uses: ./.github/prepare-test
|
||||||
with:
|
with:
|
||||||
version: ${{ matrix.version }}
|
version: ${{ matrix.version }}
|
||||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||||
with:
|
with:
|
||||||
swift-version: '5.7'
|
swift-version: 5.7.0
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
languages: swift
|
languages: swift
|
||||||
|
|
|
||||||
2
.github/workflows/pr-checks.yml
vendored
2
.github/workflows/pr-checks.yml
vendored
|
|
@ -88,7 +88,7 @@ jobs:
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v3
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.8
|
python-version: 3.8
|
||||||
|
|
||||||
|
|
|
||||||
2
.github/workflows/python-deps.yml
vendored
2
.github/workflows/python-deps.yml
vendored
|
|
@ -156,7 +156,7 @@ jobs:
|
||||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- uses: actions/setup-python@v3
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python_version }}
|
python-version: ${{ matrix.python_version }}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ fi
|
||||||
|
|
||||||
if [ "$#" -eq 1 ]; then
|
if [ "$#" -eq 1 ]; then
|
||||||
# If we were passed an argument, use that as the SHA
|
# If we were passed an argument, use that as the SHA
|
||||||
GITHUB_SHA="$0"
|
GITHUB_SHA="$1"
|
||||||
elif [ "$#" -gt 1 ]; then
|
elif [ "$#" -gt 1 ]; then
|
||||||
echo "Usage: $0 [SHA]"
|
echo "Usage: $0 [SHA]"
|
||||||
echo "Update the required checks based on the SHA, or main."
|
echo "Update the required checks based on the SHA, or main."
|
||||||
|
|
@ -23,7 +23,7 @@ fi
|
||||||
echo "Getting checks for $GITHUB_SHA"
|
echo "Getting checks for $GITHUB_SHA"
|
||||||
|
|
||||||
# Ignore any checks with "https://", CodeQL, LGTM, and Update checks.
|
# Ignore any checks with "https://", CodeQL, LGTM, and Update checks.
|
||||||
CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or contains("Update") or contains("update") or contains("test-setup-python-scripts") | not)] | unique | sort')"
|
CHECKS="$(gh api repos/github/codeql-action/commits/"${GITHUB_SHA}"/check-runs --paginate | jq --slurp --compact-output --raw-output '[.[].check_runs | .[].name | select(contains("https://") or . == "CodeQL" or . == "LGTM.com" or . == "check-expected-release-files" or contains("Update") or contains("update") or contains("test-setup-python-scripts") | not)] | unique | sort')"
|
||||||
|
|
||||||
echo "$CHECKS" | jq
|
echo "$CHECKS" | jq
|
||||||
|
|
||||||
|
|
|
||||||
2
.github/workflows/update-release-branch.yml
vendored
2
.github/workflows/update-release-branch.yml
vendored
|
|
@ -29,7 +29,7 @@ jobs:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v3
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.8
|
python-version: 3.8
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v3
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "3.7"
|
python-version: "3.7"
|
||||||
- name: Checkout CodeQL Action
|
- name: Checkout CodeQL Action
|
||||||
|
|
@ -35,7 +35,7 @@ jobs:
|
||||||
env:
|
env:
|
||||||
ENTERPRISE_RELEASES_PATH: ${{ github.workspace }}/enterprise-releases/
|
ENTERPRISE_RELEASES_PATH: ${{ github.workspace }}/enterprise-releases/
|
||||||
- name: Commit Changes
|
- name: Commit Changes
|
||||||
uses: peter-evans/create-pull-request@c7f493a8000b8aeb17a1332e326ba76b57cb83eb # v3.4.1
|
uses: peter-evans/create-pull-request@2b011faafdcbc9ceb11414d64d0573f37c774b04 # v4.2.3
|
||||||
with:
|
with:
|
||||||
commit-message: Update supported GitHub Enterprise Server versions.
|
commit-message: Update supported GitHub Enterprise Server versions.
|
||||||
title: Update supported GitHub Enterprise Server versions.
|
title: Update supported GitHub Enterprise Server versions.
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,11 @@
|
||||||
# CodeQL Action Changelog
|
# CodeQL Action Changelog
|
||||||
|
|
||||||
|
## 2.1.36 - 08 Dec 2022
|
||||||
|
|
||||||
|
- Update default CodeQL bundle version to 2.11.5. [#1412](https://github.com/github/codeql-action/pull/1412)
|
||||||
|
- Add a step that tries to upload a SARIF file for the workflow run when that workflow run fails. This will help better surface failed code scanning workflow runs. [#1393](https://github.com/github/codeql-action/pull/1393)
|
||||||
|
- Python automatic dependency installation will no longer consider dependecy code installed in venv as user-written, for projects using Poetry that specify `virtualenvs.in-project = true` in their `poetry.toml`. [#1419](https://github.com/github/codeql-action/pull/1419).
|
||||||
|
|
||||||
## 2.1.35 - 01 Dec 2022
|
## 2.1.35 - 01 Dec 2022
|
||||||
|
|
||||||
No user facing changes.
|
No user facing changes.
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,7 @@ inputs:
|
||||||
upload:
|
upload:
|
||||||
description: Upload the SARIF file to Code Scanning
|
description: Upload the SARIF file to Code Scanning
|
||||||
required: false
|
required: false
|
||||||
|
# If changing this, make sure to update workflow.ts accordingly.
|
||||||
default: "true"
|
default: "true"
|
||||||
cleanup-level:
|
cleanup-level:
|
||||||
description: "Level of cleanup to perform on CodeQL databases at the end of the analyze step. This should either be 'none' to skip cleanup, or be a valid argument for the --mode flag of the CodeQL CLI command 'codeql database cleanup' as documented at https://codeql.github.com/docs/codeql-cli/manual/database-cleanup"
|
description: "Level of cleanup to perform on CodeQL databases at the end of the analyze step. This should either be 'none' to skip cleanup, or be a valid argument for the --mode flag of the CodeQL CLI command 'codeql database cleanup' as documented at https://codeql.github.com/docs/codeql-cli/manual/database-cleanup"
|
||||||
|
|
@ -44,6 +45,7 @@ inputs:
|
||||||
checkout_path:
|
checkout_path:
|
||||||
description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file."
|
description: "The path at which the analyzed repository was checked out. Used to relativize any absolute paths in the uploaded SARIF file."
|
||||||
required: false
|
required: false
|
||||||
|
# If changing this, make sure to update workflow.ts accordingly.
|
||||||
default: ${{ github.workspace }}
|
default: ${{ github.workspace }}
|
||||||
ref:
|
ref:
|
||||||
description: "The ref where results will be uploaded. If not provided, the Action will use the GITHUB_REF environment variable. If provided, the sha input must be provided as well. This input is not available in pull requests from forks."
|
description: "The ref where results will be uploaded. If not provided, the Action will use the GITHUB_REF environment variable. If provided, the sha input must be provided as well. This input is not available in pull requests from forks."
|
||||||
|
|
|
||||||
233
lib/actions-util.js
generated
233
lib/actions-util.js
generated
|
|
@ -19,17 +19,17 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.workflowEventName = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
exports.printDebugLogs = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.workflowEventName = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const os = __importStar(require("os"));
|
const os = __importStar(require("os"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||||
const yaml = __importStar(require("js-yaml"));
|
|
||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
|
const workflow_1 = require("./workflow");
|
||||||
// eslint-disable-next-line import/no-commonjs
|
// eslint-disable-next-line import/no-commonjs
|
||||||
const pkg = require("../package.json");
|
const pkg = require("../package.json");
|
||||||
/**
|
/**
|
||||||
|
|
@ -145,225 +145,6 @@ const determineMergeBaseCommitOid = async function () {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
exports.determineMergeBaseCommitOid = determineMergeBaseCommitOid;
|
exports.determineMergeBaseCommitOid = determineMergeBaseCommitOid;
|
||||||
function isObject(o) {
|
|
||||||
return o !== null && typeof o === "object";
|
|
||||||
}
|
|
||||||
const GLOB_PATTERN = new RegExp("(\\*\\*?)");
|
|
||||||
function escapeRegExp(string) {
|
|
||||||
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
|
||||||
}
|
|
||||||
function patternToRegExp(value) {
|
|
||||||
return new RegExp(`^${value
|
|
||||||
.toString()
|
|
||||||
.split(GLOB_PATTERN)
|
|
||||||
.reduce(function (arr, cur) {
|
|
||||||
if (cur === "**") {
|
|
||||||
arr.push(".*?");
|
|
||||||
}
|
|
||||||
else if (cur === "*") {
|
|
||||||
arr.push("[^/]*?");
|
|
||||||
}
|
|
||||||
else if (cur) {
|
|
||||||
arr.push(escapeRegExp(cur));
|
|
||||||
}
|
|
||||||
return arr;
|
|
||||||
}, [])
|
|
||||||
.join("")}$`);
|
|
||||||
}
|
|
||||||
// this function should return true if patternA is a superset of patternB
|
|
||||||
// e.g: * is a superset of main-* but main-* is not a superset of *.
|
|
||||||
function patternIsSuperset(patternA, patternB) {
|
|
||||||
return patternToRegExp(patternA).test(patternB);
|
|
||||||
}
|
|
||||||
exports.patternIsSuperset = patternIsSuperset;
|
|
||||||
function branchesToArray(branches) {
|
|
||||||
if (typeof branches === "string") {
|
|
||||||
return [branches];
|
|
||||||
}
|
|
||||||
if (Array.isArray(branches)) {
|
|
||||||
if (branches.length === 0) {
|
|
||||||
return "**";
|
|
||||||
}
|
|
||||||
return branches;
|
|
||||||
}
|
|
||||||
return "**";
|
|
||||||
}
|
|
||||||
function toCodedErrors(errors) {
|
|
||||||
return Object.entries(errors).reduce((acc, [key, value]) => {
|
|
||||||
acc[key] = { message: value, code: key };
|
|
||||||
return acc;
|
|
||||||
}, {});
|
|
||||||
}
|
|
||||||
// code to send back via status report
|
|
||||||
// message to add as a warning annotation to the run
|
|
||||||
exports.WorkflowErrors = toCodedErrors({
|
|
||||||
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
|
||||||
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
|
||||||
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
|
||||||
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
|
||||||
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
|
||||||
});
|
|
||||||
function getWorkflowErrors(doc) {
|
|
||||||
var _a, _b, _c, _d, _e;
|
|
||||||
const errors = [];
|
|
||||||
const jobName = process.env.GITHUB_JOB;
|
|
||||||
if (jobName) {
|
|
||||||
const job = (_a = doc === null || doc === void 0 ? void 0 : doc.jobs) === null || _a === void 0 ? void 0 : _a[jobName];
|
|
||||||
const steps = job === null || job === void 0 ? void 0 : job.steps;
|
|
||||||
if (Array.isArray(steps)) {
|
|
||||||
for (const step of steps) {
|
|
||||||
// this was advice that we used to give in the README
|
|
||||||
// we actually want to run the analysis on the merge commit
|
|
||||||
// to produce results that are more inline with expectations
|
|
||||||
// (i.e: this is what will happen if you merge this PR)
|
|
||||||
// and avoid some race conditions
|
|
||||||
if ((step === null || step === void 0 ? void 0 : step.run) === "git checkout HEAD^2") {
|
|
||||||
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let missingPush = false;
|
|
||||||
if (doc.on === undefined) {
|
|
||||||
// this is not a valid config
|
|
||||||
}
|
|
||||||
else if (typeof doc.on === "string") {
|
|
||||||
if (doc.on === "pull_request") {
|
|
||||||
missingPush = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (Array.isArray(doc.on)) {
|
|
||||||
const hasPush = doc.on.includes("push");
|
|
||||||
const hasPullRequest = doc.on.includes("pull_request");
|
|
||||||
if (hasPullRequest && !hasPush) {
|
|
||||||
missingPush = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (isObject(doc.on)) {
|
|
||||||
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
|
|
||||||
const hasPullRequest = Object.prototype.hasOwnProperty.call(doc.on, "pull_request");
|
|
||||||
if (!hasPush && hasPullRequest) {
|
|
||||||
missingPush = true;
|
|
||||||
}
|
|
||||||
if (hasPush && hasPullRequest) {
|
|
||||||
const paths = (_b = doc.on.push) === null || _b === void 0 ? void 0 : _b.paths;
|
|
||||||
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
|
||||||
// if they didn't change any files
|
|
||||||
// currently we cannot go back through the history and find the most recent baseline
|
|
||||||
if (Array.isArray(paths) && paths.length > 0) {
|
|
||||||
errors.push(exports.WorkflowErrors.PathsSpecified);
|
|
||||||
}
|
|
||||||
const pathsIgnore = (_c = doc.on.push) === null || _c === void 0 ? void 0 : _c["paths-ignore"];
|
|
||||||
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
|
|
||||||
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// if doc.on.pull_request is null that means 'all branches'
|
|
||||||
// if doc.on.pull_request is undefined that means 'off'
|
|
||||||
// we only want to check for mismatched branches if pull_request is on.
|
|
||||||
if (doc.on.pull_request !== undefined) {
|
|
||||||
const push = branchesToArray((_d = doc.on.push) === null || _d === void 0 ? void 0 : _d.branches);
|
|
||||||
if (push !== "**") {
|
|
||||||
const pull_request = branchesToArray((_e = doc.on.pull_request) === null || _e === void 0 ? void 0 : _e.branches);
|
|
||||||
if (pull_request !== "**") {
|
|
||||||
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value)));
|
|
||||||
if (difference.length > 0) {
|
|
||||||
// there are branches in pull_request that may not have a baseline
|
|
||||||
// because we are not building them on push
|
|
||||||
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (push.length > 0) {
|
|
||||||
// push is set up to run on a subset of branches
|
|
||||||
// and you could open a PR against a branch with no baseline
|
|
||||||
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (missingPush) {
|
|
||||||
errors.push(exports.WorkflowErrors.MissingPushHook);
|
|
||||||
}
|
|
||||||
return errors;
|
|
||||||
}
|
|
||||||
exports.getWorkflowErrors = getWorkflowErrors;
|
|
||||||
async function validateWorkflow() {
|
|
||||||
let workflow;
|
|
||||||
try {
|
|
||||||
workflow = await getWorkflow();
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
return `error: getWorkflow() failed: ${String(e)}`;
|
|
||||||
}
|
|
||||||
let workflowErrors;
|
|
||||||
try {
|
|
||||||
workflowErrors = getWorkflowErrors(workflow);
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
return `error: getWorkflowErrors() failed: ${String(e)}`;
|
|
||||||
}
|
|
||||||
if (workflowErrors.length > 0) {
|
|
||||||
let message;
|
|
||||||
try {
|
|
||||||
message = formatWorkflowErrors(workflowErrors);
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
return `error: formatWorkflowErrors() failed: ${String(e)}`;
|
|
||||||
}
|
|
||||||
core.warning(message);
|
|
||||||
}
|
|
||||||
return formatWorkflowCause(workflowErrors);
|
|
||||||
}
|
|
||||||
exports.validateWorkflow = validateWorkflow;
|
|
||||||
function formatWorkflowErrors(errors) {
|
|
||||||
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
|
||||||
const errorsList = errors.map((e) => e.message).join(" ");
|
|
||||||
return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`;
|
|
||||||
}
|
|
||||||
exports.formatWorkflowErrors = formatWorkflowErrors;
|
|
||||||
function formatWorkflowCause(errors) {
|
|
||||||
if (errors.length === 0) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return errors.map((e) => e.code).join(",");
|
|
||||||
}
|
|
||||||
exports.formatWorkflowCause = formatWorkflowCause;
|
|
||||||
async function getWorkflow() {
|
|
||||||
const relativePath = await getWorkflowPath();
|
|
||||||
const absolutePath = path.join((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), relativePath);
|
|
||||||
return yaml.load(fs.readFileSync(absolutePath, "utf-8"));
|
|
||||||
}
|
|
||||||
exports.getWorkflow = getWorkflow;
|
|
||||||
/**
|
|
||||||
* Get the path of the currently executing workflow.
|
|
||||||
*/
|
|
||||||
async function getWorkflowPath() {
|
|
||||||
const repo_nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY").split("/");
|
|
||||||
const owner = repo_nwo[0];
|
|
||||||
const repo = repo_nwo[1];
|
|
||||||
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
|
||||||
const apiClient = api.getApiClient();
|
|
||||||
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
run_id,
|
|
||||||
});
|
|
||||||
const workflowUrl = runsResponse.data.workflow_url;
|
|
||||||
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
|
||||||
return workflowResponse.data.path;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get the workflow run ID.
|
|
||||||
*/
|
|
||||||
function getWorkflowRunID() {
|
|
||||||
const workflowRunID = parseInt((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"), 10);
|
|
||||||
if (Number.isNaN(workflowRunID)) {
|
|
||||||
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
|
||||||
}
|
|
||||||
return workflowRunID;
|
|
||||||
}
|
|
||||||
exports.getWorkflowRunID = getWorkflowRunID;
|
|
||||||
/**
|
/**
|
||||||
* Get the analysis key parameter for the current job.
|
* Get the analysis key parameter for the current job.
|
||||||
*
|
*
|
||||||
|
|
@ -377,7 +158,7 @@ async function getAnalysisKey() {
|
||||||
if (analysisKey !== undefined) {
|
if (analysisKey !== undefined) {
|
||||||
return analysisKey;
|
return analysisKey;
|
||||||
}
|
}
|
||||||
const workflowPath = await getWorkflowPath();
|
const workflowPath = await (0, workflow_1.getWorkflowPath)();
|
||||||
const jobName = (0, util_1.getRequiredEnvParam)("GITHUB_JOB");
|
const jobName = (0, util_1.getRequiredEnvParam)("GITHUB_JOB");
|
||||||
analysisKey = `${workflowPath}:${jobName}`;
|
analysisKey = `${workflowPath}:${jobName}`;
|
||||||
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
core.exportVariable(analysisKeyEnvVar, analysisKey);
|
||||||
|
|
@ -392,10 +173,10 @@ async function getAutomationID() {
|
||||||
exports.getAutomationID = getAutomationID;
|
exports.getAutomationID = getAutomationID;
|
||||||
function computeAutomationID(analysis_key, environment) {
|
function computeAutomationID(analysis_key, environment) {
|
||||||
let automationID = `${analysis_key}/`;
|
let automationID = `${analysis_key}/`;
|
||||||
// the id has to be deterministic so we sort the fields
|
const matrix = (0, util_1.parseMatrixInput)(environment);
|
||||||
if (environment !== undefined && environment !== "null") {
|
if (matrix !== undefined) {
|
||||||
const environmentObject = JSON.parse(environment);
|
// the id has to be deterministic so we sort the fields
|
||||||
for (const entry of Object.entries(environmentObject).sort()) {
|
for (const entry of Object.entries(matrix).sort()) {
|
||||||
if (typeof entry[1] === "string") {
|
if (typeof entry[1] === "string") {
|
||||||
automationID += `${entry[0]}:${entry[1]}/`;
|
automationID += `${entry[0]}:${entry[1]}/`;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
331
lib/actions-util.test.js
generated
331
lib/actions-util.test.js
generated
|
|
@ -25,14 +25,10 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const yaml = __importStar(require("js-yaml"));
|
|
||||||
const sinon = __importStar(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
const actionsutil = __importStar(require("./actions-util"));
|
const actionsutil = __importStar(require("./actions-util"));
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
function errorCodes(actual, expected) {
|
|
||||||
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
|
||||||
}
|
|
||||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
(0, ava_1.default)("getRef() throws on the empty string", async (t) => {
|
(0, ava_1.default)("getRef() throws on the empty string", async (t) => {
|
||||||
process.env["GITHUB_REF"] = "";
|
process.env["GITHUB_REF"] = "";
|
||||||
|
|
@ -143,333 +139,6 @@ function errorCodes(actual, expected) {
|
||||||
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", undefined);
|
actualAutomationID = actionsutil.computeAutomationID(".github/workflows/codeql-analysis.yml:analyze", undefined);
|
||||||
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
|
t.deepEqual(actualAutomationID, ".github/workflows/codeql-analysis.yml:analyze/");
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on is empty", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is valid", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: ["push", "pull_request"],
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: ["push", "pull_request", "schedule"],
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push should not have a path", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main"], paths: ["test/*"] },
|
|
||||||
pull_request: { branches: ["main"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is a correct object", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
pull_request:
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main"] },
|
|
||||||
pull_request: { branches: ["feature"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main", "feature"] },
|
|
||||||
pull_request: { branches: ["main"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main"] },
|
|
||||||
pull_request: { branches: ["main", "feature"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: 1,
|
|
||||||
pull_request: 1,
|
|
||||||
},
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: 1,
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: [1],
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { 1: 1 },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: 1 },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: [1] },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: { steps: 1 } },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: [undefined] },
|
|
||||||
}), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: {
|
|
||||||
branches: 1,
|
|
||||||
},
|
|
||||||
pull_request: {
|
|
||||||
branches: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}), []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: ["main"]
|
|
||||||
pull_request:
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["feature/*"] },
|
|
||||||
pull_request: { branches: "feature/moose" },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["feature/moose"] },
|
|
||||||
pull_request: { branches: "feature/*" },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
|
||||||
process.env.GITHUB_JOB = "test";
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: ["push", "pull_request"],
|
|
||||||
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
|
||||||
});
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("formatWorkflowErrors() when there is one error", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowErrors([
|
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
|
||||||
]);
|
|
||||||
t.true(message.startsWith("1 issue was detected with this workflow:"));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("formatWorkflowErrors() when there are multiple errors", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowErrors([
|
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
|
||||||
actionsutil.WorkflowErrors.PathsSpecified,
|
|
||||||
]);
|
|
||||||
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("formatWorkflowCause() with no errors", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowCause([]);
|
|
||||||
t.deepEqual(message, undefined);
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("formatWorkflowCause()", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowCause([
|
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
|
||||||
actionsutil.WorkflowErrors.PathsSpecified,
|
|
||||||
]);
|
|
||||||
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
|
||||||
t.deepEqual(actionsutil.formatWorkflowCause([]), undefined);
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("patternIsSuperset()", (t) => {
|
|
||||||
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("*", "*"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("*", "main-*"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("main-*", "*"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("main", "main"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("*", "feature/*"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("**", "feature/*"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("feature-*", "**"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**/d"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**", "a/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**/c", "a/main-**/c"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/d/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/c/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/d/**/c"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/c/d/**/c", "a/**/b/**/c"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/main-**/c", "a/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*"));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when branches contain dots", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [4.1, master]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [4.1, master]
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master, ]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [master]
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
|
||||||
process.env.GITHUB_JOB = "test";
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [master]
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test2:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test3:
|
|
||||||
steps: []
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead]));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
|
||||||
process.env.GITHUB_JOB = "test3";
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [master]
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test2:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test3:
|
|
||||||
steps: []
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() when on is missing", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
`));
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() with a different on setup", (t) => {
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on: "workflow_dispatch"
|
|
||||||
`)), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on: [workflow_dispatch]
|
|
||||||
`)), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
workflow_dispatch: {}
|
|
||||||
`)), []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
`)), []));
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on: ["push"]
|
|
||||||
`)), []));
|
|
||||||
});
|
|
||||||
(0, ava_1.default)("initializeEnvironment", (t) => {
|
(0, ava_1.default)("initializeEnvironment", (t) => {
|
||||||
(0, util_1.initializeEnvironment)("1.2.3");
|
(0, util_1.initializeEnvironment)("1.2.3");
|
||||||
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "1.2.3");
|
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "1.2.3");
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
6
lib/analyze-action.js
generated
6
lib/analyze-action.js
generated
|
|
@ -39,6 +39,7 @@ const feature_flags_1 = require("./feature-flags");
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const logging_1 = require("./logging");
|
const logging_1 = require("./logging");
|
||||||
const repository_1 = require("./repository");
|
const repository_1 = require("./repository");
|
||||||
|
const shared_environment_1 = require("./shared-environment");
|
||||||
const trap_caching_1 = require("./trap-caching");
|
const trap_caching_1 = require("./trap-caching");
|
||||||
const upload_lib = __importStar(require("./upload-lib"));
|
const upload_lib = __importStar(require("./upload-lib"));
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
|
|
@ -176,8 +177,9 @@ async function run() {
|
||||||
}
|
}
|
||||||
core.setOutput("db-locations", dbLocations);
|
core.setOutput("db-locations", dbLocations);
|
||||||
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
|
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
|
||||||
uploadResult = await upload_lib.uploadFromActions(outputDir, logger);
|
uploadResult = await upload_lib.uploadFromActions(outputDir, actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getOptionalInput("category"), logger);
|
||||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||||
|
core.exportVariable(shared_environment_1.CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF, "true");
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
logger.info("Not uploading results");
|
logger.info("Not uploading results");
|
||||||
|
|
@ -208,7 +210,6 @@ async function run() {
|
||||||
hasBadExpectErrorInput()) {
|
hasBadExpectErrorInput()) {
|
||||||
core.setFailed(error.message);
|
core.setFailed(error.message);
|
||||||
}
|
}
|
||||||
console.log(error);
|
|
||||||
if (error instanceof analyze_1.CodeQLAnalysisError) {
|
if (error instanceof analyze_1.CodeQLAnalysisError) {
|
||||||
const stats = { ...error.queriesStatusReport };
|
const stats = { ...error.queriesStatusReport };
|
||||||
await sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
|
await sendStatusReport(startedAt, config, stats, error, trapCacheUploadTime, dbCreationTimings, didUploadTrapCaches, logger);
|
||||||
|
|
@ -238,7 +239,6 @@ async function runWrapper() {
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
core.setFailed(`analyze action failed: ${error}`);
|
core.setFailed(`analyze action failed: ${error}`);
|
||||||
console.log(error);
|
|
||||||
}
|
}
|
||||||
await (0, util_1.checkForTimeout)();
|
await (0, util_1.checkForTimeout)();
|
||||||
}
|
}
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
2
lib/analyze.js
generated
2
lib/analyze.js
generated
|
|
@ -207,7 +207,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
||||||
return statusReport;
|
return statusReport;
|
||||||
async function runInterpretResults(language, queries, sarifFile, enableDebugLogging) {
|
async function runInterpretResults(language, queries, sarifFile, enableDebugLogging) {
|
||||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId, featureEnablement);
|
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId);
|
||||||
}
|
}
|
||||||
async function runPrintLinesOfCode(language) {
|
async function runPrintLinesOfCode(language) {
|
||||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
36
lib/codeql.js
generated
36
lib/codeql.js
generated
|
|
@ -36,7 +36,6 @@ const actions_util_1 = require("./actions-util");
|
||||||
const api = __importStar(require("./api-client"));
|
const api = __importStar(require("./api-client"));
|
||||||
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
|
const defaults = __importStar(require("./defaults.json")); // Referenced from codeql-action-sync-tool!
|
||||||
const error_matcher_1 = require("./error-matcher");
|
const error_matcher_1 = require("./error-matcher");
|
||||||
const feature_flags_1 = require("./feature-flags");
|
|
||||||
const languages_1 = require("./languages");
|
const languages_1 = require("./languages");
|
||||||
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||||
const trap_caching_1 = require("./trap-caching");
|
const trap_caching_1 = require("./trap-caching");
|
||||||
|
|
@ -77,6 +76,7 @@ const CODEQL_VERSION_LUA_TRACER_CONFIG = "2.10.0";
|
||||||
exports.CODEQL_VERSION_CONFIG_FILES = "2.10.1";
|
exports.CODEQL_VERSION_CONFIG_FILES = "2.10.1";
|
||||||
const CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED = "2.10.4";
|
const CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED = "2.10.4";
|
||||||
exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = "2.10.4";
|
exports.CODEQL_VERSION_GHES_PACK_DOWNLOAD = "2.10.4";
|
||||||
|
const CODEQL_VERSION_FILE_BASELINE_INFORMATION = "2.11.3";
|
||||||
/**
|
/**
|
||||||
* This variable controls using the new style of tracing from the CodeQL
|
* This variable controls using the new style of tracing from the CodeQL
|
||||||
* CLI. In particular, with versions above this we will use both indirect
|
* CLI. In particular, with versions above this we will use both indirect
|
||||||
|
|
@ -371,6 +371,7 @@ function setCodeQL(partialCodeql) {
|
||||||
databaseRunQueries: resolveFunction(partialCodeql, "databaseRunQueries"),
|
databaseRunQueries: resolveFunction(partialCodeql, "databaseRunQueries"),
|
||||||
databaseInterpretResults: resolveFunction(partialCodeql, "databaseInterpretResults"),
|
databaseInterpretResults: resolveFunction(partialCodeql, "databaseInterpretResults"),
|
||||||
databasePrintBaseline: resolveFunction(partialCodeql, "databasePrintBaseline"),
|
databasePrintBaseline: resolveFunction(partialCodeql, "databasePrintBaseline"),
|
||||||
|
diagnosticsExport: resolveFunction(partialCodeql, "diagnosticsExport"),
|
||||||
};
|
};
|
||||||
return cachedCodeQL;
|
return cachedCodeQL;
|
||||||
}
|
}
|
||||||
|
|
@ -666,9 +667,9 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||||
if (querySuitePath) {
|
if (querySuitePath) {
|
||||||
codeqlArgs.push(querySuitePath);
|
codeqlArgs.push(querySuitePath);
|
||||||
}
|
}
|
||||||
await runTool(cmd, codeqlArgs);
|
await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, codeqlArgs, error_matcher_1.errorMatchers);
|
||||||
},
|
},
|
||||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId, featureEnablement) {
|
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId) {
|
||||||
const codeqlArgs = [
|
const codeqlArgs = [
|
||||||
"database",
|
"database",
|
||||||
"interpret-results",
|
"interpret-results",
|
||||||
|
|
@ -687,7 +688,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||||
if (automationDetailsId !== undefined) {
|
if (automationDetailsId !== undefined) {
|
||||||
codeqlArgs.push("--sarif-category", automationDetailsId);
|
codeqlArgs.push("--sarif-category", automationDetailsId);
|
||||||
}
|
}
|
||||||
if (await featureEnablement.getValue(feature_flags_1.Feature.FileBaselineInformationEnabled, this)) {
|
if (await util.codeQlVersionAbove(this, CODEQL_VERSION_FILE_BASELINE_INFORMATION)) {
|
||||||
codeqlArgs.push("--sarif-add-baseline-file-info");
|
codeqlArgs.push("--sarif-add-baseline-file-info");
|
||||||
}
|
}
|
||||||
codeqlArgs.push(databasePath);
|
codeqlArgs.push(databasePath);
|
||||||
|
|
@ -695,7 +696,8 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||||
codeqlArgs.push(...querySuitePaths);
|
codeqlArgs.push(...querySuitePaths);
|
||||||
}
|
}
|
||||||
// capture stdout, which contains analysis summaries
|
// capture stdout, which contains analysis summaries
|
||||||
return await runTool(cmd, codeqlArgs);
|
const returnState = await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)(cmd, codeqlArgs, error_matcher_1.errorMatchers);
|
||||||
|
return returnState.stdout;
|
||||||
},
|
},
|
||||||
async databasePrintBaseline(databasePath) {
|
async databasePrintBaseline(databasePath) {
|
||||||
const codeqlArgs = [
|
const codeqlArgs = [
|
||||||
|
|
@ -771,6 +773,19 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
||||||
];
|
];
|
||||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||||
},
|
},
|
||||||
|
async diagnosticsExport(sarifFile, automationDetailsId) {
|
||||||
|
const args = [
|
||||||
|
"diagnostics",
|
||||||
|
"export",
|
||||||
|
"--format=sarif-latest",
|
||||||
|
`--output=${sarifFile}`,
|
||||||
|
...getExtraOptionsFromEnv(["diagnostics", "export"]),
|
||||||
|
];
|
||||||
|
if (automationDetailsId !== undefined) {
|
||||||
|
args.push("--sarif-category", automationDetailsId);
|
||||||
|
}
|
||||||
|
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||||
|
},
|
||||||
};
|
};
|
||||||
// To ensure that status reports include the CodeQL CLI version wherever
|
// To ensure that status reports include the CodeQL CLI version wherever
|
||||||
// possible, we want to call getVersion(), which populates the version value
|
// possible, we want to call getVersion(), which populates the version value
|
||||||
|
|
@ -846,11 +861,16 @@ async function runTool(cmd, args = []) {
|
||||||
const exitCode = await new toolrunner.ToolRunner(cmd, args, {
|
const exitCode = await new toolrunner.ToolRunner(cmd, args, {
|
||||||
listeners: {
|
listeners: {
|
||||||
stdout: (data) => {
|
stdout: (data) => {
|
||||||
output += data.toString();
|
output += data.toString("utf8");
|
||||||
},
|
},
|
||||||
stderr: (data) => {
|
stderr: (data) => {
|
||||||
const toRead = Math.min(maxErrorSize - error.length, data.length);
|
let readStartIndex = 0;
|
||||||
error += data.toString("utf8", 0, toRead);
|
// If the error is too large, then we only take the last 20,000 characters
|
||||||
|
if (data.length - maxErrorSize > 0) {
|
||||||
|
// Eg: if we have 20,000 the start index should be 2.
|
||||||
|
readStartIndex = data.length - maxErrorSize + 1;
|
||||||
|
}
|
||||||
|
error += data.toString("utf8", readStartIndex);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
ignoreReturnCode: true,
|
ignoreReturnCode: true,
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
33
lib/codeql.test.js
generated
33
lib/codeql.test.js
generated
|
|
@ -27,6 +27,7 @@ const fs = __importStar(require("fs"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||||
|
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const del_1 = __importDefault(require("del"));
|
const del_1 = __importDefault(require("del"));
|
||||||
const yaml = __importStar(require("js-yaml"));
|
const yaml = __importStar(require("js-yaml"));
|
||||||
|
|
@ -308,14 +309,18 @@ for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCAC
|
||||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
|
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
|
||||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
|
// safeWhich throws because of the test CodeQL object.
|
||||||
|
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||||
|
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be absent, but it is present");
|
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be absent, but it is present");
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (t) => {
|
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (t) => {
|
||||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
|
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
|
||||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
|
// safeWhich throws because of the test CodeQL object.
|
||||||
|
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||||
|
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent");
|
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"), "--sarif-add-query-help should be present, but it is absent");
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("databaseInitCluster() without injected codescanning config", async (t) => {
|
(0, ava_1.default)("databaseInitCluster() without injected codescanning config", async (t) => {
|
||||||
|
|
@ -323,6 +328,8 @@ for (const [isFeatureEnabled, toolsInput, shouldToolcacheBeBypassed,] of TOOLCAC
|
||||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
sinon.stub(codeqlObject, "getVersion").resolves("2.8.1");
|
sinon.stub(codeqlObject, "getVersion").resolves("2.8.1");
|
||||||
|
// safeWhich throws because of the test CodeQL object.
|
||||||
|
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||||
const thisStubConfig = {
|
const thisStubConfig = {
|
||||||
...stubConfig,
|
...stubConfig,
|
||||||
tempDir,
|
tempDir,
|
||||||
|
|
@ -564,24 +571,22 @@ const injectedConfigMacro = ava_1.default.macro({
|
||||||
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = origCODEQL_PASS_CONFIG_TO_CLI;
|
process.env["CODEQL_PASS_CONFIG_TO_CLI"] = origCODEQL_PASS_CONFIG_TO_CLI;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-baseline-file-info when feature enabled", async (t) => {
|
(0, ava_1.default)("databaseInterpretResults() sets --sarif-add-baseline-file-info for 2.11.3", async (t) => {
|
||||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
// We need to set a CodeQL version such that running `databaseInterpretResults` does not crash.
|
sinon.stub(codeqlObject, "getVersion").resolves("2.11.3");
|
||||||
// The version of CodeQL is checked separately to determine feature enablement, and does not
|
// safeWhich throws because of the test CodeQL object.
|
||||||
// otherwise impact this test, so set it to 0.0.0.
|
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||||
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
|
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.FileBaselineInformationEnabled]));
|
|
||||||
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info should be present, but it is absent");
|
t.true(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info should be present, but it is absent");
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-baseline-file-info if feature disabled", async (t) => {
|
(0, ava_1.default)("databaseInterpretResults() does not set --sarif-add-baseline-file-info for 2.11.2", async (t) => {
|
||||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
// We need to set a CodeQL version such that running `databaseInterpretResults` does not crash.
|
sinon.stub(codeqlObject, "getVersion").resolves("2.11.2");
|
||||||
// The version of CodeQL is checked upstream to determine feature enablement, so it does not
|
// safeWhich throws because of the test CodeQL object.
|
||||||
// affect this test.
|
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||||
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
|
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", (0, testing_utils_1.createFeatures)([]));
|
|
||||||
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info must be absent, but it is present");
|
t.false(runnerConstructorStub.firstCall.args[1].includes("--sarif-add-baseline-file-info"), "--sarif-add-baseline-file-info must be absent, but it is present");
|
||||||
});
|
});
|
||||||
function stubToolRunnerConstructor() {
|
function stubToolRunnerConstructor() {
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
|
|
@ -1,3 +1,3 @@
|
||||||
{
|
{
|
||||||
"bundleVersion": "codeql-bundle-20221123"
|
"bundleVersion": "codeql-bundle-20221202"
|
||||||
}
|
}
|
||||||
|
|
|
||||||
4
lib/error-matcher.js
generated
4
lib/error-matcher.js
generated
|
|
@ -12,6 +12,10 @@ exports.namedMatchersForTesting = {
|
||||||
message: "No code found during the build. Please see:\n" +
|
message: "No code found during the build. Please see:\n" +
|
||||||
"https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning#no-code-found-during-the-build",
|
"https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning#no-code-found-during-the-build",
|
||||||
},
|
},
|
||||||
|
fatalError: {
|
||||||
|
outputRegex: new RegExp("A fatal error occurred"),
|
||||||
|
message: "A fatal error occurred.",
|
||||||
|
},
|
||||||
};
|
};
|
||||||
// we collapse the matches into an array for use in execErrorCatcher
|
// we collapse the matches into an array for use in execErrorCatcher
|
||||||
exports.errorMatchers = Object.values(exports.namedMatchersForTesting);
|
exports.errorMatchers = Object.values(exports.namedMatchersForTesting);
|
||||||
|
|
|
||||||
|
|
@ -1 +1 @@
|
||||||
{"version":3,"file":"error-matcher.js","sourceRoot":"","sources":["../src/error-matcher.ts"],"names":[],"mappings":";;;AAQA,qCAAqC;AACxB,QAAA,uBAAuB,GAAoC;IACtE;;MAEE;IACF,iBAAiB,EAAE;QACjB,QAAQ,EAAE,EAAE;QACZ,WAAW,EAAE,IAAI,MAAM,CAAC,2CAA2C,CAAC;QACpE,OAAO,EACL,+CAA+C;YAC/C,yJAAyJ;KAC5J;CACF,CAAC;AAEF,oEAAoE;AACvD,QAAA,aAAa,GAAG,MAAM,CAAC,MAAM,CAAC,+BAAuB,CAAC,CAAC"}
|
{"version":3,"file":"error-matcher.js","sourceRoot":"","sources":["../src/error-matcher.ts"],"names":[],"mappings":";;;AAQA,qCAAqC;AACxB,QAAA,uBAAuB,GAAoC;IACtE;;MAEE;IACF,iBAAiB,EAAE;QACjB,QAAQ,EAAE,EAAE;QACZ,WAAW,EAAE,IAAI,MAAM,CAAC,2CAA2C,CAAC;QACpE,OAAO,EACL,+CAA+C;YAC/C,yJAAyJ;KAC5J;IACD,UAAU,EAAE;QACV,WAAW,EAAE,IAAI,MAAM,CAAC,wBAAwB,CAAC;QACjD,OAAO,EAAE,yBAAyB;KACnC;CACF,CAAC;AAEF,oEAAoE;AACvD,QAAA,aAAa,GAAG,MAAM,CAAC,MAAM,CAAC,+BAAuB,CAAC,CAAC"}
|
||||||
3
lib/error-matcher.test.js
generated
3
lib/error-matcher.test.js
generated
|
|
@ -16,6 +16,9 @@ NB We test the regexes for all the matchers against example log output snippets.
|
||||||
2020-09-07T17:39:53.9251124Z [2020-09-07 17:39:53] [ERROR] Spawned process exited abnormally (code 255; tried to run: [/opt/hostedtoolcache/CodeQL/0.0.0-20200630/x64/codeql/javascript/tools/autobuild.sh])
|
2020-09-07T17:39:53.9251124Z [2020-09-07 17:39:53] [ERROR] Spawned process exited abnormally (code 255; tried to run: [/opt/hostedtoolcache/CodeQL/0.0.0-20200630/x64/codeql/javascript/tools/autobuild.sh])
|
||||||
`));
|
`));
|
||||||
});
|
});
|
||||||
|
(0, ava_1.default)("fatalError matches against example log output", async (t) => {
|
||||||
|
t.assert(testErrorMatcher("fatalError", "A fatal error occurred: Could not process query metadata for test-query.ql"));
|
||||||
|
});
|
||||||
function testErrorMatcher(matcherName, logSample) {
|
function testErrorMatcher(matcherName, logSample) {
|
||||||
if (!(matcherName in error_matcher_1.namedMatchersForTesting)) {
|
if (!(matcherName in error_matcher_1.namedMatchersForTesting)) {
|
||||||
throw new Error(`Unknown matcher ${matcherName}`);
|
throw new Error(`Unknown matcher ${matcherName}`);
|
||||||
|
|
|
||||||
|
|
@ -1 +1 @@
|
||||||
{"version":3,"file":"error-matcher.test.js","sourceRoot":"","sources":["../src/error-matcher.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,mDAA0D;AAE1D;;EAEE;AAEF,IAAA,aAAI,EAAC,6DAA6D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9E,CAAC,CAAC,MAAM,CACN,gBAAgB,CACd,mBAAmB,EACnB;;;;;GAKH,CACE,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,gBAAgB,CAAC,WAAmB,EAAE,SAAiB;IAC9D,IAAI,CAAC,CAAC,WAAW,IAAI,uCAAuB,CAAC,EAAE;QAC7C,MAAM,IAAI,KAAK,CAAC,mBAAmB,WAAW,EAAE,CAAC,CAAC;KACnD;IACD,MAAM,KAAK,GAAG,uCAAuB,CAAC,WAAW,CAAC,CAAC,WAAW,CAAC;IAC/D,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,IAAI,KAAK,CAAC,uBAAuB,WAAW,kBAAkB,CAAC,CAAC;KACvE;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AAC/B,CAAC"}
|
{"version":3,"file":"error-matcher.test.js","sourceRoot":"","sources":["../src/error-matcher.test.ts"],"names":[],"mappings":";;;;;AAAA,8CAAuB;AAEvB,mDAA0D;AAE1D;;EAEE;AAEF,IAAA,aAAI,EAAC,6DAA6D,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAC9E,CAAC,CAAC,MAAM,CACN,gBAAgB,CACd,mBAAmB,EACnB;;;;;GAKH,CACE,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,IAAA,aAAI,EAAC,+CAA+C,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IAChE,CAAC,CAAC,MAAM,CACN,gBAAgB,CACd,YAAY,EACZ,4EAA4E,CAC7E,CACF,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,SAAS,gBAAgB,CAAC,WAAmB,EAAE,SAAiB;IAC9D,IAAI,CAAC,CAAC,WAAW,IAAI,uCAAuB,CAAC,EAAE;QAC7C,MAAM,IAAI,KAAK,CAAC,mBAAmB,WAAW,EAAE,CAAC,CAAC;KACnD;IACD,MAAM,KAAK,GAAG,uCAAuB,CAAC,WAAW,CAAC,CAAC,WAAW,CAAC;IAC/D,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,IAAI,KAAK,CAAC,uBAAuB,WAAW,kBAAkB,CAAC,CAAC;KACvE;IACD,OAAO,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AAC/B,CAAC"}
|
||||||
10
lib/feature-flags.js
generated
10
lib/feature-flags.js
generated
|
|
@ -30,9 +30,9 @@ var Feature;
|
||||||
Feature["BypassToolcacheKotlinSwiftEnabled"] = "bypass_toolcache_kotlin_swift_enabled";
|
Feature["BypassToolcacheKotlinSwiftEnabled"] = "bypass_toolcache_kotlin_swift_enabled";
|
||||||
Feature["CliConfigFileEnabled"] = "cli_config_file_enabled";
|
Feature["CliConfigFileEnabled"] = "cli_config_file_enabled";
|
||||||
Feature["DisableKotlinAnalysisEnabled"] = "disable_kotlin_analysis_enabled";
|
Feature["DisableKotlinAnalysisEnabled"] = "disable_kotlin_analysis_enabled";
|
||||||
Feature["FileBaselineInformationEnabled"] = "file_baseline_information_enabled";
|
|
||||||
Feature["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
Feature["MlPoweredQueriesEnabled"] = "ml_powered_queries_enabled";
|
||||||
Feature["TrapCachingEnabled"] = "trap_caching_enabled";
|
Feature["TrapCachingEnabled"] = "trap_caching_enabled";
|
||||||
|
Feature["UploadFailedSarifEnabled"] = "upload_failed_sarif_enabled";
|
||||||
})(Feature = exports.Feature || (exports.Feature = {}));
|
})(Feature = exports.Feature || (exports.Feature = {}));
|
||||||
exports.featureConfig = {
|
exports.featureConfig = {
|
||||||
[Feature.BypassToolcacheEnabled]: {
|
[Feature.BypassToolcacheEnabled]: {
|
||||||
|
|
@ -55,10 +55,6 @@ exports.featureConfig = {
|
||||||
envVar: "CODEQL_PASS_CONFIG_TO_CLI",
|
envVar: "CODEQL_PASS_CONFIG_TO_CLI",
|
||||||
minimumVersion: "2.11.1",
|
minimumVersion: "2.11.1",
|
||||||
},
|
},
|
||||||
[Feature.FileBaselineInformationEnabled]: {
|
|
||||||
envVar: "CODEQL_FILE_BASELINE_INFORMATION",
|
|
||||||
minimumVersion: "2.11.3",
|
|
||||||
},
|
|
||||||
[Feature.MlPoweredQueriesEnabled]: {
|
[Feature.MlPoweredQueriesEnabled]: {
|
||||||
envVar: "CODEQL_ML_POWERED_QUERIES",
|
envVar: "CODEQL_ML_POWERED_QUERIES",
|
||||||
minimumVersion: "2.7.5",
|
minimumVersion: "2.7.5",
|
||||||
|
|
@ -67,6 +63,10 @@ exports.featureConfig = {
|
||||||
envVar: "CODEQL_TRAP_CACHING",
|
envVar: "CODEQL_TRAP_CACHING",
|
||||||
minimumVersion: undefined,
|
minimumVersion: undefined,
|
||||||
},
|
},
|
||||||
|
[Feature.UploadFailedSarifEnabled]: {
|
||||||
|
envVar: "CODEQL_ACTION_UPLOAD_FAILED_SARIF",
|
||||||
|
minimumVersion: "2.11.3",
|
||||||
|
},
|
||||||
};
|
};
|
||||||
exports.FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json";
|
exports.FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json";
|
||||||
/**
|
/**
|
||||||
|
|
|
||||||
|
|
@ -1 +1 @@
|
||||||
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,6CAA4C;AAI5C,6CAA+B;AAM/B,IAAY,OAQX;AARD,WAAY,OAAO;IACjB,8DAAmD,CAAA;IACnD,sFAA2E,CAAA;IAC3E,2DAAgD,CAAA;IAChD,2EAAgE,CAAA;IAChE,+EAAoE,CAAA;IACpE,iEAAsD,CAAA;IACtD,sDAA2C,CAAA;AAC7C,CAAC,EARW,OAAO,GAAP,eAAO,KAAP,eAAO,QAQlB;AAEY,QAAA,aAAa,GAGtB;IACF,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;QAChC,MAAM,EAAE,yBAAyB;QACjC,+EAA+E;QAC/E,iCAAiC;QACjC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,iCAAiC,CAAC,EAAE;QAC3C,MAAM,EAAE,sCAAsC;QAC9C,+EAA+E;QAC/E,iCAAiC;QACjC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,4BAA4B,CAAC,EAAE;QACtC,MAAM,EAAE,gCAAgC;QACxC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,oBAAoB,CAAC,EAAE;QAC9B,MAAM,EAAE,2BAA2B;QACnC,cAAc,EAAE,QAAQ;KACzB;IACD,CAAC,OAAO,CAAC,8BAA8B,CAAC,EAAE;QACxC,MAAM,EAAE,kCAAkC;QAC1C,cAAc,EAAE,QAAQ;KACzB;IACD,CAAC,OAAO,CAAC,uBAAuB,CAAC,EAAE;QACjC,MAAM,EAAE,2BAA2B;QACnC,cAAc,EAAE,OAAO;KACxB;IACD,CAAC,OAAO,CAAC,kBAAkB,CAAC,EAAE;QAC5B,MAAM,EAAE,qBAAqB;QAC7B,cAAc,EAAE,SAAS;KAC1B;CACF,CAAC;AAUW,QAAA,uBAAuB,GAAG,2BAA2B,CAAC;AAEnE;;;;GAIG;AACH,MAAa,QAAQ;IAGnB,YACE,aAAiC,EACjC,aAA4B,EAC5B,OAAe,EACf,MAAc;QAEd,IAAI,CAAC,kBAAkB,GAAG,IAAI,kBAAkB,CAC9C,aAAa,EACb,aAAa,EACb,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,+BAAuB,CAAC,EAC3C,MAAM,CACP,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;OAWG;IACH,KAAK,CAAC,QAAQ,CAAC,OAAgB,EAAE,MAAe;QAC9C,IAAI,CAAC,MAAM,IAAI,qBAAa,CAAC,OAAO,CAAC,CAAC,cAAc,EAAE;YACpD,MAAM,IAAI,KAAK,CACb,8DAA8D,OAAO,2CAA2C,CACjH,CAAC;SACH;QAED,oDAAoD;QACpD,IAAI,OAAO,KAAK,OAAO,CAAC,sBAAsB,IAAI,IAAI,CAAC,YAAY,EAAE,EAAE;YACrE,OAAO,KAAK,CAAC;SACd;QAED,MAAM,MAAM,GAAG,CACb,OAAO,CAAC,GAAG,CAAC,qBAAa,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,IAAI,EAAE,CACjD,CAAC,iBAAiB,EAAE,CAAC;QAEtB,sFAAsF;QACtF,IAAI,MAAM,KAAK,OAAO,EAAE;YACtB,OAAO,KAAK,CAAC;SACd;QAED,yEAAyE;QACzE,MAAM,cAAc,GAAG,qBAAa,CAAC,OAAO,CAAC,CAAC,cAAc,CAAC;QAC7D,IAAI,MAAM,IAAI,cAAc,EAAE;YAC5B,IAAI,CAAC,CAAC,MAAM,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC,EAAE;gBAC5D,OAAO,KAAK,CAAC;aACd;SACF;QAED,8EAA8E;QAC9E,IAAI,MAAM,KAAK,MAAM,EAAE;YACrB,OAAO,IAAI,CAAC;SACb;QACD,gDAAgD;QAChD,OAAO,MAAM,IAAI,CAAC,kBAAkB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;IACzD,CAAC;CACF;AAjED,4BAiEC;AAED,MAAM,kBAAkB;IAGtB,YACmB,aAAiC,EACjC,aAA4B,EAC5B,gBAAwB,EACxB,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,kBAAa,GAAb,aAAa,CAAe;QAC5B,qBAAgB,GAAhB,gBAAgB,CAAQ;QACxB,WAAM,GAAN,MAAM,CAAQ;QAE/B,IAAI;IACN,CAAC;IAED,KAAK,CAAC,QAAQ,CAAC,OAAgB;QAC7B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC7C,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,qCAAqC,OAAO,4BAA4B,CACzE,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,MAAM,iBAAiB,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;QAC5C,IAAI,iBAAiB,KAAK,SAAS,EAAE;YACnC,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,YAAY,OAAO,uDAAuD,CAC3E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,CAAC,CAAC,iBAAiB,CAAC;IAC7B,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,0CAA0C;QAC1C,IAAI,IAAI,CAAC,iBAAiB,KAAK,SAAS,EAAE;YACxC,OAAO,IAAI,CAAC,iBAAiB,CAAC;SAC/B;QAED,wEAAwE;QACxE,MAAM,SAAS,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC9C,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,IAAI,CAAC,iBAAiB,GAAG,SAAS,CAAC;YACnC,OAAO,SAAS,CAAC;SAClB;QAED,wCAAwC;QACxC,IAAI,WAAW,GAAG,MAAM,IAAI,CAAC,eAAe,EAAE,CAAC;QAC/C,IAAI,WAAW,KAAK,SAAS,EAAE;YAC7B,WAAW,GAAG,EAAE,CAAC;SAClB;QAED,+BAA+B;QAC/B,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QAErC,+DAA+D;QAC/D,MAAM,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAExC,OAAO,WAAW,CAAC;IACrB,CAAC;IAEO,KAAK,CAAC,cAAc;QAG1B,IAAI;YACF,IAAI,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,gBAAgB,CAAC,EAAE;gBACxC,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8BAA8B,IAAI,CAAC,gBAAgB,EAAE,CACtD,CAAC;gBACF,OAAO,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC,CAAC;aACnE;SACF;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,2CAA2C,IAAI,CAAC,gBAAgB,KAAK,CAAC,mCAAmC,CAC1G,CAAC;SACH;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IAEO,KAAK,CAAC,eAAe,CAC3B,KAAoC;QAEpC,IAAI;YACF,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,4BAA4B,IAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC;YACvE,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,2CAA2C,IAAI,CAAC,gBAAgB,KAAK,CAAC,GAAG,CAC1E,CAAC;SACH;IACH,CAAC;IAEO,KAAK,CAAC,eAAe;QAC3B,iDAAiD;QACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,oEAAoE,CACrE,CAAC;YACF,OAAO,EAAE,CAAC;SACX;QACD,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAA,yBAAY,GAAE,CAAC,OAAO,CAC3C,8DAA8D,EAC9D;gBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;gBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;aAC9B,CACF,CAAC;YACF,OAAO,QAAQ,CAAC,IAAI,CAAC;SACtB;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;gBAC3C,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,gGAAgG;oBAC9F,oEAAoE;oBACpE,qFAAqF;oBACrF,kFAAkF,CAAC,EAAE,CACxF,CAAC;aACH;iBAAM;gBACL,kFAAkF;gBAClF,8EAA8E;gBAC9E,2FAA2F;gBAC3F,eAAe;gBACf,MAAM,IAAI,KAAK,CACb,sEAAsE,CAAC,EAAE,CAC1E,CAAC;aACH;SACF;IACH,CAAC;CACF"}
|
{"version":3,"file":"feature-flags.js","sourceRoot":"","sources":["../src/feature-flags.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,6CAA4C;AAI5C,6CAA+B;AAM/B,IAAY,OAQX;AARD,WAAY,OAAO;IACjB,8DAAmD,CAAA;IACnD,sFAA2E,CAAA;IAC3E,2DAAgD,CAAA;IAChD,2EAAgE,CAAA;IAChE,iEAAsD,CAAA;IACtD,sDAA2C,CAAA;IAC3C,mEAAwD,CAAA;AAC1D,CAAC,EARW,OAAO,GAAP,eAAO,KAAP,eAAO,QAQlB;AAEY,QAAA,aAAa,GAGtB;IACF,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;QAChC,MAAM,EAAE,yBAAyB;QACjC,+EAA+E;QAC/E,iCAAiC;QACjC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,iCAAiC,CAAC,EAAE;QAC3C,MAAM,EAAE,sCAAsC;QAC9C,+EAA+E;QAC/E,iCAAiC;QACjC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,4BAA4B,CAAC,EAAE;QACtC,MAAM,EAAE,gCAAgC;QACxC,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,oBAAoB,CAAC,EAAE;QAC9B,MAAM,EAAE,2BAA2B;QACnC,cAAc,EAAE,QAAQ;KACzB;IACD,CAAC,OAAO,CAAC,uBAAuB,CAAC,EAAE;QACjC,MAAM,EAAE,2BAA2B;QACnC,cAAc,EAAE,OAAO;KACxB;IACD,CAAC,OAAO,CAAC,kBAAkB,CAAC,EAAE;QAC5B,MAAM,EAAE,qBAAqB;QAC7B,cAAc,EAAE,SAAS;KAC1B;IACD,CAAC,OAAO,CAAC,wBAAwB,CAAC,EAAE;QAClC,MAAM,EAAE,mCAAmC;QAC3C,cAAc,EAAE,QAAQ;KACzB;CACF,CAAC;AAUW,QAAA,uBAAuB,GAAG,2BAA2B,CAAC;AAEnE;;;;GAIG;AACH,MAAa,QAAQ;IAGnB,YACE,aAAiC,EACjC,aAA4B,EAC5B,OAAe,EACf,MAAc;QAEd,IAAI,CAAC,kBAAkB,GAAG,IAAI,kBAAkB,CAC9C,aAAa,EACb,aAAa,EACb,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,+BAAuB,CAAC,EAC3C,MAAM,CACP,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;OAWG;IACH,KAAK,CAAC,QAAQ,CAAC,OAAgB,EAAE,MAAe;QAC9C,IAAI,CAAC,MAAM,IAAI,qBAAa,CAAC,OAAO,CAAC,CAAC,cAAc,EAAE;YACpD,MAAM,IAAI,KAAK,CACb,8DAA8D,OAAO,2CAA2C,CACjH,CAAC;SACH;QAED,oDAAoD;QACpD,IAAI,OAAO,KAAK,OAAO,CAAC,sBAAsB,IAAI,IAAI,CAAC,YAAY,EAAE,EAAE;YACrE,OAAO,KAAK,CAAC;SACd;QAED,MAAM,MAAM,GAAG,CACb,OAAO,CAAC,GAAG,CAAC,qBAAa,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,IAAI,EAAE,CACjD,CAAC,iBAAiB,EAAE,CAAC;QAEtB,sFAAsF;QACtF,IAAI,MAAM,KAAK,OAAO,EAAE;YACtB,OAAO,KAAK,CAAC;SACd;QAED,yEAAyE;QACzE,MAAM,cAAc,GAAG,qBAAa,CAAC,OAAO,CAAC,CAAC,cAAc,CAAC;QAC7D,IAAI,MAAM,IAAI,cAAc,EAAE;YAC5B,IAAI,CAAC,CAAC,MAAM,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,cAAc,CAAC,CAAC,EAAE;gBAC5D,OAAO,KAAK,CAAC;aACd;SACF;QAED,8EAA8E;QAC9E,IAAI,MAAM,KAAK,MAAM,EAAE;YACrB,OAAO,IAAI,CAAC;SACb;QACD,gDAAgD;QAChD,OAAO,MAAM,IAAI,CAAC,kBAAkB,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;IACzD,CAAC;CACF;AAjED,4BAiEC;AAED,MAAM,kBAAkB;IAGtB,YACmB,aAAiC,EACjC,aAA4B,EAC5B,gBAAwB,EACxB,MAAc;QAHd,kBAAa,GAAb,aAAa,CAAoB;QACjC,kBAAa,GAAb,aAAa,CAAe;QAC5B,qBAAgB,GAAhB,gBAAgB,CAAQ;QACxB,WAAM,GAAN,MAAM,CAAQ;QAE/B,IAAI;IACN,CAAC;IAED,KAAK,CAAC,QAAQ,CAAC,OAAgB;QAC7B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC7C,IAAI,QAAQ,KAAK,SAAS,EAAE;YAC1B,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,qCAAqC,OAAO,4BAA4B,CACzE,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,MAAM,iBAAiB,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;QAC5C,IAAI,iBAAiB,KAAK,SAAS,EAAE;YACnC,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,YAAY,OAAO,uDAAuD,CAC3E,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QACD,OAAO,CAAC,CAAC,iBAAiB,CAAC;IAC7B,CAAC;IAEO,KAAK,CAAC,cAAc;QAC1B,0CAA0C;QAC1C,IAAI,IAAI,CAAC,iBAAiB,KAAK,SAAS,EAAE;YACxC,OAAO,IAAI,CAAC,iBAAiB,CAAC;SAC/B;QAED,wEAAwE;QACxE,MAAM,SAAS,GAAG,MAAM,IAAI,CAAC,cAAc,EAAE,CAAC;QAC9C,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,IAAI,CAAC,iBAAiB,GAAG,SAAS,CAAC;YACnC,OAAO,SAAS,CAAC;SAClB;QAED,wCAAwC;QACxC,IAAI,WAAW,GAAG,MAAM,IAAI,CAAC,eAAe,EAAE,CAAC;QAC/C,IAAI,WAAW,KAAK,SAAS,EAAE;YAC7B,WAAW,GAAG,EAAE,CAAC;SAClB;QAED,+BAA+B;QAC/B,IAAI,CAAC,iBAAiB,GAAG,WAAW,CAAC;QAErC,+DAA+D;QAC/D,MAAM,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAExC,OAAO,WAAW,CAAC;IACrB,CAAC;IAEO,KAAK,CAAC,cAAc;QAG1B,IAAI;YACF,IAAI,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,gBAAgB,CAAC,EAAE;gBACxC,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,8BAA8B,IAAI,CAAC,gBAAgB,EAAE,CACtD,CAAC;gBACF,OAAO,IAAI,CAAC,KAAK,CAAC,EAAE,CAAC,YAAY,CAAC,IAAI,CAAC,gBAAgB,EAAE,MAAM,CAAC,CAAC,CAAC;aACnE;SACF;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,2CAA2C,IAAI,CAAC,gBAAgB,KAAK,CAAC,mCAAmC,CAC1G,CAAC;SACH;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IAEO,KAAK,CAAC,eAAe,CAC3B,KAAoC;QAEpC,IAAI;YACF,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,4BAA4B,IAAI,CAAC,gBAAgB,EAAE,CAAC,CAAC;YACvE,EAAE,CAAC,aAAa,CAAC,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;SAChE;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,2CAA2C,IAAI,CAAC,gBAAgB,KAAK,CAAC,GAAG,CAC1E,CAAC;SACH;IACH,CAAC;IAEO,KAAK,CAAC,eAAe;QAC3B,iDAAiD;QACjD,IAAI,IAAI,CAAC,aAAa,CAAC,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE;YACzD,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,oEAAoE,CACrE,CAAC;YACF,OAAO,EAAE,CAAC;SACX;QACD,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAA,yBAAY,GAAE,CAAC,OAAO,CAC3C,8DAA8D,EAC9D;gBACE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,KAAK;gBAC/B,IAAI,EAAE,IAAI,CAAC,aAAa,CAAC,IAAI;aAC9B,CACF,CAAC;YACF,OAAO,QAAQ,CAAC,IAAI,CAAC;SACtB;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,MAAM,KAAK,GAAG,EAAE;gBAC3C,IAAI,CAAC,MAAM,CAAC,OAAO,CACjB,gGAAgG;oBAC9F,oEAAoE;oBACpE,qFAAqF;oBACrF,kFAAkF,CAAC,EAAE,CACxF,CAAC;aACH;iBAAM;gBACL,kFAAkF;gBAClF,8EAA8E;gBAC9E,2FAA2F;gBAC3F,eAAe;gBACf,MAAM,IAAI,KAAK,CACb,sEAAsE,CAAC,EAAE,CAC1E,CAAC;aACH;SACF;IACH,CAAC;CACF"}
|
||||||
59
lib/init-action-post-helper.js
generated
59
lib/init-action-post-helper.js
generated
|
|
@ -19,19 +19,68 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.run = void 0;
|
exports.run = exports.uploadFailedSarif = void 0;
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
|
const codeql_1 = require("./codeql");
|
||||||
const config_utils_1 = require("./config-utils");
|
const config_utils_1 = require("./config-utils");
|
||||||
const logging_1 = require("./logging");
|
const feature_flags_1 = require("./feature-flags");
|
||||||
async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, printDebugLogs) {
|
const shared_environment_1 = require("./shared-environment");
|
||||||
const logger = (0, logging_1.getActionsLogger)();
|
const uploadLib = __importStar(require("./upload-lib"));
|
||||||
|
const util_1 = require("./util");
|
||||||
|
const workflow_1 = require("./workflow");
|
||||||
|
async function uploadFailedSarif(config, repositoryNwo, featureEnablement, logger) {
|
||||||
|
if (!config.codeQLCmd) {
|
||||||
|
logger.warning("CodeQL command not found. Unable to upload failed SARIF file.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||||
|
if (!(await featureEnablement.getValue(feature_flags_1.Feature.UploadFailedSarifEnabled, codeql))) {
|
||||||
|
logger.debug("Uploading failed SARIF is disabled.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const workflow = await (0, workflow_1.getWorkflow)();
|
||||||
|
const jobName = (0, util_1.getRequiredEnvParam)("GITHUB_JOB");
|
||||||
|
const matrix = (0, util_1.parseMatrixInput)(actionsUtil.getRequiredInput("matrix"));
|
||||||
|
if ((0, workflow_1.getUploadInputOrThrow)(workflow, jobName, matrix) !== "true" ||
|
||||||
|
(0, util_1.isInTestMode)()) {
|
||||||
|
logger.debug("Won't upload a failed SARIF file since SARIF upload is disabled.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const category = (0, workflow_1.getCategoryInputOrThrow)(workflow, jobName, matrix);
|
||||||
|
const checkoutPath = (0, workflow_1.getCheckoutPathInputOrThrow)(workflow, jobName, matrix);
|
||||||
|
const sarifFile = "../codeql-failed-run.sarif";
|
||||||
|
await codeql.diagnosticsExport(sarifFile, category);
|
||||||
|
core.info(`Uploading failed SARIF file ${sarifFile}`);
|
||||||
|
const uploadResult = await uploadLib.uploadFromActions(sarifFile, checkoutPath, category, logger);
|
||||||
|
await uploadLib.waitForProcessing(repositoryNwo, uploadResult.sarifID, logger, { isUnsuccessfulExecution: true });
|
||||||
|
}
|
||||||
|
exports.uploadFailedSarif = uploadFailedSarif;
|
||||||
|
async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, printDebugLogs, repositoryNwo, featureEnablement, logger) {
|
||||||
const config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
const config = await (0, config_utils_1.getConfig)(actionsUtil.getTemporaryDirectory(), logger);
|
||||||
if (config === undefined) {
|
if (config === undefined) {
|
||||||
logger.warning("Debugging artifacts are unavailable since the 'init' Action failed before it could produce any.");
|
logger.warning("Debugging artifacts are unavailable since the 'init' Action failed before it could produce any.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Environment variable used to integration test uploading a SARIF file for failed runs
|
||||||
|
const expectFailedSarifUpload = process.env["CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF"] === "true";
|
||||||
|
if (process.env[shared_environment_1.CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF] !== "true") {
|
||||||
|
try {
|
||||||
|
await uploadFailedSarif(config, repositoryNwo, featureEnablement, logger);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
if (expectFailedSarifUpload) {
|
||||||
|
throw new Error("Expected to upload a SARIF file for the failed run, but encountered " +
|
||||||
|
`the following error: ${e}`);
|
||||||
|
}
|
||||||
|
logger.info(`Failed to upload a SARIF file for the failed run. Error: ${e}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (expectFailedSarifUpload) {
|
||||||
|
throw new Error("Expected to upload a SARIF file for the failed run, but didn't.");
|
||||||
}
|
}
|
||||||
// Upload appropriate Actions artifacts for debugging
|
// Upload appropriate Actions artifacts for debugging
|
||||||
if (config === null || config === void 0 ? void 0 : config.debugMode) {
|
if (config.debugMode) {
|
||||||
core.info("Debug mode is on. Uploading available database bundles and logs as Actions debugging artifacts...");
|
core.info("Debug mode is on. Uploading available database bundles and logs as Actions debugging artifacts...");
|
||||||
await uploadDatabaseBundleDebugArtifact(config, logger);
|
await uploadDatabaseBundleDebugArtifact(config, logger);
|
||||||
await uploadLogsDebugArtifact(config);
|
await uploadLogsDebugArtifact(config);
|
||||||
|
|
|
||||||
|
|
@ -1 +1 @@
|
||||||
{"version":3,"file":"init-action-post-helper.js","sourceRoot":"","sources":["../src/init-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAA2C;AAC3C,uCAA6C;AAEtC,KAAK,UAAU,GAAG,CACvB,iCAA2C,EAC3C,uBAAiC,EACjC,cAAwB;IAExB,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,SAAS,EAAE;QACrB,IAAI,CAAC,IAAI,CACP,mGAAmG,CACpG,CAAC;QACF,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtC,MAAM,cAAc,CAAC,MAAM,CAAC,CAAC;KAC9B;AACH,CAAC;AAxBD,kBAwBC"}
|
{"version":3,"file":"init-action-post-helper.js","sourceRoot":"","sources":["../src/init-action-post-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,qCAAqC;AACrC,iDAAmD;AACnD,mDAA6D;AAG7D,6DAA8E;AAC9E,wDAA0C;AAC1C,iCAA6E;AAC7E,yCAKoB;AAEb,KAAK,UAAU,iBAAiB,CACrC,MAAc,EACd,aAA4B,EAC5B,iBAAoC,EACpC,MAAc;IAEd,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE;QACrB,MAAM,CAAC,OAAO,CACZ,+DAA+D,CAChE,CAAC;QACF,OAAO;KACR;IACD,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAS,EAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IACjD,IACE,CAAC,CAAC,MAAM,iBAAiB,CAAC,QAAQ,CAChC,uBAAO,CAAC,wBAAwB,EAChC,MAAM,CACP,CAAC,EACF;QACA,MAAM,CAAC,KAAK,CAAC,qCAAqC,CAAC,CAAC;QACpD,OAAO;KACR;IACD,MAAM,QAAQ,GAAG,MAAM,IAAA,sBAAW,GAAE,CAAC;IACrC,MAAM,OAAO,GAAG,IAAA,0BAAmB,EAAC,YAAY,CAAC,CAAC;IAClD,MAAM,MAAM,GAAG,IAAA,uBAAgB,EAAC,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC,CAAC;IACxE,IACE,IAAA,gCAAqB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,KAAK,MAAM;QAC3D,IAAA,mBAAY,GAAE,EACd;QACA,MAAM,CAAC,KAAK,CACV,kEAAkE,CACnE,CAAC;QACF,OAAO;KACR;IACD,MAAM,QAAQ,GAAG,IAAA,kCAAuB,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACpE,MAAM,YAAY,GAAG,IAAA,sCAA2B,EAAC,QAAQ,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IAE5E,MAAM,SAAS,GAAG,4BAA4B,CAAC;IAC/C,MAAM,MAAM,CAAC,iBAAiB,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;IAEpD,IAAI,CAAC,IAAI,CAAC,+BAA+B,SAAS,EAAE,CAAC,CAAC;IACtD,MAAM,YAAY,GAAG,MAAM,SAAS,CAAC,iBAAiB,CACpD,SAAS,EACT,YAAY,EACZ,QAAQ,EACR,MAAM,CACP,CAAC;IACF,MAAM,SAAS,CAAC,iBAAiB,CAC/B,aAAa,EACb,YAAY,CAAC,OAAO,EACpB,MAAM,EACN,EAAE,uBAAuB,EAAE,IAAI,EAAE,CAClC,CAAC;AACJ,CAAC;AArDD,8CAqDC;AAEM,KAAK,UAAU,GAAG,CACvB,iCAA2C,EAC3C,uBAAiC,EACjC,cAAwB,EACxB,aAA4B,EAC5B,iBAAoC,EACpC,MAAc;IAEd,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,CAAC,OAAO,CACZ,iGAAiG,CAClG,CAAC;QACF,OAAO;KACR;IAED,uFAAuF;IACvF,MAAM,uBAAuB,GAC3B,OAAO,CAAC,GAAG,CAAC,0CAA0C,CAAC,KAAK,MAAM,CAAC;IAErE,IAAI,OAAO,CAAC,GAAG,CAAC,2DAAsC,CAAC,KAAK,MAAM,EAAE;QAClE,IAAI;YACF,MAAM,iBAAiB,CAAC,MAAM,EAAE,aAAa,EAAE,iBAAiB,EAAE,MAAM,CAAC,CAAC;SAC3E;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,uBAAuB,EAAE;gBAC3B,MAAM,IAAI,KAAK,CACb,sEAAsE;oBACpE,wBAAwB,CAAC,EAAE,CAC9B,CAAC;aACH;YACD,MAAM,CAAC,IAAI,CACT,4DAA4D,CAAC,EAAE,CAChE,CAAC;SACH;KACF;SAAM,IAAI,uBAAuB,EAAE;QAClC,MAAM,IAAI,KAAK,CACb,iEAAiE,CAClE,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,CAAC,SAAS,EAAE;QACpB,IAAI,CAAC,IAAI,CACP,mGAAmG,CACpG,CAAC;QACF,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QAEtC,MAAM,cAAc,CAAC,MAAM,CAAC,CAAC;KAC9B;AACH,CAAC;AAlDD,kBAkDC"}
|
||||||
93
lib/init-action-post-helper.test.js
generated
93
lib/init-action-post-helper.test.js
generated
|
|
@ -24,13 +24,21 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const ava_1 = __importDefault(require("ava"));
|
const ava_1 = __importDefault(require("ava"));
|
||||||
const sinon = __importStar(require("sinon"));
|
const sinon = __importStar(require("sinon"));
|
||||||
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
|
const codeql = __importStar(require("./codeql"));
|
||||||
const configUtils = __importStar(require("./config-utils"));
|
const configUtils = __importStar(require("./config-utils"));
|
||||||
|
const feature_flags_1 = require("./feature-flags");
|
||||||
const initActionPostHelper = __importStar(require("./init-action-post-helper"));
|
const initActionPostHelper = __importStar(require("./init-action-post-helper"));
|
||||||
|
const logging_1 = require("./logging");
|
||||||
|
const repository_1 = require("./repository");
|
||||||
const testing_utils_1 = require("./testing-utils");
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
const uploadLib = __importStar(require("./upload-lib"));
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
|
const workflow = __importStar(require("./workflow"));
|
||||||
(0, testing_utils_1.setupTests)(ava_1.default);
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
(0, ava_1.default)("post: init action with debug mode off", async (t) => {
|
(0, ava_1.default)("post: init action with debug mode off", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
process.env["RUNNER_TEMP"] = tmpDir;
|
process.env["RUNNER_TEMP"] = tmpDir;
|
||||||
const gitHubVersion = {
|
const gitHubVersion = {
|
||||||
type: util.GitHubVariant.DOTCOM,
|
type: util.GitHubVariant.DOTCOM,
|
||||||
|
|
@ -44,7 +52,7 @@ const util = __importStar(require("./util"));
|
||||||
const uploadDatabaseBundleSpy = sinon.spy();
|
const uploadDatabaseBundleSpy = sinon.spy();
|
||||||
const uploadLogsSpy = sinon.spy();
|
const uploadLogsSpy = sinon.spy();
|
||||||
const printDebugLogsSpy = sinon.spy();
|
const printDebugLogsSpy = sinon.spy();
|
||||||
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy);
|
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy, (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||||
t.assert(uploadDatabaseBundleSpy.notCalled);
|
t.assert(uploadDatabaseBundleSpy.notCalled);
|
||||||
t.assert(uploadLogsSpy.notCalled);
|
t.assert(uploadLogsSpy.notCalled);
|
||||||
t.assert(printDebugLogsSpy.notCalled);
|
t.assert(printDebugLogsSpy.notCalled);
|
||||||
|
|
@ -52,6 +60,7 @@ const util = __importStar(require("./util"));
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("post: init action with debug mode on", async (t) => {
|
(0, ava_1.default)("post: init action with debug mode on", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
process.env["RUNNER_TEMP"] = tmpDir;
|
process.env["RUNNER_TEMP"] = tmpDir;
|
||||||
const gitHubVersion = {
|
const gitHubVersion = {
|
||||||
type: util.GitHubVariant.DOTCOM,
|
type: util.GitHubVariant.DOTCOM,
|
||||||
|
|
@ -65,10 +74,90 @@ const util = __importStar(require("./util"));
|
||||||
const uploadDatabaseBundleSpy = sinon.spy();
|
const uploadDatabaseBundleSpy = sinon.spy();
|
||||||
const uploadLogsSpy = sinon.spy();
|
const uploadLogsSpy = sinon.spy();
|
||||||
const printDebugLogsSpy = sinon.spy();
|
const printDebugLogsSpy = sinon.spy();
|
||||||
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy);
|
await initActionPostHelper.run(uploadDatabaseBundleSpy, uploadLogsSpy, printDebugLogsSpy, (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)([]), (0, logging_1.getRunnerLogger)(true));
|
||||||
t.assert(uploadDatabaseBundleSpy.called);
|
t.assert(uploadDatabaseBundleSpy.called);
|
||||||
t.assert(uploadLogsSpy.called);
|
t.assert(uploadLogsSpy.called);
|
||||||
t.assert(printDebugLogsSpy.called);
|
t.assert(printDebugLogsSpy.called);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
(0, ava_1.default)("uploads failed SARIF run for typical workflow", async (t) => {
|
||||||
|
const actionsWorkflow = createTestWorkflow([
|
||||||
|
{
|
||||||
|
name: "Checkout repository",
|
||||||
|
uses: "actions/checkout@v3",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Initialize CodeQL",
|
||||||
|
uses: "github/codeql-action/init@v2",
|
||||||
|
with: {
|
||||||
|
languages: "javascript",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Perform CodeQL Analysis",
|
||||||
|
uses: "github/codeql-action/analyze@v2",
|
||||||
|
with: {
|
||||||
|
category: "my-category",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
await testFailedSarifUpload(t, actionsWorkflow, { category: "my-category" });
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("uploading failed SARIF run fails when workflow does not reference github/codeql-action", async (t) => {
|
||||||
|
const actionsWorkflow = createTestWorkflow([
|
||||||
|
{
|
||||||
|
name: "Checkout repository",
|
||||||
|
uses: "actions/checkout@v3",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
await t.throwsAsync(async () => await testFailedSarifUpload(t, actionsWorkflow));
|
||||||
|
});
|
||||||
|
function createTestWorkflow(steps) {
|
||||||
|
return {
|
||||||
|
name: "CodeQL",
|
||||||
|
on: {
|
||||||
|
push: {
|
||||||
|
branches: ["main"],
|
||||||
|
},
|
||||||
|
pull_request: {
|
||||||
|
branches: ["main"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
jobs: {
|
||||||
|
analyze: {
|
||||||
|
name: "CodeQL Analysis",
|
||||||
|
"runs-on": "ubuntu-latest",
|
||||||
|
steps,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
async function testFailedSarifUpload(t, actionsWorkflow, { category } = {}) {
|
||||||
|
const config = {
|
||||||
|
codeQLCmd: "codeql",
|
||||||
|
debugMode: true,
|
||||||
|
languages: [],
|
||||||
|
packs: [],
|
||||||
|
};
|
||||||
|
const messages = [];
|
||||||
|
process.env["GITHUB_JOB"] = "analyze";
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
|
process.env["GITHUB_WORKSPACE"] =
|
||||||
|
"/home/runner/work/codeql-action/codeql-action";
|
||||||
|
sinon.stub(actionsUtil, "getRequiredInput").withArgs("matrix").returns("{}");
|
||||||
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
|
sinon.stub(codeql, "getCodeQL").resolves(codeqlObject);
|
||||||
|
const diagnosticsExportStub = sinon.stub(codeqlObject, "diagnosticsExport");
|
||||||
|
sinon.stub(workflow, "getWorkflow").resolves(actionsWorkflow);
|
||||||
|
const uploadFromActions = sinon.stub(uploadLib, "uploadFromActions");
|
||||||
|
uploadFromActions.resolves({ sarifID: "42" });
|
||||||
|
const waitForProcessing = sinon.stub(uploadLib, "waitForProcessing");
|
||||||
|
await initActionPostHelper.uploadFailedSarif(config, (0, repository_1.parseRepositoryNwo)("github/codeql-action"), (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.UploadFailedSarifEnabled]), (0, testing_utils_1.getRecordingLogger)(messages));
|
||||||
|
t.deepEqual(messages, []);
|
||||||
|
t.true(diagnosticsExportStub.calledOnceWith(sinon.match.string, category), `Actual args were: ${diagnosticsExportStub.args}`);
|
||||||
|
t.true(uploadFromActions.calledOnceWith(sinon.match.string, sinon.match.string, category, sinon.match.any), `Actual args were: ${uploadFromActions.args}`);
|
||||||
|
t.true(waitForProcessing.calledOnceWith(sinon.match.any, "42", sinon.match.any, {
|
||||||
|
isUnsuccessfulExecution: true,
|
||||||
|
}));
|
||||||
|
}
|
||||||
//# sourceMappingURL=init-action-post-helper.test.js.map
|
//# sourceMappingURL=init-action-post-helper.test.js.map
|
||||||
File diff suppressed because one or more lines are too long
12
lib/init-action-post.js
generated
12
lib/init-action-post.js
generated
|
|
@ -26,11 +26,21 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const core = __importStar(require("@actions/core"));
|
const core = __importStar(require("@actions/core"));
|
||||||
const actionsUtil = __importStar(require("./actions-util"));
|
const actionsUtil = __importStar(require("./actions-util"));
|
||||||
|
const api_client_1 = require("./api-client");
|
||||||
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||||
|
const feature_flags_1 = require("./feature-flags");
|
||||||
const initActionPostHelper = __importStar(require("./init-action-post-helper"));
|
const initActionPostHelper = __importStar(require("./init-action-post-helper"));
|
||||||
|
const logging_1 = require("./logging");
|
||||||
|
const repository_1 = require("./repository");
|
||||||
|
const util_1 = require("./util");
|
||||||
async function runWrapper() {
|
async function runWrapper() {
|
||||||
try {
|
try {
|
||||||
await initActionPostHelper.run(debugArtifacts.uploadDatabaseBundleDebugArtifact, debugArtifacts.uploadLogsDebugArtifact, actionsUtil.printDebugLogs);
|
const logger = (0, logging_1.getActionsLogger)();
|
||||||
|
const gitHubVersion = await (0, api_client_1.getGitHubVersion)();
|
||||||
|
(0, util_1.checkGitHubVersionInRange)(gitHubVersion, logger);
|
||||||
|
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||||
|
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, actionsUtil.getTemporaryDirectory(), logger);
|
||||||
|
await initActionPostHelper.run(debugArtifacts.uploadDatabaseBundleDebugArtifact, debugArtifacts.uploadLogsDebugArtifact, actionsUtil.printDebugLogs, repositoryNwo, features, logger);
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
core.setFailed(`init post-action step failed: ${error}`);
|
core.setFailed(`init post-action step failed: ${error}`);
|
||||||
|
|
|
||||||
|
|
@ -1 +1 @@
|
||||||
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,4DAA8C;AAC9C,kEAAoD;AACpD,gFAAkE;AAElE,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,oBAAoB,CAAC,GAAG,CAC5B,cAAc,CAAC,iCAAiC,EAChD,cAAc,CAAC,uBAAuB,EACtC,WAAW,CAAC,cAAc,CAC3B,CAAC;KACH;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,iCAAiC,KAAK,EAAE,CAAC,CAAC;QACzD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,4DAA8C;AAC9C,6CAAgD;AAChD,kEAAoD;AACpD,mDAA2C;AAC3C,gFAAkE;AAClE,uCAA6C;AAC7C,6CAAkD;AAClD,iCAAwE;AAExE,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;QAClC,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAEjD,MAAM,aAAa,GAAG,IAAA,+BAAkB,EACtC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CACzC,CAAC;QACF,MAAM,QAAQ,GAAG,IAAI,wBAAQ,CAC3B,aAAa,EACb,aAAa,EACb,WAAW,CAAC,qBAAqB,EAAE,EACnC,MAAM,CACP,CAAC;QAEF,MAAM,oBAAoB,CAAC,GAAG,CAC5B,cAAc,CAAC,iCAAiC,EAChD,cAAc,CAAC,uBAAuB,EACtC,WAAW,CAAC,cAAc,EAC1B,aAAa,EACb,QAAQ,EACR,MAAM,CACP,CAAC;KACH;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,iCAAiC,KAAK,EAAE,CAAC,CAAC;QACzD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
3
lib/init-action.js
generated
3
lib/init-action.js
generated
|
|
@ -31,6 +31,7 @@ const logging_1 = require("./logging");
|
||||||
const repository_1 = require("./repository");
|
const repository_1 = require("./repository");
|
||||||
const trap_caching_1 = require("./trap-caching");
|
const trap_caching_1 = require("./trap-caching");
|
||||||
const util_1 = require("./util");
|
const util_1 = require("./util");
|
||||||
|
const workflow_1 = require("./workflow");
|
||||||
// eslint-disable-next-line import/no-commonjs
|
// eslint-disable-next-line import/no-commonjs
|
||||||
const pkg = require("../package.json");
|
const pkg = require("../package.json");
|
||||||
async function sendSuccessStatusReport(startedAt, config, toolsVersion, logger) {
|
async function sendSuccessStatusReport(startedAt, config, toolsVersion, logger) {
|
||||||
|
|
@ -90,7 +91,7 @@ async function run() {
|
||||||
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
const repositoryNwo = (0, repository_1.parseRepositoryNwo)((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY"));
|
||||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, (0, actions_util_1.getTemporaryDirectory)(), logger);
|
||||||
try {
|
try {
|
||||||
const workflowErrors = await (0, actions_util_1.validateWorkflow)();
|
const workflowErrors = await (0, workflow_1.validateWorkflow)();
|
||||||
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
|
if (!(await (0, actions_util_1.sendStatusReport)(await (0, actions_util_1.createStatusReportBase)("init", "starting", startedAt, workflowErrors)))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
24
lib/shared-environment.js
generated
24
lib/shared-environment.js
generated
|
|
@ -1,14 +1,22 @@
|
||||||
"use strict";
|
"use strict";
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.CODEQL_ACTION_TEST_MODE = exports.CODEQL_ACTION_TESTING_ENVIRONMENT = exports.CODEQL_WORKFLOW_STARTED_AT = exports.ODASA_TRACER_CONFIGURATION = void 0;
|
exports.ODASA_TRACER_CONFIGURATION = exports.CODEQL_WORKFLOW_STARTED_AT = exports.CODEQL_ACTION_TEST_MODE = exports.CODEQL_ACTION_TESTING_ENVIRONMENT = exports.CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF = void 0;
|
||||||
exports.ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
|
/**
|
||||||
// The time at which the first action (normally init) started executing.
|
* This environment variable is set to true when the `analyze` Action
|
||||||
// If a workflow invokes a different action without first invoking the init
|
* successfully uploads a SARIF file. It does NOT indicate whether the
|
||||||
// action (i.e. the upload action is being used by a third-party integrator)
|
* SARIF file was processed successfully.
|
||||||
// then this variable will be assigned the start time of the action invoked
|
*/
|
||||||
// rather that the init action.
|
exports.CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF = "CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF";
|
||||||
exports.CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";
|
|
||||||
exports.CODEQL_ACTION_TESTING_ENVIRONMENT = "CODEQL_ACTION_TESTING_ENVIRONMENT";
|
exports.CODEQL_ACTION_TESTING_ENVIRONMENT = "CODEQL_ACTION_TESTING_ENVIRONMENT";
|
||||||
/** Used to disable uploading SARIF results or status reports to the GitHub API */
|
/** Used to disable uploading SARIF results or status reports to the GitHub API */
|
||||||
exports.CODEQL_ACTION_TEST_MODE = "CODEQL_ACTION_TEST_MODE";
|
exports.CODEQL_ACTION_TEST_MODE = "CODEQL_ACTION_TEST_MODE";
|
||||||
|
/**
|
||||||
|
* The time at which the first action (normally init) started executing.
|
||||||
|
* If a workflow invokes a different action without first invoking the init
|
||||||
|
* action (i.e. the upload action is being used by a third-party integrator)
|
||||||
|
* then this variable will be assigned the start time of the action invoked
|
||||||
|
* rather that the init action.
|
||||||
|
*/
|
||||||
|
exports.CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";
|
||||||
|
exports.ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
|
||||||
//# sourceMappingURL=shared-environment.js.map
|
//# sourceMappingURL=shared-environment.js.map
|
||||||
|
|
@ -1 +1 @@
|
||||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AACvE,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAE1D,QAAA,iCAAiC,GAC5C,mCAAmC,CAAC;AAEtC,kFAAkF;AACrE,QAAA,uBAAuB,GAAG,yBAAyB,CAAC"}
|
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;;AAAA;;;;GAIG;AACU,QAAA,sCAAsC,GACjD,wCAAwC,CAAC;AAE9B,QAAA,iCAAiC,GAC5C,mCAAmC,CAAC;AAEtC,kFAAkF;AACrE,QAAA,uBAAuB,GAAG,yBAAyB,CAAC;AAEjE;;;;;;GAMG;AACU,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAE1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC"}
|
||||||
40
lib/toolrunner-error-catcher.js
generated
40
lib/toolrunner-error-catcher.js
generated
|
|
@ -31,7 +31,7 @@ const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||||
* @param args optional arguments for tool. Escaping is handled by the lib.
|
* @param args optional arguments for tool. Escaping is handled by the lib.
|
||||||
* @param matchers defines specific codes and/or regexes that should lead to return of a custom error
|
* @param matchers defines specific codes and/or regexes that should lead to return of a custom error
|
||||||
* @param options optional exec options. See ExecOptions
|
* @param options optional exec options. See ExecOptions
|
||||||
* @returns Promise<number> exit code
|
* @returns ReturnState exit code and stdout output, if applicable
|
||||||
*/
|
*/
|
||||||
async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
|
async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
|
||||||
var _a, _b;
|
var _a, _b;
|
||||||
|
|
@ -54,40 +54,36 @@ async function toolrunnerErrorCatcher(commandLine, args, matchers, options) {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
// we capture the original return code or error so that if no match is found we can duplicate the behavior
|
// we capture the original return code or error so that if no match is found we can duplicate the behavior
|
||||||
let returnState;
|
let exitCode;
|
||||||
try {
|
try {
|
||||||
returnState = await new toolrunner.ToolRunner(await safeWhich.safeWhich(commandLine), args, {
|
exitCode = await new toolrunner.ToolRunner(await safeWhich.safeWhich(commandLine), args, {
|
||||||
...options,
|
...options,
|
||||||
listeners,
|
listeners,
|
||||||
ignoreReturnCode: true, // so we can check for specific codes using the matchers
|
ignoreReturnCode: true, // so we can check for specific codes using the matchers
|
||||||
}).exec();
|
}).exec();
|
||||||
}
|
// if there is a zero return code then we do not apply the matchers
|
||||||
catch (e) {
|
if (exitCode === 0)
|
||||||
returnState = e instanceof Error ? e : new Error(String(e));
|
return { exitCode, stdout };
|
||||||
}
|
if (matchers) {
|
||||||
// if there is a zero return code then we do not apply the matchers
|
for (const matcher of matchers) {
|
||||||
if (returnState === 0)
|
if (matcher.exitCode === exitCode ||
|
||||||
return returnState;
|
((_a = matcher.outputRegex) === null || _a === void 0 ? void 0 : _a.test(stderr)) ||
|
||||||
if (matchers) {
|
((_b = matcher.outputRegex) === null || _b === void 0 ? void 0 : _b.test(stdout))) {
|
||||||
for (const matcher of matchers) {
|
throw new Error(matcher.message);
|
||||||
if (matcher.exitCode === returnState ||
|
}
|
||||||
((_a = matcher.outputRegex) === null || _a === void 0 ? void 0 : _a.test(stderr)) ||
|
|
||||||
((_b = matcher.outputRegex) === null || _b === void 0 ? void 0 : _b.test(stdout))) {
|
|
||||||
throw new Error(matcher.message);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
if (typeof returnState === "number") {
|
|
||||||
// only if we were instructed to ignore the return code do we ever return it non-zero
|
// only if we were instructed to ignore the return code do we ever return it non-zero
|
||||||
if (options === null || options === void 0 ? void 0 : options.ignoreReturnCode) {
|
if (options === null || options === void 0 ? void 0 : options.ignoreReturnCode) {
|
||||||
return returnState;
|
return { exitCode, stdout };
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
throw new Error(`The process '${commandLine}' failed with exit code ${returnState}`);
|
throw new Error(`The process '${commandLine}' failed with exit code ${exitCode}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
catch (e) {
|
||||||
throw returnState;
|
const error = e instanceof Error ? e : new Error(String(e));
|
||||||
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.toolrunnerErrorCatcher = toolrunnerErrorCatcher;
|
exports.toolrunnerErrorCatcher = toolrunnerErrorCatcher;
|
||||||
|
|
|
||||||
|
|
@ -1 +1 @@
|
||||||
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AAIpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,WAA2B,CAAC;IAChC,IAAI;QACF,WAAW,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CAC3C,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;KACV;IAAC,OAAO,CAAC,EAAE;QACV,WAAW,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;KAC7D;IAED,mEAAmE;IACnE,IAAI,WAAW,KAAK,CAAC;QAAE,OAAO,WAAW,CAAC;IAE1C,IAAI,QAAQ,EAAE;QACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,WAAW;iBAChC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;iBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;gBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAClC;SACF;KACF;IAED,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnC,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,WAAW,CAAC;SACpB;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,WAAW,EAAE,CACpE,CAAC;SACH;KACF;SAAM;QACL,MAAM,WAAW,CAAC;KACnB;AACH,CAAC;AAnED,wDAmEC"}
|
{"version":3,"file":"toolrunner-error-catcher.js","sourceRoot":"","sources":["../src/toolrunner-error-catcher.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AACA,yEAA2D;AAC3D,kEAAoD;AASpD;;;;;;;;;;GAUG;AACI,KAAK,UAAU,sBAAsB,CAC1C,WAAmB,EACnB,IAAe,EACf,QAAyB,EACzB,OAAwB;;IAExB,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,MAAM,SAAS,GAAG;QAChB,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;QACD,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;;YACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC1B,IAAI,CAAA,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS,0CAAE,MAAM,MAAK,SAAS,EAAE;gBAC5C,OAAO,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;aAChC;QACH,CAAC;KACF,CAAC;IAEF,0GAA0G;IAC1G,IAAI,QAAgB,CAAC;IACrB,IAAI;QACF,QAAQ,GAAG,MAAM,IAAI,UAAU,CAAC,UAAU,CACxC,MAAM,SAAS,CAAC,SAAS,CAAC,WAAW,CAAC,EACtC,IAAI,EACJ;YACE,GAAG,OAAO;YACV,SAAS;YACT,gBAAgB,EAAE,IAAI,EAAE,wDAAwD;SACjF,CACF,CAAC,IAAI,EAAE,CAAC;QAET,mEAAmE;QACnE,IAAI,QAAQ,KAAK,CAAC;YAAE,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC;QAEhD,IAAI,QAAQ,EAAE;YACZ,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;gBAC9B,IACE,OAAO,CAAC,QAAQ,KAAK,QAAQ;qBAC7B,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA;qBACjC,MAAA,OAAO,CAAC,WAAW,0CAAE,IAAI,CAAC,MAAM,CAAC,CAAA,EACjC;oBACA,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;iBAClC;aACF;SACF;QAED,qFAAqF;QACrF,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;YAC7B,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC;SAC7B;aAAM;YACL,MAAM,IAAI,KAAK,CACb,gBAAgB,WAAW,2BAA2B,QAAQ,EAAE,CACjE,CAAC;SACH;KACF;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,KAAK,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5D,MAAM,KAAK,CAAC;KACb;AACH,CAAC;AAhED,wDAgEC"}
|
||||||
13
lib/toolrunner-error-catcher.test.js
generated
13
lib/toolrunner-error-catcher.test.js
generated
|
|
@ -33,7 +33,8 @@ const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||||
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "error!!!" },
|
{ exitCode: 123, outputRegex: new RegExp("foo bar"), message: "error!!!" },
|
||||||
];
|
];
|
||||||
t.deepEqual(await exec.exec("node", testArgs), 0);
|
t.deepEqual(await exec.exec("node", testArgs), 0);
|
||||||
t.deepEqual(await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, matchers), 0);
|
const returnState = await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, matchers);
|
||||||
|
t.deepEqual(returnState.exitCode, 0);
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("regex matchers are applied to stdout for non-zero exit code", async (t) => {
|
(0, ava_1.default)("regex matchers are applied to stdout for non-zero exit code", async (t) => {
|
||||||
const testArgs = buildDummyArgs("foo bar\\nblort qux", "", "", 1);
|
const testArgs = buildDummyArgs("foo bar\\nblort qux", "", "", 1);
|
||||||
|
|
@ -116,9 +117,10 @@ const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||||
(0, ava_1.default)("execErrorCatcher respects the ignoreReturnValue option", async (t) => {
|
(0, ava_1.default)("execErrorCatcher respects the ignoreReturnValue option", async (t) => {
|
||||||
const testArgs = buildDummyArgs("standard output", "error output", "", 199);
|
const testArgs = buildDummyArgs("standard output", "error output", "", 199);
|
||||||
await t.throwsAsync((0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], { ignoreReturnCode: false }), { instanceOf: Error });
|
await t.throwsAsync((0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], { ignoreReturnCode: false }), { instanceOf: Error });
|
||||||
t.deepEqual(await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], {
|
const returnState = await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], {
|
||||||
ignoreReturnCode: true,
|
ignoreReturnCode: true,
|
||||||
}), 199);
|
});
|
||||||
|
t.deepEqual(returnState.exitCode, 199);
|
||||||
});
|
});
|
||||||
(0, ava_1.default)("execErrorCatcher preserves behavior of provided listeners", async (t) => {
|
(0, ava_1.default)("execErrorCatcher preserves behavior of provided listeners", async (t) => {
|
||||||
const stdoutExpected = "standard output";
|
const stdoutExpected = "standard output";
|
||||||
|
|
@ -134,9 +136,10 @@ const toolrunner_error_catcher_1 = require("./toolrunner-error-catcher");
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
const testArgs = buildDummyArgs(stdoutExpected, stderrExpected, "", 0);
|
const testArgs = buildDummyArgs(stdoutExpected, stderrExpected, "", 0);
|
||||||
t.deepEqual(await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], {
|
const returnState = await (0, toolrunner_error_catcher_1.toolrunnerErrorCatcher)("node", testArgs, [], {
|
||||||
listeners,
|
listeners,
|
||||||
}), 0);
|
});
|
||||||
|
t.deepEqual(returnState.exitCode, 0);
|
||||||
t.deepEqual(stdoutActual, `${stdoutExpected}\n`);
|
t.deepEqual(stdoutActual, `${stdoutExpected}\n`);
|
||||||
t.deepEqual(stderrActual, `${stderrExpected}\n`);
|
t.deepEqual(stderrActual, `${stderrExpected}\n`);
|
||||||
});
|
});
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
124
lib/upload-lib.js
generated
124
lib/upload-lib.js
generated
|
|
@ -36,6 +36,7 @@ const fingerprints = __importStar(require("./fingerprints"));
|
||||||
const repository_1 = require("./repository");
|
const repository_1 = require("./repository");
|
||||||
const sharedEnv = __importStar(require("./shared-environment"));
|
const sharedEnv = __importStar(require("./shared-environment"));
|
||||||
const util = __importStar(require("./util"));
|
const util = __importStar(require("./util"));
|
||||||
|
const workflow = __importStar(require("./workflow"));
|
||||||
// Takes a list of paths to sarif files and combines them together,
|
// Takes a list of paths to sarif files and combines them together,
|
||||||
// returning the contents of the combined sarif file.
|
// returning the contents of the combined sarif file.
|
||||||
function combineSarifFiles(sarifFiles) {
|
function combineSarifFiles(sarifFiles) {
|
||||||
|
|
@ -127,9 +128,8 @@ function findSarifFilesInDir(sarifPath) {
|
||||||
exports.findSarifFilesInDir = findSarifFilesInDir;
|
exports.findSarifFilesInDir = findSarifFilesInDir;
|
||||||
// Uploads a single sarif file or a directory of sarif files
|
// Uploads a single sarif file or a directory of sarif files
|
||||||
// depending on what the path happens to refer to.
|
// depending on what the path happens to refer to.
|
||||||
// Returns true iff the upload occurred and succeeded
|
async function uploadFromActions(sarifPath, checkoutPath, category, logger) {
|
||||||
async function uploadFromActions(sarifPath, logger) {
|
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(checkoutPath), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), category, util.getRequiredEnvParam("GITHUB_WORKFLOW"), workflow.getWorkflowRunID(), checkoutPath, actionsUtil.getRequiredInput("matrix"), logger);
|
||||||
return await uploadFiles(getSarifFilePaths(sarifPath), (0, repository_1.parseRepositoryNwo)(util.getRequiredEnvParam("GITHUB_REPOSITORY")), await actionsUtil.getCommitOid(actionsUtil.getRequiredInput("checkout_path")), await actionsUtil.getRef(), await actionsUtil.getAnalysisKey(), actionsUtil.getOptionalInput("category"), util.getRequiredEnvParam("GITHUB_WORKFLOW"), actionsUtil.getWorkflowRunID(), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getRequiredInput("matrix"), logger);
|
|
||||||
}
|
}
|
||||||
exports.uploadFromActions = uploadFromActions;
|
exports.uploadFromActions = uploadFromActions;
|
||||||
function getSarifFilePaths(sarifPath) {
|
function getSarifFilePaths(sarifPath) {
|
||||||
|
|
@ -269,48 +269,92 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
||||||
}
|
}
|
||||||
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
|
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
|
||||||
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
|
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
|
||||||
// Waits until either the analysis is successfully processed, a processing error is reported, or STATUS_CHECK_TIMEOUT_MILLISECONDS elapses.
|
/**
|
||||||
async function waitForProcessing(repositoryNwo, sarifID, logger) {
|
* Waits until either the analysis is successfully processed, a processing error
|
||||||
|
* is reported, or `STATUS_CHECK_TIMEOUT_MILLISECONDS` elapses.
|
||||||
|
*
|
||||||
|
* If `isUnsuccessfulExecution` is passed, will throw an error if the analysis
|
||||||
|
* processing does not produce a single error mentioning the unsuccessful
|
||||||
|
* execution.
|
||||||
|
*/
|
||||||
|
async function waitForProcessing(repositoryNwo, sarifID, logger, options = {
|
||||||
|
isUnsuccessfulExecution: false,
|
||||||
|
}) {
|
||||||
logger.startGroup("Waiting for processing to finish");
|
logger.startGroup("Waiting for processing to finish");
|
||||||
const client = api.getApiClient();
|
try {
|
||||||
const statusCheckingStarted = Date.now();
|
const client = api.getApiClient();
|
||||||
// eslint-disable-next-line no-constant-condition
|
const statusCheckingStarted = Date.now();
|
||||||
while (true) {
|
// eslint-disable-next-line no-constant-condition
|
||||||
if (Date.now() >
|
while (true) {
|
||||||
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
|
if (Date.now() >
|
||||||
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS) {
|
||||||
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
|
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
||||||
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
|
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
|
||||||
break;
|
logger.warning("Timed out waiting for analysis to finish processing. Continuing.");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let response = undefined;
|
||||||
|
try {
|
||||||
|
response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
|
||||||
|
owner: repositoryNwo.owner,
|
||||||
|
repo: repositoryNwo.repo,
|
||||||
|
sarif_id: sarifID,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
logger.warning(`An error occurred checking the status of the delivery. ${e} It should still be processed in the background, but errors that occur during processing may not be reported.`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
const status = response.data.processing_status;
|
||||||
|
logger.info(`Analysis upload status is ${status}.`);
|
||||||
|
if (status === "pending") {
|
||||||
|
logger.debug("Analysis processing is still pending...");
|
||||||
|
}
|
||||||
|
else if (options.isUnsuccessfulExecution) {
|
||||||
|
// We expect a specific processing error for unsuccessful executions, so
|
||||||
|
// handle these separately.
|
||||||
|
handleProcessingResultForUnsuccessfulExecution(response, status, logger);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
else if (status === "complete") {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
else if (status === "failed") {
|
||||||
|
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
util.assertNever(status);
|
||||||
|
}
|
||||||
|
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
||||||
}
|
}
|
||||||
let response = undefined;
|
|
||||||
try {
|
|
||||||
response = await client.request("GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id", {
|
|
||||||
owner: repositoryNwo.owner,
|
|
||||||
repo: repositoryNwo.repo,
|
|
||||||
sarif_id: sarifID,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
catch (e) {
|
|
||||||
logger.warning(`An error occurred checking the status of the delivery. ${e} It should still be processed in the background, but errors that occur during processing may not be reported.`);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
const status = response.data.processing_status;
|
|
||||||
logger.info(`Analysis upload status is ${status}.`);
|
|
||||||
if (status === "complete") {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
else if (status === "pending") {
|
|
||||||
logger.debug("Analysis processing is still pending...");
|
|
||||||
}
|
|
||||||
else if (status === "failed") {
|
|
||||||
throw new Error(`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`);
|
|
||||||
}
|
|
||||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
|
||||||
}
|
}
|
||||||
logger.endGroup();
|
finally {
|
||||||
|
logger.endGroup();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
exports.waitForProcessing = waitForProcessing;
|
exports.waitForProcessing = waitForProcessing;
|
||||||
|
/**
|
||||||
|
* Checks the processing result for an unsuccessful execution. Throws if the
|
||||||
|
* result is not a failure with a single "unsuccessful execution" error.
|
||||||
|
*/
|
||||||
|
function handleProcessingResultForUnsuccessfulExecution(response, status, logger) {
|
||||||
|
if (status === "failed" &&
|
||||||
|
Array.isArray(response.data.errors) &&
|
||||||
|
response.data.errors.length === 1 &&
|
||||||
|
response.data.errors[0].toString().startsWith("unsuccessful execution")) {
|
||||||
|
logger.debug("Successfully uploaded a SARIF file for the unsuccessful execution. Received expected " +
|
||||||
|
'"unsuccessful execution" error, and no other errors.');
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const shortMessage = "Failed to upload a SARIF file for the unsuccessful execution. Code scanning status " +
|
||||||
|
"information for the repository may be out of date as a result.";
|
||||||
|
const longMessage = shortMessage + status === "failed"
|
||||||
|
? ` Processing errors: ${response.data.errors}`
|
||||||
|
: ' Encountered no processing errors, but expected to receive an "unsuccessful execution" error.';
|
||||||
|
logger.debug(longMessage);
|
||||||
|
throw new Error(shortMessage);
|
||||||
|
}
|
||||||
|
}
|
||||||
function validateUniqueCategory(sarif) {
|
function validateUniqueCategory(sarif) {
|
||||||
var _a, _b, _c;
|
var _a, _b, _c;
|
||||||
// duplicate categories are allowed in the same sarif file
|
// duplicate categories are allowed in the same sarif file
|
||||||
|
|
|
||||||
File diff suppressed because one or more lines are too long
2
lib/upload-sarif-action.js
generated
2
lib/upload-sarif-action.js
generated
|
|
@ -43,7 +43,7 @@ async function run() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), (0, logging_1.getActionsLogger)());
|
const uploadResult = await upload_lib.uploadFromActions(actionsUtil.getRequiredInput("sarif_file"), actionsUtil.getRequiredInput("checkout_path"), actionsUtil.getOptionalInput("category"), (0, logging_1.getActionsLogger)());
|
||||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||||
// We don't upload results in test mode, so don't wait for processing
|
// We don't upload results in test mode, so don't wait for processing
|
||||||
if ((0, util_1.isInTestMode)()) {
|
if ((0, util_1.isInTestMode)()) {
|
||||||
|
|
|
||||||
|
|
@ -1 +1 @@
|
||||||
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAA,4BAAqB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACnC,MAAM,IAAA,yBAAkB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACtC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,qEAAqE;QACrE,IAAI,IAAA,mBAAY,GAAE,EAAE;YAClB,IAAI,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;SACjE;aAAM,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YACzE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,EACnC,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
{"version":3,"file":"upload-sarif-action.js","sourceRoot":"","sources":["../src/upload-sarif-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,4DAA8C;AAC9C,uCAA6C;AAC7C,6CAAkD;AAClD,yDAA2C;AAC3C,iCAKgB;AAEhB,8CAA8C;AAC9C,MAAM,GAAG,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAMvC,KAAK,UAAU,uBAAuB,CACpC,SAAe,EACf,WAA0C;IAE1C,MAAM,gBAAgB,GAAG,MAAM,WAAW,CAAC,sBAAsB,CAC/D,cAAc,EACd,SAAS,EACT,SAAS,CACV,CAAC;IACF,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAG,WAAW;KACf,CAAC;IACF,MAAM,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AACnD,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAA,4BAAqB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACnC,MAAM,IAAA,yBAAkB,EAAC,GAAG,CAAC,OAAO,CAAC,CAAC;IACtC,IACE,CAAC,CAAC,MAAM,WAAW,CAAC,gBAAgB,CAClC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,UAAU,EACV,SAAS,CACV,CACF,CAAC,EACF;QACA,OAAO;KACR;IAED,IAAI;QACF,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,iBAAiB,CACrD,WAAW,CAAC,gBAAgB,CAAC,YAAY,CAAC,EAC1C,WAAW,CAAC,gBAAgB,CAAC,eAAe,CAAC,EAC7C,WAAW,CAAC,gBAAgB,CAAC,UAAU,CAAC,EACxC,IAAA,0BAAgB,GAAE,CACnB,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,UAAU,EAAE,YAAY,CAAC,OAAO,CAAC,CAAC;QAEjD,qEAAqE;QACrE,IAAI,IAAA,mBAAY,GAAE,EAAE;YAClB,IAAI,CAAC,KAAK,CAAC,mDAAmD,CAAC,CAAC;SACjE;aAAM,IAAI,WAAW,CAAC,gBAAgB,CAAC,qBAAqB,CAAC,KAAK,MAAM,EAAE;YACzE,MAAM,UAAU,CAAC,iBAAiB,CAChC,IAAA,+BAAkB,EAAC,IAAA,0BAAmB,EAAC,mBAAmB,CAAC,CAAC,EAC5D,YAAY,CAAC,OAAO,EACpB,IAAA,0BAAgB,GAAE,CACnB,CAAC;SACH;QACD,MAAM,uBAAuB,CAAC,SAAS,EAAE,YAAY,CAAC,YAAY,CAAC,CAAC;KACrE;IAAC,OAAO,KAAK,EAAE;QACd,MAAM,OAAO,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACvE,MAAM,KAAK,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QACnE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC;QACxB,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACnB,MAAM,WAAW,CAAC,gBAAgB,CAChC,MAAM,WAAW,CAAC,sBAAsB,CACtC,cAAc,EACd,WAAW,CAAC,gBAAgB,CAAC,KAAK,CAAC,EACnC,SAAS,EACT,OAAO,EACP,KAAK,CACN,CACF,CAAC;QACF,OAAO;KACR;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,EAAE,CAAC;KACb;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,sCAAsC,KAAK,EAAE,CAAC,CAAC;QAC9D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||||
9
lib/util.js
generated
9
lib/util.js
generated
|
|
@ -22,7 +22,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.shouldBypassToolcache = exports.isHostedRunner = exports.checkForTimeout = exports.withTimeout = exports.tryGetFolderBytes = exports.listFolder = exports.doesDirectoryExist = exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.isInTestMode = exports.checkActionVersion = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.enrichEnvironment = exports.initializeEnvironment = exports.EnvVar = exports.assertNever = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DID_AUTOBUILD_GO_ENV_VAR_NAME = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
exports.parseMatrixInput = exports.shouldBypassToolcache = exports.isHostedRunner = exports.checkForTimeout = exports.withTimeout = exports.tryGetFolderBytes = exports.listFolder = exports.doesDirectoryExist = exports.logCodeScanningConfigInCli = exports.useCodeScanningConfigInCli = exports.isInTestMode = exports.checkActionVersion = exports.getMlPoweredJsQueriesStatus = exports.getMlPoweredJsQueriesPack = exports.ML_POWERED_JS_QUERIES_PACK_NAME = exports.isGoodVersion = exports.delay = exports.bundleDb = exports.codeQlVersionAbove = exports.getCachedCodeQlVersion = exports.cacheCodeQlVersion = exports.isHTTPError = exports.UserError = exports.HTTPError = exports.getRequiredEnvParam = exports.enrichEnvironment = exports.initializeEnvironment = exports.EnvVar = exports.assertNever = exports.apiVersionInRange = exports.DisallowedAPIVersionReason = exports.checkGitHubVersionInRange = exports.getGitHubVersion = exports.GitHubVariant = exports.parseGitHubUrl = exports.getCodeQLDatabasePath = exports.getThreadsFlag = exports.getThreadsFlagValue = exports.getAddSnippetsFlag = exports.getMemoryFlag = exports.getMemoryFlagValue = exports.withTmpDir = exports.getToolNames = exports.getExtraOptionsEnvParam = exports.DID_AUTOBUILD_GO_ENV_VAR_NAME = exports.DEFAULT_DEBUG_DATABASE_NAME = exports.DEFAULT_DEBUG_ARTIFACT_NAME = exports.GITHUB_DOTCOM_URL = void 0;
|
||||||
const fs = __importStar(require("fs"));
|
const fs = __importStar(require("fs"));
|
||||||
const os = __importStar(require("os"));
|
const os = __importStar(require("os"));
|
||||||
const path = __importStar(require("path"));
|
const path = __importStar(require("path"));
|
||||||
|
|
@ -750,4 +750,11 @@ async function shouldBypassToolcache(featuresEnablement, codeqlUrl, languagesInp
|
||||||
return bypass;
|
return bypass;
|
||||||
}
|
}
|
||||||
exports.shouldBypassToolcache = shouldBypassToolcache;
|
exports.shouldBypassToolcache = shouldBypassToolcache;
|
||||||
|
function parseMatrixInput(matrixInput) {
|
||||||
|
if (matrixInput === undefined || matrixInput === "null") {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return JSON.parse(matrixInput);
|
||||||
|
}
|
||||||
|
exports.parseMatrixInput = parseMatrixInput;
|
||||||
//# sourceMappingURL=util.js.map
|
//# sourceMappingURL=util.js.map
|
||||||
File diff suppressed because one or more lines are too long
350
lib/workflow.js
generated
Normal file
350
lib/workflow.js
generated
Normal file
|
|
@ -0,0 +1,350 @@
|
||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.getCheckoutPathInputOrThrow = exports.getUploadInputOrThrow = exports.getCategoryInputOrThrow = exports.getWorkflowRunID = exports.getWorkflowPath = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = void 0;
|
||||||
|
const fs = __importStar(require("fs"));
|
||||||
|
const path = __importStar(require("path"));
|
||||||
|
const core = __importStar(require("@actions/core"));
|
||||||
|
const yaml = __importStar(require("js-yaml"));
|
||||||
|
const api = __importStar(require("./api-client"));
|
||||||
|
const util_1 = require("./util");
|
||||||
|
function isObject(o) {
|
||||||
|
return o !== null && typeof o === "object";
|
||||||
|
}
|
||||||
|
const GLOB_PATTERN = new RegExp("(\\*\\*?)");
|
||||||
|
function escapeRegExp(string) {
|
||||||
|
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
||||||
|
}
|
||||||
|
function patternToRegExp(value) {
|
||||||
|
return new RegExp(`^${value
|
||||||
|
.toString()
|
||||||
|
.split(GLOB_PATTERN)
|
||||||
|
.reduce(function (arr, cur) {
|
||||||
|
if (cur === "**") {
|
||||||
|
arr.push(".*?");
|
||||||
|
}
|
||||||
|
else if (cur === "*") {
|
||||||
|
arr.push("[^/]*?");
|
||||||
|
}
|
||||||
|
else if (cur) {
|
||||||
|
arr.push(escapeRegExp(cur));
|
||||||
|
}
|
||||||
|
return arr;
|
||||||
|
}, [])
|
||||||
|
.join("")}$`);
|
||||||
|
}
|
||||||
|
// this function should return true if patternA is a superset of patternB
|
||||||
|
// e.g: * is a superset of main-* but main-* is not a superset of *.
|
||||||
|
function patternIsSuperset(patternA, patternB) {
|
||||||
|
return patternToRegExp(patternA).test(patternB);
|
||||||
|
}
|
||||||
|
exports.patternIsSuperset = patternIsSuperset;
|
||||||
|
function branchesToArray(branches) {
|
||||||
|
if (typeof branches === "string") {
|
||||||
|
return [branches];
|
||||||
|
}
|
||||||
|
if (Array.isArray(branches)) {
|
||||||
|
if (branches.length === 0) {
|
||||||
|
return "**";
|
||||||
|
}
|
||||||
|
return branches;
|
||||||
|
}
|
||||||
|
return "**";
|
||||||
|
}
|
||||||
|
function toCodedErrors(errors) {
|
||||||
|
return Object.entries(errors).reduce((acc, [code, message]) => {
|
||||||
|
acc[code] = { message, code };
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
}
|
||||||
|
// code to send back via status report
|
||||||
|
// message to add as a warning annotation to the run
|
||||||
|
exports.WorkflowErrors = toCodedErrors({
|
||||||
|
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||||
|
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||||
|
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||||
|
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||||
|
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
||||||
|
});
|
||||||
|
function getWorkflowErrors(doc) {
|
||||||
|
var _a, _b, _c, _d, _e;
|
||||||
|
const errors = [];
|
||||||
|
const jobName = process.env.GITHUB_JOB;
|
||||||
|
if (jobName) {
|
||||||
|
const job = (_a = doc === null || doc === void 0 ? void 0 : doc.jobs) === null || _a === void 0 ? void 0 : _a[jobName];
|
||||||
|
const steps = job === null || job === void 0 ? void 0 : job.steps;
|
||||||
|
if (Array.isArray(steps)) {
|
||||||
|
for (const step of steps) {
|
||||||
|
// this was advice that we used to give in the README
|
||||||
|
// we actually want to run the analysis on the merge commit
|
||||||
|
// to produce results that are more inline with expectations
|
||||||
|
// (i.e: this is what will happen if you merge this PR)
|
||||||
|
// and avoid some race conditions
|
||||||
|
if ((step === null || step === void 0 ? void 0 : step.run) === "git checkout HEAD^2") {
|
||||||
|
errors.push(exports.WorkflowErrors.CheckoutWrongHead);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let missingPush = false;
|
||||||
|
if (doc.on === undefined) {
|
||||||
|
// this is not a valid config
|
||||||
|
}
|
||||||
|
else if (typeof doc.on === "string") {
|
||||||
|
if (doc.on === "pull_request") {
|
||||||
|
missingPush = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (Array.isArray(doc.on)) {
|
||||||
|
const hasPush = doc.on.includes("push");
|
||||||
|
const hasPullRequest = doc.on.includes("pull_request");
|
||||||
|
if (hasPullRequest && !hasPush) {
|
||||||
|
missingPush = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (isObject(doc.on)) {
|
||||||
|
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
|
||||||
|
const hasPullRequest = Object.prototype.hasOwnProperty.call(doc.on, "pull_request");
|
||||||
|
if (!hasPush && hasPullRequest) {
|
||||||
|
missingPush = true;
|
||||||
|
}
|
||||||
|
if (hasPush && hasPullRequest) {
|
||||||
|
const paths = (_b = doc.on.push) === null || _b === void 0 ? void 0 : _b.paths;
|
||||||
|
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
||||||
|
// if they didn't change any files
|
||||||
|
// currently we cannot go back through the history and find the most recent baseline
|
||||||
|
if (Array.isArray(paths) && paths.length > 0) {
|
||||||
|
errors.push(exports.WorkflowErrors.PathsSpecified);
|
||||||
|
}
|
||||||
|
const pathsIgnore = (_c = doc.on.push) === null || _c === void 0 ? void 0 : _c["paths-ignore"];
|
||||||
|
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
|
||||||
|
errors.push(exports.WorkflowErrors.PathsIgnoreSpecified);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// if doc.on.pull_request is null that means 'all branches'
|
||||||
|
// if doc.on.pull_request is undefined that means 'off'
|
||||||
|
// we only want to check for mismatched branches if pull_request is on.
|
||||||
|
if (doc.on.pull_request !== undefined) {
|
||||||
|
const push = branchesToArray((_d = doc.on.push) === null || _d === void 0 ? void 0 : _d.branches);
|
||||||
|
if (push !== "**") {
|
||||||
|
const pull_request = branchesToArray((_e = doc.on.pull_request) === null || _e === void 0 ? void 0 : _e.branches);
|
||||||
|
if (pull_request !== "**") {
|
||||||
|
const difference = pull_request.filter((value) => !push.some((o) => patternIsSuperset(o, value)));
|
||||||
|
if (difference.length > 0) {
|
||||||
|
// there are branches in pull_request that may not have a baseline
|
||||||
|
// because we are not building them on push
|
||||||
|
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (push.length > 0) {
|
||||||
|
// push is set up to run on a subset of branches
|
||||||
|
// and you could open a PR against a branch with no baseline
|
||||||
|
errors.push(exports.WorkflowErrors.MismatchedBranches);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (missingPush) {
|
||||||
|
errors.push(exports.WorkflowErrors.MissingPushHook);
|
||||||
|
}
|
||||||
|
return errors;
|
||||||
|
}
|
||||||
|
exports.getWorkflowErrors = getWorkflowErrors;
|
||||||
|
async function validateWorkflow() {
|
||||||
|
let workflow;
|
||||||
|
try {
|
||||||
|
workflow = await getWorkflow();
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
return `error: getWorkflow() failed: ${String(e)}`;
|
||||||
|
}
|
||||||
|
let workflowErrors;
|
||||||
|
try {
|
||||||
|
workflowErrors = getWorkflowErrors(workflow);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
return `error: getWorkflowErrors() failed: ${String(e)}`;
|
||||||
|
}
|
||||||
|
if (workflowErrors.length > 0) {
|
||||||
|
let message;
|
||||||
|
try {
|
||||||
|
message = formatWorkflowErrors(workflowErrors);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
return `error: formatWorkflowErrors() failed: ${String(e)}`;
|
||||||
|
}
|
||||||
|
core.warning(message);
|
||||||
|
}
|
||||||
|
return formatWorkflowCause(workflowErrors);
|
||||||
|
}
|
||||||
|
exports.validateWorkflow = validateWorkflow;
|
||||||
|
function formatWorkflowErrors(errors) {
|
||||||
|
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
||||||
|
const errorsList = errors.map((e) => e.message).join(" ");
|
||||||
|
return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`;
|
||||||
|
}
|
||||||
|
exports.formatWorkflowErrors = formatWorkflowErrors;
|
||||||
|
function formatWorkflowCause(errors) {
|
||||||
|
if (errors.length === 0) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return errors.map((e) => e.code).join(",");
|
||||||
|
}
|
||||||
|
exports.formatWorkflowCause = formatWorkflowCause;
|
||||||
|
async function getWorkflow() {
|
||||||
|
const relativePath = await getWorkflowPath();
|
||||||
|
const absolutePath = path.join((0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE"), relativePath);
|
||||||
|
return yaml.load(fs.readFileSync(absolutePath, "utf-8"));
|
||||||
|
}
|
||||||
|
exports.getWorkflow = getWorkflow;
|
||||||
|
/**
|
||||||
|
* Get the path of the currently executing workflow.
|
||||||
|
*/
|
||||||
|
async function getWorkflowPath() {
|
||||||
|
const repo_nwo = (0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY").split("/");
|
||||||
|
const owner = repo_nwo[0];
|
||||||
|
const repo = repo_nwo[1];
|
||||||
|
const run_id = Number((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"));
|
||||||
|
const apiClient = api.getApiClient();
|
||||||
|
const runsResponse = await apiClient.request("GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true", {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
run_id,
|
||||||
|
});
|
||||||
|
const workflowUrl = runsResponse.data.workflow_url;
|
||||||
|
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
||||||
|
return workflowResponse.data.path;
|
||||||
|
}
|
||||||
|
exports.getWorkflowPath = getWorkflowPath;
|
||||||
|
/**
|
||||||
|
* Get the workflow run ID.
|
||||||
|
*/
|
||||||
|
function getWorkflowRunID() {
|
||||||
|
const workflowRunID = parseInt((0, util_1.getRequiredEnvParam)("GITHUB_RUN_ID"), 10);
|
||||||
|
if (Number.isNaN(workflowRunID)) {
|
||||||
|
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
||||||
|
}
|
||||||
|
return workflowRunID;
|
||||||
|
}
|
||||||
|
exports.getWorkflowRunID = getWorkflowRunID;
|
||||||
|
function getStepsCallingAction(job, actionName) {
|
||||||
|
const steps = job.steps;
|
||||||
|
if (!Array.isArray(steps)) {
|
||||||
|
throw new Error(`Could not get steps calling ${actionName} since job.steps was not an array.`);
|
||||||
|
}
|
||||||
|
return steps.filter((step) => { var _a; return (_a = step.uses) === null || _a === void 0 ? void 0 : _a.includes(actionName); });
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Makes a best effort attempt to retrieve the value of a particular input with which
|
||||||
|
* an Action in the workflow would be invoked.
|
||||||
|
*
|
||||||
|
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||||
|
*
|
||||||
|
* @returns the value of the input, or undefined if no such input is passed to the Action
|
||||||
|
* @throws an error if the value of the input could not be determined, or we could not
|
||||||
|
* determine that no such input is passed to the Action.
|
||||||
|
*/
|
||||||
|
function getInputOrThrow(workflow, jobName, actionName, inputName, matrixVars) {
|
||||||
|
var _a;
|
||||||
|
const preamble = `Could not get ${inputName} input to ${actionName} since`;
|
||||||
|
if (!workflow.jobs) {
|
||||||
|
throw new Error(`${preamble} the workflow has no jobs.`);
|
||||||
|
}
|
||||||
|
if (!workflow.jobs[jobName]) {
|
||||||
|
throw new Error(`${preamble} the workflow has no job named ${jobName}.`);
|
||||||
|
}
|
||||||
|
const stepsCallingAction = getStepsCallingAction(workflow.jobs[jobName], actionName);
|
||||||
|
if (stepsCallingAction.length === 0) {
|
||||||
|
throw new Error(`${preamble} the ${jobName} job does not call ${actionName}.`);
|
||||||
|
}
|
||||||
|
else if (stepsCallingAction.length > 1) {
|
||||||
|
throw new Error(`${preamble} the ${jobName} job calls ${actionName} multiple times.`);
|
||||||
|
}
|
||||||
|
let input = (_a = stepsCallingAction[0].with) === null || _a === void 0 ? void 0 : _a[inputName];
|
||||||
|
if (input !== undefined && matrixVars !== undefined) {
|
||||||
|
// Make a basic attempt to substitute matrix variables
|
||||||
|
// First normalize by removing whitespace
|
||||||
|
input = input.replace(/\${{\s+/, "${{").replace(/\s+}}/, "}}");
|
||||||
|
for (const [key, value] of Object.entries(matrixVars)) {
|
||||||
|
input = input.replace(`\${{matrix.${key}}}`, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (input !== undefined && input.includes("${{")) {
|
||||||
|
throw new Error(`Could not get ${inputName} input to ${actionName} since it contained an unrecognized dynamic value.`);
|
||||||
|
}
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Get the expected name of the analyze Action.
|
||||||
|
*
|
||||||
|
* This allows us to test workflow parsing functionality as a CodeQL Action PR check.
|
||||||
|
*/
|
||||||
|
function getAnalyzeActionName() {
|
||||||
|
if ((0, util_1.getRequiredEnvParam)("GITHUB_REPOSITORY") === "github/codeql-action") {
|
||||||
|
return "./analyze";
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return "github/codeql-action/analyze";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Makes a best effort attempt to retrieve the category input for the particular job,
|
||||||
|
* given a set of matrix variables.
|
||||||
|
*
|
||||||
|
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||||
|
*
|
||||||
|
* @returns the category input, or undefined if the category input is not defined
|
||||||
|
* @throws an error if the category input could not be determined
|
||||||
|
*/
|
||||||
|
function getCategoryInputOrThrow(workflow, jobName, matrixVars) {
|
||||||
|
return getInputOrThrow(workflow, jobName, getAnalyzeActionName(), "category", matrixVars);
|
||||||
|
}
|
||||||
|
exports.getCategoryInputOrThrow = getCategoryInputOrThrow;
|
||||||
|
/**
|
||||||
|
* Makes a best effort attempt to retrieve the upload input for the particular job,
|
||||||
|
* given a set of matrix variables.
|
||||||
|
*
|
||||||
|
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||||
|
*
|
||||||
|
* @returns the upload input
|
||||||
|
* @throws an error if the upload input could not be determined
|
||||||
|
*/
|
||||||
|
function getUploadInputOrThrow(workflow, jobName, matrixVars) {
|
||||||
|
return (getInputOrThrow(workflow, jobName, getAnalyzeActionName(), "upload", matrixVars) || "true" // if unspecified, upload defaults to true
|
||||||
|
);
|
||||||
|
}
|
||||||
|
exports.getUploadInputOrThrow = getUploadInputOrThrow;
|
||||||
|
/**
|
||||||
|
* Makes a best effort attempt to retrieve the checkout_path input for the
|
||||||
|
* particular job, given a set of matrix variables.
|
||||||
|
*
|
||||||
|
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||||
|
*
|
||||||
|
* @returns the checkout_path input
|
||||||
|
* @throws an error if the checkout_path input could not be determined
|
||||||
|
*/
|
||||||
|
function getCheckoutPathInputOrThrow(workflow, jobName, matrixVars) {
|
||||||
|
return (getInputOrThrow(workflow, jobName, getAnalyzeActionName(), "checkout_path", matrixVars) || (0, util_1.getRequiredEnvParam)("GITHUB_WORKSPACE") // if unspecified, checkout_path defaults to ${{ github.workspace }}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
exports.getCheckoutPathInputOrThrow = getCheckoutPathInputOrThrow;
|
||||||
|
//# sourceMappingURL=workflow.js.map
|
||||||
1
lib/workflow.js.map
Normal file
1
lib/workflow.js.map
Normal file
File diff suppressed because one or more lines are too long
463
lib/workflow.test.js
generated
Normal file
463
lib/workflow.test.js
generated
Normal file
|
|
@ -0,0 +1,463 @@
|
||||||
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const ava_1 = __importDefault(require("ava"));
|
||||||
|
const yaml = __importStar(require("js-yaml"));
|
||||||
|
const testing_utils_1 = require("./testing-utils");
|
||||||
|
const workflow_1 = require("./workflow");
|
||||||
|
function errorCodes(actual, expected) {
|
||||||
|
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
||||||
|
}
|
||||||
|
(0, testing_utils_1.setupTests)(ava_1.default);
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on is empty", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({ on: {} });
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({ on: ["push"] });
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({ on: ["pull_request"] });
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MissingPushHook]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is valid", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: ["push", "pull_request"],
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: ["push", "pull_request", "schedule"],
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push should not have a path", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["main"], paths: ["test/*"] },
|
||||||
|
pull_request: { branches: ["main"] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.PathsSpecified]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is a correct object", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["main"] },
|
||||||
|
pull_request: { branches: ["feature"] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["main", "feature"] },
|
||||||
|
pull_request: { branches: ["main"] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["main"] },
|
||||||
|
pull_request: { branches: ["main", "feature"] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: {
|
||||||
|
push: 1,
|
||||||
|
pull_request: 1,
|
||||||
|
},
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
jobs: 1,
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
jobs: [1],
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
jobs: { 1: 1 },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: 1 },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: [1] },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: { steps: 1 } },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: [undefined] },
|
||||||
|
}), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(1), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: {
|
||||||
|
push: {
|
||||||
|
branches: 1,
|
||||||
|
},
|
||||||
|
pull_request: {
|
||||||
|
branches: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}), []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: ["main"]
|
||||||
|
pull_request:
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["feature/*"] },
|
||||||
|
pull_request: { branches: "feature/moose" },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["feature/moose"] },
|
||||||
|
pull_request: { branches: "feature/*" },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test";
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)({
|
||||||
|
on: ["push", "pull_request"],
|
||||||
|
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
||||||
|
});
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.CheckoutWrongHead]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("formatWorkflowErrors() when there is one error", (t) => {
|
||||||
|
const message = (0, workflow_1.formatWorkflowErrors)([workflow_1.WorkflowErrors.CheckoutWrongHead]);
|
||||||
|
t.true(message.startsWith("1 issue was detected with this workflow:"));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("formatWorkflowErrors() when there are multiple errors", (t) => {
|
||||||
|
const message = (0, workflow_1.formatWorkflowErrors)([
|
||||||
|
workflow_1.WorkflowErrors.CheckoutWrongHead,
|
||||||
|
workflow_1.WorkflowErrors.PathsSpecified,
|
||||||
|
]);
|
||||||
|
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("formatWorkflowCause() with no errors", (t) => {
|
||||||
|
const message = (0, workflow_1.formatWorkflowCause)([]);
|
||||||
|
t.deepEqual(message, undefined);
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("formatWorkflowCause()", (t) => {
|
||||||
|
const message = (0, workflow_1.formatWorkflowCause)([
|
||||||
|
workflow_1.WorkflowErrors.CheckoutWrongHead,
|
||||||
|
workflow_1.WorkflowErrors.PathsSpecified,
|
||||||
|
]);
|
||||||
|
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
||||||
|
t.deepEqual((0, workflow_1.formatWorkflowCause)([]), undefined);
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("patternIsSuperset()", (t) => {
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("main-*", "main"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("*", "*"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("*", "main-*"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("main-*", "*"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("main-*", "main"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("main", "main"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("*", "feature/*"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("**", "feature/*"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("feature-*", "**"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("a/**/c", "a/**/d"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("a/**/c", "a/**"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("a/**", "a/**/c"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("a/**/c", "a/main-**/c"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("a/**/b/**/c", "a/**/d/**/c"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("a/**/b/**/c", "a/**/b/c/**/c"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("a/**/b/**/c", "a/**/b/d/**/c"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("a/**/c/d/**/c", "a/**/b/**/c"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("a/main-**/c", "a/**/c"));
|
||||||
|
t.true((0, workflow_1.patternIsSuperset)("/robin/*/release/*", "/robin/moose/release/goose"));
|
||||||
|
t.false((0, workflow_1.patternIsSuperset)("/robin/moose/release/goose", "/robin/*/release/*"));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when branches contain dots", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [4.1, master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [4.1, master]
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master, ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test";
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test2:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test3:
|
||||||
|
steps: []
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, [workflow_1.WorkflowErrors.CheckoutWrongHead]));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test3";
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test2:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test3:
|
||||||
|
steps: []
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() when on is missing", (t) => {
|
||||||
|
const errors = (0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
`));
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() with a different on setup", (t) => {
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: "workflow_dispatch"
|
||||||
|
`)), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: [workflow_dispatch]
|
||||||
|
`)), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
workflow_dispatch: {}
|
||||||
|
`)), []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
`)), []));
|
||||||
|
t.deepEqual(...errorCodes((0, workflow_1.getWorkflowErrors)(yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: ["push"]
|
||||||
|
`)), []));
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getCategoryInputOrThrow returns category for simple workflow with category", (t) => {
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
|
t.is((0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: some-category
|
||||||
|
`), "analysis", {}), "some-category");
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getCategoryInputOrThrow returns undefined for simple workflow without category", (t) => {
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
|
t.is((0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
`), "analysis", {}), undefined);
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getCategoryInputOrThrow returns category for workflow with multiple jobs", (t) => {
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
|
t.is((0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||||
|
jobs:
|
||||||
|
foo:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- runs: ./build foo
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: foo-category
|
||||||
|
bar:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- runs: ./build bar
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: bar-category
|
||||||
|
`), "bar", {}), "bar-category");
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getCategoryInputOrThrow finds category for workflow with language matrix", (t) => {
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
|
t.is((0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
language: [javascript, python]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
with:
|
||||||
|
language: \${{ matrix.language }}
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "/language:\${{ matrix.language }}"
|
||||||
|
`), "analysis", { language: "javascript" }), "/language:javascript");
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getCategoryInputOrThrow throws error for workflow with dynamic category", (t) => {
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
|
t.throws(() => (0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "\${{ github.workflow }}"
|
||||||
|
`), "analysis", {}), {
|
||||||
|
message: "Could not get category input to github/codeql-action/analyze since it contained " +
|
||||||
|
"an unrecognized dynamic value.",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
(0, ava_1.default)("getCategoryInputOrThrow throws error for workflow with multiple calls to analyze", (t) => {
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
|
t.throws(() => (0, workflow_1.getCategoryInputOrThrow)(yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: some-category
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: another-category
|
||||||
|
`), "analysis", {}), {
|
||||||
|
message: "Could not get category input to github/codeql-action/analyze since the analysis job " +
|
||||||
|
"calls github/codeql-action/analyze multiple times.",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
//# sourceMappingURL=workflow.test.js.map
|
||||||
1
lib/workflow.test.js.map
Normal file
1
lib/workflow.test.js.map
Normal file
File diff suppressed because one or more lines are too long
2
node_modules/.package-lock.json
generated
vendored
2
node_modules/.package-lock.json
generated
vendored
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "2.1.35",
|
"version": "2.1.36",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
|
|
|
||||||
4
package-lock.json
generated
4
package-lock.json
generated
|
|
@ -1,12 +1,12 @@
|
||||||
{
|
{
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "2.1.35",
|
"version": "2.1.36",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "2.1.35",
|
"version": "2.1.36",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/artifact": "^1.1.0",
|
"@actions/artifact": "^1.1.0",
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "codeql",
|
"name": "codeql",
|
||||||
"version": "2.1.35",
|
"version": "2.1.36",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "CodeQL action",
|
"description": "CodeQL action",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
|
|
||||||
|
|
@ -2,11 +2,11 @@ name: "Export file baseline information"
|
||||||
description: "Tests that file baseline information is exported when the feature is enabled"
|
description: "Tests that file baseline information is exported when the feature is enabled"
|
||||||
versions: ["nightly-latest"]
|
versions: ["nightly-latest"]
|
||||||
steps:
|
steps:
|
||||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||||
# Windows doesn't support Swift, and only macOS latest and nightly-latest support Swift 5.7.1.
|
# Windows doesn't support Swift, and only macOS latest and nightly-latest support Swift 5.7.1.
|
||||||
if: runner.os == 'Linux' || (runner.os == 'macOS' && matrix.version == 'cached')
|
if: runner.os == 'Linux' || (runner.os == 'macOS' && matrix.version == 'cached')
|
||||||
with:
|
with:
|
||||||
swift-version: "5.7"
|
swift-version: "5.7.0"
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
languages: javascript
|
languages: javascript
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,16 @@
|
||||||
name: "Multi-language repository"
|
name: "Multi-language repository"
|
||||||
description: "An end-to-end integration test of a multi-language repository using automatic language detection"
|
description: "An end-to-end integration test of a multi-language repository using automatic language detection"
|
||||||
|
# Temporarily exclude nightly-latest to unblock release
|
||||||
|
versions: ["stable-20211005", "stable-20220120", "stable-20220401", "cached", "latest"]
|
||||||
operatingSystems: ["ubuntu", "macos"]
|
operatingSystems: ["ubuntu", "macos"]
|
||||||
env:
|
env:
|
||||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: "true" # Remove when Swift is GA.
|
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: "true" # Remove when Swift is GA.
|
||||||
steps:
|
steps:
|
||||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||||
with:
|
with:
|
||||||
swift-version: "5.7"
|
swift-version: "5.7.0"
|
||||||
|
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
|
|
|
||||||
34
pr-checks/checks/submit-sarif-failure.yml
Normal file
34
pr-checks/checks/submit-sarif-failure.yml
Normal file
|
|
@ -0,0 +1,34 @@
|
||||||
|
name: Submit SARIF after failure
|
||||||
|
description: Check that a SARIF file is submitted for the workflow run if it fails
|
||||||
|
versions: ["latest", "cached", "nightly-latest"]
|
||||||
|
operatingSystems: ["ubuntu"]
|
||||||
|
|
||||||
|
env:
|
||||||
|
# Internal-only environment variable used to indicate that the post-init Action
|
||||||
|
# should expect to upload a SARIF file for the failed run.
|
||||||
|
CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF: true
|
||||||
|
# Make sure the uploading SARIF files feature is enabled.
|
||||||
|
CODEQL_ACTION_UPLOAD_FAILED_SARIF: true
|
||||||
|
# Upload the failed SARIF file as an integration test of the API endpoint.
|
||||||
|
CODEQL_ACTION_TEST_MODE: false
|
||||||
|
# Mark telemetry for this workflow so it can be treated separately.
|
||||||
|
CODEQL_ACTION_TESTING_ENVIRONMENT: codeql-action-pr-checks
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: ./init
|
||||||
|
with:
|
||||||
|
languages: javascript
|
||||||
|
- name: Fail
|
||||||
|
# We want this job to pass if the Action correctly uploads the SARIF file for
|
||||||
|
# the failed run.
|
||||||
|
# Setting this step to continue on error means that it is marked as completing
|
||||||
|
# successfully, so will not fail the job.
|
||||||
|
continue-on-error: true
|
||||||
|
run: exit 1
|
||||||
|
- uses: ./analyze
|
||||||
|
# In a real workflow, this step wouldn't run. Since we used `continue-on-error`
|
||||||
|
# above, we manually disable it with an `if` condition.
|
||||||
|
if: false
|
||||||
|
with:
|
||||||
|
category: "/test-codeql-version:${{ matrix.version }}"
|
||||||
|
|
@ -6,11 +6,11 @@ operatingSystems: ["macos"]
|
||||||
env:
|
env:
|
||||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: "true"
|
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: "true"
|
||||||
steps:
|
steps:
|
||||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||||
with:
|
with:
|
||||||
swift-version: "5.7"
|
swift-version: "5.7.0"
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
languages: swift
|
languages: swift
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,17 @@
|
||||||
name: "Swift analysis using a custom build command"
|
name: "Swift analysis using a custom build command"
|
||||||
description: "Tests creation of a Swift database using custom build"
|
description: "Tests creation of a Swift database using custom build"
|
||||||
versions: ["latest", "cached", "nightly-latest"]
|
# Temporarily exclude nightly-latest to unblock release
|
||||||
|
versions: ["latest", "cached"]
|
||||||
operatingSystems: ["ubuntu", "macos"]
|
operatingSystems: ["ubuntu", "macos"]
|
||||||
env:
|
env:
|
||||||
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: "true"
|
CODEQL_ENABLE_EXPERIMENTAL_FEATURES_SWIFT: "true"
|
||||||
DOTNET_GENERATE_ASPNET_CERTIFICATE: "false"
|
DOTNET_GENERATE_ASPNET_CERTIFICATE: "false"
|
||||||
steps:
|
steps:
|
||||||
- uses: swift-actions/setup-swift@5cdaa9161ad1f55ae39a5ea1784ef96de72f95d9
|
- uses: swift-actions/setup-swift@194625b58a582570f61cc707c3b558086c26b723
|
||||||
# Only macOS latest and nightly-latest support Swift 5.7.1
|
# Only macOS latest and nightly-latest support Swift 5.7.1
|
||||||
if: runner.os == 'Linux' || matrix.version == 'cached'
|
if: runner.os == 'Linux' || matrix.version == 'cached'
|
||||||
with:
|
with:
|
||||||
swift-version: "5.7"
|
swift-version: "5.7.0"
|
||||||
- uses: ./../action/init
|
- uses: ./../action/init
|
||||||
with:
|
with:
|
||||||
languages: swift
|
languages: swift
|
||||||
|
|
|
||||||
|
|
@ -115,7 +115,8 @@ for file in os.listdir('checks'):
|
||||||
checkJob[key] = checkSpecification[key]
|
checkJob[key] = checkSpecification[key]
|
||||||
|
|
||||||
checkJob['env'] = checkJob.get('env', {})
|
checkJob['env'] = checkJob.get('env', {})
|
||||||
checkJob['env']['CODEQL_ACTION_TEST_MODE'] = True
|
if 'CODEQL_ACTION_TEST_MODE' not in checkJob['env']:
|
||||||
|
checkJob['env']['CODEQL_ACTION_TEST_MODE'] = True
|
||||||
checkName = file[:len(file) - 4]
|
checkName = file[:len(file) - 4]
|
||||||
|
|
||||||
with open(f"../.github/workflows/__{checkName}.yml", 'w') as output_stream:
|
with open(f"../.github/workflows/__{checkName}.yml", 'w') as output_stream:
|
||||||
|
|
|
||||||
|
|
@ -33,10 +33,17 @@ def _check_output(command, extra_env={}):
|
||||||
|
|
||||||
def install_packages_with_poetry():
|
def install_packages_with_poetry():
|
||||||
|
|
||||||
# To handle poetry 1.2, which started to use keyring interaction MUCH more, we need
|
extra_poetry_env = {
|
||||||
# add a workaround. See
|
# To handle poetry 1.2, which started to use keyring interaction MUCH more, we need
|
||||||
# https://github.com/python-poetry/poetry/issues/2692#issuecomment-1235683370
|
# add a workaround. See
|
||||||
extra_poetry_env = {"PYTHON_KEYRING_BACKEND": "keyring.backends.null.Keyring"}
|
# https://github.com/python-poetry/poetry/issues/2692#issuecomment-1235683370
|
||||||
|
"PYTHON_KEYRING_BACKEND": "keyring.backends.null.Keyring",
|
||||||
|
# Projects that specify `virtualenvs.in-project = true` in their poetry.toml
|
||||||
|
# would get the venv created inside the repo directory, which would cause CodeQL
|
||||||
|
# to consider it as user-written code. We don't want this to happen. see
|
||||||
|
# https://python-poetry.org/docs/configuration/#virtualenvsin-project
|
||||||
|
"POETRY_VIRTUALENVS_IN_PROJECT": "False",
|
||||||
|
}
|
||||||
|
|
||||||
command = [sys.executable, '-m', 'poetry']
|
command = [sys.executable, '-m', 'poetry']
|
||||||
if sys.platform.startswith('win32'):
|
if sys.platform.startswith('win32'):
|
||||||
|
|
|
||||||
29
python-setup/tests/pipenv/python-3.8/Pipfile.lock
generated
29
python-setup/tests/pipenv/python-3.8/Pipfile.lock
generated
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"_meta": {
|
"_meta": {
|
||||||
"hash": {
|
"hash": {
|
||||||
"sha256": "cbd95e61b2b1e90be5379b643dfef7e88a1a4359e9a76803c2ca09b0caa83525"
|
"sha256": "acbc8c4e7f2f98f1059b2a93d581ef43f4aa0c9741e64e6253adff8e35fbd99e"
|
||||||
},
|
},
|
||||||
"pipfile-spec": 6,
|
"pipfile-spec": 6,
|
||||||
"requires": {
|
"requires": {
|
||||||
|
|
@ -18,26 +18,27 @@
|
||||||
"default": {
|
"default": {
|
||||||
"certifi": {
|
"certifi": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872",
|
"sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3",
|
||||||
"sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"
|
"sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"
|
||||||
],
|
],
|
||||||
"version": "==2021.10.8"
|
"index": "pypi",
|
||||||
|
"version": "==2022.12.7"
|
||||||
},
|
},
|
||||||
"charset-normalizer": {
|
"charset-normalizer": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0",
|
"sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597",
|
||||||
"sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"
|
"sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"
|
||||||
],
|
],
|
||||||
"markers": "python_version >= '3'",
|
"markers": "python_version >= '3'",
|
||||||
"version": "==2.0.7"
|
"version": "==2.0.12"
|
||||||
},
|
},
|
||||||
"idna": {
|
"idna": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff",
|
"sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4",
|
||||||
"sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"
|
"sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"
|
||||||
],
|
],
|
||||||
"markers": "python_version >= '3'",
|
"markers": "python_version >= '3'",
|
||||||
"version": "==3.3"
|
"version": "==3.4"
|
||||||
},
|
},
|
||||||
"requests": {
|
"requests": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
|
|
@ -49,11 +50,11 @@
|
||||||
},
|
},
|
||||||
"urllib3": {
|
"urllib3": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece",
|
"sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc",
|
||||||
"sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"
|
"sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"
|
||||||
],
|
],
|
||||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'",
|
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
|
||||||
"version": "==1.26.7"
|
"version": "==1.26.13"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"develop": {}
|
"develop": {}
|
||||||
|
|
|
||||||
29
python-setup/tests/pipenv/requests-3/Pipfile.lock
generated
29
python-setup/tests/pipenv/requests-3/Pipfile.lock
generated
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"_meta": {
|
"_meta": {
|
||||||
"hash": {
|
"hash": {
|
||||||
"sha256": "9e4895d52c597b97d49cf69d314224f6738668271d51f71534063f42b565e85c"
|
"sha256": "70e8bf6bc774f5ca177467cab4e67d4264d0536857993326abc13ff43063bec0"
|
||||||
},
|
},
|
||||||
"pipfile-spec": 6,
|
"pipfile-spec": 6,
|
||||||
"requires": {},
|
"requires": {},
|
||||||
|
|
@ -16,26 +16,27 @@
|
||||||
"default": {
|
"default": {
|
||||||
"certifi": {
|
"certifi": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872",
|
"sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3",
|
||||||
"sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"
|
"sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"
|
||||||
],
|
],
|
||||||
"version": "==2021.10.8"
|
"index": "pypi",
|
||||||
|
"version": "==2022.12.7"
|
||||||
},
|
},
|
||||||
"charset-normalizer": {
|
"charset-normalizer": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0",
|
"sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597",
|
||||||
"sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"
|
"sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"
|
||||||
],
|
],
|
||||||
"markers": "python_version >= '3'",
|
"markers": "python_version >= '3'",
|
||||||
"version": "==2.0.7"
|
"version": "==2.0.12"
|
||||||
},
|
},
|
||||||
"idna": {
|
"idna": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff",
|
"sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4",
|
||||||
"sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"
|
"sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"
|
||||||
],
|
],
|
||||||
"markers": "python_version >= '3'",
|
"markers": "python_version >= '3'",
|
||||||
"version": "==3.3"
|
"version": "==3.4"
|
||||||
},
|
},
|
||||||
"requests": {
|
"requests": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
|
|
@ -47,11 +48,11 @@
|
||||||
},
|
},
|
||||||
"urllib3": {
|
"urllib3": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece",
|
"sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc",
|
||||||
"sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"
|
"sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"
|
||||||
],
|
],
|
||||||
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'",
|
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
|
||||||
"version": "==1.26.7"
|
"version": "==1.26.13"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"develop": {}
|
"develop": {}
|
||||||
|
|
|
||||||
16
python-setup/tests/poetry/requests-3/poetry.lock
generated
16
python-setup/tests/poetry/requests-3/poetry.lock
generated
|
|
@ -1,10 +1,10 @@
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "certifi"
|
name = "certifi"
|
||||||
version = "2021.10.8"
|
version = "2022.12.7"
|
||||||
description = "Python package for providing Mozilla's CA Bundle."
|
description = "Python package for providing Mozilla's CA Bundle."
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = ">=3.6"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "charset-normalizer"
|
name = "charset-normalizer"
|
||||||
|
|
@ -15,7 +15,7 @@ optional = false
|
||||||
python-versions = ">=3.5.0"
|
python-versions = ">=3.5.0"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
unicode_backport = ["unicodedata2"]
|
unicode-backport = ["unicodedata2"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "idna"
|
name = "idna"
|
||||||
|
|
@ -41,7 +41,7 @@ urllib3 = ">=1.21.1,<1.27"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
|
socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
|
||||||
use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
|
use-chardet-on-py3 = ["chardet (>=3.0.2,<5)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "urllib3"
|
name = "urllib3"
|
||||||
|
|
@ -53,18 +53,18 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
brotli = ["brotlipy (>=0.6.0)"]
|
brotli = ["brotlipy (>=0.6.0)"]
|
||||||
secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
|
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"]
|
||||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "1.1"
|
lock-version = "1.1"
|
||||||
python-versions = "^3.6"
|
python-versions = "^3.6"
|
||||||
content-hash = "0688bcc269cb32eab2edeadcb342631e24cf30fd9ef54f8710010cc06cd523c5"
|
content-hash = "3186fede9fea5b617c0bcebda3034f2d889a3c4579d60dd45945772895a28b7d"
|
||||||
|
|
||||||
[metadata.files]
|
[metadata.files]
|
||||||
certifi = [
|
certifi = [
|
||||||
{file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
|
{file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
|
||||||
{file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
|
{file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
|
||||||
]
|
]
|
||||||
charset-normalizer = [
|
charset-normalizer = [
|
||||||
{file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"},
|
{file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"},
|
||||||
|
|
|
||||||
2
python-setup/tests/poetry/requests-3/poetry.toml
Normal file
2
python-setup/tests/poetry/requests-3/poetry.toml
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
[virtualenvs]
|
||||||
|
in-project = true
|
||||||
|
|
@ -2,20 +2,12 @@ import * as fs from "fs";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import test from "ava";
|
import test from "ava";
|
||||||
import * as yaml from "js-yaml";
|
|
||||||
import * as sinon from "sinon";
|
import * as sinon from "sinon";
|
||||||
|
|
||||||
import * as actionsutil from "./actions-util";
|
import * as actionsutil from "./actions-util";
|
||||||
import { setupActionsVars, setupTests } from "./testing-utils";
|
import { setupActionsVars, setupTests } from "./testing-utils";
|
||||||
import { initializeEnvironment, withTmpDir } from "./util";
|
import { initializeEnvironment, withTmpDir } from "./util";
|
||||||
|
|
||||||
function errorCodes(
|
|
||||||
actual: actionsutil.CodedError[],
|
|
||||||
expected: actionsutil.CodedError[]
|
|
||||||
): [string[], string[]] {
|
|
||||||
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
|
||||||
}
|
|
||||||
|
|
||||||
setupTests(test);
|
setupTests(test);
|
||||||
|
|
||||||
test("getRef() throws on the empty string", async (t) => {
|
test("getRef() throws on the empty string", async (t) => {
|
||||||
|
|
@ -188,536 +180,6 @@ test("computeAutomationID()", async (t) => {
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("getWorkflowErrors() when on is empty", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: {} });
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: ["push"] });
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({ on: ["pull_request"] });
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.MissingPushHook])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is valid", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: ["push", "pull_request"],
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: ["push", "pull_request", "schedule"],
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push should not have a path", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main"], paths: ["test/*"] },
|
|
||||||
pull_request: { branches: ["main"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.PathsSpecified])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is a correct object", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
pull_request:
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is mismatched", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main"] },
|
|
||||||
pull_request: { branches: ["feature"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main", "feature"] },
|
|
||||||
pull_request: { branches: ["main"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["main"] },
|
|
||||||
pull_request: { branches: ["main", "feature"] },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: 1,
|
|
||||||
pull_request: 1,
|
|
||||||
},
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: 1,
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: [1],
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { 1: 1 },
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: 1 },
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: [1] },
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: { steps: 1 } },
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: 1,
|
|
||||||
jobs: { test: [undefined] },
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(actionsutil.getWorkflowErrors(1 as any), []));
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: {
|
|
||||||
branches: 1,
|
|
||||||
},
|
|
||||||
pull_request: {
|
|
||||||
branches: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
} as any),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: ["main"]
|
|
||||||
pull_request:
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["feature/*"] },
|
|
||||||
pull_request: { branches: "feature/moose" },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: {
|
|
||||||
push: { branches: ["feature/moose"] },
|
|
||||||
pull_request: { branches: "feature/*" },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.MismatchedBranches])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
|
||||||
process.env.GITHUB_JOB = "test";
|
|
||||||
|
|
||||||
const errors = actionsutil.getWorkflowErrors({
|
|
||||||
on: ["push", "pull_request"],
|
|
||||||
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
|
||||||
});
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("formatWorkflowErrors() when there is one error", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowErrors([
|
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
|
||||||
]);
|
|
||||||
t.true(message.startsWith("1 issue was detected with this workflow:"));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("formatWorkflowErrors() when there are multiple errors", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowErrors([
|
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
|
||||||
actionsutil.WorkflowErrors.PathsSpecified,
|
|
||||||
]);
|
|
||||||
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("formatWorkflowCause() with no errors", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowCause([]);
|
|
||||||
|
|
||||||
t.deepEqual(message, undefined);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("formatWorkflowCause()", (t) => {
|
|
||||||
const message = actionsutil.formatWorkflowCause([
|
|
||||||
actionsutil.WorkflowErrors.CheckoutWrongHead,
|
|
||||||
actionsutil.WorkflowErrors.PathsSpecified,
|
|
||||||
]);
|
|
||||||
|
|
||||||
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
|
||||||
t.deepEqual(actionsutil.formatWorkflowCause([]), undefined);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("patternIsSuperset()", (t) => {
|
|
||||||
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("*", "*"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("*", "main-*"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("main-*", "*"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("main-*", "main"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("main", "main"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("*", "feature/*"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("**", "feature/*"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("feature-*", "**"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**/d"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/c", "a/**"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**", "a/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**/c", "a/main-**/c"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/d/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/c/**/c"));
|
|
||||||
t.true(actionsutil.patternIsSuperset("a/**/b/**/c", "a/**/b/d/**/c"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/**/c/d/**/c", "a/**/b/**/c"));
|
|
||||||
t.false(actionsutil.patternIsSuperset("a/main-**/c", "a/**/c"));
|
|
||||||
t.true(
|
|
||||||
actionsutil.patternIsSuperset(
|
|
||||||
"/robin/*/release/*",
|
|
||||||
"/robin/moose/release/goose"
|
|
||||||
)
|
|
||||||
);
|
|
||||||
t.false(
|
|
||||||
actionsutil.patternIsSuperset(
|
|
||||||
"/robin/moose/release/goose",
|
|
||||||
"/robin/*/release/*"
|
|
||||||
)
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when branches contain dots", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [4.1, master]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [4.1, master]
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master, ]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [master]
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
|
||||||
process.env.GITHUB_JOB = "test";
|
|
||||||
|
|
||||||
const errors = actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [master]
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test2:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test3:
|
|
||||||
steps: []
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(errors, [actionsutil.WorkflowErrors.CheckoutWrongHead])
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
|
||||||
process.env.GITHUB_JOB = "test3";
|
|
||||||
|
|
||||||
const errors = actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [master]
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test2:
|
|
||||||
steps:
|
|
||||||
- run: "git checkout HEAD^2"
|
|
||||||
|
|
||||||
test3:
|
|
||||||
steps: []
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() when on is missing", (t) => {
|
|
||||||
const errors = actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(...errorCodes(errors, []));
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() with a different on setup", (t) => {
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on: "workflow_dispatch"
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on: [workflow_dispatch]
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
workflow_dispatch: {}
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [master]
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(
|
|
||||||
...errorCodes(
|
|
||||||
actionsutil.getWorkflowErrors(
|
|
||||||
yaml.load(`
|
|
||||||
name: "CodeQL"
|
|
||||||
on: ["push"]
|
|
||||||
`) as actionsutil.Workflow
|
|
||||||
),
|
|
||||||
[]
|
|
||||||
)
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("initializeEnvironment", (t) => {
|
test("initializeEnvironment", (t) => {
|
||||||
initializeEnvironment("1.2.3");
|
initializeEnvironment("1.2.3");
|
||||||
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "1.2.3");
|
t.deepEqual(process.env.CODEQL_ACTION_VERSION, "1.2.3");
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,6 @@ import * as path from "path";
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||||
import * as safeWhich from "@chrisgavin/safe-which";
|
import * as safeWhich from "@chrisgavin/safe-which";
|
||||||
import * as yaml from "js-yaml";
|
|
||||||
|
|
||||||
import * as api from "./api-client";
|
import * as api from "./api-client";
|
||||||
import { Config } from "./config-utils";
|
import { Config } from "./config-utils";
|
||||||
|
|
@ -18,8 +17,10 @@ import {
|
||||||
GITHUB_DOTCOM_URL,
|
GITHUB_DOTCOM_URL,
|
||||||
isHTTPError,
|
isHTTPError,
|
||||||
isInTestMode,
|
isInTestMode,
|
||||||
|
parseMatrixInput,
|
||||||
UserError,
|
UserError,
|
||||||
} from "./util";
|
} from "./util";
|
||||||
|
import { getWorkflowPath } from "./workflow";
|
||||||
|
|
||||||
// eslint-disable-next-line import/no-commonjs
|
// eslint-disable-next-line import/no-commonjs
|
||||||
const pkg = require("../package.json");
|
const pkg = require("../package.json");
|
||||||
|
|
@ -156,288 +157,6 @@ export const determineMergeBaseCommitOid = async function (): Promise<
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
interface WorkflowJobStep {
|
|
||||||
run: any;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface WorkflowJob {
|
|
||||||
steps?: WorkflowJobStep[];
|
|
||||||
}
|
|
||||||
|
|
||||||
interface WorkflowTrigger {
|
|
||||||
branches?: string[] | string;
|
|
||||||
paths?: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
// on: {} then push/pull_request are undefined
|
|
||||||
// on:
|
|
||||||
// push:
|
|
||||||
// pull_request:
|
|
||||||
// then push/pull_request are null
|
|
||||||
interface WorkflowTriggers {
|
|
||||||
push?: WorkflowTrigger | null;
|
|
||||||
pull_request?: WorkflowTrigger | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface Workflow {
|
|
||||||
jobs?: { [key: string]: WorkflowJob };
|
|
||||||
on?: string | string[] | WorkflowTriggers;
|
|
||||||
}
|
|
||||||
|
|
||||||
function isObject(o: unknown): o is object {
|
|
||||||
return o !== null && typeof o === "object";
|
|
||||||
}
|
|
||||||
|
|
||||||
const GLOB_PATTERN = new RegExp("(\\*\\*?)");
|
|
||||||
|
|
||||||
function escapeRegExp(string) {
|
|
||||||
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
|
||||||
}
|
|
||||||
|
|
||||||
function patternToRegExp(value) {
|
|
||||||
return new RegExp(
|
|
||||||
`^${value
|
|
||||||
.toString()
|
|
||||||
.split(GLOB_PATTERN)
|
|
||||||
.reduce(function (arr, cur) {
|
|
||||||
if (cur === "**") {
|
|
||||||
arr.push(".*?");
|
|
||||||
} else if (cur === "*") {
|
|
||||||
arr.push("[^/]*?");
|
|
||||||
} else if (cur) {
|
|
||||||
arr.push(escapeRegExp(cur));
|
|
||||||
}
|
|
||||||
return arr;
|
|
||||||
}, [])
|
|
||||||
.join("")}$`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// this function should return true if patternA is a superset of patternB
|
|
||||||
// e.g: * is a superset of main-* but main-* is not a superset of *.
|
|
||||||
export function patternIsSuperset(patternA: string, patternB: string): boolean {
|
|
||||||
return patternToRegExp(patternA).test(patternB);
|
|
||||||
}
|
|
||||||
|
|
||||||
function branchesToArray(branches?: string | null | string[]): string[] | "**" {
|
|
||||||
if (typeof branches === "string") {
|
|
||||||
return [branches];
|
|
||||||
}
|
|
||||||
if (Array.isArray(branches)) {
|
|
||||||
if (branches.length === 0) {
|
|
||||||
return "**";
|
|
||||||
}
|
|
||||||
return branches;
|
|
||||||
}
|
|
||||||
return "**";
|
|
||||||
}
|
|
||||||
export interface CodedError {
|
|
||||||
message: string;
|
|
||||||
code: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
function toCodedErrors<T>(errors: T): Record<keyof T, CodedError> {
|
|
||||||
return Object.entries(errors).reduce((acc, [key, value]) => {
|
|
||||||
acc[key] = { message: value, code: key };
|
|
||||||
return acc;
|
|
||||||
}, {} as Record<keyof T, CodedError>);
|
|
||||||
}
|
|
||||||
|
|
||||||
// code to send back via status report
|
|
||||||
// message to add as a warning annotation to the run
|
|
||||||
export const WorkflowErrors = toCodedErrors({
|
|
||||||
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
|
||||||
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
|
||||||
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
|
||||||
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
|
||||||
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
|
||||||
});
|
|
||||||
|
|
||||||
export function getWorkflowErrors(doc: Workflow): CodedError[] {
|
|
||||||
const errors: CodedError[] = [];
|
|
||||||
|
|
||||||
const jobName = process.env.GITHUB_JOB;
|
|
||||||
|
|
||||||
if (jobName) {
|
|
||||||
const job = doc?.jobs?.[jobName];
|
|
||||||
|
|
||||||
const steps = job?.steps;
|
|
||||||
|
|
||||||
if (Array.isArray(steps)) {
|
|
||||||
for (const step of steps) {
|
|
||||||
// this was advice that we used to give in the README
|
|
||||||
// we actually want to run the analysis on the merge commit
|
|
||||||
// to produce results that are more inline with expectations
|
|
||||||
// (i.e: this is what will happen if you merge this PR)
|
|
||||||
// and avoid some race conditions
|
|
||||||
if (step?.run === "git checkout HEAD^2") {
|
|
||||||
errors.push(WorkflowErrors.CheckoutWrongHead);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let missingPush = false;
|
|
||||||
|
|
||||||
if (doc.on === undefined) {
|
|
||||||
// this is not a valid config
|
|
||||||
} else if (typeof doc.on === "string") {
|
|
||||||
if (doc.on === "pull_request") {
|
|
||||||
missingPush = true;
|
|
||||||
}
|
|
||||||
} else if (Array.isArray(doc.on)) {
|
|
||||||
const hasPush = doc.on.includes("push");
|
|
||||||
const hasPullRequest = doc.on.includes("pull_request");
|
|
||||||
if (hasPullRequest && !hasPush) {
|
|
||||||
missingPush = true;
|
|
||||||
}
|
|
||||||
} else if (isObject(doc.on)) {
|
|
||||||
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
|
|
||||||
const hasPullRequest = Object.prototype.hasOwnProperty.call(
|
|
||||||
doc.on,
|
|
||||||
"pull_request"
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!hasPush && hasPullRequest) {
|
|
||||||
missingPush = true;
|
|
||||||
}
|
|
||||||
if (hasPush && hasPullRequest) {
|
|
||||||
const paths = doc.on.push?.paths;
|
|
||||||
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
|
||||||
// if they didn't change any files
|
|
||||||
// currently we cannot go back through the history and find the most recent baseline
|
|
||||||
if (Array.isArray(paths) && paths.length > 0) {
|
|
||||||
errors.push(WorkflowErrors.PathsSpecified);
|
|
||||||
}
|
|
||||||
const pathsIgnore = doc.on.push?.["paths-ignore"];
|
|
||||||
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
|
|
||||||
errors.push(WorkflowErrors.PathsIgnoreSpecified);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// if doc.on.pull_request is null that means 'all branches'
|
|
||||||
// if doc.on.pull_request is undefined that means 'off'
|
|
||||||
// we only want to check for mismatched branches if pull_request is on.
|
|
||||||
if (doc.on.pull_request !== undefined) {
|
|
||||||
const push = branchesToArray(doc.on.push?.branches);
|
|
||||||
|
|
||||||
if (push !== "**") {
|
|
||||||
const pull_request = branchesToArray(doc.on.pull_request?.branches);
|
|
||||||
|
|
||||||
if (pull_request !== "**") {
|
|
||||||
const difference = pull_request.filter(
|
|
||||||
(value) => !push.some((o) => patternIsSuperset(o, value))
|
|
||||||
);
|
|
||||||
if (difference.length > 0) {
|
|
||||||
// there are branches in pull_request that may not have a baseline
|
|
||||||
// because we are not building them on push
|
|
||||||
errors.push(WorkflowErrors.MismatchedBranches);
|
|
||||||
}
|
|
||||||
} else if (push.length > 0) {
|
|
||||||
// push is set up to run on a subset of branches
|
|
||||||
// and you could open a PR against a branch with no baseline
|
|
||||||
errors.push(WorkflowErrors.MismatchedBranches);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (missingPush) {
|
|
||||||
errors.push(WorkflowErrors.MissingPushHook);
|
|
||||||
}
|
|
||||||
|
|
||||||
return errors;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function validateWorkflow(): Promise<undefined | string> {
|
|
||||||
let workflow: Workflow;
|
|
||||||
try {
|
|
||||||
workflow = await getWorkflow();
|
|
||||||
} catch (e) {
|
|
||||||
return `error: getWorkflow() failed: ${String(e)}`;
|
|
||||||
}
|
|
||||||
let workflowErrors: CodedError[];
|
|
||||||
try {
|
|
||||||
workflowErrors = getWorkflowErrors(workflow);
|
|
||||||
} catch (e) {
|
|
||||||
return `error: getWorkflowErrors() failed: ${String(e)}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (workflowErrors.length > 0) {
|
|
||||||
let message: string;
|
|
||||||
try {
|
|
||||||
message = formatWorkflowErrors(workflowErrors);
|
|
||||||
} catch (e) {
|
|
||||||
return `error: formatWorkflowErrors() failed: ${String(e)}`;
|
|
||||||
}
|
|
||||||
core.warning(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
return formatWorkflowCause(workflowErrors);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function formatWorkflowErrors(errors: CodedError[]): string {
|
|
||||||
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
|
||||||
|
|
||||||
const errorsList = errors.map((e) => e.message).join(" ");
|
|
||||||
|
|
||||||
return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function formatWorkflowCause(errors: CodedError[]): undefined | string {
|
|
||||||
if (errors.length === 0) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return errors.map((e) => e.code).join(",");
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getWorkflow(): Promise<Workflow> {
|
|
||||||
const relativePath = await getWorkflowPath();
|
|
||||||
const absolutePath = path.join(
|
|
||||||
getRequiredEnvParam("GITHUB_WORKSPACE"),
|
|
||||||
relativePath
|
|
||||||
);
|
|
||||||
|
|
||||||
return yaml.load(fs.readFileSync(absolutePath, "utf-8")) as Workflow;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the path of the currently executing workflow.
|
|
||||||
*/
|
|
||||||
async function getWorkflowPath(): Promise<string> {
|
|
||||||
const repo_nwo = getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
|
||||||
const owner = repo_nwo[0];
|
|
||||||
const repo = repo_nwo[1];
|
|
||||||
const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID"));
|
|
||||||
|
|
||||||
const apiClient = api.getApiClient();
|
|
||||||
const runsResponse = await apiClient.request(
|
|
||||||
"GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true",
|
|
||||||
{
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
run_id,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
const workflowUrl = runsResponse.data.workflow_url;
|
|
||||||
|
|
||||||
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
|
||||||
|
|
||||||
return workflowResponse.data.path;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the workflow run ID.
|
|
||||||
*/
|
|
||||||
export function getWorkflowRunID(): number {
|
|
||||||
const workflowRunID = parseInt(getRequiredEnvParam("GITHUB_RUN_ID"), 10);
|
|
||||||
if (Number.isNaN(workflowRunID)) {
|
|
||||||
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
|
||||||
}
|
|
||||||
return workflowRunID;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the analysis key parameter for the current job.
|
* Get the analysis key parameter for the current job.
|
||||||
*
|
*
|
||||||
|
|
@ -474,10 +193,10 @@ export function computeAutomationID(
|
||||||
): string {
|
): string {
|
||||||
let automationID = `${analysis_key}/`;
|
let automationID = `${analysis_key}/`;
|
||||||
|
|
||||||
// the id has to be deterministic so we sort the fields
|
const matrix = parseMatrixInput(environment);
|
||||||
if (environment !== undefined && environment !== "null") {
|
if (matrix !== undefined) {
|
||||||
const environmentObject = JSON.parse(environment);
|
// the id has to be deterministic so we sort the fields
|
||||||
for (const entry of Object.entries(environmentObject).sort()) {
|
for (const entry of Object.entries(matrix).sort()) {
|
||||||
if (typeof entry[1] === "string") {
|
if (typeof entry[1] === "string") {
|
||||||
automationID += `${entry[0]}:${entry[1]}/`;
|
automationID += `${entry[0]}:${entry[1]}/`;
|
||||||
} else {
|
} else {
|
||||||
|
|
|
||||||
|
|
@ -24,6 +24,7 @@ import { Features } from "./feature-flags";
|
||||||
import { Language } from "./languages";
|
import { Language } from "./languages";
|
||||||
import { getActionsLogger, Logger } from "./logging";
|
import { getActionsLogger, Logger } from "./logging";
|
||||||
import { parseRepositoryNwo } from "./repository";
|
import { parseRepositoryNwo } from "./repository";
|
||||||
|
import { CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF } from "./shared-environment";
|
||||||
import { getTotalCacheSize, uploadTrapCaches } from "./trap-caching";
|
import { getTotalCacheSize, uploadTrapCaches } from "./trap-caching";
|
||||||
import * as upload_lib from "./upload-lib";
|
import * as upload_lib from "./upload-lib";
|
||||||
import { UploadResult } from "./upload-lib";
|
import { UploadResult } from "./upload-lib";
|
||||||
|
|
@ -271,8 +272,14 @@ async function run() {
|
||||||
core.setOutput("db-locations", dbLocations);
|
core.setOutput("db-locations", dbLocations);
|
||||||
|
|
||||||
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
|
if (runStats && actionsUtil.getRequiredInput("upload") === "true") {
|
||||||
uploadResult = await upload_lib.uploadFromActions(outputDir, logger);
|
uploadResult = await upload_lib.uploadFromActions(
|
||||||
|
outputDir,
|
||||||
|
actionsUtil.getRequiredInput("checkout_path"),
|
||||||
|
actionsUtil.getOptionalInput("category"),
|
||||||
|
logger
|
||||||
|
);
|
||||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||||
|
core.exportVariable(CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF, "true");
|
||||||
} else {
|
} else {
|
||||||
logger.info("Not uploading results");
|
logger.info("Not uploading results");
|
||||||
}
|
}
|
||||||
|
|
@ -315,8 +322,6 @@ async function run() {
|
||||||
core.setFailed(error.message);
|
core.setFailed(error.message);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(error);
|
|
||||||
|
|
||||||
if (error instanceof CodeQLAnalysisError) {
|
if (error instanceof CodeQLAnalysisError) {
|
||||||
const stats = { ...error.queriesStatusReport };
|
const stats = { ...error.queriesStatusReport };
|
||||||
await sendStatusReport(
|
await sendStatusReport(
|
||||||
|
|
@ -391,7 +396,6 @@ async function runWrapper() {
|
||||||
await runPromise;
|
await runPromise;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
core.setFailed(`analyze action failed: ${error}`);
|
core.setFailed(`analyze action failed: ${error}`);
|
||||||
console.log(error);
|
|
||||||
}
|
}
|
||||||
await checkForTimeout();
|
await checkForTimeout();
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -355,8 +355,7 @@ export async function runQueries(
|
||||||
addSnippetsFlag,
|
addSnippetsFlag,
|
||||||
threadsFlag,
|
threadsFlag,
|
||||||
enableDebugLogging ? "-vv" : "-v",
|
enableDebugLogging ? "-vv" : "-v",
|
||||||
automationDetailsId,
|
automationDetailsId
|
||||||
featureEnablement
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@ import * as path from "path";
|
||||||
|
|
||||||
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||||
import * as toolcache from "@actions/tool-cache";
|
import * as toolcache from "@actions/tool-cache";
|
||||||
|
import * as safeWhich from "@chrisgavin/safe-which";
|
||||||
import test, { ExecutionContext } from "ava";
|
import test, { ExecutionContext } from "ava";
|
||||||
import del from "del";
|
import del from "del";
|
||||||
import * as yaml from "js-yaml";
|
import * as yaml from "js-yaml";
|
||||||
|
|
@ -442,16 +443,9 @@ test("databaseInterpretResults() does not set --sarif-add-query-help for 2.7.0",
|
||||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
|
sinon.stub(codeqlObject, "getVersion").resolves("2.7.0");
|
||||||
await codeqlObject.databaseInterpretResults(
|
// safeWhich throws because of the test CodeQL object.
|
||||||
"",
|
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||||
[],
|
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||||
"",
|
|
||||||
"",
|
|
||||||
"",
|
|
||||||
"-v",
|
|
||||||
"",
|
|
||||||
createFeatures([])
|
|
||||||
);
|
|
||||||
t.false(
|
t.false(
|
||||||
runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"),
|
runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"),
|
||||||
"--sarif-add-query-help should be absent, but it is present"
|
"--sarif-add-query-help should be absent, but it is present"
|
||||||
|
|
@ -462,16 +456,9 @@ test("databaseInterpretResults() sets --sarif-add-query-help for 2.7.1", async (
|
||||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
|
sinon.stub(codeqlObject, "getVersion").resolves("2.7.1");
|
||||||
await codeqlObject.databaseInterpretResults(
|
// safeWhich throws because of the test CodeQL object.
|
||||||
"",
|
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||||
[],
|
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||||
"",
|
|
||||||
"",
|
|
||||||
"",
|
|
||||||
"-v",
|
|
||||||
"",
|
|
||||||
createFeatures([])
|
|
||||||
);
|
|
||||||
t.true(
|
t.true(
|
||||||
runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"),
|
runnerConstructorStub.firstCall.args[1].includes("--sarif-add-query-help"),
|
||||||
"--sarif-add-query-help should be present, but it is absent"
|
"--sarif-add-query-help should be present, but it is absent"
|
||||||
|
|
@ -483,6 +470,8 @@ test("databaseInitCluster() without injected codescanning config", async (t) =>
|
||||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
sinon.stub(codeqlObject, "getVersion").resolves("2.8.1");
|
sinon.stub(codeqlObject, "getVersion").resolves("2.8.1");
|
||||||
|
// safeWhich throws because of the test CodeQL object.
|
||||||
|
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||||
|
|
||||||
const thisStubConfig: Config = {
|
const thisStubConfig: Config = {
|
||||||
...stubConfig,
|
...stubConfig,
|
||||||
|
|
@ -858,23 +847,13 @@ test("does not use injected config", async (t: ExecutionContext<unknown>) => {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
test("databaseInterpretResults() sets --sarif-add-baseline-file-info when feature enabled", async (t) => {
|
test("databaseInterpretResults() sets --sarif-add-baseline-file-info for 2.11.3", async (t) => {
|
||||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
// We need to set a CodeQL version such that running `databaseInterpretResults` does not crash.
|
sinon.stub(codeqlObject, "getVersion").resolves("2.11.3");
|
||||||
// The version of CodeQL is checked separately to determine feature enablement, and does not
|
// safeWhich throws because of the test CodeQL object.
|
||||||
// otherwise impact this test, so set it to 0.0.0.
|
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||||
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
|
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||||
await codeqlObject.databaseInterpretResults(
|
|
||||||
"",
|
|
||||||
[],
|
|
||||||
"",
|
|
||||||
"",
|
|
||||||
"",
|
|
||||||
"-v",
|
|
||||||
"",
|
|
||||||
createFeatures([Feature.FileBaselineInformationEnabled])
|
|
||||||
);
|
|
||||||
t.true(
|
t.true(
|
||||||
runnerConstructorStub.firstCall.args[1].includes(
|
runnerConstructorStub.firstCall.args[1].includes(
|
||||||
"--sarif-add-baseline-file-info"
|
"--sarif-add-baseline-file-info"
|
||||||
|
|
@ -883,23 +862,13 @@ test("databaseInterpretResults() sets --sarif-add-baseline-file-info when featur
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("databaseInterpretResults() does not set --sarif-add-baseline-file-info if feature disabled", async (t) => {
|
test("databaseInterpretResults() does not set --sarif-add-baseline-file-info for 2.11.2", async (t) => {
|
||||||
const runnerConstructorStub = stubToolRunnerConstructor();
|
const runnerConstructorStub = stubToolRunnerConstructor();
|
||||||
const codeqlObject = await codeql.getCodeQLForTesting();
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
// We need to set a CodeQL version such that running `databaseInterpretResults` does not crash.
|
sinon.stub(codeqlObject, "getVersion").resolves("2.11.2");
|
||||||
// The version of CodeQL is checked upstream to determine feature enablement, so it does not
|
// safeWhich throws because of the test CodeQL object.
|
||||||
// affect this test.
|
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||||
sinon.stub(codeqlObject, "getVersion").resolves("0.0.0");
|
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "");
|
||||||
await codeqlObject.databaseInterpretResults(
|
|
||||||
"",
|
|
||||||
[],
|
|
||||||
"",
|
|
||||||
"",
|
|
||||||
"",
|
|
||||||
"-v",
|
|
||||||
"",
|
|
||||||
createFeatures([])
|
|
||||||
);
|
|
||||||
t.false(
|
t.false(
|
||||||
runnerConstructorStub.firstCall.args[1].includes(
|
runnerConstructorStub.firstCall.args[1].includes(
|
||||||
"--sarif-add-baseline-file-info"
|
"--sarif-add-baseline-file-info"
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ import * as api from "./api-client";
|
||||||
import { Config } from "./config-utils";
|
import { Config } from "./config-utils";
|
||||||
import * as defaults from "./defaults.json"; // Referenced from codeql-action-sync-tool!
|
import * as defaults from "./defaults.json"; // Referenced from codeql-action-sync-tool!
|
||||||
import { errorMatchers } from "./error-matcher";
|
import { errorMatchers } from "./error-matcher";
|
||||||
import { Feature, FeatureEnablement } from "./feature-flags";
|
import { FeatureEnablement } from "./feature-flags";
|
||||||
import { isTracedLanguage, Language } from "./languages";
|
import { isTracedLanguage, Language } from "./languages";
|
||||||
import { Logger } from "./logging";
|
import { Logger } from "./logging";
|
||||||
import { toolrunnerErrorCatcher } from "./toolrunner-error-catcher";
|
import { toolrunnerErrorCatcher } from "./toolrunner-error-catcher";
|
||||||
|
|
@ -172,13 +172,19 @@ export interface CodeQL {
|
||||||
addSnippetsFlag: string,
|
addSnippetsFlag: string,
|
||||||
threadsFlag: string,
|
threadsFlag: string,
|
||||||
verbosityFlag: string | undefined,
|
verbosityFlag: string | undefined,
|
||||||
automationDetailsId: string | undefined,
|
automationDetailsId: string | undefined
|
||||||
featureEnablement: FeatureEnablement
|
|
||||||
): Promise<string>;
|
): Promise<string>;
|
||||||
/**
|
/**
|
||||||
* Run 'codeql database print-baseline'.
|
* Run 'codeql database print-baseline'.
|
||||||
*/
|
*/
|
||||||
databasePrintBaseline(databasePath: string): Promise<string>;
|
databasePrintBaseline(databasePath: string): Promise<string>;
|
||||||
|
/**
|
||||||
|
* Run 'codeql diagnostics export'.
|
||||||
|
*/
|
||||||
|
diagnosticsExport(
|
||||||
|
sarifFile: string,
|
||||||
|
automationDetailsId: string | undefined
|
||||||
|
): Promise<void>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ResolveLanguagesOutput {
|
export interface ResolveLanguagesOutput {
|
||||||
|
|
@ -250,6 +256,7 @@ const CODEQL_VERSION_LUA_TRACER_CONFIG = "2.10.0";
|
||||||
export const CODEQL_VERSION_CONFIG_FILES = "2.10.1";
|
export const CODEQL_VERSION_CONFIG_FILES = "2.10.1";
|
||||||
const CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED = "2.10.4";
|
const CODEQL_VERSION_LUA_TRACING_GO_WINDOWS_FIXED = "2.10.4";
|
||||||
export const CODEQL_VERSION_GHES_PACK_DOWNLOAD = "2.10.4";
|
export const CODEQL_VERSION_GHES_PACK_DOWNLOAD = "2.10.4";
|
||||||
|
const CODEQL_VERSION_FILE_BASELINE_INFORMATION = "2.11.3";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This variable controls using the new style of tracing from the CodeQL
|
* This variable controls using the new style of tracing from the CodeQL
|
||||||
|
|
@ -634,6 +641,7 @@ export function setCodeQL(partialCodeql: Partial<CodeQL>): CodeQL {
|
||||||
partialCodeql,
|
partialCodeql,
|
||||||
"databasePrintBaseline"
|
"databasePrintBaseline"
|
||||||
),
|
),
|
||||||
|
diagnosticsExport: resolveFunction(partialCodeql, "diagnosticsExport"),
|
||||||
};
|
};
|
||||||
return cachedCodeQL;
|
return cachedCodeQL;
|
||||||
}
|
}
|
||||||
|
|
@ -675,7 +683,7 @@ async function getCodeQLForCmd(
|
||||||
cmd: string,
|
cmd: string,
|
||||||
checkVersion: boolean
|
checkVersion: boolean
|
||||||
): Promise<CodeQL> {
|
): Promise<CodeQL> {
|
||||||
const codeql = {
|
const codeql: CodeQL = {
|
||||||
getPath() {
|
getPath() {
|
||||||
return cmd;
|
return cmd;
|
||||||
},
|
},
|
||||||
|
|
@ -1016,7 +1024,7 @@ async function getCodeQLForCmd(
|
||||||
if (querySuitePath) {
|
if (querySuitePath) {
|
||||||
codeqlArgs.push(querySuitePath);
|
codeqlArgs.push(querySuitePath);
|
||||||
}
|
}
|
||||||
await runTool(cmd, codeqlArgs);
|
await toolrunnerErrorCatcher(cmd, codeqlArgs, errorMatchers);
|
||||||
},
|
},
|
||||||
async databaseInterpretResults(
|
async databaseInterpretResults(
|
||||||
databasePath: string,
|
databasePath: string,
|
||||||
|
|
@ -1025,8 +1033,7 @@ async function getCodeQLForCmd(
|
||||||
addSnippetsFlag: string,
|
addSnippetsFlag: string,
|
||||||
threadsFlag: string,
|
threadsFlag: string,
|
||||||
verbosityFlag: string,
|
verbosityFlag: string,
|
||||||
automationDetailsId: string | undefined,
|
automationDetailsId: string | undefined
|
||||||
featureEnablement: FeatureEnablement
|
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
const codeqlArgs = [
|
const codeqlArgs = [
|
||||||
"database",
|
"database",
|
||||||
|
|
@ -1047,9 +1054,9 @@ async function getCodeQLForCmd(
|
||||||
codeqlArgs.push("--sarif-category", automationDetailsId);
|
codeqlArgs.push("--sarif-category", automationDetailsId);
|
||||||
}
|
}
|
||||||
if (
|
if (
|
||||||
await featureEnablement.getValue(
|
await util.codeQlVersionAbove(
|
||||||
Feature.FileBaselineInformationEnabled,
|
this,
|
||||||
this
|
CODEQL_VERSION_FILE_BASELINE_INFORMATION
|
||||||
)
|
)
|
||||||
) {
|
) {
|
||||||
codeqlArgs.push("--sarif-add-baseline-file-info");
|
codeqlArgs.push("--sarif-add-baseline-file-info");
|
||||||
|
|
@ -1059,7 +1066,12 @@ async function getCodeQLForCmd(
|
||||||
codeqlArgs.push(...querySuitePaths);
|
codeqlArgs.push(...querySuitePaths);
|
||||||
}
|
}
|
||||||
// capture stdout, which contains analysis summaries
|
// capture stdout, which contains analysis summaries
|
||||||
return await runTool(cmd, codeqlArgs);
|
const returnState = await toolrunnerErrorCatcher(
|
||||||
|
cmd,
|
||||||
|
codeqlArgs,
|
||||||
|
errorMatchers
|
||||||
|
);
|
||||||
|
return returnState.stdout;
|
||||||
},
|
},
|
||||||
async databasePrintBaseline(databasePath: string): Promise<string> {
|
async databasePrintBaseline(databasePath: string): Promise<string> {
|
||||||
const codeqlArgs = [
|
const codeqlArgs = [
|
||||||
|
|
@ -1151,6 +1163,22 @@ async function getCodeQLForCmd(
|
||||||
];
|
];
|
||||||
await new toolrunner.ToolRunner(cmd, args).exec();
|
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||||
},
|
},
|
||||||
|
async diagnosticsExport(
|
||||||
|
sarifFile: string,
|
||||||
|
automationDetailsId: string | undefined
|
||||||
|
): Promise<void> {
|
||||||
|
const args = [
|
||||||
|
"diagnostics",
|
||||||
|
"export",
|
||||||
|
"--format=sarif-latest",
|
||||||
|
`--output=${sarifFile}`,
|
||||||
|
...getExtraOptionsFromEnv(["diagnostics", "export"]),
|
||||||
|
];
|
||||||
|
if (automationDetailsId !== undefined) {
|
||||||
|
args.push("--sarif-category", automationDetailsId);
|
||||||
|
}
|
||||||
|
await new toolrunner.ToolRunner(cmd, args).exec();
|
||||||
|
},
|
||||||
};
|
};
|
||||||
// To ensure that status reports include the CodeQL CLI version wherever
|
// To ensure that status reports include the CodeQL CLI version wherever
|
||||||
// possible, we want to call getVersion(), which populates the version value
|
// possible, we want to call getVersion(), which populates the version value
|
||||||
|
|
@ -1247,11 +1275,16 @@ async function runTool(cmd: string, args: string[] = []) {
|
||||||
const exitCode = await new toolrunner.ToolRunner(cmd, args, {
|
const exitCode = await new toolrunner.ToolRunner(cmd, args, {
|
||||||
listeners: {
|
listeners: {
|
||||||
stdout: (data: Buffer) => {
|
stdout: (data: Buffer) => {
|
||||||
output += data.toString();
|
output += data.toString("utf8");
|
||||||
},
|
},
|
||||||
stderr: (data: Buffer) => {
|
stderr: (data: Buffer) => {
|
||||||
const toRead = Math.min(maxErrorSize - error.length, data.length);
|
let readStartIndex = 0;
|
||||||
error += data.toString("utf8", 0, toRead);
|
// If the error is too large, then we only take the last 20,000 characters
|
||||||
|
if (data.length - maxErrorSize > 0) {
|
||||||
|
// Eg: if we have 20,000 the start index should be 2.
|
||||||
|
readStartIndex = data.length - maxErrorSize + 1;
|
||||||
|
}
|
||||||
|
error += data.toString("utf8", readStartIndex);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
ignoreReturnCode: true,
|
ignoreReturnCode: true,
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,3 @@
|
||||||
{
|
{
|
||||||
"bundleVersion": "codeql-bundle-20221123"
|
"bundleVersion": "codeql-bundle-20221202"
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -20,6 +20,15 @@ test("noSourceCodeFound matches against example javascript output", async (t) =>
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("fatalError matches against example log output", async (t) => {
|
||||||
|
t.assert(
|
||||||
|
testErrorMatcher(
|
||||||
|
"fatalError",
|
||||||
|
"A fatal error occurred: Could not process query metadata for test-query.ql"
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
function testErrorMatcher(matcherName: string, logSample: string): boolean {
|
function testErrorMatcher(matcherName: string, logSample: string): boolean {
|
||||||
if (!(matcherName in namedMatchersForTesting)) {
|
if (!(matcherName in namedMatchersForTesting)) {
|
||||||
throw new Error(`Unknown matcher ${matcherName}`);
|
throw new Error(`Unknown matcher ${matcherName}`);
|
||||||
|
|
|
||||||
|
|
@ -18,6 +18,10 @@ export const namedMatchersForTesting: { [key: string]: ErrorMatcher } = {
|
||||||
"No code found during the build. Please see:\n" +
|
"No code found during the build. Please see:\n" +
|
||||||
"https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning#no-code-found-during-the-build",
|
"https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/troubleshooting-code-scanning#no-code-found-during-the-build",
|
||||||
},
|
},
|
||||||
|
fatalError: {
|
||||||
|
outputRegex: new RegExp("A fatal error occurred"),
|
||||||
|
message: "A fatal error occurred.",
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
// we collapse the matches into an array for use in execErrorCatcher
|
// we collapse the matches into an array for use in execErrorCatcher
|
||||||
|
|
|
||||||
|
|
@ -16,9 +16,9 @@ export enum Feature {
|
||||||
BypassToolcacheKotlinSwiftEnabled = "bypass_toolcache_kotlin_swift_enabled",
|
BypassToolcacheKotlinSwiftEnabled = "bypass_toolcache_kotlin_swift_enabled",
|
||||||
CliConfigFileEnabled = "cli_config_file_enabled",
|
CliConfigFileEnabled = "cli_config_file_enabled",
|
||||||
DisableKotlinAnalysisEnabled = "disable_kotlin_analysis_enabled",
|
DisableKotlinAnalysisEnabled = "disable_kotlin_analysis_enabled",
|
||||||
FileBaselineInformationEnabled = "file_baseline_information_enabled",
|
|
||||||
MlPoweredQueriesEnabled = "ml_powered_queries_enabled",
|
MlPoweredQueriesEnabled = "ml_powered_queries_enabled",
|
||||||
TrapCachingEnabled = "trap_caching_enabled",
|
TrapCachingEnabled = "trap_caching_enabled",
|
||||||
|
UploadFailedSarifEnabled = "upload_failed_sarif_enabled",
|
||||||
}
|
}
|
||||||
|
|
||||||
export const featureConfig: Record<
|
export const featureConfig: Record<
|
||||||
|
|
@ -45,10 +45,6 @@ export const featureConfig: Record<
|
||||||
envVar: "CODEQL_PASS_CONFIG_TO_CLI",
|
envVar: "CODEQL_PASS_CONFIG_TO_CLI",
|
||||||
minimumVersion: "2.11.1",
|
minimumVersion: "2.11.1",
|
||||||
},
|
},
|
||||||
[Feature.FileBaselineInformationEnabled]: {
|
|
||||||
envVar: "CODEQL_FILE_BASELINE_INFORMATION",
|
|
||||||
minimumVersion: "2.11.3",
|
|
||||||
},
|
|
||||||
[Feature.MlPoweredQueriesEnabled]: {
|
[Feature.MlPoweredQueriesEnabled]: {
|
||||||
envVar: "CODEQL_ML_POWERED_QUERIES",
|
envVar: "CODEQL_ML_POWERED_QUERIES",
|
||||||
minimumVersion: "2.7.5",
|
minimumVersion: "2.7.5",
|
||||||
|
|
@ -57,6 +53,10 @@ export const featureConfig: Record<
|
||||||
envVar: "CODEQL_TRAP_CACHING",
|
envVar: "CODEQL_TRAP_CACHING",
|
||||||
minimumVersion: undefined,
|
minimumVersion: undefined,
|
||||||
},
|
},
|
||||||
|
[Feature.UploadFailedSarifEnabled]: {
|
||||||
|
envVar: "CODEQL_ACTION_UPLOAD_FAILED_SARIF",
|
||||||
|
minimumVersion: "2.11.3",
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
||||||
|
|
@ -1,15 +1,27 @@
|
||||||
import test from "ava";
|
import test, { ExecutionContext } from "ava";
|
||||||
import * as sinon from "sinon";
|
import * as sinon from "sinon";
|
||||||
|
|
||||||
|
import * as actionsUtil from "./actions-util";
|
||||||
|
import * as codeql from "./codeql";
|
||||||
import * as configUtils from "./config-utils";
|
import * as configUtils from "./config-utils";
|
||||||
|
import { Feature } from "./feature-flags";
|
||||||
import * as initActionPostHelper from "./init-action-post-helper";
|
import * as initActionPostHelper from "./init-action-post-helper";
|
||||||
import { setupTests } from "./testing-utils";
|
import { getRunnerLogger } from "./logging";
|
||||||
|
import { parseRepositoryNwo } from "./repository";
|
||||||
|
import {
|
||||||
|
createFeatures,
|
||||||
|
getRecordingLogger,
|
||||||
|
setupTests,
|
||||||
|
} from "./testing-utils";
|
||||||
|
import * as uploadLib from "./upload-lib";
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
|
import * as workflow from "./workflow";
|
||||||
|
|
||||||
setupTests(test);
|
setupTests(test);
|
||||||
|
|
||||||
test("post: init action with debug mode off", async (t) => {
|
test("post: init action with debug mode off", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
process.env["RUNNER_TEMP"] = tmpDir;
|
process.env["RUNNER_TEMP"] = tmpDir;
|
||||||
|
|
||||||
const gitHubVersion: util.GitHubVersion = {
|
const gitHubVersion: util.GitHubVersion = {
|
||||||
|
|
@ -29,7 +41,10 @@ test("post: init action with debug mode off", async (t) => {
|
||||||
await initActionPostHelper.run(
|
await initActionPostHelper.run(
|
||||||
uploadDatabaseBundleSpy,
|
uploadDatabaseBundleSpy,
|
||||||
uploadLogsSpy,
|
uploadLogsSpy,
|
||||||
printDebugLogsSpy
|
printDebugLogsSpy,
|
||||||
|
parseRepositoryNwo("github/codeql-action"),
|
||||||
|
createFeatures([]),
|
||||||
|
getRunnerLogger(true)
|
||||||
);
|
);
|
||||||
|
|
||||||
t.assert(uploadDatabaseBundleSpy.notCalled);
|
t.assert(uploadDatabaseBundleSpy.notCalled);
|
||||||
|
|
@ -40,6 +55,7 @@ test("post: init action with debug mode off", async (t) => {
|
||||||
|
|
||||||
test("post: init action with debug mode on", async (t) => {
|
test("post: init action with debug mode on", async (t) => {
|
||||||
return await util.withTmpDir(async (tmpDir) => {
|
return await util.withTmpDir(async (tmpDir) => {
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
process.env["RUNNER_TEMP"] = tmpDir;
|
process.env["RUNNER_TEMP"] = tmpDir;
|
||||||
|
|
||||||
const gitHubVersion: util.GitHubVersion = {
|
const gitHubVersion: util.GitHubVersion = {
|
||||||
|
|
@ -59,7 +75,10 @@ test("post: init action with debug mode on", async (t) => {
|
||||||
await initActionPostHelper.run(
|
await initActionPostHelper.run(
|
||||||
uploadDatabaseBundleSpy,
|
uploadDatabaseBundleSpy,
|
||||||
uploadLogsSpy,
|
uploadLogsSpy,
|
||||||
printDebugLogsSpy
|
printDebugLogsSpy,
|
||||||
|
parseRepositoryNwo("github/codeql-action"),
|
||||||
|
createFeatures([]),
|
||||||
|
getRunnerLogger(true)
|
||||||
);
|
);
|
||||||
|
|
||||||
t.assert(uploadDatabaseBundleSpy.called);
|
t.assert(uploadDatabaseBundleSpy.called);
|
||||||
|
|
@ -67,3 +86,117 @@ test("post: init action with debug mode on", async (t) => {
|
||||||
t.assert(printDebugLogsSpy.called);
|
t.assert(printDebugLogsSpy.called);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("uploads failed SARIF run for typical workflow", async (t) => {
|
||||||
|
const actionsWorkflow = createTestWorkflow([
|
||||||
|
{
|
||||||
|
name: "Checkout repository",
|
||||||
|
uses: "actions/checkout@v3",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Initialize CodeQL",
|
||||||
|
uses: "github/codeql-action/init@v2",
|
||||||
|
with: {
|
||||||
|
languages: "javascript",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Perform CodeQL Analysis",
|
||||||
|
uses: "github/codeql-action/analyze@v2",
|
||||||
|
with: {
|
||||||
|
category: "my-category",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
await testFailedSarifUpload(t, actionsWorkflow, { category: "my-category" });
|
||||||
|
});
|
||||||
|
|
||||||
|
test("uploading failed SARIF run fails when workflow does not reference github/codeql-action", async (t) => {
|
||||||
|
const actionsWorkflow = createTestWorkflow([
|
||||||
|
{
|
||||||
|
name: "Checkout repository",
|
||||||
|
uses: "actions/checkout@v3",
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
await t.throwsAsync(
|
||||||
|
async () => await testFailedSarifUpload(t, actionsWorkflow)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
function createTestWorkflow(
|
||||||
|
steps: workflow.WorkflowJobStep[]
|
||||||
|
): workflow.Workflow {
|
||||||
|
return {
|
||||||
|
name: "CodeQL",
|
||||||
|
on: {
|
||||||
|
push: {
|
||||||
|
branches: ["main"],
|
||||||
|
},
|
||||||
|
pull_request: {
|
||||||
|
branches: ["main"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
jobs: {
|
||||||
|
analyze: {
|
||||||
|
name: "CodeQL Analysis",
|
||||||
|
"runs-on": "ubuntu-latest",
|
||||||
|
steps,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async function testFailedSarifUpload(
|
||||||
|
t: ExecutionContext<unknown>,
|
||||||
|
actionsWorkflow: workflow.Workflow,
|
||||||
|
{ category }: { category?: string } = {}
|
||||||
|
): Promise<void> {
|
||||||
|
const config = {
|
||||||
|
codeQLCmd: "codeql",
|
||||||
|
debugMode: true,
|
||||||
|
languages: [],
|
||||||
|
packs: [],
|
||||||
|
} as unknown as configUtils.Config;
|
||||||
|
const messages = [];
|
||||||
|
process.env["GITHUB_JOB"] = "analyze";
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
|
process.env["GITHUB_WORKSPACE"] =
|
||||||
|
"/home/runner/work/codeql-action/codeql-action";
|
||||||
|
sinon.stub(actionsUtil, "getRequiredInput").withArgs("matrix").returns("{}");
|
||||||
|
|
||||||
|
const codeqlObject = await codeql.getCodeQLForTesting();
|
||||||
|
sinon.stub(codeql, "getCodeQL").resolves(codeqlObject);
|
||||||
|
const diagnosticsExportStub = sinon.stub(codeqlObject, "diagnosticsExport");
|
||||||
|
|
||||||
|
sinon.stub(workflow, "getWorkflow").resolves(actionsWorkflow);
|
||||||
|
|
||||||
|
const uploadFromActions = sinon.stub(uploadLib, "uploadFromActions");
|
||||||
|
uploadFromActions.resolves({ sarifID: "42" } as uploadLib.UploadResult);
|
||||||
|
const waitForProcessing = sinon.stub(uploadLib, "waitForProcessing");
|
||||||
|
|
||||||
|
await initActionPostHelper.uploadFailedSarif(
|
||||||
|
config,
|
||||||
|
parseRepositoryNwo("github/codeql-action"),
|
||||||
|
createFeatures([Feature.UploadFailedSarifEnabled]),
|
||||||
|
getRecordingLogger(messages)
|
||||||
|
);
|
||||||
|
t.deepEqual(messages, []);
|
||||||
|
t.true(
|
||||||
|
diagnosticsExportStub.calledOnceWith(sinon.match.string, category),
|
||||||
|
`Actual args were: ${diagnosticsExportStub.args}`
|
||||||
|
);
|
||||||
|
t.true(
|
||||||
|
uploadFromActions.calledOnceWith(
|
||||||
|
sinon.match.string,
|
||||||
|
sinon.match.string,
|
||||||
|
category,
|
||||||
|
sinon.match.any
|
||||||
|
),
|
||||||
|
`Actual args were: ${uploadFromActions.args}`
|
||||||
|
);
|
||||||
|
t.true(
|
||||||
|
waitForProcessing.calledOnceWith(sinon.match.any, "42", sinon.match.any, {
|
||||||
|
isUnsuccessfulExecution: true,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,25 +1,118 @@
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
import { getConfig } from "./config-utils";
|
import { getCodeQL } from "./codeql";
|
||||||
import { getActionsLogger } from "./logging";
|
import { Config, getConfig } from "./config-utils";
|
||||||
|
import { Feature, FeatureEnablement } from "./feature-flags";
|
||||||
|
import { Logger } from "./logging";
|
||||||
|
import { RepositoryNwo } from "./repository";
|
||||||
|
import { CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF } from "./shared-environment";
|
||||||
|
import * as uploadLib from "./upload-lib";
|
||||||
|
import { getRequiredEnvParam, isInTestMode, parseMatrixInput } from "./util";
|
||||||
|
import {
|
||||||
|
getCategoryInputOrThrow,
|
||||||
|
getCheckoutPathInputOrThrow,
|
||||||
|
getUploadInputOrThrow,
|
||||||
|
getWorkflow,
|
||||||
|
} from "./workflow";
|
||||||
|
|
||||||
|
export async function uploadFailedSarif(
|
||||||
|
config: Config,
|
||||||
|
repositoryNwo: RepositoryNwo,
|
||||||
|
featureEnablement: FeatureEnablement,
|
||||||
|
logger: Logger
|
||||||
|
) {
|
||||||
|
if (!config.codeQLCmd) {
|
||||||
|
logger.warning(
|
||||||
|
"CodeQL command not found. Unable to upload failed SARIF file."
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const codeql = await getCodeQL(config.codeQLCmd);
|
||||||
|
if (
|
||||||
|
!(await featureEnablement.getValue(
|
||||||
|
Feature.UploadFailedSarifEnabled,
|
||||||
|
codeql
|
||||||
|
))
|
||||||
|
) {
|
||||||
|
logger.debug("Uploading failed SARIF is disabled.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const workflow = await getWorkflow();
|
||||||
|
const jobName = getRequiredEnvParam("GITHUB_JOB");
|
||||||
|
const matrix = parseMatrixInput(actionsUtil.getRequiredInput("matrix"));
|
||||||
|
if (
|
||||||
|
getUploadInputOrThrow(workflow, jobName, matrix) !== "true" ||
|
||||||
|
isInTestMode()
|
||||||
|
) {
|
||||||
|
logger.debug(
|
||||||
|
"Won't upload a failed SARIF file since SARIF upload is disabled."
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const category = getCategoryInputOrThrow(workflow, jobName, matrix);
|
||||||
|
const checkoutPath = getCheckoutPathInputOrThrow(workflow, jobName, matrix);
|
||||||
|
|
||||||
|
const sarifFile = "../codeql-failed-run.sarif";
|
||||||
|
await codeql.diagnosticsExport(sarifFile, category);
|
||||||
|
|
||||||
|
core.info(`Uploading failed SARIF file ${sarifFile}`);
|
||||||
|
const uploadResult = await uploadLib.uploadFromActions(
|
||||||
|
sarifFile,
|
||||||
|
checkoutPath,
|
||||||
|
category,
|
||||||
|
logger
|
||||||
|
);
|
||||||
|
await uploadLib.waitForProcessing(
|
||||||
|
repositoryNwo,
|
||||||
|
uploadResult.sarifID,
|
||||||
|
logger,
|
||||||
|
{ isUnsuccessfulExecution: true }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
export async function run(
|
export async function run(
|
||||||
uploadDatabaseBundleDebugArtifact: Function,
|
uploadDatabaseBundleDebugArtifact: Function,
|
||||||
uploadLogsDebugArtifact: Function,
|
uploadLogsDebugArtifact: Function,
|
||||||
printDebugLogs: Function
|
printDebugLogs: Function,
|
||||||
|
repositoryNwo: RepositoryNwo,
|
||||||
|
featureEnablement: FeatureEnablement,
|
||||||
|
logger: Logger
|
||||||
) {
|
) {
|
||||||
const logger = getActionsLogger();
|
|
||||||
|
|
||||||
const config = await getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
const config = await getConfig(actionsUtil.getTemporaryDirectory(), logger);
|
||||||
if (config === undefined) {
|
if (config === undefined) {
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Debugging artifacts are unavailable since the 'init' Action failed before it could produce any."
|
"Debugging artifacts are unavailable since the 'init' Action failed before it could produce any."
|
||||||
);
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Environment variable used to integration test uploading a SARIF file for failed runs
|
||||||
|
const expectFailedSarifUpload =
|
||||||
|
process.env["CODEQL_ACTION_EXPECT_UPLOAD_FAILED_SARIF"] === "true";
|
||||||
|
|
||||||
|
if (process.env[CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF] !== "true") {
|
||||||
|
try {
|
||||||
|
await uploadFailedSarif(config, repositoryNwo, featureEnablement, logger);
|
||||||
|
} catch (e) {
|
||||||
|
if (expectFailedSarifUpload) {
|
||||||
|
throw new Error(
|
||||||
|
"Expected to upload a SARIF file for the failed run, but encountered " +
|
||||||
|
`the following error: ${e}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
logger.info(
|
||||||
|
`Failed to upload a SARIF file for the failed run. Error: ${e}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else if (expectFailedSarifUpload) {
|
||||||
|
throw new Error(
|
||||||
|
"Expected to upload a SARIF file for the failed run, but didn't."
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Upload appropriate Actions artifacts for debugging
|
// Upload appropriate Actions artifacts for debugging
|
||||||
if (config?.debugMode) {
|
if (config.debugMode) {
|
||||||
core.info(
|
core.info(
|
||||||
"Debug mode is on. Uploading available database bundles and logs as Actions debugging artifacts..."
|
"Debug mode is on. Uploading available database bundles and logs as Actions debugging artifacts..."
|
||||||
);
|
);
|
||||||
|
|
|
||||||
|
|
@ -7,15 +7,37 @@
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import * as actionsUtil from "./actions-util";
|
import * as actionsUtil from "./actions-util";
|
||||||
|
import { getGitHubVersion } from "./api-client";
|
||||||
import * as debugArtifacts from "./debug-artifacts";
|
import * as debugArtifacts from "./debug-artifacts";
|
||||||
|
import { Features } from "./feature-flags";
|
||||||
import * as initActionPostHelper from "./init-action-post-helper";
|
import * as initActionPostHelper from "./init-action-post-helper";
|
||||||
|
import { getActionsLogger } from "./logging";
|
||||||
|
import { parseRepositoryNwo } from "./repository";
|
||||||
|
import { checkGitHubVersionInRange, getRequiredEnvParam } from "./util";
|
||||||
|
|
||||||
async function runWrapper() {
|
async function runWrapper() {
|
||||||
try {
|
try {
|
||||||
|
const logger = getActionsLogger();
|
||||||
|
const gitHubVersion = await getGitHubVersion();
|
||||||
|
checkGitHubVersionInRange(gitHubVersion, logger);
|
||||||
|
|
||||||
|
const repositoryNwo = parseRepositoryNwo(
|
||||||
|
getRequiredEnvParam("GITHUB_REPOSITORY")
|
||||||
|
);
|
||||||
|
const features = new Features(
|
||||||
|
gitHubVersion,
|
||||||
|
repositoryNwo,
|
||||||
|
actionsUtil.getTemporaryDirectory(),
|
||||||
|
logger
|
||||||
|
);
|
||||||
|
|
||||||
await initActionPostHelper.run(
|
await initActionPostHelper.run(
|
||||||
debugArtifacts.uploadDatabaseBundleDebugArtifact,
|
debugArtifacts.uploadDatabaseBundleDebugArtifact,
|
||||||
debugArtifacts.uploadLogsDebugArtifact,
|
debugArtifacts.uploadLogsDebugArtifact,
|
||||||
actionsUtil.printDebugLogs
|
actionsUtil.printDebugLogs,
|
||||||
|
repositoryNwo,
|
||||||
|
features,
|
||||||
|
logger
|
||||||
);
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
core.setFailed(`init post-action step failed: ${error}`);
|
core.setFailed(`init post-action step failed: ${error}`);
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,6 @@ import {
|
||||||
getTemporaryDirectory,
|
getTemporaryDirectory,
|
||||||
sendStatusReport,
|
sendStatusReport,
|
||||||
StatusReportBase,
|
StatusReportBase,
|
||||||
validateWorkflow,
|
|
||||||
} from "./actions-util";
|
} from "./actions-util";
|
||||||
import { getGitHubVersion } from "./api-client";
|
import { getGitHubVersion } from "./api-client";
|
||||||
import { CodeQL, CODEQL_VERSION_NEW_TRACING } from "./codeql";
|
import { CodeQL, CODEQL_VERSION_NEW_TRACING } from "./codeql";
|
||||||
|
|
@ -43,6 +42,7 @@ import {
|
||||||
isHostedRunner,
|
isHostedRunner,
|
||||||
shouldBypassToolcache,
|
shouldBypassToolcache,
|
||||||
} from "./util";
|
} from "./util";
|
||||||
|
import { validateWorkflow } from "./workflow";
|
||||||
|
|
||||||
// eslint-disable-next-line import/no-commonjs
|
// eslint-disable-next-line import/no-commonjs
|
||||||
const pkg = require("../package.json");
|
const pkg = require("../package.json");
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,24 @@
|
||||||
export const ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
|
/**
|
||||||
// The time at which the first action (normally init) started executing.
|
* This environment variable is set to true when the `analyze` Action
|
||||||
// If a workflow invokes a different action without first invoking the init
|
* successfully uploads a SARIF file. It does NOT indicate whether the
|
||||||
// action (i.e. the upload action is being used by a third-party integrator)
|
* SARIF file was processed successfully.
|
||||||
// then this variable will be assigned the start time of the action invoked
|
*/
|
||||||
// rather that the init action.
|
export const CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF =
|
||||||
export const CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";
|
"CODEQL_ACTION_ANALYZE_DID_UPLOAD_SARIF";
|
||||||
|
|
||||||
export const CODEQL_ACTION_TESTING_ENVIRONMENT =
|
export const CODEQL_ACTION_TESTING_ENVIRONMENT =
|
||||||
"CODEQL_ACTION_TESTING_ENVIRONMENT";
|
"CODEQL_ACTION_TESTING_ENVIRONMENT";
|
||||||
|
|
||||||
/** Used to disable uploading SARIF results or status reports to the GitHub API */
|
/** Used to disable uploading SARIF results or status reports to the GitHub API */
|
||||||
export const CODEQL_ACTION_TEST_MODE = "CODEQL_ACTION_TEST_MODE";
|
export const CODEQL_ACTION_TEST_MODE = "CODEQL_ACTION_TEST_MODE";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The time at which the first action (normally init) started executing.
|
||||||
|
* If a workflow invokes a different action without first invoking the init
|
||||||
|
* action (i.e. the upload action is being used by a third-party integrator)
|
||||||
|
* then this variable will be assigned the start time of the action invoked
|
||||||
|
* rather that the init action.
|
||||||
|
*/
|
||||||
|
export const CODEQL_WORKFLOW_STARTED_AT = "CODEQL_WORKFLOW_STARTED_AT";
|
||||||
|
|
||||||
|
export const ODASA_TRACER_CONFIGURATION = "ODASA_TRACER_CONFIGURATION";
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,8 @@ test("matchers are never applied if non-error exit", async (t) => {
|
||||||
|
|
||||||
t.deepEqual(await exec.exec("node", testArgs), 0);
|
t.deepEqual(await exec.exec("node", testArgs), 0);
|
||||||
|
|
||||||
t.deepEqual(await toolrunnerErrorCatcher("node", testArgs, matchers), 0);
|
const returnState = await toolrunnerErrorCatcher("node", testArgs, matchers);
|
||||||
|
t.deepEqual(returnState.exitCode, 0);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("regex matchers are applied to stdout for non-zero exit code", async (t) => {
|
test("regex matchers are applied to stdout for non-zero exit code", async (t) => {
|
||||||
|
|
@ -150,12 +151,11 @@ test("execErrorCatcher respects the ignoreReturnValue option", async (t) => {
|
||||||
{ instanceOf: Error }
|
{ instanceOf: Error }
|
||||||
);
|
);
|
||||||
|
|
||||||
t.deepEqual(
|
const returnState = await toolrunnerErrorCatcher("node", testArgs, [], {
|
||||||
await toolrunnerErrorCatcher("node", testArgs, [], {
|
ignoreReturnCode: true,
|
||||||
ignoreReturnCode: true,
|
});
|
||||||
}),
|
|
||||||
199
|
t.deepEqual(returnState.exitCode, 199);
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test("execErrorCatcher preserves behavior of provided listeners", async (t) => {
|
test("execErrorCatcher preserves behavior of provided listeners", async (t) => {
|
||||||
|
|
@ -176,12 +176,10 @@ test("execErrorCatcher preserves behavior of provided listeners", async (t) => {
|
||||||
|
|
||||||
const testArgs = buildDummyArgs(stdoutExpected, stderrExpected, "", 0);
|
const testArgs = buildDummyArgs(stdoutExpected, stderrExpected, "", 0);
|
||||||
|
|
||||||
t.deepEqual(
|
const returnState = await toolrunnerErrorCatcher("node", testArgs, [], {
|
||||||
await toolrunnerErrorCatcher("node", testArgs, [], {
|
listeners,
|
||||||
listeners,
|
});
|
||||||
}),
|
t.deepEqual(returnState.exitCode, 0);
|
||||||
0
|
|
||||||
);
|
|
||||||
|
|
||||||
t.deepEqual(stdoutActual, `${stdoutExpected}\n`);
|
t.deepEqual(stdoutActual, `${stdoutExpected}\n`);
|
||||||
t.deepEqual(stderrActual, `${stderrExpected}\n`);
|
t.deepEqual(stderrActual, `${stderrExpected}\n`);
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,11 @@ import * as safeWhich from "@chrisgavin/safe-which";
|
||||||
|
|
||||||
import { ErrorMatcher } from "./error-matcher";
|
import { ErrorMatcher } from "./error-matcher";
|
||||||
|
|
||||||
|
export interface ReturnState {
|
||||||
|
exitCode: number;
|
||||||
|
stdout: string;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Wrapper for toolrunner.Toolrunner which checks for specific return code and/or regex matches in console output.
|
* Wrapper for toolrunner.Toolrunner which checks for specific return code and/or regex matches in console output.
|
||||||
* Output will be streamed to the live console as well as captured for subsequent processing.
|
* Output will be streamed to the live console as well as captured for subsequent processing.
|
||||||
|
|
@ -13,14 +18,14 @@ import { ErrorMatcher } from "./error-matcher";
|
||||||
* @param args optional arguments for tool. Escaping is handled by the lib.
|
* @param args optional arguments for tool. Escaping is handled by the lib.
|
||||||
* @param matchers defines specific codes and/or regexes that should lead to return of a custom error
|
* @param matchers defines specific codes and/or regexes that should lead to return of a custom error
|
||||||
* @param options optional exec options. See ExecOptions
|
* @param options optional exec options. See ExecOptions
|
||||||
* @returns Promise<number> exit code
|
* @returns ReturnState exit code and stdout output, if applicable
|
||||||
*/
|
*/
|
||||||
export async function toolrunnerErrorCatcher(
|
export async function toolrunnerErrorCatcher(
|
||||||
commandLine: string,
|
commandLine: string,
|
||||||
args?: string[],
|
args?: string[],
|
||||||
matchers?: ErrorMatcher[],
|
matchers?: ErrorMatcher[],
|
||||||
options?: im.ExecOptions
|
options?: im.ExecOptions
|
||||||
): Promise<number> {
|
): Promise<ReturnState> {
|
||||||
let stdout = "";
|
let stdout = "";
|
||||||
let stderr = "";
|
let stderr = "";
|
||||||
|
|
||||||
|
|
@ -40,9 +45,9 @@ export async function toolrunnerErrorCatcher(
|
||||||
};
|
};
|
||||||
|
|
||||||
// we capture the original return code or error so that if no match is found we can duplicate the behavior
|
// we capture the original return code or error so that if no match is found we can duplicate the behavior
|
||||||
let returnState: Error | number;
|
let exitCode: number;
|
||||||
try {
|
try {
|
||||||
returnState = await new toolrunner.ToolRunner(
|
exitCode = await new toolrunner.ToolRunner(
|
||||||
await safeWhich.safeWhich(commandLine),
|
await safeWhich.safeWhich(commandLine),
|
||||||
args,
|
args,
|
||||||
{
|
{
|
||||||
|
|
@ -51,35 +56,32 @@ export async function toolrunnerErrorCatcher(
|
||||||
ignoreReturnCode: true, // so we can check for specific codes using the matchers
|
ignoreReturnCode: true, // so we can check for specific codes using the matchers
|
||||||
}
|
}
|
||||||
).exec();
|
).exec();
|
||||||
} catch (e) {
|
|
||||||
returnState = e instanceof Error ? e : new Error(String(e));
|
|
||||||
}
|
|
||||||
|
|
||||||
// if there is a zero return code then we do not apply the matchers
|
// if there is a zero return code then we do not apply the matchers
|
||||||
if (returnState === 0) return returnState;
|
if (exitCode === 0) return { exitCode, stdout };
|
||||||
|
|
||||||
if (matchers) {
|
if (matchers) {
|
||||||
for (const matcher of matchers) {
|
for (const matcher of matchers) {
|
||||||
if (
|
if (
|
||||||
matcher.exitCode === returnState ||
|
matcher.exitCode === exitCode ||
|
||||||
matcher.outputRegex?.test(stderr) ||
|
matcher.outputRegex?.test(stderr) ||
|
||||||
matcher.outputRegex?.test(stdout)
|
matcher.outputRegex?.test(stdout)
|
||||||
) {
|
) {
|
||||||
throw new Error(matcher.message);
|
throw new Error(matcher.message);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof returnState === "number") {
|
|
||||||
// only if we were instructed to ignore the return code do we ever return it non-zero
|
// only if we were instructed to ignore the return code do we ever return it non-zero
|
||||||
if (options?.ignoreReturnCode) {
|
if (options?.ignoreReturnCode) {
|
||||||
return returnState;
|
return { exitCode, stdout };
|
||||||
} else {
|
} else {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`The process '${commandLine}' failed with exit code ${returnState}`
|
`The process '${commandLine}' failed with exit code ${exitCode}`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} catch (e) {
|
||||||
throw returnState;
|
const error = e instanceof Error ? e : new Error(String(e));
|
||||||
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -16,6 +16,7 @@ import { parseRepositoryNwo, RepositoryNwo } from "./repository";
|
||||||
import * as sharedEnv from "./shared-environment";
|
import * as sharedEnv from "./shared-environment";
|
||||||
import * as util from "./util";
|
import * as util from "./util";
|
||||||
import { SarifFile, SarifResult, SarifRun } from "./util";
|
import { SarifFile, SarifResult, SarifRun } from "./util";
|
||||||
|
import * as workflow from "./workflow";
|
||||||
|
|
||||||
// Takes a list of paths to sarif files and combines them together,
|
// Takes a list of paths to sarif files and combines them together,
|
||||||
// returning the contents of the combined sarif file.
|
// returning the contents of the combined sarif file.
|
||||||
|
|
@ -157,23 +158,22 @@ export function findSarifFilesInDir(sarifPath: string): string[] {
|
||||||
|
|
||||||
// Uploads a single sarif file or a directory of sarif files
|
// Uploads a single sarif file or a directory of sarif files
|
||||||
// depending on what the path happens to refer to.
|
// depending on what the path happens to refer to.
|
||||||
// Returns true iff the upload occurred and succeeded
|
|
||||||
export async function uploadFromActions(
|
export async function uploadFromActions(
|
||||||
sarifPath: string,
|
sarifPath: string,
|
||||||
|
checkoutPath: string,
|
||||||
|
category: string | undefined,
|
||||||
logger: Logger
|
logger: Logger
|
||||||
): Promise<UploadResult> {
|
): Promise<UploadResult> {
|
||||||
return await uploadFiles(
|
return await uploadFiles(
|
||||||
getSarifFilePaths(sarifPath),
|
getSarifFilePaths(sarifPath),
|
||||||
parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")),
|
parseRepositoryNwo(util.getRequiredEnvParam("GITHUB_REPOSITORY")),
|
||||||
await actionsUtil.getCommitOid(
|
await actionsUtil.getCommitOid(checkoutPath),
|
||||||
actionsUtil.getRequiredInput("checkout_path")
|
|
||||||
),
|
|
||||||
await actionsUtil.getRef(),
|
await actionsUtil.getRef(),
|
||||||
await actionsUtil.getAnalysisKey(),
|
await actionsUtil.getAnalysisKey(),
|
||||||
actionsUtil.getOptionalInput("category"),
|
category,
|
||||||
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
util.getRequiredEnvParam("GITHUB_WORKFLOW"),
|
||||||
actionsUtil.getWorkflowRunID(),
|
workflow.getWorkflowRunID(),
|
||||||
actionsUtil.getRequiredInput("checkout_path"),
|
checkoutPath,
|
||||||
actionsUtil.getRequiredInput("matrix"),
|
actionsUtil.getRequiredInput("matrix"),
|
||||||
logger
|
logger
|
||||||
);
|
);
|
||||||
|
|
@ -385,60 +385,119 @@ async function uploadFiles(
|
||||||
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
|
const STATUS_CHECK_FREQUENCY_MILLISECONDS = 5 * 1000;
|
||||||
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
|
const STATUS_CHECK_TIMEOUT_MILLISECONDS = 2 * 60 * 1000;
|
||||||
|
|
||||||
// Waits until either the analysis is successfully processed, a processing error is reported, or STATUS_CHECK_TIMEOUT_MILLISECONDS elapses.
|
type ProcessingStatus = "pending" | "complete" | "failed";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Waits until either the analysis is successfully processed, a processing error
|
||||||
|
* is reported, or `STATUS_CHECK_TIMEOUT_MILLISECONDS` elapses.
|
||||||
|
*
|
||||||
|
* If `isUnsuccessfulExecution` is passed, will throw an error if the analysis
|
||||||
|
* processing does not produce a single error mentioning the unsuccessful
|
||||||
|
* execution.
|
||||||
|
*/
|
||||||
export async function waitForProcessing(
|
export async function waitForProcessing(
|
||||||
repositoryNwo: RepositoryNwo,
|
repositoryNwo: RepositoryNwo,
|
||||||
sarifID: string,
|
sarifID: string,
|
||||||
logger: Logger
|
logger: Logger,
|
||||||
|
options: { isUnsuccessfulExecution: boolean } = {
|
||||||
|
isUnsuccessfulExecution: false,
|
||||||
|
}
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
logger.startGroup("Waiting for processing to finish");
|
logger.startGroup("Waiting for processing to finish");
|
||||||
const client = api.getApiClient();
|
try {
|
||||||
|
const client = api.getApiClient();
|
||||||
|
|
||||||
const statusCheckingStarted = Date.now();
|
const statusCheckingStarted = Date.now();
|
||||||
// eslint-disable-next-line no-constant-condition
|
// eslint-disable-next-line no-constant-condition
|
||||||
while (true) {
|
while (true) {
|
||||||
if (
|
if (
|
||||||
Date.now() >
|
Date.now() >
|
||||||
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS
|
statusCheckingStarted + STATUS_CHECK_TIMEOUT_MILLISECONDS
|
||||||
) {
|
) {
|
||||||
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
// If the analysis hasn't finished processing in the allotted time, we continue anyway rather than failing.
|
||||||
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
|
// It's possible the analysis will eventually finish processing, but it's not worth spending more Actions time waiting.
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Timed out waiting for analysis to finish processing. Continuing."
|
"Timed out waiting for analysis to finish processing. Continuing."
|
||||||
);
|
);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
let response: OctokitResponse<any> | undefined = undefined;
|
let response: OctokitResponse<any> | undefined = undefined;
|
||||||
try {
|
try {
|
||||||
response = await client.request(
|
response = await client.request(
|
||||||
"GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id",
|
"GET /repos/:owner/:repo/code-scanning/sarifs/:sarif_id",
|
||||||
{
|
{
|
||||||
owner: repositoryNwo.owner,
|
owner: repositoryNwo.owner,
|
||||||
repo: repositoryNwo.repo,
|
repo: repositoryNwo.repo,
|
||||||
sarif_id: sarifID,
|
sarif_id: sarifID,
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.warning(
|
logger.warning(
|
||||||
`An error occurred checking the status of the delivery. ${e} It should still be processed in the background, but errors that occur during processing may not be reported.`
|
`An error occurred checking the status of the delivery. ${e} It should still be processed in the background, but errors that occur during processing may not be reported.`
|
||||||
);
|
);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
const status = response.data.processing_status;
|
const status = response.data.processing_status as ProcessingStatus;
|
||||||
logger.info(`Analysis upload status is ${status}.`);
|
logger.info(`Analysis upload status is ${status}.`);
|
||||||
if (status === "complete") {
|
|
||||||
break;
|
|
||||||
} else if (status === "pending") {
|
|
||||||
logger.debug("Analysis processing is still pending...");
|
|
||||||
} else if (status === "failed") {
|
|
||||||
throw new Error(
|
|
||||||
`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
if (status === "pending") {
|
||||||
|
logger.debug("Analysis processing is still pending...");
|
||||||
|
} else if (options.isUnsuccessfulExecution) {
|
||||||
|
// We expect a specific processing error for unsuccessful executions, so
|
||||||
|
// handle these separately.
|
||||||
|
handleProcessingResultForUnsuccessfulExecution(
|
||||||
|
response,
|
||||||
|
status,
|
||||||
|
logger
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
} else if (status === "complete") {
|
||||||
|
break;
|
||||||
|
} else if (status === "failed") {
|
||||||
|
throw new Error(
|
||||||
|
`Code Scanning could not process the submitted SARIF file:\n${response.data.errors}`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
util.assertNever(status);
|
||||||
|
}
|
||||||
|
|
||||||
|
await util.delay(STATUS_CHECK_FREQUENCY_MILLISECONDS);
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
logger.endGroup();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks the processing result for an unsuccessful execution. Throws if the
|
||||||
|
* result is not a failure with a single "unsuccessful execution" error.
|
||||||
|
*/
|
||||||
|
function handleProcessingResultForUnsuccessfulExecution(
|
||||||
|
response: OctokitResponse<any, number>,
|
||||||
|
status: Exclude<ProcessingStatus, "pending">,
|
||||||
|
logger: Logger
|
||||||
|
): void {
|
||||||
|
if (
|
||||||
|
status === "failed" &&
|
||||||
|
Array.isArray(response.data.errors) &&
|
||||||
|
response.data.errors.length === 1 &&
|
||||||
|
response.data.errors[0].toString().startsWith("unsuccessful execution")
|
||||||
|
) {
|
||||||
|
logger.debug(
|
||||||
|
"Successfully uploaded a SARIF file for the unsuccessful execution. Received expected " +
|
||||||
|
'"unsuccessful execution" error, and no other errors.'
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
const shortMessage =
|
||||||
|
"Failed to upload a SARIF file for the unsuccessful execution. Code scanning status " +
|
||||||
|
"information for the repository may be out of date as a result.";
|
||||||
|
const longMessage =
|
||||||
|
shortMessage + status === "failed"
|
||||||
|
? ` Processing errors: ${response.data.errors}`
|
||||||
|
: ' Encountered no processing errors, but expected to receive an "unsuccessful execution" error.';
|
||||||
|
logger.debug(longMessage);
|
||||||
|
throw new Error(shortMessage);
|
||||||
}
|
}
|
||||||
logger.endGroup();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function validateUniqueCategory(sarif: SarifFile): void {
|
export function validateUniqueCategory(sarif: SarifFile): void {
|
||||||
|
|
|
||||||
|
|
@ -53,6 +53,8 @@ async function run() {
|
||||||
try {
|
try {
|
||||||
const uploadResult = await upload_lib.uploadFromActions(
|
const uploadResult = await upload_lib.uploadFromActions(
|
||||||
actionsUtil.getRequiredInput("sarif_file"),
|
actionsUtil.getRequiredInput("sarif_file"),
|
||||||
|
actionsUtil.getRequiredInput("checkout_path"),
|
||||||
|
actionsUtil.getOptionalInput("category"),
|
||||||
getActionsLogger()
|
getActionsLogger()
|
||||||
);
|
);
|
||||||
core.setOutput("sarif-id", uploadResult.sarifID);
|
core.setOutput("sarif-id", uploadResult.sarifID);
|
||||||
|
|
|
||||||
|
|
@ -892,3 +892,12 @@ export async function shouldBypassToolcache(
|
||||||
}
|
}
|
||||||
return bypass;
|
return bypass;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function parseMatrixInput(
|
||||||
|
matrixInput: string | undefined
|
||||||
|
): { [key: string]: string } | undefined {
|
||||||
|
if (matrixInput === undefined || matrixInput === "null") {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return JSON.parse(matrixInput);
|
||||||
|
}
|
||||||
|
|
|
||||||
682
src/workflow.test.ts
Normal file
682
src/workflow.test.ts
Normal file
|
|
@ -0,0 +1,682 @@
|
||||||
|
import test from "ava";
|
||||||
|
import * as yaml from "js-yaml";
|
||||||
|
|
||||||
|
import { setupTests } from "./testing-utils";
|
||||||
|
import {
|
||||||
|
CodedError,
|
||||||
|
formatWorkflowCause,
|
||||||
|
formatWorkflowErrors,
|
||||||
|
getCategoryInputOrThrow,
|
||||||
|
getWorkflowErrors,
|
||||||
|
patternIsSuperset,
|
||||||
|
Workflow,
|
||||||
|
WorkflowErrors,
|
||||||
|
} from "./workflow";
|
||||||
|
|
||||||
|
function errorCodes(
|
||||||
|
actual: CodedError[],
|
||||||
|
expected: CodedError[]
|
||||||
|
): [string[], string[]] {
|
||||||
|
return [actual.map(({ code }) => code), expected.map(({ code }) => code)];
|
||||||
|
}
|
||||||
|
|
||||||
|
setupTests(test);
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on is empty", (t) => {
|
||||||
|
const errors = getWorkflowErrors({ on: {} });
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is an array missing pull_request", (t) => {
|
||||||
|
const errors = getWorkflowErrors({ on: ["push"] });
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is an array missing push", (t) => {
|
||||||
|
const errors = getWorkflowErrors({ on: ["pull_request"] });
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.MissingPushHook]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is valid", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: ["push", "pull_request"],
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is a valid superset", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: ["push", "pull_request", "schedule"],
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push should not have a path", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["main"], paths: ["test/*"] },
|
||||||
|
pull_request: { branches: ["main"] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.PathsSpecified]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is a correct object", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: { push: { branches: ["main"] }, pull_request: { branches: ["main"] } },
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.pull_requests is a string", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: { push: { branches: ["main"] }, pull_request: { branches: "*" } },
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.pull_requests is a string and correct", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: { push: { branches: "*" }, pull_request: { branches: "*" } },
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is correct with empty objects", (t) => {
|
||||||
|
const errors = getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
`) as Workflow
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is mismatched", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["main"] },
|
||||||
|
pull_request: { branches: ["feature"] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is not mismatched", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["main", "feature"] },
|
||||||
|
pull_request: { branches: ["main"] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push is mismatched for pull_request", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["main"] },
|
||||||
|
pull_request: { branches: ["main", "feature"] },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() for a range of malformed workflows", (t) => {
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: 1,
|
||||||
|
pull_request: 1,
|
||||||
|
},
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: 1,
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: [1],
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { 1: 1 },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: 1 },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: [1] },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: { steps: 1 } },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: { steps: [{ notrun: "git checkout HEAD^2" }] } },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: 1,
|
||||||
|
jobs: { test: [undefined] },
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(getWorkflowErrors(1 as any), []));
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: {
|
||||||
|
branches: 1,
|
||||||
|
},
|
||||||
|
pull_request: {
|
||||||
|
branches: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
} as any),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.pull_request for every branch but push specifies branches", (t) => {
|
||||||
|
const errors = getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: ["main"]
|
||||||
|
pull_request:
|
||||||
|
`) as Workflow
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.pull_request for wildcard branches", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["feature/*"] },
|
||||||
|
pull_request: { branches: "feature/moose" },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.pull_request for mismatched wildcard branches", (t) => {
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: {
|
||||||
|
push: { branches: ["feature/moose"] },
|
||||||
|
pull_request: { branches: "feature/*" },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.MismatchedBranches]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when HEAD^2 is checked out", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test";
|
||||||
|
|
||||||
|
const errors = getWorkflowErrors({
|
||||||
|
on: ["push", "pull_request"],
|
||||||
|
jobs: { test: { steps: [{ run: "git checkout HEAD^2" }] } },
|
||||||
|
});
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.CheckoutWrongHead]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("formatWorkflowErrors() when there is one error", (t) => {
|
||||||
|
const message = formatWorkflowErrors([WorkflowErrors.CheckoutWrongHead]);
|
||||||
|
t.true(message.startsWith("1 issue was detected with this workflow:"));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("formatWorkflowErrors() when there are multiple errors", (t) => {
|
||||||
|
const message = formatWorkflowErrors([
|
||||||
|
WorkflowErrors.CheckoutWrongHead,
|
||||||
|
WorkflowErrors.PathsSpecified,
|
||||||
|
]);
|
||||||
|
t.true(message.startsWith("2 issues were detected with this workflow:"));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("formatWorkflowCause() with no errors", (t) => {
|
||||||
|
const message = formatWorkflowCause([]);
|
||||||
|
|
||||||
|
t.deepEqual(message, undefined);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("formatWorkflowCause()", (t) => {
|
||||||
|
const message = formatWorkflowCause([
|
||||||
|
WorkflowErrors.CheckoutWrongHead,
|
||||||
|
WorkflowErrors.PathsSpecified,
|
||||||
|
]);
|
||||||
|
|
||||||
|
t.deepEqual(message, "CheckoutWrongHead,PathsSpecified");
|
||||||
|
t.deepEqual(formatWorkflowCause([]), undefined);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("patternIsSuperset()", (t) => {
|
||||||
|
t.false(patternIsSuperset("main-*", "main"));
|
||||||
|
t.true(patternIsSuperset("*", "*"));
|
||||||
|
t.true(patternIsSuperset("*", "main-*"));
|
||||||
|
t.false(patternIsSuperset("main-*", "*"));
|
||||||
|
t.false(patternIsSuperset("main-*", "main"));
|
||||||
|
t.true(patternIsSuperset("main", "main"));
|
||||||
|
t.false(patternIsSuperset("*", "feature/*"));
|
||||||
|
t.true(patternIsSuperset("**", "feature/*"));
|
||||||
|
t.false(patternIsSuperset("feature-*", "**"));
|
||||||
|
t.false(patternIsSuperset("a/**/c", "a/**/d"));
|
||||||
|
t.false(patternIsSuperset("a/**/c", "a/**"));
|
||||||
|
t.true(patternIsSuperset("a/**", "a/**/c"));
|
||||||
|
t.true(patternIsSuperset("a/**/c", "a/main-**/c"));
|
||||||
|
t.false(patternIsSuperset("a/**/b/**/c", "a/**/d/**/c"));
|
||||||
|
t.true(patternIsSuperset("a/**/b/**/c", "a/**/b/c/**/c"));
|
||||||
|
t.true(patternIsSuperset("a/**/b/**/c", "a/**/b/d/**/c"));
|
||||||
|
t.false(patternIsSuperset("a/**/c/d/**/c", "a/**/b/**/c"));
|
||||||
|
t.false(patternIsSuperset("a/main-**/c", "a/**/c"));
|
||||||
|
t.true(patternIsSuperset("/robin/*/release/*", "/robin/moose/release/goose"));
|
||||||
|
t.false(
|
||||||
|
patternIsSuperset("/robin/moose/release/goose", "/robin/*/release/*")
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when branches contain dots", (t) => {
|
||||||
|
const errors = getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [4.1, master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [4.1, master]
|
||||||
|
`) as Workflow
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on.push has a trailing comma", (t) => {
|
||||||
|
const errors = getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master, ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
`) as Workflow
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() should only report the current job's CheckoutWrongHead", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test";
|
||||||
|
|
||||||
|
const errors = getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test2:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test3:
|
||||||
|
steps: []
|
||||||
|
`) as Workflow
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, [WorkflowErrors.CheckoutWrongHead]));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() should not report a different job's CheckoutWrongHead", (t) => {
|
||||||
|
process.env.GITHUB_JOB = "test3";
|
||||||
|
|
||||||
|
const errors = getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [master]
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test2:
|
||||||
|
steps:
|
||||||
|
- run: "git checkout HEAD^2"
|
||||||
|
|
||||||
|
test3:
|
||||||
|
steps: []
|
||||||
|
`) as Workflow
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() when on is missing", (t) => {
|
||||||
|
const errors = getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
`) as Workflow
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(...errorCodes(errors, []));
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() with a different on setup", (t) => {
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: "workflow_dispatch"
|
||||||
|
`) as Workflow
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: [workflow_dispatch]
|
||||||
|
`) as Workflow
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
workflow_dispatch: {}
|
||||||
|
`) as Workflow
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getWorkflowErrors() should not report an error if PRs are totally unconfigured", (t) => {
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
`) as Workflow
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
t.deepEqual(
|
||||||
|
...errorCodes(
|
||||||
|
getWorkflowErrors(
|
||||||
|
yaml.load(`
|
||||||
|
name: "CodeQL"
|
||||||
|
on: ["push"]
|
||||||
|
`) as Workflow
|
||||||
|
),
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCategoryInputOrThrow returns category for simple workflow with category", (t) => {
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
|
t.is(
|
||||||
|
getCategoryInputOrThrow(
|
||||||
|
yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: some-category
|
||||||
|
`) as Workflow,
|
||||||
|
"analysis",
|
||||||
|
{}
|
||||||
|
),
|
||||||
|
"some-category"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCategoryInputOrThrow returns undefined for simple workflow without category", (t) => {
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
|
t.is(
|
||||||
|
getCategoryInputOrThrow(
|
||||||
|
yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
`) as Workflow,
|
||||||
|
"analysis",
|
||||||
|
{}
|
||||||
|
),
|
||||||
|
undefined
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCategoryInputOrThrow returns category for workflow with multiple jobs", (t) => {
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
|
t.is(
|
||||||
|
getCategoryInputOrThrow(
|
||||||
|
yaml.load(`
|
||||||
|
jobs:
|
||||||
|
foo:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- runs: ./build foo
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: foo-category
|
||||||
|
bar:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- runs: ./build bar
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: bar-category
|
||||||
|
`) as Workflow,
|
||||||
|
"bar",
|
||||||
|
{}
|
||||||
|
),
|
||||||
|
"bar-category"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCategoryInputOrThrow finds category for workflow with language matrix", (t) => {
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
|
t.is(
|
||||||
|
getCategoryInputOrThrow(
|
||||||
|
yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
language: [javascript, python]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
with:
|
||||||
|
language: \${{ matrix.language }}
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "/language:\${{ matrix.language }}"
|
||||||
|
`) as Workflow,
|
||||||
|
"analysis",
|
||||||
|
{ language: "javascript" }
|
||||||
|
),
|
||||||
|
"/language:javascript"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCategoryInputOrThrow throws error for workflow with dynamic category", (t) => {
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
|
t.throws(
|
||||||
|
() =>
|
||||||
|
getCategoryInputOrThrow(
|
||||||
|
yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "\${{ github.workflow }}"
|
||||||
|
`) as Workflow,
|
||||||
|
"analysis",
|
||||||
|
{}
|
||||||
|
),
|
||||||
|
{
|
||||||
|
message:
|
||||||
|
"Could not get category input to github/codeql-action/analyze since it contained " +
|
||||||
|
"an unrecognized dynamic value.",
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCategoryInputOrThrow throws error for workflow with multiple calls to analyze", (t) => {
|
||||||
|
process.env["GITHUB_REPOSITORY"] = "github/codeql-action-fake-repository";
|
||||||
|
t.throws(
|
||||||
|
() =>
|
||||||
|
getCategoryInputOrThrow(
|
||||||
|
yaml.load(`
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: github/codeql-action/init@v2
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: some-category
|
||||||
|
- uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: another-category
|
||||||
|
`) as Workflow,
|
||||||
|
"analysis",
|
||||||
|
{}
|
||||||
|
),
|
||||||
|
{
|
||||||
|
message:
|
||||||
|
"Could not get category input to github/codeql-action/analyze since the analysis job " +
|
||||||
|
"calls github/codeql-action/analyze multiple times.",
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
455
src/workflow.ts
Normal file
455
src/workflow.ts
Normal file
|
|
@ -0,0 +1,455 @@
|
||||||
|
import * as fs from "fs";
|
||||||
|
import * as path from "path";
|
||||||
|
|
||||||
|
import * as core from "@actions/core";
|
||||||
|
import * as yaml from "js-yaml";
|
||||||
|
|
||||||
|
import * as api from "./api-client";
|
||||||
|
import { getRequiredEnvParam } from "./util";
|
||||||
|
|
||||||
|
export interface WorkflowJobStep {
|
||||||
|
name?: string;
|
||||||
|
run?: any;
|
||||||
|
uses?: string;
|
||||||
|
with?: { [key: string]: string };
|
||||||
|
}
|
||||||
|
|
||||||
|
interface WorkflowJob {
|
||||||
|
name?: string;
|
||||||
|
"runs-on"?: string;
|
||||||
|
steps?: WorkflowJobStep[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface WorkflowTrigger {
|
||||||
|
branches?: string[] | string;
|
||||||
|
paths?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// on: {} then push/pull_request are undefined
|
||||||
|
// on:
|
||||||
|
// push:
|
||||||
|
// pull_request:
|
||||||
|
// then push/pull_request are null
|
||||||
|
interface WorkflowTriggers {
|
||||||
|
push?: WorkflowTrigger | null;
|
||||||
|
pull_request?: WorkflowTrigger | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Workflow {
|
||||||
|
name?: string;
|
||||||
|
jobs?: { [key: string]: WorkflowJob };
|
||||||
|
on?: string | string[] | WorkflowTriggers;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isObject(o: unknown): o is object {
|
||||||
|
return o !== null && typeof o === "object";
|
||||||
|
}
|
||||||
|
|
||||||
|
const GLOB_PATTERN = new RegExp("(\\*\\*?)");
|
||||||
|
|
||||||
|
function escapeRegExp(string) {
|
||||||
|
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
|
||||||
|
}
|
||||||
|
|
||||||
|
function patternToRegExp(value) {
|
||||||
|
return new RegExp(
|
||||||
|
`^${value
|
||||||
|
.toString()
|
||||||
|
.split(GLOB_PATTERN)
|
||||||
|
.reduce(function (arr, cur) {
|
||||||
|
if (cur === "**") {
|
||||||
|
arr.push(".*?");
|
||||||
|
} else if (cur === "*") {
|
||||||
|
arr.push("[^/]*?");
|
||||||
|
} else if (cur) {
|
||||||
|
arr.push(escapeRegExp(cur));
|
||||||
|
}
|
||||||
|
return arr;
|
||||||
|
}, [])
|
||||||
|
.join("")}$`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// this function should return true if patternA is a superset of patternB
|
||||||
|
// e.g: * is a superset of main-* but main-* is not a superset of *.
|
||||||
|
export function patternIsSuperset(patternA: string, patternB: string): boolean {
|
||||||
|
return patternToRegExp(patternA).test(patternB);
|
||||||
|
}
|
||||||
|
|
||||||
|
function branchesToArray(branches?: string | null | string[]): string[] | "**" {
|
||||||
|
if (typeof branches === "string") {
|
||||||
|
return [branches];
|
||||||
|
}
|
||||||
|
if (Array.isArray(branches)) {
|
||||||
|
if (branches.length === 0) {
|
||||||
|
return "**";
|
||||||
|
}
|
||||||
|
return branches;
|
||||||
|
}
|
||||||
|
return "**";
|
||||||
|
}
|
||||||
|
export interface CodedError {
|
||||||
|
message: string;
|
||||||
|
code: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
function toCodedErrors(errors: {
|
||||||
|
[code: string]: string;
|
||||||
|
}): Record<string, CodedError> {
|
||||||
|
return Object.entries(errors).reduce((acc, [code, message]) => {
|
||||||
|
acc[code] = { message, code };
|
||||||
|
return acc;
|
||||||
|
}, {} as Record<string, CodedError>);
|
||||||
|
}
|
||||||
|
|
||||||
|
// code to send back via status report
|
||||||
|
// message to add as a warning annotation to the run
|
||||||
|
export const WorkflowErrors = toCodedErrors({
|
||||||
|
MismatchedBranches: `Please make sure that every branch in on.pull_request is also in on.push so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||||
|
MissingPushHook: `Please specify an on.push hook so that Code Scanning can compare pull requests against the state of the base branch.`,
|
||||||
|
PathsSpecified: `Using on.push.paths can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||||
|
PathsIgnoreSpecified: `Using on.push.paths-ignore can prevent Code Scanning annotating new alerts in your pull requests.`,
|
||||||
|
CheckoutWrongHead: `git checkout HEAD^2 is no longer necessary. Please remove this step as Code Scanning recommends analyzing the merge commit for best results.`,
|
||||||
|
});
|
||||||
|
|
||||||
|
export function getWorkflowErrors(doc: Workflow): CodedError[] {
|
||||||
|
const errors: CodedError[] = [];
|
||||||
|
|
||||||
|
const jobName = process.env.GITHUB_JOB;
|
||||||
|
|
||||||
|
if (jobName) {
|
||||||
|
const job = doc?.jobs?.[jobName];
|
||||||
|
|
||||||
|
const steps = job?.steps;
|
||||||
|
|
||||||
|
if (Array.isArray(steps)) {
|
||||||
|
for (const step of steps) {
|
||||||
|
// this was advice that we used to give in the README
|
||||||
|
// we actually want to run the analysis on the merge commit
|
||||||
|
// to produce results that are more inline with expectations
|
||||||
|
// (i.e: this is what will happen if you merge this PR)
|
||||||
|
// and avoid some race conditions
|
||||||
|
if (step?.run === "git checkout HEAD^2") {
|
||||||
|
errors.push(WorkflowErrors.CheckoutWrongHead);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let missingPush = false;
|
||||||
|
|
||||||
|
if (doc.on === undefined) {
|
||||||
|
// this is not a valid config
|
||||||
|
} else if (typeof doc.on === "string") {
|
||||||
|
if (doc.on === "pull_request") {
|
||||||
|
missingPush = true;
|
||||||
|
}
|
||||||
|
} else if (Array.isArray(doc.on)) {
|
||||||
|
const hasPush = doc.on.includes("push");
|
||||||
|
const hasPullRequest = doc.on.includes("pull_request");
|
||||||
|
if (hasPullRequest && !hasPush) {
|
||||||
|
missingPush = true;
|
||||||
|
}
|
||||||
|
} else if (isObject(doc.on)) {
|
||||||
|
const hasPush = Object.prototype.hasOwnProperty.call(doc.on, "push");
|
||||||
|
const hasPullRequest = Object.prototype.hasOwnProperty.call(
|
||||||
|
doc.on,
|
||||||
|
"pull_request"
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!hasPush && hasPullRequest) {
|
||||||
|
missingPush = true;
|
||||||
|
}
|
||||||
|
if (hasPush && hasPullRequest) {
|
||||||
|
const paths = doc.on.push?.paths;
|
||||||
|
// if you specify paths or paths-ignore you can end up with commits that have no baseline
|
||||||
|
// if they didn't change any files
|
||||||
|
// currently we cannot go back through the history and find the most recent baseline
|
||||||
|
if (Array.isArray(paths) && paths.length > 0) {
|
||||||
|
errors.push(WorkflowErrors.PathsSpecified);
|
||||||
|
}
|
||||||
|
const pathsIgnore = doc.on.push?.["paths-ignore"];
|
||||||
|
if (Array.isArray(pathsIgnore) && pathsIgnore.length > 0) {
|
||||||
|
errors.push(WorkflowErrors.PathsIgnoreSpecified);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// if doc.on.pull_request is null that means 'all branches'
|
||||||
|
// if doc.on.pull_request is undefined that means 'off'
|
||||||
|
// we only want to check for mismatched branches if pull_request is on.
|
||||||
|
if (doc.on.pull_request !== undefined) {
|
||||||
|
const push = branchesToArray(doc.on.push?.branches);
|
||||||
|
|
||||||
|
if (push !== "**") {
|
||||||
|
const pull_request = branchesToArray(doc.on.pull_request?.branches);
|
||||||
|
|
||||||
|
if (pull_request !== "**") {
|
||||||
|
const difference = pull_request.filter(
|
||||||
|
(value) => !push.some((o) => patternIsSuperset(o, value))
|
||||||
|
);
|
||||||
|
if (difference.length > 0) {
|
||||||
|
// there are branches in pull_request that may not have a baseline
|
||||||
|
// because we are not building them on push
|
||||||
|
errors.push(WorkflowErrors.MismatchedBranches);
|
||||||
|
}
|
||||||
|
} else if (push.length > 0) {
|
||||||
|
// push is set up to run on a subset of branches
|
||||||
|
// and you could open a PR against a branch with no baseline
|
||||||
|
errors.push(WorkflowErrors.MismatchedBranches);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (missingPush) {
|
||||||
|
errors.push(WorkflowErrors.MissingPushHook);
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function validateWorkflow(): Promise<undefined | string> {
|
||||||
|
let workflow: Workflow;
|
||||||
|
try {
|
||||||
|
workflow = await getWorkflow();
|
||||||
|
} catch (e) {
|
||||||
|
return `error: getWorkflow() failed: ${String(e)}`;
|
||||||
|
}
|
||||||
|
let workflowErrors: CodedError[];
|
||||||
|
try {
|
||||||
|
workflowErrors = getWorkflowErrors(workflow);
|
||||||
|
} catch (e) {
|
||||||
|
return `error: getWorkflowErrors() failed: ${String(e)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workflowErrors.length > 0) {
|
||||||
|
let message: string;
|
||||||
|
try {
|
||||||
|
message = formatWorkflowErrors(workflowErrors);
|
||||||
|
} catch (e) {
|
||||||
|
return `error: formatWorkflowErrors() failed: ${String(e)}`;
|
||||||
|
}
|
||||||
|
core.warning(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
return formatWorkflowCause(workflowErrors);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function formatWorkflowErrors(errors: CodedError[]): string {
|
||||||
|
const issuesWere = errors.length === 1 ? "issue was" : "issues were";
|
||||||
|
|
||||||
|
const errorsList = errors.map((e) => e.message).join(" ");
|
||||||
|
|
||||||
|
return `${errors.length} ${issuesWere} detected with this workflow: ${errorsList}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function formatWorkflowCause(errors: CodedError[]): undefined | string {
|
||||||
|
if (errors.length === 0) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return errors.map((e) => e.code).join(",");
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getWorkflow(): Promise<Workflow> {
|
||||||
|
const relativePath = await getWorkflowPath();
|
||||||
|
const absolutePath = path.join(
|
||||||
|
getRequiredEnvParam("GITHUB_WORKSPACE"),
|
||||||
|
relativePath
|
||||||
|
);
|
||||||
|
|
||||||
|
return yaml.load(fs.readFileSync(absolutePath, "utf-8")) as Workflow;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the path of the currently executing workflow.
|
||||||
|
*/
|
||||||
|
export async function getWorkflowPath(): Promise<string> {
|
||||||
|
const repo_nwo = getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
||||||
|
const owner = repo_nwo[0];
|
||||||
|
const repo = repo_nwo[1];
|
||||||
|
const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID"));
|
||||||
|
|
||||||
|
const apiClient = api.getApiClient();
|
||||||
|
const runsResponse = await apiClient.request(
|
||||||
|
"GET /repos/:owner/:repo/actions/runs/:run_id?exclude_pull_requests=true",
|
||||||
|
{
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
run_id,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
const workflowUrl = runsResponse.data.workflow_url;
|
||||||
|
|
||||||
|
const workflowResponse = await apiClient.request(`GET ${workflowUrl}`);
|
||||||
|
|
||||||
|
return workflowResponse.data.path;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the workflow run ID.
|
||||||
|
*/
|
||||||
|
export function getWorkflowRunID(): number {
|
||||||
|
const workflowRunID = parseInt(getRequiredEnvParam("GITHUB_RUN_ID"), 10);
|
||||||
|
if (Number.isNaN(workflowRunID)) {
|
||||||
|
throw new Error("GITHUB_RUN_ID must define a non NaN workflow run ID");
|
||||||
|
}
|
||||||
|
return workflowRunID;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getStepsCallingAction(
|
||||||
|
job: WorkflowJob,
|
||||||
|
actionName: string
|
||||||
|
): WorkflowJobStep[] {
|
||||||
|
const steps = job.steps;
|
||||||
|
if (!Array.isArray(steps)) {
|
||||||
|
throw new Error(
|
||||||
|
`Could not get steps calling ${actionName} since job.steps was not an array.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return steps.filter((step) => step.uses?.includes(actionName));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Makes a best effort attempt to retrieve the value of a particular input with which
|
||||||
|
* an Action in the workflow would be invoked.
|
||||||
|
*
|
||||||
|
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||||
|
*
|
||||||
|
* @returns the value of the input, or undefined if no such input is passed to the Action
|
||||||
|
* @throws an error if the value of the input could not be determined, or we could not
|
||||||
|
* determine that no such input is passed to the Action.
|
||||||
|
*/
|
||||||
|
function getInputOrThrow(
|
||||||
|
workflow: Workflow,
|
||||||
|
jobName: string,
|
||||||
|
actionName: string,
|
||||||
|
inputName: string,
|
||||||
|
matrixVars: { [key: string]: string } | undefined
|
||||||
|
) {
|
||||||
|
const preamble = `Could not get ${inputName} input to ${actionName} since`;
|
||||||
|
if (!workflow.jobs) {
|
||||||
|
throw new Error(`${preamble} the workflow has no jobs.`);
|
||||||
|
}
|
||||||
|
if (!workflow.jobs[jobName]) {
|
||||||
|
throw new Error(`${preamble} the workflow has no job named ${jobName}.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const stepsCallingAction = getStepsCallingAction(
|
||||||
|
workflow.jobs[jobName],
|
||||||
|
actionName
|
||||||
|
);
|
||||||
|
|
||||||
|
if (stepsCallingAction.length === 0) {
|
||||||
|
throw new Error(
|
||||||
|
`${preamble} the ${jobName} job does not call ${actionName}.`
|
||||||
|
);
|
||||||
|
} else if (stepsCallingAction.length > 1) {
|
||||||
|
throw new Error(
|
||||||
|
`${preamble} the ${jobName} job calls ${actionName} multiple times.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let input = stepsCallingAction[0].with?.[inputName];
|
||||||
|
|
||||||
|
if (input !== undefined && matrixVars !== undefined) {
|
||||||
|
// Make a basic attempt to substitute matrix variables
|
||||||
|
// First normalize by removing whitespace
|
||||||
|
input = input.replace(/\${{\s+/, "${{").replace(/\s+}}/, "}}");
|
||||||
|
for (const [key, value] of Object.entries(matrixVars)) {
|
||||||
|
input = input.replace(`\${{matrix.${key}}}`, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (input !== undefined && input.includes("${{")) {
|
||||||
|
throw new Error(
|
||||||
|
`Could not get ${inputName} input to ${actionName} since it contained an unrecognized dynamic value.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the expected name of the analyze Action.
|
||||||
|
*
|
||||||
|
* This allows us to test workflow parsing functionality as a CodeQL Action PR check.
|
||||||
|
*/
|
||||||
|
function getAnalyzeActionName() {
|
||||||
|
if (getRequiredEnvParam("GITHUB_REPOSITORY") === "github/codeql-action") {
|
||||||
|
return "./analyze";
|
||||||
|
} else {
|
||||||
|
return "github/codeql-action/analyze";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Makes a best effort attempt to retrieve the category input for the particular job,
|
||||||
|
* given a set of matrix variables.
|
||||||
|
*
|
||||||
|
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||||
|
*
|
||||||
|
* @returns the category input, or undefined if the category input is not defined
|
||||||
|
* @throws an error if the category input could not be determined
|
||||||
|
*/
|
||||||
|
export function getCategoryInputOrThrow(
|
||||||
|
workflow: Workflow,
|
||||||
|
jobName: string,
|
||||||
|
matrixVars: { [key: string]: string } | undefined
|
||||||
|
): string | undefined {
|
||||||
|
return getInputOrThrow(
|
||||||
|
workflow,
|
||||||
|
jobName,
|
||||||
|
getAnalyzeActionName(),
|
||||||
|
"category",
|
||||||
|
matrixVars
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Makes a best effort attempt to retrieve the upload input for the particular job,
|
||||||
|
* given a set of matrix variables.
|
||||||
|
*
|
||||||
|
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||||
|
*
|
||||||
|
* @returns the upload input
|
||||||
|
* @throws an error if the upload input could not be determined
|
||||||
|
*/
|
||||||
|
export function getUploadInputOrThrow(
|
||||||
|
workflow: Workflow,
|
||||||
|
jobName: string,
|
||||||
|
matrixVars: { [key: string]: string } | undefined
|
||||||
|
): string {
|
||||||
|
return (
|
||||||
|
getInputOrThrow(
|
||||||
|
workflow,
|
||||||
|
jobName,
|
||||||
|
getAnalyzeActionName(),
|
||||||
|
"upload",
|
||||||
|
matrixVars
|
||||||
|
) || "true" // if unspecified, upload defaults to true
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Makes a best effort attempt to retrieve the checkout_path input for the
|
||||||
|
* particular job, given a set of matrix variables.
|
||||||
|
*
|
||||||
|
* Typically you'll want to wrap this function in a try/catch block and handle the error.
|
||||||
|
*
|
||||||
|
* @returns the checkout_path input
|
||||||
|
* @throws an error if the checkout_path input could not be determined
|
||||||
|
*/
|
||||||
|
export function getCheckoutPathInputOrThrow(
|
||||||
|
workflow: Workflow,
|
||||||
|
jobName: string,
|
||||||
|
matrixVars: { [key: string]: string } | undefined
|
||||||
|
): string {
|
||||||
|
return (
|
||||||
|
getInputOrThrow(
|
||||||
|
workflow,
|
||||||
|
jobName,
|
||||||
|
getAnalyzeActionName(),
|
||||||
|
"checkout_path",
|
||||||
|
matrixVars
|
||||||
|
) || getRequiredEnvParam("GITHUB_WORKSPACE") // if unspecified, checkout_path defaults to ${{ github.workspace }}
|
||||||
|
);
|
||||||
|
}
|
||||||
Loading…
Add table
Add a link
Reference in a new issue