build: refresh js files
This commit is contained in:
parent
f7935cc485
commit
94b5d396bc
14 changed files with 358 additions and 15 deletions
133
lib/actions-util.js
generated
133
lib/actions-util.js
generated
|
|
@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.restoreInputs = exports.persistInputs = exports.CommandInvocationError = exports.getFileType = exports.FileCmdNotFoundError = exports.determineBaseBranchHeadCommitOid = exports.getCommitOid = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.restoreInputs = exports.persistInputs = exports.CommandInvocationError = exports.getFileType = exports.FileCmdNotFoundError = exports.decodeGitFilePath = exports.getGitDiffHunkHeaders = exports.getAllGitMergeBases = exports.gitFetch = exports.deepenGitHistory = exports.determineBaseBranchHeadCommitOid = exports.getCommitOid = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.getTemporaryDirectory = getTemporaryDirectory;
|
||||
exports.getRef = getRef;
|
||||
exports.getActionVersion = getActionVersion;
|
||||
|
|
@ -82,6 +82,7 @@ function getTemporaryDirectory() {
|
|||
async function runGitCommand(checkoutPath, args, customErrorMessage) {
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
core.debug(`Running git command: git ${args.join(" ")}`);
|
||||
try {
|
||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), args, {
|
||||
silent: true,
|
||||
|
|
@ -170,6 +171,136 @@ const determineBaseBranchHeadCommitOid = async function (checkoutPathOverride) {
|
|||
}
|
||||
};
|
||||
exports.determineBaseBranchHeadCommitOid = determineBaseBranchHeadCommitOid;
|
||||
/**
|
||||
* Deepen the git history of the given ref by one level. Errors are logged.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
const deepenGitHistory = async function () {
|
||||
try {
|
||||
await runGitCommand((0, exports.getOptionalInput)("checkout_path"), ["fetch", "--no-tags", "--deepen=1"], "Cannot deepen the shallow repository.");
|
||||
}
|
||||
catch {
|
||||
// Errors are already logged by runGitCommand()
|
||||
}
|
||||
};
|
||||
exports.deepenGitHistory = deepenGitHistory;
|
||||
/**
|
||||
* Fetch the given remote branch. Errors are logged.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
const gitFetch = async function (branch, extraFlags) {
|
||||
try {
|
||||
await runGitCommand((0, exports.getOptionalInput)("checkout_path"), ["fetch", "--no-tags", ...extraFlags, "origin", `${branch}:${branch}`], `Cannot fetch ${branch}.`);
|
||||
}
|
||||
catch {
|
||||
// Errors are already logged by runGitCommand()
|
||||
}
|
||||
};
|
||||
exports.gitFetch = gitFetch;
|
||||
/**
|
||||
* Compute the all merge bases between the given refs. Returns an empty array
|
||||
* if no merge base is found, or if there is an error.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
const getAllGitMergeBases = async function (refs) {
|
||||
try {
|
||||
const stdout = await runGitCommand((0, exports.getOptionalInput)("checkout_path"), ["merge-base", "--all", ...refs], `Cannot get merge base of ${refs}.`);
|
||||
return stdout.trim().split("\n");
|
||||
}
|
||||
catch {
|
||||
return [];
|
||||
}
|
||||
};
|
||||
exports.getAllGitMergeBases = getAllGitMergeBases;
|
||||
/**
|
||||
* Compute the diff hunk headers between the two given refs.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*
|
||||
* @returns an array of diff hunk headers (one element per line), or undefined
|
||||
* if the action was not triggered by a pull request, or if the diff could not
|
||||
* be determined.
|
||||
*/
|
||||
const getGitDiffHunkHeaders = async function (fromRef, toRef) {
|
||||
let stdout = "";
|
||||
try {
|
||||
stdout = await runGitCommand((0, exports.getOptionalInput)("checkout_path"), [
|
||||
"-c",
|
||||
"core.quotePath=false",
|
||||
"diff",
|
||||
"--no-renames",
|
||||
"--irreversible-delete",
|
||||
"-U0",
|
||||
fromRef,
|
||||
toRef,
|
||||
], `Cannot get diff from ${fromRef} to ${toRef}.`);
|
||||
}
|
||||
catch {
|
||||
return undefined;
|
||||
}
|
||||
const headers = [];
|
||||
for (const line of stdout.split("\n")) {
|
||||
if (line.startsWith("--- ") ||
|
||||
line.startsWith("+++ ") ||
|
||||
line.startsWith("@@ ")) {
|
||||
headers.push(line);
|
||||
}
|
||||
}
|
||||
return headers;
|
||||
};
|
||||
exports.getGitDiffHunkHeaders = getGitDiffHunkHeaders;
|
||||
/**
|
||||
* Decode, if necessary, a file path produced by Git. See
|
||||
* https://git-scm.com/docs/git-config#Documentation/git-config.txt-corequotePath
|
||||
* for details on how Git encodes file paths with special characters.
|
||||
*
|
||||
* This function works only for Git output with `core.quotePath=false`.
|
||||
*/
|
||||
const decodeGitFilePath = function (filePath) {
|
||||
if (filePath.startsWith('"') && filePath.endsWith('"')) {
|
||||
filePath = filePath.substring(1, filePath.length - 1);
|
||||
return filePath.replace(/\\([abfnrtv\\"]|[0-7]{1,3})/g, (_match, seq) => {
|
||||
switch (seq[0]) {
|
||||
case "a":
|
||||
return "\x07";
|
||||
case "b":
|
||||
return "\b";
|
||||
case "f":
|
||||
return "\f";
|
||||
case "n":
|
||||
return "\n";
|
||||
case "r":
|
||||
return "\r";
|
||||
case "t":
|
||||
return "\t";
|
||||
case "v":
|
||||
return "\v";
|
||||
case "\\":
|
||||
return "\\";
|
||||
case '"':
|
||||
return '"';
|
||||
default:
|
||||
// Both String.fromCharCode() and String.fromCodePoint() works only
|
||||
// for constructing an entire character at once. If a Unicode
|
||||
// character is encoded as a sequence of escaped bytes, calling these
|
||||
// methods sequentially on the individual byte values would *not*
|
||||
// produce the original multi-byte Unicode character. As a result,
|
||||
// this implementation works only with the Git option core.quotePath
|
||||
// set to false.
|
||||
return String.fromCharCode(parseInt(seq, 8));
|
||||
}
|
||||
});
|
||||
}
|
||||
return filePath;
|
||||
};
|
||||
exports.decodeGitFilePath = decodeGitFilePath;
|
||||
/**
|
||||
* Get the ref currently being analyzed.
|
||||
*/
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
30
lib/actions-util.test.js
generated
30
lib/actions-util.test.js
generated
|
|
@ -246,4 +246,34 @@ const util_1 = require("./util");
|
|||
t.assert(!infoStub.firstCall.args[0].endsWith("The checkout path provided to the action does not appear to be a git repository."));
|
||||
infoStub.restore();
|
||||
});
|
||||
(0, ava_1.default)("decodeGitFilePath unquoted strings", async (t) => {
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo"), "foo");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo bar"), "foo bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\\\bar"), "foo\\\\bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('foo\\"bar'), 'foo\\"bar');
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\001bar"), "foo\\001bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\abar"), "foo\\abar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\bbar"), "foo\\bbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\fbar"), "foo\\fbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\nbar"), "foo\\nbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\rbar"), "foo\\rbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\tbar"), "foo\\tbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("foo\\vbar"), "foo\\vbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath("\\a\\b\\f\\n\\r\\t\\v"), "\\a\\b\\f\\n\\r\\t\\v");
|
||||
});
|
||||
(0, ava_1.default)("decodeGitFilePath quoted strings", async (t) => {
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo"'), "foo");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo bar"'), "foo bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\\\bar"'), "foo\\bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\"bar"'), 'foo"bar');
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\001bar"'), "foo\x01bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\abar"'), "foo\x07bar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\bbar"'), "foo\bbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\fbar"'), "foo\fbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\nbar"'), "foo\nbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\rbar"'), "foo\rbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\tbar"'), "foo\tbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"foo\\vbar"'), "foo\vbar");
|
||||
t.deepEqual(actionsUtil.decodeGitFilePath('"\\a\\b\\f\\n\\r\\t\\v"'), "\x07\b\f\n\r\t\v");
|
||||
});
|
||||
//# sourceMappingURL=actions-util.test.js.map
|
||||
File diff suppressed because one or more lines are too long
6
lib/analyze-action.js
generated
6
lib/analyze-action.js
generated
|
|
@ -31,6 +31,7 @@ const fs = __importStar(require("fs"));
|
|||
const path_1 = __importDefault(require("path"));
|
||||
const perf_hooks_1 = require("perf_hooks");
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const github = __importStar(require("@actions/github"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyze_1 = require("./analyze");
|
||||
const api_client_1 = require("./api-client");
|
||||
|
|
@ -173,11 +174,14 @@ async function run() {
|
|||
util.checkActionVersion(actionsUtil.getActionVersion(), gitHubVersion);
|
||||
const features = new feature_flags_1.Features(gitHubVersion, repositoryNwo, actionsUtil.getTemporaryDirectory(), logger);
|
||||
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"], logger);
|
||||
const pull_request = github.context.payload.pull_request;
|
||||
const diffRangePackDir = pull_request &&
|
||||
(await (0, analyze_1.setupDiffInformedQueryRun)(pull_request.base.ref, pull_request.head.ref, codeql, logger, features));
|
||||
await (0, analyze_1.warnIfGoInstalledAfterInit)(config, logger);
|
||||
await runAutobuildIfLegacyGoWorkflow(config, logger);
|
||||
dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, codeql, config, logger);
|
||||
if (actionsUtil.getRequiredInput("skip-queries") !== "true") {
|
||||
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, actionsUtil.getOptionalInput("category"), config, logger, features);
|
||||
runStats = await (0, analyze_1.runQueries)(outputDir, memory, util.getAddSnippetsFlag(actionsUtil.getRequiredInput("add-snippets")), threads, diffRangePackDir, actionsUtil.getOptionalInput("category"), config, logger, features);
|
||||
}
|
||||
if (actionsUtil.getOptionalInput("cleanup-level") !== "none") {
|
||||
await (0, analyze_1.runCleanup)(config, actionsUtil.getOptionalInput("cleanup-level") || "brutal", logger);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
181
lib/analyze.js
generated
181
lib/analyze.js
generated
|
|
@ -29,6 +29,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||
exports.CodeQLAnalysisError = void 0;
|
||||
exports.runExtraction = runExtraction;
|
||||
exports.dbIsFinalized = dbIsFinalized;
|
||||
exports.setupDiffInformedQueryRun = setupDiffInformedQueryRun;
|
||||
exports.runQueries = runQueries;
|
||||
exports.runFinalize = runFinalize;
|
||||
exports.warnIfGoInstalledAfterInit = warnIfGoInstalledAfterInit;
|
||||
|
|
@ -39,12 +40,14 @@ const perf_hooks_1 = require("perf_hooks");
|
|||
const safe_which_1 = require("@chrisgavin/safe-which");
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const autobuild_1 = require("./autobuild");
|
||||
const codeql_1 = require("./codeql");
|
||||
const diagnostics_1 = require("./diagnostics");
|
||||
const environment_1 = require("./environment");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const tools_features_1 = require("./tools-features");
|
||||
const tracer_config_1 = require("./tracer-config");
|
||||
const upload_lib_1 = require("./upload-lib");
|
||||
|
|
@ -134,11 +137,183 @@ async function finalizeDatabaseCreation(codeql, config, threadsFlag, memoryFlag,
|
|||
trap_import_duration_ms: Math.round(trapImportTime),
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Set up the diff-informed analysis feature.
|
||||
*
|
||||
* @param baseRef The base branch name, used for calculating the diff range.
|
||||
* @param headRef The head branch name, used for calculating the diff range.
|
||||
* @param codeql
|
||||
* @param logger
|
||||
* @param features
|
||||
* @returns Absolute path to the directory containing the extension pack for
|
||||
* the diff range information, or `undefined` if the feature is disabled.
|
||||
*/
|
||||
async function setupDiffInformedQueryRun(baseRef, headRef, codeql, logger, features) {
|
||||
if (!(await features.getValue(feature_flags_1.Feature.DiffInformedQueries, codeql))) {
|
||||
return undefined;
|
||||
}
|
||||
return await (0, logging_1.withGroup)("Generating diff range extension pack", async () => {
|
||||
const diffRanges = await getPullRequestEditedDiffRanges(baseRef, headRef, logger);
|
||||
return writeDiffRangeDataExtensionPack(logger, diffRanges);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Return the file line ranges that were added or modified in the pull request.
|
||||
*
|
||||
* @param baseRef The base branch name, used for calculating the diff range.
|
||||
* @param headRef The head branch name, used for calculating the diff range.
|
||||
* @param logger
|
||||
* @returns An array of tuples, where each tuple contains the absolute path of a
|
||||
* file, the start line and the end line (both 1-based and inclusive) of an
|
||||
* added or modified range in that file. Returns `undefined` if the action was
|
||||
* not triggered by a pull request or if there was an error.
|
||||
*/
|
||||
async function getPullRequestEditedDiffRanges(baseRef, headRef, logger) {
|
||||
const checkoutPath = actionsUtil.getOptionalInput("checkout_path");
|
||||
if (checkoutPath === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
// To compute the merge bases between the base branch and the PR topic branch,
|
||||
// we need to fetch the commit graph from the branch heads to those merge
|
||||
// babes. The following 4-step procedure does so while limiting the amount of
|
||||
// history fetched.
|
||||
// Step 1: Deepen from the PR merge commit to the base branch head and the PR
|
||||
// topic branch head, so that the PR merge commit is no longer considered a
|
||||
// grafted commit.
|
||||
await actionsUtil.deepenGitHistory();
|
||||
// Step 2: Fetch the base branch shallow history. This step ensures that the
|
||||
// base branch name is present in the local repository. Normally the base
|
||||
// branch name would be added by Step 4. However, if the base branch head is
|
||||
// an ancestor of the PR topic branch head, Step 4 would fail without doing
|
||||
// anything, so we need to fetch the base branch explicitly.
|
||||
await actionsUtil.gitFetch(baseRef, ["--depth=1"]);
|
||||
// Step 3: Fetch the PR topic branch history, stopping when we reach commits
|
||||
// that are reachable from the base branch head.
|
||||
await actionsUtil.gitFetch(headRef, [`--shallow-exclude=${baseRef}`]);
|
||||
// Step 4: Fetch the base branch history, stopping when we reach commits that
|
||||
// are reachable from the PR topic branch head.
|
||||
await actionsUtil.gitFetch(baseRef, [`--shallow-exclude=${headRef}`]);
|
||||
// Step 5: Deepen the history so that we have the merge bases between the base
|
||||
// branch and the PR topic branch.
|
||||
await actionsUtil.deepenGitHistory();
|
||||
// To compute the exact same diff as GitHub would compute for the PR, we need
|
||||
// to use the same merge base as GitHub. That is easy to do if there is only
|
||||
// one merge base, which is by far the most common case. If there are multiple
|
||||
// merge bases, we stop without producing a diff range.
|
||||
const mergeBases = await actionsUtil.getAllGitMergeBases([baseRef, headRef]);
|
||||
logger.info(`Merge bases: ${mergeBases.join(", ")}`);
|
||||
if (mergeBases.length !== 1) {
|
||||
logger.info("Cannot compute diff range because baseRef and headRef " +
|
||||
`have ${mergeBases.length} merge bases (instead of exactly 1).`);
|
||||
return undefined;
|
||||
}
|
||||
const diffHunkHeaders = await actionsUtil.getGitDiffHunkHeaders(mergeBases[0], headRef);
|
||||
if (diffHunkHeaders === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
const results = new Array();
|
||||
let changedFile = "";
|
||||
for (const line of diffHunkHeaders) {
|
||||
if (line.startsWith("+++ ")) {
|
||||
const filePath = actionsUtil.decodeGitFilePath(line.substring(4));
|
||||
if (filePath.startsWith("b/")) {
|
||||
// The file was edited: track all hunks in the file
|
||||
changedFile = filePath.substring(2);
|
||||
}
|
||||
else if (filePath === "/dev/null") {
|
||||
// The file was deleted: skip all hunks in the file
|
||||
changedFile = "";
|
||||
}
|
||||
else {
|
||||
logger.warning(`Failed to parse diff hunk header line: ${line}`);
|
||||
return undefined;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (line.startsWith("@@ ")) {
|
||||
if (changedFile === "")
|
||||
continue;
|
||||
const match = line.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@/);
|
||||
if (match === null) {
|
||||
logger.warning(`Failed to parse diff hunk header line: ${line}`);
|
||||
return undefined;
|
||||
}
|
||||
const startLine = parseInt(match[1], 10);
|
||||
const numLines = parseInt(match[2], 10);
|
||||
if (numLines === 0) {
|
||||
// The hunk was a deletion: skip it
|
||||
continue;
|
||||
}
|
||||
const endLine = startLine + (numLines || 1) - 1;
|
||||
results.push({
|
||||
path: path.join(checkoutPath, changedFile),
|
||||
startLine,
|
||||
endLine,
|
||||
});
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
/**
|
||||
* Create an extension pack in the temporary directory that contains the file
|
||||
* line ranges that were added or modified in the pull request.
|
||||
*
|
||||
* @param logger
|
||||
* @param ranges The file line ranges, as returned by
|
||||
* `getPullRequestEditedDiffRanges`.
|
||||
* @returns The absolute path of the directory containing the extension pack, or
|
||||
* `undefined` if no extension pack was created.
|
||||
*/
|
||||
function writeDiffRangeDataExtensionPack(logger, ranges) {
|
||||
if (ranges === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
const diffRangeDir = path.join(actionsUtil.getTemporaryDirectory(), "pr-diff-range");
|
||||
fs.mkdirSync(diffRangeDir);
|
||||
fs.writeFileSync(path.join(diffRangeDir, "qlpack.yml"), `
|
||||
name: codeql-action/pr-diff-range
|
||||
version: 0.0.0
|
||||
library: true
|
||||
extensionTargets:
|
||||
codeql/util: '*'
|
||||
dataExtensions:
|
||||
- pr-diff-range.yml
|
||||
`);
|
||||
const header = `
|
||||
extensions:
|
||||
- addsTo:
|
||||
pack: codeql/util
|
||||
extensible: restrictAlertsTo
|
||||
data:
|
||||
`;
|
||||
let data = ranges
|
||||
.map((range) => ` - ["${range[0]}", ${range[1]}, ${range[2]}]\n`)
|
||||
.join("");
|
||||
if (!data) {
|
||||
// Ensure that the data extension is not empty, so that a pull request with
|
||||
// no edited lines would exclude (instead of accepting) all alerts.
|
||||
data = ' - ["", 0, 0]\n';
|
||||
}
|
||||
const extensionContents = header + data;
|
||||
const extensionFilePath = path.join(diffRangeDir, "pr-diff-range.yml");
|
||||
fs.writeFileSync(extensionFilePath, extensionContents);
|
||||
logger.debug(`Wrote pr-diff-range extension pack to ${extensionFilePath}:\n${extensionContents}`);
|
||||
return diffRangeDir;
|
||||
}
|
||||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, automationDetailsId, config, logger, features) {
|
||||
async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag, diffRangePackDir, automationDetailsId, config, logger, features) {
|
||||
const statusReport = {};
|
||||
const dataExtensionFlags = diffRangePackDir
|
||||
? [
|
||||
`--additional-packs=${diffRangePackDir}`,
|
||||
"--extension-packs=codeql-action/pr-diff-range",
|
||||
]
|
||||
: [];
|
||||
const sarifRunPropertyFlag = diffRangePackDir
|
||||
? "--sarif-run-property=incrementalMode=diff-informed"
|
||||
: undefined;
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
const queryFlags = [memoryFlag, threadsFlag];
|
||||
const queryFlags = [memoryFlag, threadsFlag, ...dataExtensionFlags];
|
||||
for (const language of config.languages) {
|
||||
try {
|
||||
const sarifFile = path.join(sarifFolder, `${language}.sarif`);
|
||||
|
|
@ -192,7 +367,7 @@ async function runQueries(sarifFolder, memoryFlag, addSnippetsFlag, threadsFlag,
|
|||
return statusReport;
|
||||
async function runInterpretResults(language, queries, sarifFile, enableDebugLogging) {
|
||||
const databasePath = util.getCodeQLDatabasePath(config, language);
|
||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", automationDetailsId, config, features);
|
||||
return await codeql.databaseInterpretResults(databasePath, queries, sarifFile, addSnippetsFlag, threadsFlag, enableDebugLogging ? "-vv" : "-v", sarifRunPropertyFlag, automationDetailsId, config, features);
|
||||
}
|
||||
/** Get an object with all queries and their counts parsed from a SARIF file path. */
|
||||
function getPerQueryAlertCounts(sarifPath, log) {
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
2
lib/analyze.test.js
generated
2
lib/analyze.test.js
generated
|
|
@ -103,7 +103,7 @@ const util = __importStar(require("./util"));
|
|||
fs.mkdirSync(util.getCodeQLDatabasePath(config, language), {
|
||||
recursive: true,
|
||||
});
|
||||
const statusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.QaTelemetryEnabled]));
|
||||
const statusReport = await (0, analyze_1.runQueries)(tmpDir, memoryFlag, addSnippetsFlag, threadsFlag, undefined, undefined, config, (0, logging_1.getRunnerLogger)(true), (0, testing_utils_1.createFeatures)([feature_flags_1.Feature.QaTelemetryEnabled]));
|
||||
t.deepEqual(Object.keys(statusReport).sort(), [
|
||||
`analyze_builtin_queries_${language}_duration_ms`,
|
||||
"event_reports",
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"analyze.test.js","sourceRoot":"","sources":["../src/analyze.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,8CAAuB;AACvB,6CAA+B;AAE/B,uCAAuC;AACvC,qCAAqC;AACrC,mDAA0C;AAC1C,2CAAuC;AACvC,uCAA4C;AAC5C,mDAKyB;AACzB,wDAA0C;AAC1C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB;;;;;GAKG;AACH,IAAA,aAAI,EAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,UAAU,GAAG,EAAE,CAAC;QACtB,MAAM,eAAe,GAAG,EAAE,CAAC;QAC3B,MAAM,WAAW,GAAG,EAAE,CAAC;QACvB,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,yBAAyB,CAAC,CAAC;QAEjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,MAAM,CAAC,oBAAQ,CAAC,EAAE,CAAC;YAC/C,IAAA,kBAAS,EAAC;gBACR,kBAAkB,EAAE,KAAK,IAAI,EAAE,GAAE,CAAC;gBAClC,YAAY,EAAE,KAAK,IAAI,EAAE,CAAC,CAAC,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC;gBACzC,wBAAwB,EAAE,KAAK,EAC7B,GAAW,EACX,WAAqB,EACrB,SAAiB,EACjB,EAAE;oBACF,EAAE,CAAC,aAAa,CACd,SAAS,EACT,IAAI,CAAC,SAAS,CAAC;wBACb,IAAI,EAAE;4BACJ,+EAA+E;4BAC/E;gCACE,IAAI,EAAE;oCACJ,UAAU,EAAE;wCACV;4CACE,KAAK,EAAE;gDACL;oDACE,UAAU,EAAE;wDACV,IAAI,EAAE,CAAC,eAAe,CAAC;qDACxB;iDACF;6CACF;yCACF;qCACF;iCACF;gCACD,UAAU,EAAE;oCACV,aAAa,EAAE;wCACb;4CACE,IAAI,EAAE;gDACJ,KAAK,EAAE,CAAC;gDACR,aAAa,EAAE;oDACb,KAAK,EAAE,CAAC;iDACT;6CACF;4CACD,KAAK,EAAE,GAAG;yCACX;qCACF;iCACF;6BACF;4BACD,EAAE;yBACH;qBACF,CAAC,CACH,CAAC;oBACF,OAAO,EAAE,CAAC;gBACZ,CAAC;gBACD,qBAAqB,EAAE,KAAK,IAAI,EAAE,CAAC,EAAE;aACtC,CAAC,CAAC;YAEH,MAAM,MAAM,GAAG,IAAA,gCAAgB,EAAC;gBAC9B,SAAS,EAAE,CAAC,QAAQ,CAAC;gBACrB,OAAO,EAAE,MAAM;gBACf,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;aACrD,CAAC,CAAC;YACH,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAAE;gBACzD,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,YAAY,GAAG,MAAM,IAAA,oBAAU,EACnC,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,SAAS,EACT,MAAM,EACN,IAAA,yBAAe,EAAC,IAAI,CAAC,EACrB,IAAA,8BAAc,EAAC,CAAC,uBAAO,CAAC,kBAAkB,CAAC,CAAC,CAC7C,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC,IAAI,EAAE,EAAE;gBAC5C,2BAA2B,QAAQ,cAAc;gBACjD,eAAe;gBACf,qBAAqB,QAAQ,cAAc;aAC5C,CAAC,CAAC;YACH,KAAK,MAAM,WAAW,IAAI,YAAY,CAAC,aAAc,EAAE,CAAC;gBACtD,CAAC,CAAC,SAAS,CAAC,WAAW,CAAC,KAAK,EAAE,mCAAmC,CAAC,CAAC;gBACpE,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,WAAW,CAAC,CAAC;gBACpC,CAAC,CAAC,IAAI,CAAC,aAAa,IAAI,WAAW,CAAC,UAAW,CAAC,CAAC;YACnD,CAAC;QACH,CAAC;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analyze.test.js","sourceRoot":"","sources":["../src/analyze.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAyB;AACzB,2CAA6B;AAE7B,8CAAuB;AACvB,6CAA+B;AAE/B,uCAAuC;AACvC,qCAAqC;AACrC,mDAA0C;AAC1C,2CAAuC;AACvC,uCAA4C;AAC5C,mDAKyB;AACzB,wDAA0C;AAC1C,6CAA+B;AAE/B,IAAA,0BAAU,EAAC,aAAI,CAAC,CAAC;AAEjB;;;;;GAKG;AACH,IAAA,aAAI,EAAC,sBAAsB,EAAE,KAAK,EAAE,CAAC,EAAE,EAAE;IACvC,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE;QAC5C,IAAA,gCAAgB,EAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAEjC,MAAM,UAAU,GAAG,EAAE,CAAC;QACtB,MAAM,eAAe,GAAG,EAAE,CAAC;QAC3B,MAAM,WAAW,GAAG,EAAE,CAAC;QACvB,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,yBAAyB,CAAC,CAAC;QAEjD,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,MAAM,CAAC,oBAAQ,CAAC,EAAE,CAAC;YAC/C,IAAA,kBAAS,EAAC;gBACR,kBAAkB,EAAE,KAAK,IAAI,EAAE,GAAE,CAAC;gBAClC,YAAY,EAAE,KAAK,IAAI,EAAE,CAAC,CAAC,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC;gBACzC,wBAAwB,EAAE,KAAK,EAC7B,GAAW,EACX,WAAqB,EACrB,SAAiB,EACjB,EAAE;oBACF,EAAE,CAAC,aAAa,CACd,SAAS,EACT,IAAI,CAAC,SAAS,CAAC;wBACb,IAAI,EAAE;4BACJ,+EAA+E;4BAC/E;gCACE,IAAI,EAAE;oCACJ,UAAU,EAAE;wCACV;4CACE,KAAK,EAAE;gDACL;oDACE,UAAU,EAAE;wDACV,IAAI,EAAE,CAAC,eAAe,CAAC;qDACxB;iDACF;6CACF;yCACF;qCACF;iCACF;gCACD,UAAU,EAAE;oCACV,aAAa,EAAE;wCACb;4CACE,IAAI,EAAE;gDACJ,KAAK,EAAE,CAAC;gDACR,aAAa,EAAE;oDACb,KAAK,EAAE,CAAC;iDACT;6CACF;4CACD,KAAK,EAAE,GAAG;yCACX;qCACF;iCACF;6BACF;4BACD,EAAE;yBACH;qBACF,CAAC,CACH,CAAC;oBACF,OAAO,EAAE,CAAC;gBACZ,CAAC;gBACD,qBAAqB,EAAE,KAAK,IAAI,EAAE,CAAC,EAAE;aACtC,CAAC,CAAC;YAEH,MAAM,MAAM,GAAG,IAAA,gCAAgB,EAAC;gBAC9B,SAAS,EAAE,CAAC,QAAQ,CAAC;gBACrB,OAAO,EAAE,MAAM;gBACf,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,kBAAkB,CAAC;aACrD,CAAC,CAAC;YACH,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,EAAE;gBACzD,SAAS,EAAE,IAAI;aAChB,CAAC,CAAC;YAEH,MAAM,YAAY,GAAG,MAAM,IAAA,oBAAU,EACnC,MAAM,EACN,UAAU,EACV,eAAe,EACf,WAAW,EACX,SAAS,EACT,SAAS,EACT,MAAM,EACN,IAAA,yBAAe,EAAC,IAAI,CAAC,EACrB,IAAA,8BAAc,EAAC,CAAC,uBAAO,CAAC,kBAAkB,CAAC,CAAC,CAC7C,CAAC;YACF,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC,IAAI,EAAE,EAAE;gBAC5C,2BAA2B,QAAQ,cAAc;gBACjD,eAAe;gBACf,qBAAqB,QAAQ,cAAc;aAC5C,CAAC,CAAC;YACH,KAAK,MAAM,WAAW,IAAI,YAAY,CAAC,aAAc,EAAE,CAAC;gBACtD,CAAC,CAAC,SAAS,CAAC,WAAW,CAAC,KAAK,EAAE,mCAAmC,CAAC,CAAC;gBACpE,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,WAAW,CAAC,CAAC;gBACpC,CAAC,CAAC,IAAI,CAAC,aAAa,IAAI,WAAW,CAAC,UAAW,CAAC,CAAC;YACnD,CAAC;QACH,CAAC;IACH,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC"}
|
||||
5
lib/codeql.js
generated
5
lib/codeql.js
generated
|
|
@ -460,7 +460,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
}
|
||||
await runCli(cmd, codeqlArgs);
|
||||
},
|
||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId, config, features) {
|
||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, sarifRunPropertyFlag, automationDetailsId, config, features) {
|
||||
const shouldExportDiagnostics = await features.getValue(feature_flags_1.Feature.ExportDiagnosticsEnabled, this);
|
||||
const codeqlArgs = [
|
||||
"database",
|
||||
|
|
@ -479,6 +479,9 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
...(await getJobRunUuidSarifOptions(this)),
|
||||
...getExtraOptionsFromEnv(["database", "interpret-results"]),
|
||||
];
|
||||
if (sarifRunPropertyFlag !== undefined) {
|
||||
codeqlArgs.push(sarifRunPropertyFlag);
|
||||
}
|
||||
if (automationDetailsId !== undefined) {
|
||||
codeqlArgs.push("--sarif-category", automationDetailsId);
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
2
lib/codeql.test.js
generated
2
lib/codeql.test.js
generated
|
|
@ -533,7 +533,7 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
|
|||
sinon.stub(codeqlObject, "getVersion").resolves(codeqlVersion);
|
||||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", "", Object.assign({}, stubConfig, { gitHubVersion: githubVersion }), (0, testing_utils_1.createFeatures)([]));
|
||||
await codeqlObject.databaseInterpretResults("", [], "", "", "", "-v", undefined, "", Object.assign({}, stubConfig, { gitHubVersion: githubVersion }), (0, testing_utils_1.createFeatures)([]));
|
||||
const actualArgs = runnerConstructorStub.firstCall.args[1];
|
||||
t.is(actualArgs.includes("--new-analysis-summary"), flagPassed, `--new-analysis-summary should${flagPassed ? "" : "n't"} be passed`);
|
||||
t.is(actualArgs.includes("--no-new-analysis-summary"), negativeFlagPassed, `--no-new-analysis-summary should${negativeFlagPassed ? "" : "n't"} be passed`);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
Loading…
Add table
Add a link
Reference in a new issue