build: refresh js files
This commit is contained in:
parent
3548ff54b5
commit
ae19466961
6 changed files with 322 additions and 95 deletions
2
lib/analyze-action.js
generated
2
lib/analyze-action.js
generated
|
|
@ -188,7 +188,7 @@ async function run() {
|
|||
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"], logger);
|
||||
const pull_request = github.context.payload.pull_request;
|
||||
const diffRangePackDir = pull_request &&
|
||||
(await (0, analyze_1.setupDiffInformedQueryRun)(pull_request.base.ref, pull_request.head.ref, codeql, logger, features));
|
||||
(await (0, analyze_1.setupDiffInformedQueryRun)(pull_request.base.ref, pull_request.head.label, codeql, logger, features));
|
||||
await (0, analyze_1.warnIfGoInstalledAfterInit)(config, logger);
|
||||
await runAutobuildIfLegacyGoWorkflow(config, logger);
|
||||
dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, codeql, config, logger);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
234
lib/analyze.js
generated
234
lib/analyze.js
generated
|
|
@ -36,7 +36,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CodeQLAnalysisError = void 0;
|
||||
exports.exportedForTesting = exports.CodeQLAnalysisError = void 0;
|
||||
exports.runExtraction = runExtraction;
|
||||
exports.dbIsFinalized = dbIsFinalized;
|
||||
exports.setupDiffInformedQueryRun = setupDiffInformedQueryRun;
|
||||
|
|
@ -51,12 +51,12 @@ const io = __importStar(require("@actions/io"));
|
|||
const del_1 = __importDefault(require("del"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const api_client_1 = require("./api-client");
|
||||
const autobuild_1 = require("./autobuild");
|
||||
const codeql_1 = require("./codeql");
|
||||
const diagnostics_1 = require("./diagnostics");
|
||||
const environment_1 = require("./environment");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
const gitUtils = __importStar(require("./git-utils"));
|
||||
const languages_1 = require("./languages");
|
||||
const logging_1 = require("./logging");
|
||||
const tools_features_1 = require("./tools-features");
|
||||
|
|
@ -152,124 +152,168 @@ async function finalizeDatabaseCreation(codeql, config, threadsFlag, memoryFlag,
|
|||
* Set up the diff-informed analysis feature.
|
||||
*
|
||||
* @param baseRef The base branch name, used for calculating the diff range.
|
||||
* @param headRef The head branch name, used for calculating the diff range.
|
||||
* @param headLabel The label that uniquely identifies the head branch across
|
||||
* repositories, used for calculating the diff range.
|
||||
* @param codeql
|
||||
* @param logger
|
||||
* @param features
|
||||
* @returns Absolute path to the directory containing the extension pack for
|
||||
* the diff range information, or `undefined` if the feature is disabled.
|
||||
*/
|
||||
async function setupDiffInformedQueryRun(baseRef, headRef, codeql, logger, features) {
|
||||
async function setupDiffInformedQueryRun(baseRef, headLabel, codeql, logger, features) {
|
||||
if (!(await features.getValue(feature_flags_1.Feature.DiffInformedQueries, codeql))) {
|
||||
return undefined;
|
||||
}
|
||||
return await (0, logging_1.withGroupAsync)("Generating diff range extension pack", async () => {
|
||||
const diffRanges = await getPullRequestEditedDiffRanges(baseRef, headRef, logger);
|
||||
return writeDiffRangeDataExtensionPack(logger, diffRanges);
|
||||
const diffRanges = await getPullRequestEditedDiffRanges(baseRef, headLabel, logger);
|
||||
const packDir = writeDiffRangeDataExtensionPack(logger, diffRanges);
|
||||
if (packDir === undefined) {
|
||||
logger.warning("Cannot create diff range extension pack for diff-informed queries; " +
|
||||
"reverting to performing full analysis.");
|
||||
}
|
||||
else {
|
||||
logger.info(`Successfully created diff range extension pack at ${packDir}.`);
|
||||
}
|
||||
return packDir;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Return the file line ranges that were added or modified in the pull request.
|
||||
*
|
||||
* @param baseRef The base branch name, used for calculating the diff range.
|
||||
* @param headRef The head branch name, used for calculating the diff range.
|
||||
* @param headLabel The label that uniquely identifies the head branch across
|
||||
* repositories, used for calculating the diff range.
|
||||
* @param logger
|
||||
* @returns An array of tuples, where each tuple contains the absolute path of a
|
||||
* file, the start line and the end line (both 1-based and inclusive) of an
|
||||
* added or modified range in that file. Returns `undefined` if the action was
|
||||
* not triggered by a pull request or if there was an error.
|
||||
*/
|
||||
async function getPullRequestEditedDiffRanges(baseRef, headRef, logger) {
|
||||
const checkoutPath = actionsUtil.getOptionalInput("checkout_path");
|
||||
if (checkoutPath === undefined) {
|
||||
async function getPullRequestEditedDiffRanges(baseRef, headLabel, logger) {
|
||||
const fileDiffs = await getFileDiffsWithBasehead(baseRef, headLabel, logger);
|
||||
if (fileDiffs === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
// To compute the merge bases between the base branch and the PR topic branch,
|
||||
// we need to fetch the commit graph from the branch heads to those merge
|
||||
// babes. The following 6-step procedure does so while limiting the amount of
|
||||
// history fetched.
|
||||
// Step 1: Deepen from the PR merge commit to the base branch head and the PR
|
||||
// topic branch head, so that the PR merge commit is no longer considered a
|
||||
// grafted commit.
|
||||
await gitUtils.deepenGitHistory();
|
||||
// Step 2: Fetch the base branch shallow history. This step ensures that the
|
||||
// base branch name is present in the local repository. Normally the base
|
||||
// branch name would be added by Step 4. However, if the base branch head is
|
||||
// an ancestor of the PR topic branch head, Step 4 would fail without doing
|
||||
// anything, so we need to fetch the base branch explicitly.
|
||||
await gitUtils.gitFetch(baseRef, ["--depth=1"]);
|
||||
// Step 3: Fetch the PR topic branch history, stopping when we reach commits
|
||||
// that are reachable from the base branch head.
|
||||
await gitUtils.gitFetch(headRef, [`--shallow-exclude=${baseRef}`]);
|
||||
// Step 4: Fetch the base branch history, stopping when we reach commits that
|
||||
// are reachable from the PR topic branch head.
|
||||
await gitUtils.gitFetch(baseRef, [`--shallow-exclude=${headRef}`]);
|
||||
// Step 5: Repack the history to remove the shallow grafts that were added by
|
||||
// the previous fetches. This step works around a bug that causes subsequent
|
||||
// deepening fetches to fail with "fatal: error in object: unshallow <SHA>".
|
||||
// See https://stackoverflow.com/q/63878612
|
||||
await gitUtils.gitRepack(["-d"]);
|
||||
// Step 6: Deepen the history so that we have the merge bases between the base
|
||||
// branch and the PR topic branch.
|
||||
await gitUtils.deepenGitHistory();
|
||||
// To compute the exact same diff as GitHub would compute for the PR, we need
|
||||
// to use the same merge base as GitHub. That is easy to do if there is only
|
||||
// one merge base, which is by far the most common case. If there are multiple
|
||||
// merge bases, we stop without producing a diff range.
|
||||
const mergeBases = await gitUtils.getAllGitMergeBases([baseRef, headRef]);
|
||||
logger.info(`Merge bases: ${mergeBases.join(", ")}`);
|
||||
if (mergeBases.length !== 1) {
|
||||
logger.info("Cannot compute diff range because baseRef and headRef " +
|
||||
`have ${mergeBases.length} merge bases (instead of exactly 1).`);
|
||||
if (fileDiffs.length >= 300) {
|
||||
// The "compare two commits" API returns a maximum of 300 changed files. If
|
||||
// we see that many changed files, it is possible that there could be more,
|
||||
// with the rest being truncated. In this case, we should not attempt to
|
||||
// compute the diff ranges, as the result would be incomplete.
|
||||
logger.warning(`Cannot retrieve the full diff because there are too many ` +
|
||||
`(${fileDiffs.length}) changed files in the pull request.`);
|
||||
return undefined;
|
||||
}
|
||||
const diffHunkHeaders = await gitUtils.getGitDiffHunkHeaders(mergeBases[0], headRef);
|
||||
if (diffHunkHeaders === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
const results = new Array();
|
||||
let changedFile = "";
|
||||
for (const line of diffHunkHeaders) {
|
||||
if (line.startsWith("+++ ")) {
|
||||
const filePath = gitUtils.decodeGitFilePath(line.substring(4));
|
||||
if (filePath.startsWith("b/")) {
|
||||
// The file was edited: track all hunks in the file
|
||||
changedFile = filePath.substring(2);
|
||||
}
|
||||
else if (filePath === "/dev/null") {
|
||||
// The file was deleted: skip all hunks in the file
|
||||
changedFile = "";
|
||||
}
|
||||
else {
|
||||
logger.warning(`Failed to parse diff hunk header line: ${line}`);
|
||||
return undefined;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (line.startsWith("@@ ")) {
|
||||
if (changedFile === "")
|
||||
continue;
|
||||
const match = line.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@/);
|
||||
if (match === null) {
|
||||
logger.warning(`Failed to parse diff hunk header line: ${line}`);
|
||||
return undefined;
|
||||
}
|
||||
const startLine = parseInt(match[1], 10);
|
||||
const numLines = parseInt(match[2], 10);
|
||||
if (numLines === 0) {
|
||||
// The hunk was a deletion: skip it
|
||||
continue;
|
||||
}
|
||||
const endLine = startLine + (numLines || 1) - 1;
|
||||
results.push({
|
||||
path: path.join(checkoutPath, changedFile),
|
||||
startLine,
|
||||
endLine,
|
||||
});
|
||||
const results = [];
|
||||
for (const filediff of fileDiffs) {
|
||||
const diffRanges = getDiffRanges(filediff, logger);
|
||||
if (diffRanges === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
results.push(...diffRanges);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
async function getFileDiffsWithBasehead(baseRef, headLabel, logger) {
|
||||
const ownerRepo = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
||||
const owner = ownerRepo[0];
|
||||
const repo = ownerRepo[1];
|
||||
const basehead = `${baseRef}...${headLabel}`;
|
||||
try {
|
||||
const response = await (0, api_client_1.getApiClient)().rest.repos.compareCommitsWithBasehead({
|
||||
owner,
|
||||
repo,
|
||||
basehead,
|
||||
per_page: 1,
|
||||
});
|
||||
logger.debug(`Response from compareCommitsWithBasehead(${basehead}):` +
|
||||
`\n${JSON.stringify(response, null, 2)}`);
|
||||
return response.data.files;
|
||||
}
|
||||
catch (error) {
|
||||
if (error.status) {
|
||||
logger.warning(`Error retrieving diff ${basehead}: ${error.message}`);
|
||||
logger.debug(`Error running compareCommitsWithBasehead(${basehead}):` +
|
||||
`\nRequest: ${JSON.stringify(error.request, null, 2)}` +
|
||||
`\nError Response: ${JSON.stringify(error.response, null, 2)}`);
|
||||
return undefined;
|
||||
}
|
||||
else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
function getDiffRanges(fileDiff, logger) {
|
||||
if (fileDiff.patch === undefined) {
|
||||
if (fileDiff.changes === 0) {
|
||||
// There are situations where a changed file legitimately has no diff.
|
||||
// For example, the file may be a binary file, or that the file may have
|
||||
// been renamed with no changes to its contents. In these cases, the
|
||||
// file would be reported as having 0 changes, and we can return an empty
|
||||
// array to indicate no diff range in this file.
|
||||
return [];
|
||||
}
|
||||
// If a file is reported to have nonzero changes but no patch, that may be
|
||||
// due to the file diff being too large. In this case, we should return
|
||||
// undefined to indicate that we cannot process the diff.
|
||||
logger.warning(`No patch found for file ${fileDiff.filename} with ${fileDiff.changes} changes.`);
|
||||
return undefined;
|
||||
}
|
||||
// Diff-informed queries expect the file path to be absolute. CodeQL always
|
||||
// uses forward slashes as the path separator, so on Windows we need to
|
||||
// replace any backslashes with forward slashes.
|
||||
const filename = path
|
||||
.join(actionsUtil.getRequiredInput("checkout_path"), fileDiff.filename)
|
||||
.replaceAll(path.sep, "/");
|
||||
// The 1-based file line number of the current line
|
||||
let currentLine = 0;
|
||||
// The 1-based file line number that starts the current range of added lines
|
||||
let additionRangeStartLine = undefined;
|
||||
const diffRanges = [];
|
||||
const diffLines = fileDiff.patch.split("\n");
|
||||
// Adding a fake context line at the end ensures that the following loop will
|
||||
// always terminate the last range of added lines.
|
||||
diffLines.push(" ");
|
||||
for (const diffLine of diffLines) {
|
||||
if (diffLine.startsWith("-")) {
|
||||
// Ignore deletions completely -- we do not even want to consider them when
|
||||
// calculating consecutive ranges of added lines.
|
||||
continue;
|
||||
}
|
||||
if (diffLine.startsWith("+")) {
|
||||
if (additionRangeStartLine === undefined) {
|
||||
additionRangeStartLine = currentLine;
|
||||
}
|
||||
currentLine++;
|
||||
continue;
|
||||
}
|
||||
if (additionRangeStartLine !== undefined) {
|
||||
// Any line that does not start with a "+" or "-" terminates the current
|
||||
// range of added lines.
|
||||
diffRanges.push({
|
||||
path: filename,
|
||||
startLine: additionRangeStartLine,
|
||||
endLine: currentLine - 1,
|
||||
});
|
||||
additionRangeStartLine = undefined;
|
||||
}
|
||||
if (diffLine.startsWith("@@ ")) {
|
||||
// A new hunk header line resets the current line number.
|
||||
const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
|
||||
if (match === null) {
|
||||
logger.warning(`Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`);
|
||||
return undefined;
|
||||
}
|
||||
currentLine = parseInt(match[1], 10);
|
||||
continue;
|
||||
}
|
||||
if (diffLine.startsWith(" ")) {
|
||||
// An unchanged context line advances the current line number.
|
||||
currentLine++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return diffRanges;
|
||||
}
|
||||
/**
|
||||
* Create an extension pack in the temporary directory that contains the file
|
||||
* line ranges that were added or modified in the pull request.
|
||||
|
|
@ -303,7 +347,12 @@ extensions:
|
|||
data:
|
||||
`;
|
||||
let data = ranges
|
||||
.map((range) => ` - ["${range.path}", ${range.startLine}, ${range.endLine}]\n`)
|
||||
.map((range) =>
|
||||
// Using yaml.dump() with `forceQuotes: true` ensures that all special
|
||||
// characters are escaped, and that the path is always rendered as a
|
||||
// quoted string on a single line.
|
||||
` - [${yaml.dump(range.path, { forceQuotes: true }).trim()}, ` +
|
||||
`${range.startLine}, ${range.endLine}]\n`)
|
||||
.join("");
|
||||
if (!data) {
|
||||
// Ensure that the data extension is not empty, so that a pull request with
|
||||
|
|
@ -461,4 +510,7 @@ async function runCleanup(config, cleanupLevel, logger) {
|
|||
}
|
||||
logger.endGroup();
|
||||
}
|
||||
exports.exportedForTesting = {
|
||||
getDiffRanges,
|
||||
};
|
||||
//# sourceMappingURL=analyze.js.map
|
||||
File diff suppressed because one or more lines are too long
175
lib/analyze.test.js
generated
175
lib/analyze.test.js
generated
|
|
@ -40,6 +40,7 @@ const fs = __importStar(require("fs"));
|
|||
const path = __importStar(require("path"));
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const sinon = __importStar(require("sinon"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const analyze_1 = require("./analyze");
|
||||
const codeql_1 = require("./codeql");
|
||||
const feature_flags_1 = require("./feature-flags");
|
||||
|
|
@ -127,4 +128,178 @@ const util = __importStar(require("./util"));
|
|||
}
|
||||
});
|
||||
});
|
||||
function runGetDiffRanges(changes, patch) {
|
||||
sinon
|
||||
.stub(actionsUtil, "getRequiredInput")
|
||||
.withArgs("checkout_path")
|
||||
.returns("/checkout/path");
|
||||
return analyze_1.exportedForTesting.getDiffRanges({
|
||||
filename: "test.txt",
|
||||
changes,
|
||||
patch: patch?.join("\n"),
|
||||
}, (0, logging_1.getRunnerLogger)(true));
|
||||
}
|
||||
(0, ava_1.default)("getDiffRanges: file unchanged", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(0, undefined);
|
||||
t.deepEqual(diffRanges, []);
|
||||
});
|
||||
(0, ava_1.default)("getDiffRanges: file diff too large", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(1000000, undefined);
|
||||
t.deepEqual(diffRanges, undefined);
|
||||
});
|
||||
(0, ava_1.default)("getDiffRanges: diff thunk with single addition range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,6 +50,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 54,
|
||||
},
|
||||
]);
|
||||
});
|
||||
(0, ava_1.default)("getDiffRanges: diff thunk with single deletion range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,8 +50,6 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
"-2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, []);
|
||||
});
|
||||
(0, ava_1.default)("getDiffRanges: diff thunk with single update range", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,7 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 53,
|
||||
},
|
||||
]);
|
||||
});
|
||||
(0, ava_1.default)("getDiffRanges: diff thunk with addition ranges", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,9 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
" c",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 53,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 55,
|
||||
endLine: 55,
|
||||
},
|
||||
]);
|
||||
});
|
||||
(0, ava_1.default)("getDiffRanges: diff thunk with mixed ranges", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,7 +50,7 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"-1",
|
||||
" d",
|
||||
"-2",
|
||||
"+3",
|
||||
" e",
|
||||
" f",
|
||||
"+4",
|
||||
"+5",
|
||||
" g",
|
||||
" h",
|
||||
" i",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 54,
|
||||
endLine: 54,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 57,
|
||||
endLine: 58,
|
||||
},
|
||||
]);
|
||||
});
|
||||
(0, ava_1.default)("getDiffRanges: multiple diff thunks", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, [
|
||||
"@@ -30,6 +50,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
"@@ -130,6 +150,8 @@",
|
||||
" a",
|
||||
" b",
|
||||
" c",
|
||||
"+1",
|
||||
"+2",
|
||||
" d",
|
||||
" e",
|
||||
" f",
|
||||
]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 53,
|
||||
endLine: 54,
|
||||
},
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 153,
|
||||
endLine: 154,
|
||||
},
|
||||
]);
|
||||
});
|
||||
(0, ava_1.default)("getDiffRanges: no diff context lines", async (t) => {
|
||||
const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]);
|
||||
t.deepEqual(diffRanges, [
|
||||
{
|
||||
path: "/checkout/path/test.txt",
|
||||
startLine: 50,
|
||||
endLine: 51,
|
||||
},
|
||||
]);
|
||||
});
|
||||
//# sourceMappingURL=analyze.test.js.map
|
||||
File diff suppressed because one or more lines are too long
Loading…
Add table
Add a link
Reference in a new issue