Merge branch 'main' into dependabot/npm_and_yarn/npm-006da05bd8

This commit is contained in:
Henry Mercer 2025-01-15 17:30:02 +00:00 committed by GitHub
commit 4de061d85d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
18 changed files with 743 additions and 214 deletions

View file

@ -19,8 +19,6 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix:
node-types-version: [16.11, current] # run tests on 16.11 while CodeQL Action v2 is still supported
steps: steps:
- name: Checkout - name: Checkout
@ -32,34 +30,10 @@ jobs:
- name: Upload sarif - name: Upload sarif
uses: github/codeql-action/upload-sarif@v3 uses: github/codeql-action/upload-sarif@v3
# Only upload SARIF for the latest version of Node.js
if: "!cancelled() && matrix.node-types-version == 'current' && !startsWith(github.head_ref, 'dependabot/')"
with: with:
sarif_file: eslint.sarif sarif_file: eslint.sarif
category: eslint category: eslint
- name: Update version of @types/node
if: matrix.node-types-version != 'current'
env:
NODE_TYPES_VERSION: ${{ matrix.node-types-version }}
run: |
# Export `NODE_TYPES_VERSION` so it's available to jq
export NODE_TYPES_VERSION="${NODE_TYPES_VERSION}"
contents=$(jq '.devDependencies."@types/node" = env.NODE_TYPES_VERSION' package.json)
echo "${contents}" > package.json
# Usually we run `npm install` on macOS to ensure that we pick up macOS-only dependencies.
# However we're not checking in the updated lockfile here, so it's fine to run
# `npm install` on Linux.
npm install
if [ ! -z "$(git status --porcelain)" ]; then
git config --global user.email "github-actions@github.com"
git config --global user.name "github-actions[bot]"
# The period in `git add --all .` ensures that we stage deleted files too.
git add --all .
git commit -m "Use @types/node=${NODE_TYPES_VERSION}"
fi
- name: Check generated JS - name: Check generated JS
run: .github/workflows/script/check-js.sh run: .github/workflows/script/check-js.sh

View file

@ -63,7 +63,7 @@ Here are a few things you can do that will increase the likelihood of your pull
You can start a release by triggering this workflow via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml). You can start a release by triggering this workflow via [workflow dispatch](https://github.com/github/codeql-action/actions/workflows/update-release-branch.yml).
1. The workflow run will open a pull request titled "Merge main into releases/v3". Follow the steps on the checklist in the pull request. Once you've checked off all but the last two of these, approve the PR and automerge it. 1. The workflow run will open a pull request titled "Merge main into releases/v3". Follow the steps on the checklist in the pull request. Once you've checked off all but the last two of these, approve the PR and automerge it.
1. When the "Merge main into releases/v3" pull request is merged into the `releases/v3` branch, a mergeback pull request to `main` will be automatically created. This mergeback pull request incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into releases/v3" pull request, and bumps the patch version of the CodeQL Action. 1. When the "Merge main into releases/v3" pull request is merged into the `releases/v3` branch, a mergeback pull request to `main` will be automatically created. This mergeback pull request incorporates the changelog updates into `main`, tags the release using the merge commit of the "Merge main into releases/v3" pull request, and bumps the patch version of the CodeQL Action.
1. If a backport to an older major version is required, a pull request targeting that version's branch will also be automatically created 1. If a backport to an older major version is required, a pull request targeting that version's branch will also be automatically created.
1. Approve the mergeback and backport pull request (if applicable) and automerge them. 1. Approve the mergeback and backport pull request (if applicable) and automerge them.
Once the mergeback and backport pull request have been merged, the release is complete. Once the mergeback and backport pull request have been merged, the release is complete.
@ -109,6 +109,7 @@ To add a new major version of the Action:
1. Change the `version` field of `package.json` by running `npm version x.y.z` where `x` is the new major version, and `y` and `z` match the latest minor and patch versions of the last release. 1. Change the `version` field of `package.json` by running `npm version x.y.z` where `x` is the new major version, and `y` and `z` match the latest minor and patch versions of the last release.
1. Update appropriate documentation to explain the reasoning behind the releases: see [the diff](https://github.com/github/codeql-action/pull/2677/commits/913d60579d4b560addf53ec3c493d491dd3c1378) in our last major version deprecation for examples on which parts of the documentation should be updated. 1. Update appropriate documentation to explain the reasoning behind the releases: see [the diff](https://github.com/github/codeql-action/pull/2677/commits/913d60579d4b560addf53ec3c493d491dd3c1378) in our last major version deprecation for examples on which parts of the documentation should be updated.
1. Consider the timeline behind deprecating the prior Action version: see [CodeQL Action deprecation documentation](#deprecating-a-codeql-action-major-version-write-access-required) 1. Consider the timeline behind deprecating the prior Action version: see [CodeQL Action deprecation documentation](#deprecating-a-codeql-action-major-version-write-access-required)
1. If the new major version runs on a new version of Node, add a PR check to ensure the codebase continues to compile against the previous version of Node. See [Remove Node 16 compilation PR check](https://github.com/github/codeql-action/pull/2695) for an example.
## Deprecating a CodeQL Action major version (write access required) ## Deprecating a CodeQL Action major version (write access required)

2
lib/analyze-action.js generated
View file

@ -188,7 +188,7 @@ async function run() {
const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"], logger); const memory = util.getMemoryFlag(actionsUtil.getOptionalInput("ram") || process.env["CODEQL_RAM"], logger);
const pull_request = github.context.payload.pull_request; const pull_request = github.context.payload.pull_request;
const diffRangePackDir = pull_request && const diffRangePackDir = pull_request &&
(await (0, analyze_1.setupDiffInformedQueryRun)(pull_request.base.ref, pull_request.head.ref, codeql, logger, features)); (await (0, analyze_1.setupDiffInformedQueryRun)(pull_request.base.ref, pull_request.head.label, codeql, logger, features));
await (0, analyze_1.warnIfGoInstalledAfterInit)(config, logger); await (0, analyze_1.warnIfGoInstalledAfterInit)(config, logger);
await runAutobuildIfLegacyGoWorkflow(config, logger); await runAutobuildIfLegacyGoWorkflow(config, logger);
dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, codeql, config, logger); dbCreationTimings = await (0, analyze_1.runFinalize)(outputDir, threads, memory, codeql, config, logger);

File diff suppressed because one or more lines are too long

234
lib/analyze.js generated
View file

@ -36,7 +36,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod }; return (mod && mod.__esModule) ? mod : { "default": mod };
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.CodeQLAnalysisError = void 0; exports.exportedForTesting = exports.CodeQLAnalysisError = void 0;
exports.runExtraction = runExtraction; exports.runExtraction = runExtraction;
exports.dbIsFinalized = dbIsFinalized; exports.dbIsFinalized = dbIsFinalized;
exports.setupDiffInformedQueryRun = setupDiffInformedQueryRun; exports.setupDiffInformedQueryRun = setupDiffInformedQueryRun;
@ -51,12 +51,12 @@ const io = __importStar(require("@actions/io"));
const del_1 = __importDefault(require("del")); const del_1 = __importDefault(require("del"));
const yaml = __importStar(require("js-yaml")); const yaml = __importStar(require("js-yaml"));
const actionsUtil = __importStar(require("./actions-util")); const actionsUtil = __importStar(require("./actions-util"));
const api_client_1 = require("./api-client");
const autobuild_1 = require("./autobuild"); const autobuild_1 = require("./autobuild");
const codeql_1 = require("./codeql"); const codeql_1 = require("./codeql");
const diagnostics_1 = require("./diagnostics"); const diagnostics_1 = require("./diagnostics");
const environment_1 = require("./environment"); const environment_1 = require("./environment");
const feature_flags_1 = require("./feature-flags"); const feature_flags_1 = require("./feature-flags");
const gitUtils = __importStar(require("./git-utils"));
const languages_1 = require("./languages"); const languages_1 = require("./languages");
const logging_1 = require("./logging"); const logging_1 = require("./logging");
const tools_features_1 = require("./tools-features"); const tools_features_1 = require("./tools-features");
@ -152,124 +152,168 @@ async function finalizeDatabaseCreation(codeql, config, threadsFlag, memoryFlag,
* Set up the diff-informed analysis feature. * Set up the diff-informed analysis feature.
* *
* @param baseRef The base branch name, used for calculating the diff range. * @param baseRef The base branch name, used for calculating the diff range.
* @param headRef The head branch name, used for calculating the diff range. * @param headLabel The label that uniquely identifies the head branch across
* repositories, used for calculating the diff range.
* @param codeql * @param codeql
* @param logger * @param logger
* @param features * @param features
* @returns Absolute path to the directory containing the extension pack for * @returns Absolute path to the directory containing the extension pack for
* the diff range information, or `undefined` if the feature is disabled. * the diff range information, or `undefined` if the feature is disabled.
*/ */
async function setupDiffInformedQueryRun(baseRef, headRef, codeql, logger, features) { async function setupDiffInformedQueryRun(baseRef, headLabel, codeql, logger, features) {
if (!(await features.getValue(feature_flags_1.Feature.DiffInformedQueries, codeql))) { if (!(await features.getValue(feature_flags_1.Feature.DiffInformedQueries, codeql))) {
return undefined; return undefined;
} }
return await (0, logging_1.withGroupAsync)("Generating diff range extension pack", async () => { return await (0, logging_1.withGroupAsync)("Generating diff range extension pack", async () => {
const diffRanges = await getPullRequestEditedDiffRanges(baseRef, headRef, logger); const diffRanges = await getPullRequestEditedDiffRanges(baseRef, headLabel, logger);
return writeDiffRangeDataExtensionPack(logger, diffRanges); const packDir = writeDiffRangeDataExtensionPack(logger, diffRanges);
if (packDir === undefined) {
logger.warning("Cannot create diff range extension pack for diff-informed queries; " +
"reverting to performing full analysis.");
}
else {
logger.info(`Successfully created diff range extension pack at ${packDir}.`);
}
return packDir;
}); });
} }
/** /**
* Return the file line ranges that were added or modified in the pull request. * Return the file line ranges that were added or modified in the pull request.
* *
* @param baseRef The base branch name, used for calculating the diff range. * @param baseRef The base branch name, used for calculating the diff range.
* @param headRef The head branch name, used for calculating the diff range. * @param headLabel The label that uniquely identifies the head branch across
* repositories, used for calculating the diff range.
* @param logger * @param logger
* @returns An array of tuples, where each tuple contains the absolute path of a * @returns An array of tuples, where each tuple contains the absolute path of a
* file, the start line and the end line (both 1-based and inclusive) of an * file, the start line and the end line (both 1-based and inclusive) of an
* added or modified range in that file. Returns `undefined` if the action was * added or modified range in that file. Returns `undefined` if the action was
* not triggered by a pull request or if there was an error. * not triggered by a pull request or if there was an error.
*/ */
async function getPullRequestEditedDiffRanges(baseRef, headRef, logger) { async function getPullRequestEditedDiffRanges(baseRef, headLabel, logger) {
const checkoutPath = actionsUtil.getOptionalInput("checkout_path"); const fileDiffs = await getFileDiffsWithBasehead(baseRef, headLabel, logger);
if (checkoutPath === undefined) { if (fileDiffs === undefined) {
return undefined; return undefined;
} }
// To compute the merge bases between the base branch and the PR topic branch, if (fileDiffs.length >= 300) {
// we need to fetch the commit graph from the branch heads to those merge // The "compare two commits" API returns a maximum of 300 changed files. If
// babes. The following 6-step procedure does so while limiting the amount of // we see that many changed files, it is possible that there could be more,
// history fetched. // with the rest being truncated. In this case, we should not attempt to
// Step 1: Deepen from the PR merge commit to the base branch head and the PR // compute the diff ranges, as the result would be incomplete.
// topic branch head, so that the PR merge commit is no longer considered a logger.warning(`Cannot retrieve the full diff because there are too many ` +
// grafted commit. `(${fileDiffs.length}) changed files in the pull request.`);
await gitUtils.deepenGitHistory();
// Step 2: Fetch the base branch shallow history. This step ensures that the
// base branch name is present in the local repository. Normally the base
// branch name would be added by Step 4. However, if the base branch head is
// an ancestor of the PR topic branch head, Step 4 would fail without doing
// anything, so we need to fetch the base branch explicitly.
await gitUtils.gitFetch(baseRef, ["--depth=1"]);
// Step 3: Fetch the PR topic branch history, stopping when we reach commits
// that are reachable from the base branch head.
await gitUtils.gitFetch(headRef, [`--shallow-exclude=${baseRef}`]);
// Step 4: Fetch the base branch history, stopping when we reach commits that
// are reachable from the PR topic branch head.
await gitUtils.gitFetch(baseRef, [`--shallow-exclude=${headRef}`]);
// Step 5: Repack the history to remove the shallow grafts that were added by
// the previous fetches. This step works around a bug that causes subsequent
// deepening fetches to fail with "fatal: error in object: unshallow <SHA>".
// See https://stackoverflow.com/q/63878612
await gitUtils.gitRepack(["-d"]);
// Step 6: Deepen the history so that we have the merge bases between the base
// branch and the PR topic branch.
await gitUtils.deepenGitHistory();
// To compute the exact same diff as GitHub would compute for the PR, we need
// to use the same merge base as GitHub. That is easy to do if there is only
// one merge base, which is by far the most common case. If there are multiple
// merge bases, we stop without producing a diff range.
const mergeBases = await gitUtils.getAllGitMergeBases([baseRef, headRef]);
logger.info(`Merge bases: ${mergeBases.join(", ")}`);
if (mergeBases.length !== 1) {
logger.info("Cannot compute diff range because baseRef and headRef " +
`have ${mergeBases.length} merge bases (instead of exactly 1).`);
return undefined; return undefined;
} }
const diffHunkHeaders = await gitUtils.getGitDiffHunkHeaders(mergeBases[0], headRef); const results = [];
if (diffHunkHeaders === undefined) { for (const filediff of fileDiffs) {
return undefined; const diffRanges = getDiffRanges(filediff, logger);
} if (diffRanges === undefined) {
const results = new Array(); return undefined;
let changedFile = "";
for (const line of diffHunkHeaders) {
if (line.startsWith("+++ ")) {
const filePath = gitUtils.decodeGitFilePath(line.substring(4));
if (filePath.startsWith("b/")) {
// The file was edited: track all hunks in the file
changedFile = filePath.substring(2);
}
else if (filePath === "/dev/null") {
// The file was deleted: skip all hunks in the file
changedFile = "";
}
else {
logger.warning(`Failed to parse diff hunk header line: ${line}`);
return undefined;
}
continue;
}
if (line.startsWith("@@ ")) {
if (changedFile === "")
continue;
const match = line.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@/);
if (match === null) {
logger.warning(`Failed to parse diff hunk header line: ${line}`);
return undefined;
}
const startLine = parseInt(match[1], 10);
const numLines = parseInt(match[2], 10);
if (numLines === 0) {
// The hunk was a deletion: skip it
continue;
}
const endLine = startLine + (numLines || 1) - 1;
results.push({
path: path.join(checkoutPath, changedFile),
startLine,
endLine,
});
} }
results.push(...diffRanges);
} }
return results; return results;
} }
async function getFileDiffsWithBasehead(baseRef, headLabel, logger) {
const ownerRepo = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
const owner = ownerRepo[0];
const repo = ownerRepo[1];
const basehead = `${baseRef}...${headLabel}`;
try {
const response = await (0, api_client_1.getApiClient)().rest.repos.compareCommitsWithBasehead({
owner,
repo,
basehead,
per_page: 1,
});
logger.debug(`Response from compareCommitsWithBasehead(${basehead}):` +
`\n${JSON.stringify(response, null, 2)}`);
return response.data.files;
}
catch (error) {
if (error.status) {
logger.warning(`Error retrieving diff ${basehead}: ${error.message}`);
logger.debug(`Error running compareCommitsWithBasehead(${basehead}):` +
`\nRequest: ${JSON.stringify(error.request, null, 2)}` +
`\nError Response: ${JSON.stringify(error.response, null, 2)}`);
return undefined;
}
else {
throw error;
}
}
}
function getDiffRanges(fileDiff, logger) {
if (fileDiff.patch === undefined) {
if (fileDiff.changes === 0) {
// There are situations where a changed file legitimately has no diff.
// For example, the file may be a binary file, or that the file may have
// been renamed with no changes to its contents. In these cases, the
// file would be reported as having 0 changes, and we can return an empty
// array to indicate no diff range in this file.
return [];
}
// If a file is reported to have nonzero changes but no patch, that may be
// due to the file diff being too large. In this case, we should return
// undefined to indicate that we cannot process the diff.
logger.warning(`No patch found for file ${fileDiff.filename} with ${fileDiff.changes} changes.`);
return undefined;
}
// Diff-informed queries expect the file path to be absolute. CodeQL always
// uses forward slashes as the path separator, so on Windows we need to
// replace any backslashes with forward slashes.
const filename = path
.join(actionsUtil.getRequiredInput("checkout_path"), fileDiff.filename)
.replaceAll(path.sep, "/");
// The 1-based file line number of the current line
let currentLine = 0;
// The 1-based file line number that starts the current range of added lines
let additionRangeStartLine = undefined;
const diffRanges = [];
const diffLines = fileDiff.patch.split("\n");
// Adding a fake context line at the end ensures that the following loop will
// always terminate the last range of added lines.
diffLines.push(" ");
for (const diffLine of diffLines) {
if (diffLine.startsWith("-")) {
// Ignore deletions completely -- we do not even want to consider them when
// calculating consecutive ranges of added lines.
continue;
}
if (diffLine.startsWith("+")) {
if (additionRangeStartLine === undefined) {
additionRangeStartLine = currentLine;
}
currentLine++;
continue;
}
if (additionRangeStartLine !== undefined) {
// Any line that does not start with a "+" or "-" terminates the current
// range of added lines.
diffRanges.push({
path: filename,
startLine: additionRangeStartLine,
endLine: currentLine - 1,
});
additionRangeStartLine = undefined;
}
if (diffLine.startsWith("@@ ")) {
// A new hunk header line resets the current line number.
const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
if (match === null) {
logger.warning(`Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`);
return undefined;
}
currentLine = parseInt(match[1], 10);
continue;
}
if (diffLine.startsWith(" ")) {
// An unchanged context line advances the current line number.
currentLine++;
continue;
}
}
return diffRanges;
}
/** /**
* Create an extension pack in the temporary directory that contains the file * Create an extension pack in the temporary directory that contains the file
* line ranges that were added or modified in the pull request. * line ranges that were added or modified in the pull request.
@ -303,7 +347,12 @@ extensions:
data: data:
`; `;
let data = ranges let data = ranges
.map((range) => ` - ["${range.path}", ${range.startLine}, ${range.endLine}]\n`) .map((range) =>
// Using yaml.dump() with `forceQuotes: true` ensures that all special
// characters are escaped, and that the path is always rendered as a
// quoted string on a single line.
` - [${yaml.dump(range.path, { forceQuotes: true }).trim()}, ` +
`${range.startLine}, ${range.endLine}]\n`)
.join(""); .join("");
if (!data) { if (!data) {
// Ensure that the data extension is not empty, so that a pull request with // Ensure that the data extension is not empty, so that a pull request with
@ -461,4 +510,7 @@ async function runCleanup(config, cleanupLevel, logger) {
} }
logger.endGroup(); logger.endGroup();
} }
exports.exportedForTesting = {
getDiffRanges,
};
//# sourceMappingURL=analyze.js.map //# sourceMappingURL=analyze.js.map

File diff suppressed because one or more lines are too long

175
lib/analyze.test.js generated
View file

@ -40,6 +40,7 @@ const fs = __importStar(require("fs"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
const ava_1 = __importDefault(require("ava")); const ava_1 = __importDefault(require("ava"));
const sinon = __importStar(require("sinon")); const sinon = __importStar(require("sinon"));
const actionsUtil = __importStar(require("./actions-util"));
const analyze_1 = require("./analyze"); const analyze_1 = require("./analyze");
const codeql_1 = require("./codeql"); const codeql_1 = require("./codeql");
const feature_flags_1 = require("./feature-flags"); const feature_flags_1 = require("./feature-flags");
@ -127,4 +128,178 @@ const util = __importStar(require("./util"));
} }
}); });
}); });
function runGetDiffRanges(changes, patch) {
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("checkout_path")
.returns("/checkout/path");
return analyze_1.exportedForTesting.getDiffRanges({
filename: "test.txt",
changes,
patch: patch?.join("\n"),
}, (0, logging_1.getRunnerLogger)(true));
}
(0, ava_1.default)("getDiffRanges: file unchanged", async (t) => {
const diffRanges = runGetDiffRanges(0, undefined);
t.deepEqual(diffRanges, []);
});
(0, ava_1.default)("getDiffRanges: file diff too large", async (t) => {
const diffRanges = runGetDiffRanges(1000000, undefined);
t.deepEqual(diffRanges, undefined);
});
(0, ava_1.default)("getDiffRanges: diff thunk with single addition range", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,6 +50,8 @@",
" a",
" b",
" c",
"+1",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 54,
},
]);
});
(0, ava_1.default)("getDiffRanges: diff thunk with single deletion range", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,8 +50,6 @@",
" a",
" b",
" c",
"-1",
"-2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, []);
});
(0, ava_1.default)("getDiffRanges: diff thunk with single update range", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,7 +50,7 @@",
" a",
" b",
" c",
"-1",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 53,
},
]);
});
(0, ava_1.default)("getDiffRanges: diff thunk with addition ranges", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,7 +50,9 @@",
" a",
" b",
" c",
"+1",
" c",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 53,
},
{
path: "/checkout/path/test.txt",
startLine: 55,
endLine: 55,
},
]);
});
(0, ava_1.default)("getDiffRanges: diff thunk with mixed ranges", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,7 +50,7 @@",
" a",
" b",
" c",
"-1",
" d",
"-2",
"+3",
" e",
" f",
"+4",
"+5",
" g",
" h",
" i",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 54,
endLine: 54,
},
{
path: "/checkout/path/test.txt",
startLine: 57,
endLine: 58,
},
]);
});
(0, ava_1.default)("getDiffRanges: multiple diff thunks", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,6 +50,8 @@",
" a",
" b",
" c",
"+1",
"+2",
" d",
" e",
" f",
"@@ -130,6 +150,8 @@",
" a",
" b",
" c",
"+1",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 54,
},
{
path: "/checkout/path/test.txt",
startLine: 153,
endLine: 154,
},
]);
});
(0, ava_1.default)("getDiffRanges: no diff context lines", async (t) => {
const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 50,
endLine: 51,
},
]);
});
//# sourceMappingURL=analyze.test.js.map //# sourceMappingURL=analyze.test.js.map

File diff suppressed because one or more lines are too long

2
lib/languages.js generated
View file

@ -25,6 +25,8 @@ exports.LANGUAGE_ALIASES = {
"c#": Language.csharp, "c#": Language.csharp,
kotlin: Language.java, kotlin: Language.java,
typescript: Language.javascript, typescript: Language.javascript,
"javascript-typescript": Language.javascript,
"java-kotlin": Language.java,
}; };
/** /**
* Translate from user input or GitHub's API names for languages to CodeQL's * Translate from user input or GitHub's API names for languages to CodeQL's

View file

@ -1 +1 @@
{"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;;AAgCA,sCAgBC;AAED,4CAQC;AAED,8CAEC;AA9DD,wCAAwC;AACxC,IAAY,QAWX;AAXD,WAAY,QAAQ;IAClB,+BAAmB,CAAA;IACnB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;IACjB,yBAAa,CAAA;IACb,yBAAa,CAAA;IACb,2BAAe,CAAA;AACjB,CAAC,EAXW,QAAQ,wBAAR,QAAQ,QAWnB;AAED,iCAAiC;AACpB,QAAA,gBAAgB,GAAiC;IAC5D,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,MAAM,EAAE,QAAQ,CAAC,IAAI;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;CAChC,CAAC;AAEF;;;;;;;;GAQG;AACH,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;IAEzC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE,CAAC;QACzB,OAAO,QAAoB,CAAC;IAC9B,CAAC;IAED,iEAAiE;IACjE,oCAAoC;IACpC,IAAI,QAAQ,IAAI,wBAAgB,EAAE,CAAC;QACjC,OAAO,wBAAgB,CAAC,QAAQ,CAAC,CAAC;IACpC,CAAC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO;QACL,QAAQ,CAAC,GAAG;QACZ,QAAQ,CAAC,MAAM;QACf,QAAQ,CAAC,EAAE;QACX,QAAQ,CAAC,IAAI;QACb,QAAQ,CAAC,KAAK;KACf,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACvB,CAAC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC"} {"version":3,"file":"languages.js","sourceRoot":"","sources":["../src/languages.ts"],"names":[],"mappings":";;;AAkCA,sCAgBC;AAED,4CAQC;AAED,8CAEC;AAhED,wCAAwC;AACxC,IAAY,QAWX;AAXD,WAAY,QAAQ;IAClB,+BAAmB,CAAA;IACnB,6BAAiB,CAAA;IACjB,uBAAW,CAAA;IACX,qBAAS,CAAA;IACT,yBAAa,CAAA;IACb,qCAAyB,CAAA;IACzB,6BAAiB,CAAA;IACjB,yBAAa,CAAA;IACb,yBAAa,CAAA;IACb,2BAAe,CAAA;AACjB,CAAC,EAXW,QAAQ,wBAAR,QAAQ,QAWnB;AAED,iCAAiC;AACpB,QAAA,gBAAgB,GAAiC;IAC5D,CAAC,EAAE,QAAQ,CAAC,GAAG;IACf,KAAK,EAAE,QAAQ,CAAC,GAAG;IACnB,IAAI,EAAE,QAAQ,CAAC,MAAM;IACrB,MAAM,EAAE,QAAQ,CAAC,IAAI;IACrB,UAAU,EAAE,QAAQ,CAAC,UAAU;IAC/B,uBAAuB,EAAE,QAAQ,CAAC,UAAU;IAC5C,aAAa,EAAE,QAAQ,CAAC,IAAI;CAC7B,CAAC;AAEF;;;;;;;;GAQG;AACH,SAAgB,aAAa,CAAC,QAAgB;IAC5C,0BAA0B;IAC1B,QAAQ,GAAG,QAAQ,CAAC,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;IAEzC,6BAA6B;IAC7B,IAAI,QAAQ,IAAI,QAAQ,EAAE,CAAC;QACzB,OAAO,QAAoB,CAAC;IAC9B,CAAC;IAED,iEAAiE;IACjE,oCAAoC;IACpC,IAAI,QAAQ,IAAI,wBAAgB,EAAE,CAAC;QACjC,OAAO,wBAAgB,CAAC,QAAQ,CAAC,CAAC;IACpC,CAAC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAgB,gBAAgB,CAAC,QAAkB;IACjD,OAAO;QACL,QAAQ,CAAC,GAAG;QACZ,QAAQ,CAAC,MAAM;QACf,QAAQ,CAAC,EAAE;QACX,QAAQ,CAAC,IAAI;QACb,QAAQ,CAAC,KAAK;KACf,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACvB,CAAC;AAED,SAAgB,iBAAiB,CAAC,QAAkB;IAClD,OAAO,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;AACrC,CAAC"}

View file

@ -39,6 +39,7 @@ const core = __importStar(require("@actions/core"));
const toolcache = __importStar(require("@actions/tool-cache")); const toolcache = __importStar(require("@actions/tool-cache"));
const node_forge_1 = require("node-forge"); const node_forge_1 = require("node-forge");
const actionsUtil = __importStar(require("./actions-util")); const actionsUtil = __importStar(require("./actions-util"));
const languages_1 = require("./languages");
const logging_1 = require("./logging"); const logging_1 = require("./logging");
const util = __importStar(require("./util")); const util = __importStar(require("./util"));
const UPDATEJOB_PROXY = "update-job-proxy"; const UPDATEJOB_PROXY = "update-job-proxy";
@ -47,6 +48,19 @@ const UPDATEJOB_PROXY_URL_PREFIX = "https://github.com/github/codeql-action/rele
const PROXY_USER = "proxy_user"; const PROXY_USER = "proxy_user";
const KEY_SIZE = 2048; const KEY_SIZE = 2048;
const KEY_EXPIRY_YEARS = 2; const KEY_EXPIRY_YEARS = 2;
const LANGUAGE_TO_REGISTRY_TYPE = {
java: "maven_repository",
csharp: "nuget_feed",
javascript: "npm_registry",
python: "python_index",
ruby: "rubygems_server",
rust: "cargo_registry",
// We do not have an established proxy type for these languages, thus leaving empty.
actions: "",
cpp: "",
go: "",
swift: "",
};
const CERT_SUBJECT = [ const CERT_SUBJECT = [
{ {
name: "commonName", name: "commonName",
@ -170,6 +184,11 @@ async function startProxy(binPath, config, logFilePath, logger) {
function getCredentials(logger) { function getCredentials(logger) {
const registriesCredentials = actionsUtil.getOptionalInput("registries_credentials"); const registriesCredentials = actionsUtil.getOptionalInput("registries_credentials");
const registrySecrets = actionsUtil.getOptionalInput("registry_secrets"); const registrySecrets = actionsUtil.getOptionalInput("registry_secrets");
const languageString = actionsUtil.getOptionalInput("language");
const language = languageString ? (0, languages_1.parseLanguage)(languageString) : undefined;
const registryTypeForLanguage = language
? LANGUAGE_TO_REGISTRY_TYPE[language]
: undefined;
let credentialsStr; let credentialsStr;
if (registriesCredentials !== undefined) { if (registriesCredentials !== undefined) {
logger.info(`Using registries_credentials input.`); logger.info(`Using registries_credentials input.`);
@ -190,6 +209,11 @@ function getCredentials(logger) {
if (e.url === undefined && e.host === undefined) { if (e.url === undefined && e.host === undefined) {
throw new Error("Invalid credentials - must specify host or url"); throw new Error("Invalid credentials - must specify host or url");
} }
// Filter credentials based on language if specified. `type` is the registry type.
// E.g., "maven_feed" for Java/Kotlin, "nuget_repository" for C#.
if (e.type !== registryTypeForLanguage) {
continue;
}
out.push({ out.push({
type: e.type, type: e.type,
host: e.host, host: e.host,

File diff suppressed because one or more lines are too long

View file

@ -268,7 +268,7 @@ async function run() {
pull_request && pull_request &&
(await setupDiffInformedQueryRun( (await setupDiffInformedQueryRun(
pull_request.base.ref as string, pull_request.base.ref as string,
pull_request.head.ref as string, pull_request.head.label as string,
codeql, codeql,
logger, logger,
features, features,

View file

@ -4,7 +4,8 @@ import * as path from "path";
import test from "ava"; import test from "ava";
import * as sinon from "sinon"; import * as sinon from "sinon";
import { runQueries } from "./analyze"; import * as actionsUtil from "./actions-util";
import { exportedForTesting, runQueries } from "./analyze";
import { setCodeQL } from "./codeql"; import { setCodeQL } from "./codeql";
import { Feature } from "./feature-flags"; import { Feature } from "./feature-flags";
import { Language } from "./languages"; import { Language } from "./languages";
@ -119,3 +120,190 @@ test("status report fields", async (t) => {
} }
}); });
}); });
function runGetDiffRanges(changes: number, patch: string[] | undefined): any {
sinon
.stub(actionsUtil, "getRequiredInput")
.withArgs("checkout_path")
.returns("/checkout/path");
return exportedForTesting.getDiffRanges(
{
filename: "test.txt",
changes,
patch: patch?.join("\n"),
},
getRunnerLogger(true),
);
}
test("getDiffRanges: file unchanged", async (t) => {
const diffRanges = runGetDiffRanges(0, undefined);
t.deepEqual(diffRanges, []);
});
test("getDiffRanges: file diff too large", async (t) => {
const diffRanges = runGetDiffRanges(1000000, undefined);
t.deepEqual(diffRanges, undefined);
});
test("getDiffRanges: diff thunk with single addition range", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,6 +50,8 @@",
" a",
" b",
" c",
"+1",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 54,
},
]);
});
test("getDiffRanges: diff thunk with single deletion range", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,8 +50,6 @@",
" a",
" b",
" c",
"-1",
"-2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, []);
});
test("getDiffRanges: diff thunk with single update range", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,7 +50,7 @@",
" a",
" b",
" c",
"-1",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 53,
},
]);
});
test("getDiffRanges: diff thunk with addition ranges", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,7 +50,9 @@",
" a",
" b",
" c",
"+1",
" c",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 53,
},
{
path: "/checkout/path/test.txt",
startLine: 55,
endLine: 55,
},
]);
});
test("getDiffRanges: diff thunk with mixed ranges", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,7 +50,7 @@",
" a",
" b",
" c",
"-1",
" d",
"-2",
"+3",
" e",
" f",
"+4",
"+5",
" g",
" h",
" i",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 54,
endLine: 54,
},
{
path: "/checkout/path/test.txt",
startLine: 57,
endLine: 58,
},
]);
});
test("getDiffRanges: multiple diff thunks", async (t) => {
const diffRanges = runGetDiffRanges(2, [
"@@ -30,6 +50,8 @@",
" a",
" b",
" c",
"+1",
"+2",
" d",
" e",
" f",
"@@ -130,6 +150,8 @@",
" a",
" b",
" c",
"+1",
"+2",
" d",
" e",
" f",
]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 53,
endLine: 54,
},
{
path: "/checkout/path/test.txt",
startLine: 153,
endLine: 154,
},
]);
});
test("getDiffRanges: no diff context lines", async (t) => {
const diffRanges = runGetDiffRanges(2, ["@@ -30 +50,2 @@", "+1", "+2"]);
t.deepEqual(diffRanges, [
{
path: "/checkout/path/test.txt",
startLine: 50,
endLine: 51,
},
]);
});

View file

@ -7,6 +7,7 @@ import del from "del";
import * as yaml from "js-yaml"; import * as yaml from "js-yaml";
import * as actionsUtil from "./actions-util"; import * as actionsUtil from "./actions-util";
import { getApiClient } from "./api-client";
import { setupCppAutobuild } from "./autobuild"; import { setupCppAutobuild } from "./autobuild";
import { import {
CODEQL_VERSION_ANALYSIS_SUMMARY_V2, CODEQL_VERSION_ANALYSIS_SUMMARY_V2,
@ -17,7 +18,6 @@ import * as configUtils from "./config-utils";
import { addDiagnostic, makeDiagnostic } from "./diagnostics"; import { addDiagnostic, makeDiagnostic } from "./diagnostics";
import { EnvVar } from "./environment"; import { EnvVar } from "./environment";
import { FeatureEnablement, Feature } from "./feature-flags"; import { FeatureEnablement, Feature } from "./feature-flags";
import * as gitUtils from "./git-utils";
import { isScannedLanguage, Language } from "./languages"; import { isScannedLanguage, Language } from "./languages";
import { Logger, withGroupAsync } from "./logging"; import { Logger, withGroupAsync } from "./logging";
import { DatabaseCreationTimings, EventReport } from "./status-report"; import { DatabaseCreationTimings, EventReport } from "./status-report";
@ -240,7 +240,8 @@ async function finalizeDatabaseCreation(
* Set up the diff-informed analysis feature. * Set up the diff-informed analysis feature.
* *
* @param baseRef The base branch name, used for calculating the diff range. * @param baseRef The base branch name, used for calculating the diff range.
* @param headRef The head branch name, used for calculating the diff range. * @param headLabel The label that uniquely identifies the head branch across
* repositories, used for calculating the diff range.
* @param codeql * @param codeql
* @param logger * @param logger
* @param features * @param features
@ -249,7 +250,7 @@ async function finalizeDatabaseCreation(
*/ */
export async function setupDiffInformedQueryRun( export async function setupDiffInformedQueryRun(
baseRef: string, baseRef: string,
headRef: string, headLabel: string,
codeql: CodeQL, codeql: CodeQL,
logger: Logger, logger: Logger,
features: FeatureEnablement, features: FeatureEnablement,
@ -262,10 +263,21 @@ export async function setupDiffInformedQueryRun(
async () => { async () => {
const diffRanges = await getPullRequestEditedDiffRanges( const diffRanges = await getPullRequestEditedDiffRanges(
baseRef, baseRef,
headRef, headLabel,
logger, logger,
); );
return writeDiffRangeDataExtensionPack(logger, diffRanges); const packDir = writeDiffRangeDataExtensionPack(logger, diffRanges);
if (packDir === undefined) {
logger.warning(
"Cannot create diff range extension pack for diff-informed queries; " +
"reverting to performing full analysis.",
);
} else {
logger.info(
`Successfully created diff range extension pack at ${packDir}.`,
);
}
return packDir;
}, },
); );
} }
@ -280,7 +292,8 @@ interface DiffThunkRange {
* Return the file line ranges that were added or modified in the pull request. * Return the file line ranges that were added or modified in the pull request.
* *
* @param baseRef The base branch name, used for calculating the diff range. * @param baseRef The base branch name, used for calculating the diff range.
* @param headRef The head branch name, used for calculating the diff range. * @param headLabel The label that uniquely identifies the head branch across
* repositories, used for calculating the diff range.
* @param logger * @param logger
* @returns An array of tuples, where each tuple contains the absolute path of a * @returns An array of tuples, where each tuple contains the absolute path of a
* file, the start line and the end line (both 1-based and inclusive) of an * file, the start line and the end line (both 1-based and inclusive) of an
@ -289,107 +302,167 @@ interface DiffThunkRange {
*/ */
async function getPullRequestEditedDiffRanges( async function getPullRequestEditedDiffRanges(
baseRef: string, baseRef: string,
headRef: string, headLabel: string,
logger: Logger, logger: Logger,
): Promise<DiffThunkRange[] | undefined> { ): Promise<DiffThunkRange[] | undefined> {
const checkoutPath = actionsUtil.getOptionalInput("checkout_path"); const fileDiffs = await getFileDiffsWithBasehead(baseRef, headLabel, logger);
if (checkoutPath === undefined) { if (fileDiffs === undefined) {
return undefined; return undefined;
} }
if (fileDiffs.length >= 300) {
// The "compare two commits" API returns a maximum of 300 changed files. If
// we see that many changed files, it is possible that there could be more,
// with the rest being truncated. In this case, we should not attempt to
// compute the diff ranges, as the result would be incomplete.
logger.warning(
`Cannot retrieve the full diff because there are too many ` +
`(${fileDiffs.length}) changed files in the pull request.`,
);
return undefined;
}
const results: DiffThunkRange[] = [];
for (const filediff of fileDiffs) {
const diffRanges = getDiffRanges(filediff, logger);
if (diffRanges === undefined) {
return undefined;
}
results.push(...diffRanges);
}
return results;
}
// To compute the merge bases between the base branch and the PR topic branch, /**
// we need to fetch the commit graph from the branch heads to those merge * This interface is an abbreviated version of the file diff object returned by
// babes. The following 6-step procedure does so while limiting the amount of * the GitHub API.
// history fetched. */
interface FileDiff {
filename: string;
changes: number;
// A patch may be absent if the file is binary, if the file diff is too large,
// or if the file is unchanged.
patch?: string | undefined;
}
// Step 1: Deepen from the PR merge commit to the base branch head and the PR async function getFileDiffsWithBasehead(
// topic branch head, so that the PR merge commit is no longer considered a baseRef: string,
// grafted commit. headLabel: string,
await gitUtils.deepenGitHistory(); logger: Logger,
// Step 2: Fetch the base branch shallow history. This step ensures that the ): Promise<FileDiff[] | undefined> {
// base branch name is present in the local repository. Normally the base const ownerRepo = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
// branch name would be added by Step 4. However, if the base branch head is const owner = ownerRepo[0];
// an ancestor of the PR topic branch head, Step 4 would fail without doing const repo = ownerRepo[1];
// anything, so we need to fetch the base branch explicitly. const basehead = `${baseRef}...${headLabel}`;
await gitUtils.gitFetch(baseRef, ["--depth=1"]); try {
// Step 3: Fetch the PR topic branch history, stopping when we reach commits const response = await getApiClient().rest.repos.compareCommitsWithBasehead(
// that are reachable from the base branch head. {
await gitUtils.gitFetch(headRef, [`--shallow-exclude=${baseRef}`]); owner,
// Step 4: Fetch the base branch history, stopping when we reach commits that repo,
// are reachable from the PR topic branch head. basehead,
await gitUtils.gitFetch(baseRef, [`--shallow-exclude=${headRef}`]); per_page: 1,
// Step 5: Repack the history to remove the shallow grafts that were added by },
// the previous fetches. This step works around a bug that causes subsequent );
// deepening fetches to fail with "fatal: error in object: unshallow <SHA>". logger.debug(
// See https://stackoverflow.com/q/63878612 `Response from compareCommitsWithBasehead(${basehead}):` +
await gitUtils.gitRepack(["-d"]); `\n${JSON.stringify(response, null, 2)}`,
// Step 6: Deepen the history so that we have the merge bases between the base );
// branch and the PR topic branch. return response.data.files;
await gitUtils.deepenGitHistory(); } catch (error: any) {
if (error.status) {
logger.warning(`Error retrieving diff ${basehead}: ${error.message}`);
logger.debug(
`Error running compareCommitsWithBasehead(${basehead}):` +
`\nRequest: ${JSON.stringify(error.request, null, 2)}` +
`\nError Response: ${JSON.stringify(error.response, null, 2)}`,
);
return undefined;
} else {
throw error;
}
}
}
// To compute the exact same diff as GitHub would compute for the PR, we need function getDiffRanges(
// to use the same merge base as GitHub. That is easy to do if there is only fileDiff: FileDiff,
// one merge base, which is by far the most common case. If there are multiple logger: Logger,
// merge bases, we stop without producing a diff range. ): DiffThunkRange[] | undefined {
const mergeBases = await gitUtils.getAllGitMergeBases([baseRef, headRef]); if (fileDiff.patch === undefined) {
logger.info(`Merge bases: ${mergeBases.join(", ")}`); if (fileDiff.changes === 0) {
if (mergeBases.length !== 1) { // There are situations where a changed file legitimately has no diff.
logger.info( // For example, the file may be a binary file, or that the file may have
"Cannot compute diff range because baseRef and headRef " + // been renamed with no changes to its contents. In these cases, the
`have ${mergeBases.length} merge bases (instead of exactly 1).`, // file would be reported as having 0 changes, and we can return an empty
// array to indicate no diff range in this file.
return [];
}
// If a file is reported to have nonzero changes but no patch, that may be
// due to the file diff being too large. In this case, we should return
// undefined to indicate that we cannot process the diff.
logger.warning(
`No patch found for file ${fileDiff.filename} with ${fileDiff.changes} changes.`,
); );
return undefined; return undefined;
} }
const diffHunkHeaders = await gitUtils.getGitDiffHunkHeaders( // Diff-informed queries expect the file path to be absolute. CodeQL always
mergeBases[0], // uses forward slashes as the path separator, so on Windows we need to
headRef, // replace any backslashes with forward slashes.
); const filename = path
if (diffHunkHeaders === undefined) { .join(actionsUtil.getRequiredInput("checkout_path"), fileDiff.filename)
return undefined; .replaceAll(path.sep, "/");
}
const results = new Array<DiffThunkRange>(); // The 1-based file line number of the current line
let currentLine = 0;
// The 1-based file line number that starts the current range of added lines
let additionRangeStartLine: number | undefined = undefined;
const diffRanges: DiffThunkRange[] = [];
let changedFile = ""; const diffLines = fileDiff.patch.split("\n");
for (const line of diffHunkHeaders) { // Adding a fake context line at the end ensures that the following loop will
if (line.startsWith("+++ ")) { // always terminate the last range of added lines.
const filePath = gitUtils.decodeGitFilePath(line.substring(4)); diffLines.push(" ");
if (filePath.startsWith("b/")) {
// The file was edited: track all hunks in the file for (const diffLine of diffLines) {
changedFile = filePath.substring(2); if (diffLine.startsWith("-")) {
} else if (filePath === "/dev/null") { // Ignore deletions completely -- we do not even want to consider them when
// The file was deleted: skip all hunks in the file // calculating consecutive ranges of added lines.
changedFile = "";
} else {
logger.warning(`Failed to parse diff hunk header line: ${line}`);
return undefined;
}
continue; continue;
} }
if (line.startsWith("@@ ")) { if (diffLine.startsWith("+")) {
if (changedFile === "") continue; if (additionRangeStartLine === undefined) {
additionRangeStartLine = currentLine;
const match = line.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@/); }
currentLine++;
continue;
}
if (additionRangeStartLine !== undefined) {
// Any line that does not start with a "+" or "-" terminates the current
// range of added lines.
diffRanges.push({
path: filename,
startLine: additionRangeStartLine,
endLine: currentLine - 1,
});
additionRangeStartLine = undefined;
}
if (diffLine.startsWith("@@ ")) {
// A new hunk header line resets the current line number.
const match = diffLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,\d+)? @@/);
if (match === null) { if (match === null) {
logger.warning(`Failed to parse diff hunk header line: ${line}`); logger.warning(
`Cannot parse diff hunk header for ${fileDiff.filename}: ${diffLine}`,
);
return undefined; return undefined;
} }
const startLine = parseInt(match[1], 10); currentLine = parseInt(match[1], 10);
const numLines = parseInt(match[2], 10); continue;
if (numLines === 0) { }
// The hunk was a deletion: skip it if (diffLine.startsWith(" ")) {
continue; // An unchanged context line advances the current line number.
} currentLine++;
const endLine = startLine + (numLines || 1) - 1; continue;
results.push({
path: path.join(checkoutPath, changedFile),
startLine,
endLine,
});
} }
} }
return results; return diffRanges;
} }
/** /**
@ -439,7 +512,11 @@ extensions:
let data = ranges let data = ranges
.map( .map(
(range) => (range) =>
` - ["${range.path}", ${range.startLine}, ${range.endLine}]\n`, // Using yaml.dump() with `forceQuotes: true` ensures that all special
// characters are escaped, and that the path is always rendered as a
// quoted string on a single line.
` - [${yaml.dump(range.path, { forceQuotes: true }).trim()}, ` +
`${range.startLine}, ${range.endLine}]\n`,
) )
.join(""); .join("");
if (!data) { if (!data) {
@ -702,3 +779,7 @@ export async function runCleanup(
} }
logger.endGroup(); logger.endGroup();
} }
export const exportedForTesting = {
getDiffRanges,
};

View file

@ -19,6 +19,8 @@ export const LANGUAGE_ALIASES: { [lang: string]: Language } = {
"c#": Language.csharp, "c#": Language.csharp,
kotlin: Language.java, kotlin: Language.java,
typescript: Language.javascript, typescript: Language.javascript,
"javascript-typescript": Language.javascript,
"java-kotlin": Language.java,
}; };
/** /**

View file

@ -6,6 +6,7 @@ import * as toolcache from "@actions/tool-cache";
import { pki } from "node-forge"; import { pki } from "node-forge";
import * as actionsUtil from "./actions-util"; import * as actionsUtil from "./actions-util";
import { Language, parseLanguage } from "./languages";
import { getActionsLogger, Logger } from "./logging"; import { getActionsLogger, Logger } from "./logging";
import * as util from "./util"; import * as util from "./util";
@ -17,6 +18,20 @@ const PROXY_USER = "proxy_user";
const KEY_SIZE = 2048; const KEY_SIZE = 2048;
const KEY_EXPIRY_YEARS = 2; const KEY_EXPIRY_YEARS = 2;
const LANGUAGE_TO_REGISTRY_TYPE: Record<Language, string> = {
java: "maven_repository",
csharp: "nuget_feed",
javascript: "npm_registry",
python: "python_index",
ruby: "rubygems_server",
rust: "cargo_registry",
// We do not have an established proxy type for these languages, thus leaving empty.
actions: "",
cpp: "",
go: "",
swift: "",
} as const;
type CertificateAuthority = { type CertificateAuthority = {
cert: string; cert: string;
key: string; key: string;
@ -192,6 +207,11 @@ function getCredentials(logger: Logger): Credential[] {
"registries_credentials", "registries_credentials",
); );
const registrySecrets = actionsUtil.getOptionalInput("registry_secrets"); const registrySecrets = actionsUtil.getOptionalInput("registry_secrets");
const languageString = actionsUtil.getOptionalInput("language");
const language = languageString ? parseLanguage(languageString) : undefined;
const registryTypeForLanguage = language
? LANGUAGE_TO_REGISTRY_TYPE[language]
: undefined;
let credentialsStr: string; let credentialsStr: string;
if (registriesCredentials !== undefined) { if (registriesCredentials !== undefined) {
@ -212,6 +232,13 @@ function getCredentials(logger: Logger): Credential[] {
if (e.url === undefined && e.host === undefined) { if (e.url === undefined && e.host === undefined) {
throw new Error("Invalid credentials - must specify host or url"); throw new Error("Invalid credentials - must specify host or url");
} }
// Filter credentials based on language if specified. `type` is the registry type.
// E.g., "maven_feed" for Java/Kotlin, "nuget_repository" for C#.
if (e.type !== registryTypeForLanguage) {
continue;
}
out.push({ out.push({
type: e.type, type: e.type,
host: e.host, host: e.host,

View file

@ -16,6 +16,9 @@ inputs:
description: GitHub token to use for authenticating with this instance of GitHub, used to upload debug artifacts. description: GitHub token to use for authenticating with this instance of GitHub, used to upload debug artifacts.
default: ${{ github.token }} default: ${{ github.token }}
required: false required: false
language:
description: The programming language to setup the proxy for the correct ecosystem
required: false
outputs: outputs:
proxy_host: proxy_host:
description: The IP address of the proxy description: The IP address of the proxy