Merge branch 'main' into henrymercer/report-ml-powered-query-enablement
This commit is contained in:
commit
ad40e4a8f8
9 changed files with 198 additions and 28 deletions
51
lib/actions-util.js
generated
51
lib/actions-util.js
generated
|
|
@ -19,7 +19,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getToolCacheDirectory = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
|
|
@ -103,6 +103,55 @@ const getCommitOid = async function (ref = "HEAD") {
|
|||
}
|
||||
};
|
||||
exports.getCommitOid = getCommitOid;
|
||||
/**
|
||||
* If the action was triggered by a pull request, determine the commit sha of the merge base.
|
||||
* Returns undefined if run by other triggers or the merge base cannot be determined.
|
||||
*/
|
||||
const determineMergeBaseCommitOid = async function () {
|
||||
if (process.env.GITHUB_EVENT_NAME !== "pull_request") {
|
||||
return undefined;
|
||||
}
|
||||
const mergeSha = (0, util_1.getRequiredEnvParam)("GITHUB_SHA");
|
||||
try {
|
||||
let commitOid = "";
|
||||
let baseOid = "";
|
||||
let headOid = "";
|
||||
await new toolrunner.ToolRunner(await safeWhich.safeWhich("git"), ["show", "-s", "--format=raw", mergeSha], {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdline: (data) => {
|
||||
if (data.startsWith("commit ") && commitOid === "") {
|
||||
commitOid = data.substring(7);
|
||||
}
|
||||
else if (data.startsWith("parent ")) {
|
||||
if (baseOid === "") {
|
||||
baseOid = data.substring(7);
|
||||
}
|
||||
else if (headOid === "") {
|
||||
headOid = data.substring(7);
|
||||
}
|
||||
}
|
||||
},
|
||||
stderr: (data) => {
|
||||
process.stderr.write(data);
|
||||
},
|
||||
},
|
||||
}).exec();
|
||||
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
|
||||
if (commitOid === mergeSha &&
|
||||
headOid.length === 40 &&
|
||||
baseOid.length === 40) {
|
||||
return baseOid;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
catch (e) {
|
||||
core.info(`Failed to call git to determine merge base. Continuing with data from environment: ${e}`);
|
||||
core.info(e.stack || "NO STACK");
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
exports.determineMergeBaseCommitOid = determineMergeBaseCommitOid;
|
||||
function isObject(o) {
|
||||
return o !== null && typeof o === "object";
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
26
lib/upload-lib.js
generated
26
lib/upload-lib.js
generated
|
|
@ -204,7 +204,7 @@ function validateSarifFileSchema(sarifFilePath, logger) {
|
|||
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||
// buildPayload constructs a map ready to be uploaded to the API from the given
|
||||
// parameters, respecting the current mode and target GitHub instance version.
|
||||
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion) {
|
||||
function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion, mergeBaseCommitOid) {
|
||||
if (util.isActions()) {
|
||||
const payloadObj = {
|
||||
commit_oid: commitOid,
|
||||
|
|
@ -223,11 +223,23 @@ function buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, wo
|
|||
// This behaviour can be made the default when support for GHES 3.0 is discontinued.
|
||||
if (gitHubVersion.type !== util.GitHubVariant.GHES ||
|
||||
semver.satisfies(gitHubVersion.version, `>=3.1`)) {
|
||||
if (process.env.GITHUB_EVENT_NAME === "pull_request" &&
|
||||
process.env.GITHUB_EVENT_PATH) {
|
||||
const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8"));
|
||||
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
|
||||
payloadObj.base_sha = githubEvent.pull_request.base.sha;
|
||||
if (process.env.GITHUB_EVENT_NAME === "pull_request") {
|
||||
if (commitOid === util.getRequiredEnvParam("GITHUB_SHA") &&
|
||||
mergeBaseCommitOid) {
|
||||
// We're uploading results for the merge commit
|
||||
// and were able to determine the merge base.
|
||||
// So we use that as the most accurate base.
|
||||
payloadObj.base_ref = `refs/heads/${util.getRequiredEnvParam("GITHUB_BASE_REF")}`;
|
||||
payloadObj.base_sha = mergeBaseCommitOid;
|
||||
}
|
||||
else if (process.env.GITHUB_EVENT_PATH) {
|
||||
// Either we're not uploading results for the merge commit
|
||||
// or we could not determine the merge base.
|
||||
// Using the PR base is the only option here
|
||||
const githubEvent = JSON.parse(fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8"));
|
||||
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
|
||||
payloadObj.base_sha = githubEvent.pull_request.base.sha;
|
||||
}
|
||||
}
|
||||
}
|
||||
return payloadObj;
|
||||
|
|
@ -260,7 +272,7 @@ async function uploadFiles(sarifFiles, repositoryNwo, commitOid, ref, analysisKe
|
|||
const sarifPayload = JSON.stringify(sarif);
|
||||
const zippedSarif = zlib_1.default.gzipSync(sarifPayload).toString("base64");
|
||||
const checkoutURI = (0, file_url_1.default)(sourceRoot);
|
||||
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion);
|
||||
const payload = buildPayload(commitOid, ref, analysisKey, analysisName, zippedSarif, workflowRunID, checkoutURI, environment, toolNames, gitHubVersion, await actionsUtil.determineMergeBaseCommitOid());
|
||||
// Log some useful debug info about the info
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
logger.debug(`Raw upload size: ${rawUploadSizeBytes} bytes`);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
15
lib/upload-lib.test.js
generated
15
lib/upload-lib.test.js
generated
|
|
@ -53,20 +53,29 @@ ava_1.default.beforeEach(() => {
|
|||
const allVersions = newVersions.concat(oldVersions);
|
||||
process.env["GITHUB_EVENT_NAME"] = "push";
|
||||
for (const version of allVersions) {
|
||||
const payload = uploadLib.buildPayload("commit", "refs/heads/master", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version);
|
||||
const payload = uploadLib.buildPayload("commit", "refs/heads/master", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "mergeBaseCommit");
|
||||
// Not triggered by a pull request
|
||||
t.falsy(payload.base_ref);
|
||||
t.falsy(payload.base_sha);
|
||||
}
|
||||
process.env["GITHUB_EVENT_NAME"] = "pull_request";
|
||||
process.env["GITHUB_SHA"] = "commit";
|
||||
process.env["GITHUB_BASE_REF"] = "master";
|
||||
process.env["GITHUB_EVENT_PATH"] = `${__dirname}/../src/testdata/pull_request.json`;
|
||||
for (const version of newVersions) {
|
||||
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version);
|
||||
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "mergeBaseCommit");
|
||||
// Uploads for a merge commit use the merge base
|
||||
t.deepEqual(payload.base_ref, "refs/heads/master");
|
||||
t.deepEqual(payload.base_sha, "mergeBaseCommit");
|
||||
}
|
||||
for (const version of newVersions) {
|
||||
const payload = uploadLib.buildPayload("headCommit", "refs/pull/123/head", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "mergeBaseCommit");
|
||||
// Uploads for the head use the PR base
|
||||
t.deepEqual(payload.base_ref, "refs/heads/master");
|
||||
t.deepEqual(payload.base_sha, "f95f852bd8fca8fcc58a9a2d6c842781e32a215e");
|
||||
}
|
||||
for (const version of oldVersions) {
|
||||
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version);
|
||||
const payload = uploadLib.buildPayload("commit", "refs/pull/123/merge", "key", undefined, "", undefined, "/opt/src", undefined, ["CodeQL", "eslint"], version, "mergeBaseCommit");
|
||||
// These older versions won't expect these values
|
||||
t.falsy(payload.base_ref);
|
||||
t.falsy(payload.base_sha);
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -90,6 +90,66 @@ export const getCommitOid = async function (ref = "HEAD"): Promise<string> {
|
|||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* If the action was triggered by a pull request, determine the commit sha of the merge base.
|
||||
* Returns undefined if run by other triggers or the merge base cannot be determined.
|
||||
*/
|
||||
export const determineMergeBaseCommitOid = async function (): Promise<
|
||||
string | undefined
|
||||
> {
|
||||
if (process.env.GITHUB_EVENT_NAME !== "pull_request") {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const mergeSha = getRequiredEnvParam("GITHUB_SHA");
|
||||
|
||||
try {
|
||||
let commitOid = "";
|
||||
let baseOid = "";
|
||||
let headOid = "";
|
||||
|
||||
await new toolrunner.ToolRunner(
|
||||
await safeWhich.safeWhich("git"),
|
||||
["show", "-s", "--format=raw", mergeSha],
|
||||
{
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdline: (data) => {
|
||||
if (data.startsWith("commit ") && commitOid === "") {
|
||||
commitOid = data.substring(7);
|
||||
} else if (data.startsWith("parent ")) {
|
||||
if (baseOid === "") {
|
||||
baseOid = data.substring(7);
|
||||
} else if (headOid === "") {
|
||||
headOid = data.substring(7);
|
||||
}
|
||||
}
|
||||
},
|
||||
stderr: (data) => {
|
||||
process.stderr.write(data);
|
||||
},
|
||||
},
|
||||
}
|
||||
).exec();
|
||||
|
||||
// Let's confirm our assumptions: We had a merge commit and the parsed parent data looks correct
|
||||
if (
|
||||
commitOid === mergeSha &&
|
||||
headOid.length === 40 &&
|
||||
baseOid.length === 40
|
||||
) {
|
||||
return baseOid;
|
||||
}
|
||||
return undefined;
|
||||
} catch (e) {
|
||||
core.info(
|
||||
`Failed to call git to determine merge base. Continuing with data from environment: ${e}`
|
||||
);
|
||||
core.info((e as Error).stack || "NO STACK");
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
interface WorkflowJobStep {
|
||||
run: any;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -57,7 +57,8 @@ test("validate correct payload used per version", async (t) => {
|
|||
"/opt/src",
|
||||
undefined,
|
||||
["CodeQL", "eslint"],
|
||||
version
|
||||
version,
|
||||
"mergeBaseCommit"
|
||||
);
|
||||
// Not triggered by a pull request
|
||||
t.falsy(payload.base_ref);
|
||||
|
|
@ -65,6 +66,8 @@ test("validate correct payload used per version", async (t) => {
|
|||
}
|
||||
|
||||
process.env["GITHUB_EVENT_NAME"] = "pull_request";
|
||||
process.env["GITHUB_SHA"] = "commit";
|
||||
process.env["GITHUB_BASE_REF"] = "master";
|
||||
process.env[
|
||||
"GITHUB_EVENT_PATH"
|
||||
] = `${__dirname}/../src/testdata/pull_request.json`;
|
||||
|
|
@ -79,8 +82,29 @@ test("validate correct payload used per version", async (t) => {
|
|||
"/opt/src",
|
||||
undefined,
|
||||
["CodeQL", "eslint"],
|
||||
version
|
||||
version,
|
||||
"mergeBaseCommit"
|
||||
);
|
||||
// Uploads for a merge commit use the merge base
|
||||
t.deepEqual(payload.base_ref, "refs/heads/master");
|
||||
t.deepEqual(payload.base_sha, "mergeBaseCommit");
|
||||
}
|
||||
|
||||
for (const version of newVersions) {
|
||||
const payload: any = uploadLib.buildPayload(
|
||||
"headCommit",
|
||||
"refs/pull/123/head",
|
||||
"key",
|
||||
undefined,
|
||||
"",
|
||||
undefined,
|
||||
"/opt/src",
|
||||
undefined,
|
||||
["CodeQL", "eslint"],
|
||||
version,
|
||||
"mergeBaseCommit"
|
||||
);
|
||||
// Uploads for the head use the PR base
|
||||
t.deepEqual(payload.base_ref, "refs/heads/master");
|
||||
t.deepEqual(payload.base_sha, "f95f852bd8fca8fcc58a9a2d6c842781e32a215e");
|
||||
}
|
||||
|
|
@ -96,7 +120,8 @@ test("validate correct payload used per version", async (t) => {
|
|||
"/opt/src",
|
||||
undefined,
|
||||
["CodeQL", "eslint"],
|
||||
version
|
||||
version,
|
||||
"mergeBaseCommit"
|
||||
);
|
||||
// These older versions won't expect these values
|
||||
t.falsy(payload.base_ref);
|
||||
|
|
|
|||
|
|
@ -291,7 +291,8 @@ export function buildPayload(
|
|||
checkoutURI: string,
|
||||
environment: string | undefined,
|
||||
toolNames: string[],
|
||||
gitHubVersion: util.GitHubVersion
|
||||
gitHubVersion: util.GitHubVersion,
|
||||
mergeBaseCommitOid: string | undefined
|
||||
) {
|
||||
if (util.isActions()) {
|
||||
const payloadObj = {
|
||||
|
|
@ -314,15 +315,28 @@ export function buildPayload(
|
|||
gitHubVersion.type !== util.GitHubVariant.GHES ||
|
||||
semver.satisfies(gitHubVersion.version, `>=3.1`)
|
||||
) {
|
||||
if (
|
||||
process.env.GITHUB_EVENT_NAME === "pull_request" &&
|
||||
process.env.GITHUB_EVENT_PATH
|
||||
) {
|
||||
const githubEvent = JSON.parse(
|
||||
fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
|
||||
);
|
||||
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
|
||||
payloadObj.base_sha = githubEvent.pull_request.base.sha;
|
||||
if (process.env.GITHUB_EVENT_NAME === "pull_request") {
|
||||
if (
|
||||
commitOid === util.getRequiredEnvParam("GITHUB_SHA") &&
|
||||
mergeBaseCommitOid
|
||||
) {
|
||||
// We're uploading results for the merge commit
|
||||
// and were able to determine the merge base.
|
||||
// So we use that as the most accurate base.
|
||||
payloadObj.base_ref = `refs/heads/${util.getRequiredEnvParam(
|
||||
"GITHUB_BASE_REF"
|
||||
)}`;
|
||||
payloadObj.base_sha = mergeBaseCommitOid;
|
||||
} else if (process.env.GITHUB_EVENT_PATH) {
|
||||
// Either we're not uploading results for the merge commit
|
||||
// or we could not determine the merge base.
|
||||
// Using the PR base is the only option here
|
||||
const githubEvent = JSON.parse(
|
||||
fs.readFileSync(process.env.GITHUB_EVENT_PATH, "utf8")
|
||||
);
|
||||
payloadObj.base_ref = `refs/heads/${githubEvent.pull_request.base.ref}`;
|
||||
payloadObj.base_sha = githubEvent.pull_request.base.sha;
|
||||
}
|
||||
}
|
||||
}
|
||||
return payloadObj;
|
||||
|
|
@ -389,7 +403,8 @@ async function uploadFiles(
|
|||
checkoutURI,
|
||||
environment,
|
||||
toolNames,
|
||||
gitHubVersion
|
||||
gitHubVersion,
|
||||
await actionsUtil.determineMergeBaseCommitOid()
|
||||
);
|
||||
|
||||
// Log some useful debug info about the info
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue