Merge branch 'main' into NlightNFotis/detect_use_proxy_when_streaming
This commit is contained in:
commit
c6454d58c8
13 changed files with 112 additions and 26 deletions
28
lib/actions-util.js
generated
28
lib/actions-util.js
generated
|
|
@ -33,7 +33,7 @@ var __importStar = (this && this.__importStar) || (function () {
|
|||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.restoreInputs = exports.persistInputs = exports.CommandInvocationError = exports.getFileType = exports.FileCmdNotFoundError = exports.decodeGitFilePath = exports.getGitDiffHunkHeaders = exports.getAllGitMergeBases = exports.gitFetch = exports.deepenGitHistory = exports.determineBaseBranchHeadCommitOid = exports.getCommitOid = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.restoreInputs = exports.persistInputs = exports.CommandInvocationError = exports.getFileType = exports.FileCmdNotFoundError = exports.decodeGitFilePath = exports.getGitDiffHunkHeaders = exports.getAllGitMergeBases = exports.gitRepack = exports.gitFetch = exports.deepenGitHistory = exports.determineBaseBranchHeadCommitOid = exports.getCommitOid = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.getTemporaryDirectory = getTemporaryDirectory;
|
||||
exports.getRef = getRef;
|
||||
exports.getActionVersion = getActionVersion;
|
||||
|
|
@ -183,14 +183,21 @@ const determineBaseBranchHeadCommitOid = async function (checkoutPathOverride) {
|
|||
};
|
||||
exports.determineBaseBranchHeadCommitOid = determineBaseBranchHeadCommitOid;
|
||||
/**
|
||||
* Deepen the git history of the given ref by one level. Errors are logged.
|
||||
* Deepen the git history of HEAD by one level. Errors are logged.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
const deepenGitHistory = async function () {
|
||||
try {
|
||||
await runGitCommand((0, exports.getOptionalInput)("checkout_path"), ["fetch", "--no-tags", "--deepen=1"], "Cannot deepen the shallow repository.");
|
||||
await runGitCommand((0, exports.getOptionalInput)("checkout_path"), [
|
||||
"fetch",
|
||||
"origin",
|
||||
"HEAD",
|
||||
"--no-tags",
|
||||
"--no-recurse-submodules",
|
||||
"--deepen=1",
|
||||
], "Cannot deepen the shallow repository.");
|
||||
}
|
||||
catch {
|
||||
// Errors are already logged by runGitCommand()
|
||||
|
|
@ -212,6 +219,21 @@ const gitFetch = async function (branch, extraFlags) {
|
|||
}
|
||||
};
|
||||
exports.gitFetch = gitFetch;
|
||||
/**
|
||||
* Repack the git repository, using with the given flags. Errors are logged.
|
||||
*
|
||||
* This function uses the `checkout_path` to determine the repository path and
|
||||
* works only when called from `analyze` or `upload-sarif`.
|
||||
*/
|
||||
const gitRepack = async function (flags) {
|
||||
try {
|
||||
await runGitCommand((0, exports.getOptionalInput)("checkout_path"), ["repack", ...flags], "Cannot repack the repository.");
|
||||
}
|
||||
catch {
|
||||
// Errors are already logged by runGitCommand()
|
||||
}
|
||||
};
|
||||
exports.gitRepack = gitRepack;
|
||||
/**
|
||||
* Compute the all merge bases between the given refs. Returns an empty array
|
||||
* if no merge base is found, or if there is an error.
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
11
lib/analyze.js
generated
11
lib/analyze.js
generated
|
|
@ -162,7 +162,7 @@ async function setupDiffInformedQueryRun(baseRef, headRef, codeql, logger, featu
|
|||
if (!(await features.getValue(feature_flags_1.Feature.DiffInformedQueries, codeql))) {
|
||||
return undefined;
|
||||
}
|
||||
return await (0, logging_1.withGroup)("Generating diff range extension pack", async () => {
|
||||
return await (0, logging_1.withGroupAsync)("Generating diff range extension pack", async () => {
|
||||
const diffRanges = await getPullRequestEditedDiffRanges(baseRef, headRef, logger);
|
||||
return writeDiffRangeDataExtensionPack(logger, diffRanges);
|
||||
});
|
||||
|
|
@ -185,7 +185,7 @@ async function getPullRequestEditedDiffRanges(baseRef, headRef, logger) {
|
|||
}
|
||||
// To compute the merge bases between the base branch and the PR topic branch,
|
||||
// we need to fetch the commit graph from the branch heads to those merge
|
||||
// babes. The following 4-step procedure does so while limiting the amount of
|
||||
// babes. The following 6-step procedure does so while limiting the amount of
|
||||
// history fetched.
|
||||
// Step 1: Deepen from the PR merge commit to the base branch head and the PR
|
||||
// topic branch head, so that the PR merge commit is no longer considered a
|
||||
|
|
@ -203,7 +203,12 @@ async function getPullRequestEditedDiffRanges(baseRef, headRef, logger) {
|
|||
// Step 4: Fetch the base branch history, stopping when we reach commits that
|
||||
// are reachable from the PR topic branch head.
|
||||
await actionsUtil.gitFetch(baseRef, [`--shallow-exclude=${headRef}`]);
|
||||
// Step 5: Deepen the history so that we have the merge bases between the base
|
||||
// Step 5: Repack the history to remove the shallow grafts that were added by
|
||||
// the previous fetches. This step works around a bug that causes subsequent
|
||||
// deepening fetches to fail with "fatal: error in object: unshallow <SHA>".
|
||||
// See https://stackoverflow.com/q/63878612
|
||||
await actionsUtil.gitRepack(["-d"]);
|
||||
// Step 6: Deepen the history so that we have the merge bases between the base
|
||||
// branch and the PR topic branch.
|
||||
await actionsUtil.deepenGitHistory();
|
||||
// To compute the exact same diff as GitHub would compute for the PR, we need
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
10
lib/logging.js
generated
10
lib/logging.js
generated
|
|
@ -36,6 +36,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||
exports.getActionsLogger = getActionsLogger;
|
||||
exports.getRunnerLogger = getRunnerLogger;
|
||||
exports.withGroup = withGroup;
|
||||
exports.withGroupAsync = withGroupAsync;
|
||||
exports.formatDuration = formatDuration;
|
||||
const core = __importStar(require("@actions/core"));
|
||||
function getActionsLogger() {
|
||||
|
|
@ -65,6 +66,15 @@ function withGroup(groupName, f) {
|
|||
core.endGroup();
|
||||
}
|
||||
}
|
||||
async function withGroupAsync(groupName, f) {
|
||||
core.startGroup(groupName);
|
||||
try {
|
||||
return await f();
|
||||
}
|
||||
finally {
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
/** Format a duration for use in logs. */
|
||||
function formatDuration(durationMs) {
|
||||
if (durationMs < 1000) {
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"logging.js","sourceRoot":"","sources":["../src/logging.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAcA,4CAEC;AAED,0CAcC;AAED,8BAOC;AAGD,wCAWC;AAvDD,oDAAsC;AActC,SAAgB,gBAAgB;IAC9B,OAAO,IAAI,CAAC;AACd,CAAC;AAED,SAAgB,eAAe,CAAC,SAAkB;IAChD,OAAO;QACL,sCAAsC;QACtC,KAAK,EAAE,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,SAAS;QAClD,sCAAsC;QACtC,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,sCAAsC;QACtC,OAAO,EAAE,OAAO,CAAC,IAAI;QACrB,sCAAsC;QACtC,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,OAAO,EAAE,GAAG,EAAE,CAAC,SAAS;QACxB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAED,SAAgB,SAAS,CAAI,SAAiB,EAAE,CAAU;IACxD,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC;IAC3B,IAAI,CAAC;QACH,OAAO,CAAC,EAAE,CAAC;IACb,CAAC;YAAS,CAAC;QACT,IAAI,CAAC,QAAQ,EAAE,CAAC;IAClB,CAAC;AACH,CAAC;AAED,yCAAyC;AACzC,SAAgB,cAAc,CAAC,UAAkB;IAC/C,IAAI,UAAU,GAAG,IAAI,EAAE,CAAC;QACtB,OAAO,GAAG,UAAU,IAAI,CAAC;IAC3B,CAAC;IAED,IAAI,UAAU,GAAG,EAAE,GAAG,IAAI,EAAE,CAAC;QAC3B,OAAO,GAAG,CAAC,UAAU,GAAG,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC;IAC9C,CAAC;IACD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC;IACrD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,UAAU,GAAG,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;IAC9D,OAAO,GAAG,OAAO,IAAI,OAAO,GAAG,CAAC;AAClC,CAAC"}
|
||||
{"version":3,"file":"logging.js","sourceRoot":"","sources":["../src/logging.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAcA,4CAEC;AAED,0CAcC;AAED,8BAOC;AAED,wCAUC;AAGD,wCAWC;AAnED,oDAAsC;AActC,SAAgB,gBAAgB;IAC9B,OAAO,IAAI,CAAC;AACd,CAAC;AAED,SAAgB,eAAe,CAAC,SAAkB;IAChD,OAAO;QACL,sCAAsC;QACtC,KAAK,EAAE,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,SAAS;QAClD,sCAAsC;QACtC,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,sCAAsC;QACtC,OAAO,EAAE,OAAO,CAAC,IAAI;QACrB,sCAAsC;QACtC,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,OAAO,EAAE,GAAG,EAAE,CAAC,SAAS;QACxB,UAAU,EAAE,GAAG,EAAE,CAAC,SAAS;QAC3B,QAAQ,EAAE,GAAG,EAAE,CAAC,SAAS;KAC1B,CAAC;AACJ,CAAC;AAED,SAAgB,SAAS,CAAI,SAAiB,EAAE,CAAU;IACxD,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC;IAC3B,IAAI,CAAC;QACH,OAAO,CAAC,EAAE,CAAC;IACb,CAAC;YAAS,CAAC;QACT,IAAI,CAAC,QAAQ,EAAE,CAAC;IAClB,CAAC;AACH,CAAC;AAEM,KAAK,UAAU,cAAc,CAClC,SAAiB,EACjB,CAAmB;IAEnB,IAAI,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC;IAC3B,IAAI,CAAC;QACH,OAAO,MAAM,CAAC,EAAE,CAAC;IACnB,CAAC;YAAS,CAAC;QACT,IAAI,CAAC,QAAQ,EAAE,CAAC;IAClB,CAAC;AACH,CAAC;AAED,yCAAyC;AACzC,SAAgB,cAAc,CAAC,UAAkB;IAC/C,IAAI,UAAU,GAAG,IAAI,EAAE,CAAC;QACtB,OAAO,GAAG,UAAU,IAAI,CAAC;IAC3B,CAAC;IAED,IAAI,UAAU,GAAG,EAAE,GAAG,IAAI,EAAE,CAAC;QAC3B,OAAO,GAAG,CAAC,UAAU,GAAG,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,CAAC;IAC9C,CAAC;IACD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,GAAG,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,CAAC;IACrD,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,UAAU,GAAG,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;IAC9D,OAAO,GAAG,OAAO,IAAI,OAAO,GAAG,CAAC;AAClC,CAAC"}
|
||||
Loading…
Add table
Add a link
Reference in a new issue