Merge pull request #2514 from github/henrymercer/zstd-better-failure-logging
Capture stderr from extracting Zstandard bundles
This commit is contained in:
commit
e85017e674
24 changed files with 487 additions and 263 deletions
78
lib/actions-util.js
generated
78
lib/actions-util.js
generated
|
|
@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getFileType = exports.FileCmdNotFoundError = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.CommandInvocationError = exports.getFileType = exports.FileCmdNotFoundError = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.getTemporaryDirectory = getTemporaryDirectory;
|
||||
exports.getRef = getRef;
|
||||
exports.getActionVersion = getActionVersion;
|
||||
|
|
@ -37,6 +37,9 @@ exports.getUploadValue = getUploadValue;
|
|||
exports.getWorkflowRunID = getWorkflowRunID;
|
||||
exports.getWorkflowRunAttempt = getWorkflowRunAttempt;
|
||||
exports.isSelfHostedRunner = isSelfHostedRunner;
|
||||
exports.prettyPrintInvocation = prettyPrintInvocation;
|
||||
exports.ensureEndsInPeriod = ensureEndsInPeriod;
|
||||
exports.runTool = runTool;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
|
|
@ -429,4 +432,77 @@ exports.getFileType = getFileType;
|
|||
function isSelfHostedRunner() {
|
||||
return process.env.RUNNER_ENVIRONMENT === "self-hosted";
|
||||
}
|
||||
function prettyPrintInvocation(cmd, args) {
|
||||
return [cmd, ...args].map((x) => (x.includes(" ") ? `'${x}'` : x)).join(" ");
|
||||
}
|
||||
/**
|
||||
* An error from a tool invocation, with associated exit code, stderr, etc.
|
||||
*/
|
||||
class CommandInvocationError extends Error {
|
||||
constructor(cmd, args, exitCode, stderr, stdout) {
|
||||
const prettyCommand = prettyPrintInvocation(cmd, args);
|
||||
const lastLine = ensureEndsInPeriod(stderr.trim().split("\n").pop()?.trim() || "n/a");
|
||||
super(`Failed to run "${prettyCommand}". ` +
|
||||
`Exit code was ${exitCode} and last log line was: ${lastLine} See the logs for more details.`);
|
||||
this.cmd = cmd;
|
||||
this.args = args;
|
||||
this.exitCode = exitCode;
|
||||
this.stderr = stderr;
|
||||
this.stdout = stdout;
|
||||
}
|
||||
}
|
||||
exports.CommandInvocationError = CommandInvocationError;
|
||||
function ensureEndsInPeriod(text) {
|
||||
return text[text.length - 1] === "." ? text : `${text}.`;
|
||||
}
|
||||
/**
|
||||
* A constant defining the maximum number of characters we will keep from
|
||||
* the programs stderr for logging.
|
||||
*
|
||||
* This serves two purposes:
|
||||
* 1. It avoids an OOM if a program fails in a way that results it
|
||||
* printing many log lines.
|
||||
* 2. It avoids us hitting the limit of how much data we can send in our
|
||||
* status reports on GitHub.com.
|
||||
*/
|
||||
const MAX_STDERR_BUFFER_SIZE = 20000;
|
||||
/**
|
||||
* Runs a CLI tool.
|
||||
*
|
||||
* @returns Standard output produced by the tool.
|
||||
* @throws A `CommandInvocationError` if the tool exits with a non-zero status code.
|
||||
*/
|
||||
async function runTool(cmd, args = [], opts = {}) {
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
process.stdout.write(`[command]${cmd} ${args.join(" ")}\n`);
|
||||
const exitCode = await new toolrunner.ToolRunner(cmd, args, {
|
||||
ignoreReturnCode: true,
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
stdout += data.toString("utf8");
|
||||
if (!opts.noStreamStdout) {
|
||||
process.stdout.write(data);
|
||||
}
|
||||
},
|
||||
stderr: (data) => {
|
||||
let readStartIndex = 0;
|
||||
// If the error is too large, then we only take the last MAX_STDERR_BUFFER_SIZE characters
|
||||
if (data.length - MAX_STDERR_BUFFER_SIZE > 0) {
|
||||
// Eg: if we have MAX_STDERR_BUFFER_SIZE the start index should be 2.
|
||||
readStartIndex = data.length - MAX_STDERR_BUFFER_SIZE + 1;
|
||||
}
|
||||
stderr += data.toString("utf8", readStartIndex);
|
||||
// Mimic the standard behavior of the toolrunner by writing stderr to stdout
|
||||
process.stdout.write(data);
|
||||
},
|
||||
},
|
||||
silent: true,
|
||||
...(opts.stdin ? { input: Buffer.from(opts.stdin || "") } : {}),
|
||||
}).exec();
|
||||
if (exitCode !== 0) {
|
||||
throw new CommandInvocationError(cmd, args, exitCode, stderr, stdout);
|
||||
}
|
||||
return stdout;
|
||||
}
|
||||
//# sourceMappingURL=actions-util.js.map
|
||||
File diff suppressed because one or more lines are too long
31
lib/cli-errors.js
generated
31
lib/cli-errors.js
generated
|
|
@ -1,26 +1,24 @@
|
|||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.cliErrorsConfig = exports.CliConfigErrorCategory = exports.CommandInvocationError = void 0;
|
||||
exports.cliErrorsConfig = exports.CliConfigErrorCategory = exports.CliError = void 0;
|
||||
exports.getCliConfigCategoryIfExists = getCliConfigCategoryIfExists;
|
||||
exports.wrapCliConfigurationError = wrapCliConfigurationError;
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const doc_url_1 = require("./doc-url");
|
||||
const util_1 = require("./util");
|
||||
/**
|
||||
* A class of Error that we can classify as an error stemming from a CLI
|
||||
* invocation, with associated exit code, stderr,etc.
|
||||
* An error from a CodeQL CLI invocation, with associated exit code, stderr, etc.
|
||||
*/
|
||||
class CommandInvocationError extends Error {
|
||||
constructor(cmd, args, exitCode, stderr, stdout) {
|
||||
const prettyCommand = [cmd, ...args]
|
||||
.map((x) => (x.includes(" ") ? `'${x}'` : x))
|
||||
.join(" ");
|
||||
class CliError extends Error {
|
||||
constructor({ cmd, args, exitCode, stderr }) {
|
||||
const prettyCommand = (0, actions_util_1.prettyPrintInvocation)(cmd, args);
|
||||
const fatalErrors = extractFatalErrors(stderr);
|
||||
const autobuildErrors = extractAutobuildErrors(stderr);
|
||||
let message;
|
||||
if (fatalErrors) {
|
||||
message =
|
||||
`Encountered a fatal error while running "${prettyCommand}". ` +
|
||||
`Exit code was ${exitCode} and error was: ${ensureEndsInPeriod(fatalErrors.trim())} See the logs for more details.`;
|
||||
`Exit code was ${exitCode} and error was: ${(0, actions_util_1.ensureEndsInPeriod)(fatalErrors.trim())} See the logs for more details.`;
|
||||
}
|
||||
else if (autobuildErrors) {
|
||||
message =
|
||||
|
|
@ -29,7 +27,7 @@ class CommandInvocationError extends Error {
|
|||
`Encountered the following error: ${autobuildErrors}`;
|
||||
}
|
||||
else {
|
||||
const lastLine = ensureEndsInPeriod(stderr.trim().split("\n").pop()?.trim() || "n/a");
|
||||
const lastLine = (0, actions_util_1.ensureEndsInPeriod)(stderr.trim().split("\n").pop()?.trim() || "n/a");
|
||||
message =
|
||||
`Encountered a fatal error while running "${prettyCommand}". ` +
|
||||
`Exit code was ${exitCode} and last log line was: ${lastLine} See the logs for more details.`;
|
||||
|
|
@ -37,10 +35,9 @@ class CommandInvocationError extends Error {
|
|||
super(message);
|
||||
this.exitCode = exitCode;
|
||||
this.stderr = stderr;
|
||||
this.stdout = stdout;
|
||||
}
|
||||
}
|
||||
exports.CommandInvocationError = CommandInvocationError;
|
||||
exports.CliError = CliError;
|
||||
/**
|
||||
* Provide a better error message from the stderr of a CLI invocation that failed with a fatal
|
||||
* error.
|
||||
|
|
@ -89,10 +86,10 @@ function extractFatalErrors(error) {
|
|||
}
|
||||
const isOneLiner = !fatalErrors.some((e) => e.includes("\n"));
|
||||
if (isOneLiner) {
|
||||
fatalErrors = fatalErrors.map(ensureEndsInPeriod);
|
||||
fatalErrors = fatalErrors.map(actions_util_1.ensureEndsInPeriod);
|
||||
}
|
||||
return [
|
||||
ensureEndsInPeriod(lastError),
|
||||
(0, actions_util_1.ensureEndsInPeriod)(lastError),
|
||||
"Context:",
|
||||
...fatalErrors.reverse(),
|
||||
].join(isOneLiner ? " " : "\n");
|
||||
|
|
@ -109,9 +106,6 @@ function extractAutobuildErrors(error) {
|
|||
}
|
||||
return errorLines.join("\n") || undefined;
|
||||
}
|
||||
function ensureEndsInPeriod(text) {
|
||||
return text[text.length - 1] === "." ? text : `${text}.`;
|
||||
}
|
||||
/** Error messages from the CLI that we consider configuration errors and handle specially. */
|
||||
var CliConfigErrorCategory;
|
||||
(function (CliConfigErrorCategory) {
|
||||
|
|
@ -267,9 +261,6 @@ function getCliConfigCategoryIfExists(cliError) {
|
|||
* simply returns the original error.
|
||||
*/
|
||||
function wrapCliConfigurationError(cliError) {
|
||||
if (!(cliError instanceof CommandInvocationError)) {
|
||||
return cliError;
|
||||
}
|
||||
const cliConfigErrorCategory = getCliConfigCategoryIfExists(cliError);
|
||||
if (cliConfigErrorCategory === undefined) {
|
||||
return cliError;
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
84
lib/codeql.js
generated
84
lib/codeql.js
generated
|
|
@ -241,7 +241,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
async getVersion() {
|
||||
let result = util.getCachedCodeQlVersion();
|
||||
if (result === undefined) {
|
||||
const output = await runTool(cmd, ["version", "--format=json"]);
|
||||
const output = await runCli(cmd, ["version", "--format=json"]);
|
||||
try {
|
||||
result = JSON.parse(output);
|
||||
}
|
||||
|
|
@ -253,7 +253,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
return result;
|
||||
},
|
||||
async printVersion() {
|
||||
await runTool(cmd, ["version", "--format=json"]);
|
||||
await runCli(cmd, ["version", "--format=json"]);
|
||||
},
|
||||
async supportsFeature(feature) {
|
||||
return (0, tools_features_1.isSupportedToolsFeature)(await this.getVersion(), feature);
|
||||
|
|
@ -290,7 +290,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
const overwriteFlag = (0, tools_features_1.isSupportedToolsFeature)(await this.getVersion(), tools_features_1.ToolsFeature.ForceOverwrite)
|
||||
? "--force-overwrite"
|
||||
: "--overwrite";
|
||||
await runTool(cmd, [
|
||||
await runCli(cmd, [
|
||||
"database",
|
||||
"init",
|
||||
overwriteFlag,
|
||||
|
|
@ -325,10 +325,10 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
// When `DYLD_INSERT_LIBRARIES` is set in the environment for a step,
|
||||
// the Actions runtime introduces its own workaround for SIP
|
||||
// (https://github.com/actions/runner/pull/416).
|
||||
await runTool(autobuildCmd);
|
||||
await runCli(autobuildCmd);
|
||||
},
|
||||
async extractScannedLanguage(config, language) {
|
||||
await runTool(cmd, [
|
||||
await runCli(cmd, [
|
||||
"database",
|
||||
"trace-command",
|
||||
"--index-traceless-dbs",
|
||||
|
|
@ -343,7 +343,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
applyAutobuildAzurePipelinesTimeoutFix();
|
||||
}
|
||||
try {
|
||||
await runTool(cmd, [
|
||||
await runCli(cmd, [
|
||||
"database",
|
||||
"trace-command",
|
||||
"--use-build-mode",
|
||||
|
|
@ -378,7 +378,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
...getExtraOptionsFromEnv(["database", "finalize"]),
|
||||
databasePath,
|
||||
];
|
||||
await runTool(cmd, args);
|
||||
await runCli(cmd, args);
|
||||
},
|
||||
async resolveLanguages() {
|
||||
const codeqlArgs = [
|
||||
|
|
@ -387,7 +387,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
"--format=json",
|
||||
...getExtraOptionsFromEnv(["resolve", "languages"]),
|
||||
];
|
||||
const output = await runTool(cmd, codeqlArgs);
|
||||
const output = await runCli(cmd, codeqlArgs);
|
||||
try {
|
||||
return JSON.parse(output);
|
||||
}
|
||||
|
|
@ -404,7 +404,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
...(await getLanguageAliasingArguments(this)),
|
||||
...getExtraOptionsFromEnv(["resolve", "languages"]),
|
||||
];
|
||||
const output = await runTool(cmd, codeqlArgs);
|
||||
const output = await runCli(cmd, codeqlArgs);
|
||||
try {
|
||||
return JSON.parse(output);
|
||||
}
|
||||
|
|
@ -423,7 +423,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
if (extraSearchPath !== undefined) {
|
||||
codeqlArgs.push("--additional-packs", extraSearchPath);
|
||||
}
|
||||
const output = await runTool(cmd, codeqlArgs);
|
||||
const output = await runCli(cmd, codeqlArgs);
|
||||
try {
|
||||
return JSON.parse(output);
|
||||
}
|
||||
|
|
@ -442,7 +442,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
if (workingDir !== undefined) {
|
||||
codeqlArgs.push("--working-dir", workingDir);
|
||||
}
|
||||
const output = await runTool(cmd, codeqlArgs);
|
||||
const output = await runCli(cmd, codeqlArgs);
|
||||
try {
|
||||
return JSON.parse(output);
|
||||
}
|
||||
|
|
@ -466,7 +466,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
if (await util.codeQlVersionAtLeast(this, feature_flags_1.CODEQL_VERSION_FINE_GRAINED_PARALLELISM)) {
|
||||
codeqlArgs.push("--intra-layer-parallelism");
|
||||
}
|
||||
await runTool(cmd, codeqlArgs);
|
||||
await runCli(cmd, codeqlArgs);
|
||||
},
|
||||
async databaseInterpretResults(databasePath, querySuitePaths, sarifFile, addSnippetsFlag, threadsFlag, verbosityFlag, automationDetailsId, config, features) {
|
||||
const shouldExportDiagnostics = await features.getValue(feature_flags_1.Feature.ExportDiagnosticsEnabled, this);
|
||||
|
|
@ -512,7 +512,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
}
|
||||
// Capture the stdout, which contains the analysis summary. Don't stream it to the Actions
|
||||
// logs to avoid printing it twice.
|
||||
return await runTool(cmd, codeqlArgs, {
|
||||
return await runCli(cmd, codeqlArgs, {
|
||||
noStreamStdout: true,
|
||||
});
|
||||
},
|
||||
|
|
@ -523,7 +523,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
...getExtraOptionsFromEnv(["database", "print-baseline"]),
|
||||
databasePath,
|
||||
];
|
||||
return await runTool(cmd, codeqlArgs);
|
||||
return await runCli(cmd, codeqlArgs);
|
||||
},
|
||||
/**
|
||||
* Download specified packs into the package cache. If the specified
|
||||
|
|
@ -551,7 +551,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
...getExtraOptionsFromEnv(["pack", "download"]),
|
||||
...packs,
|
||||
];
|
||||
const output = await runTool(cmd, codeqlArgs);
|
||||
const output = await runCli(cmd, codeqlArgs);
|
||||
try {
|
||||
const parsedOutput = JSON.parse(output);
|
||||
if (Array.isArray(parsedOutput.packs) &&
|
||||
|
|
@ -580,7 +580,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
`${cacheCleanupFlag}=${cleanupLevel}`,
|
||||
...getExtraOptionsFromEnv(["database", "cleanup"]),
|
||||
];
|
||||
await runTool(cmd, codeqlArgs);
|
||||
await runCli(cmd, codeqlArgs);
|
||||
},
|
||||
async databaseBundle(databasePath, outputFilePath, databaseName) {
|
||||
const args = [
|
||||
|
|
@ -662,7 +662,7 @@ async function getCodeQLForCmd(cmd, checkVersion) {
|
|||
if (mergeRunsFromEqualCategory) {
|
||||
args.push("--sarif-merge-runs-from-equal-category");
|
||||
}
|
||||
await runTool(cmd, args);
|
||||
await runCli(cmd, args);
|
||||
},
|
||||
};
|
||||
// To ensure that status reports include the CodeQL CLI version wherever
|
||||
|
|
@ -742,48 +742,16 @@ function getExtraOptions(options, paths, pathInfo) {
|
|||
: getExtraOptions(options?.[paths[0]], paths?.slice(1), pathInfo.concat(paths[0]));
|
||||
return all.concat(specific);
|
||||
}
|
||||
/*
|
||||
* A constant defining the maximum number of characters we will keep from
|
||||
* the programs stderr for logging. This serves two purposes:
|
||||
* (1) It avoids an OOM if a program fails in a way that results it
|
||||
* printing many log lines.
|
||||
* (2) It avoids us hitting the limit of how much data we can send in our
|
||||
* status reports on GitHub.com.
|
||||
*/
|
||||
const maxErrorSize = 20_000;
|
||||
async function runTool(cmd, args = [], opts = {}) {
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
process.stdout.write(`[command]${cmd} ${args.join(" ")}\n`);
|
||||
const exitCode = await new toolrunner.ToolRunner(cmd, args, {
|
||||
ignoreReturnCode: true,
|
||||
listeners: {
|
||||
stdout: (data) => {
|
||||
stdout += data.toString("utf8");
|
||||
if (!opts.noStreamStdout) {
|
||||
process.stdout.write(data);
|
||||
}
|
||||
},
|
||||
stderr: (data) => {
|
||||
let readStartIndex = 0;
|
||||
// If the error is too large, then we only take the last 20,000 characters
|
||||
if (data.length - maxErrorSize > 0) {
|
||||
// Eg: if we have 20,000 the start index should be 2.
|
||||
readStartIndex = data.length - maxErrorSize + 1;
|
||||
}
|
||||
stderr += data.toString("utf8", readStartIndex);
|
||||
// Mimic the standard behavior of the toolrunner by writing stderr to stdout
|
||||
process.stdout.write(data);
|
||||
},
|
||||
},
|
||||
silent: true,
|
||||
...(opts.stdin ? { input: Buffer.from(opts.stdin || "") } : {}),
|
||||
}).exec();
|
||||
if (exitCode !== 0) {
|
||||
const e = new cli_errors_1.CommandInvocationError(cmd, args, exitCode, stderr, stdout);
|
||||
throw (0, cli_errors_1.wrapCliConfigurationError)(e);
|
||||
async function runCli(cmd, args = [], opts = {}) {
|
||||
try {
|
||||
return await (0, actions_util_1.runTool)(cmd, args, opts);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof actions_util_1.CommandInvocationError) {
|
||||
throw (0, cli_errors_1.wrapCliConfigurationError)(new cli_errors_1.CliError(e));
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
return stdout;
|
||||
}
|
||||
/**
|
||||
* Generates a code scanning configuration that is to be used for a scan.
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
2
lib/codeql.test.js
generated
2
lib/codeql.test.js
generated
|
|
@ -591,7 +591,7 @@ for (const { codeqlVersion, flagPassed, githubVersion, negativeFlagPassed, } of
|
|||
// safeWhich throws because of the test CodeQL object.
|
||||
sinon.stub(safeWhich, "safeWhich").resolves("");
|
||||
await t.throwsAsync(async () => await codeqlObject.databaseRunQueries(stubConfig.dbLocation, []), {
|
||||
instanceOf: cli_errors_1.CommandInvocationError,
|
||||
instanceOf: cli_errors_1.CliError,
|
||||
message: `Encountered a fatal error while running "codeql-for-testing database run-queries --expect-discarded-cache --min-disk-free=1024 -v --intra-layer-parallelism". Exit code was 1 and error was: Oops! A fatal internal error occurred. Details:
|
||||
com.semmle.util.exception.CatastrophicError: An error occurred while evaluating ControlFlowGraph::ControlFlow::Root.isRootOf/1#dispred#f610e6ed/2@86282cc8
|
||||
Severe disk cache trouble (corruption or out of space) at /home/runner/work/_temp/codeql_databases/go/db-go/default/cache/pages/28/33.pack: Failed to write item to disk. See the logs for more details.`,
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
2
lib/resolve-environment-action.js
generated
2
lib/resolve-environment-action.js
generated
|
|
@ -55,7 +55,7 @@ async function run() {
|
|||
}
|
||||
catch (unwrappedError) {
|
||||
const error = (0, util_1.wrapError)(unwrappedError);
|
||||
if (error instanceof cli_errors_1.CommandInvocationError) {
|
||||
if (error instanceof cli_errors_1.CliError) {
|
||||
// If the CLI failed to run successfully for whatever reason,
|
||||
// we just return an empty JSON object and proceed with the workflow.
|
||||
core.setOutput(ENVIRONMENT_OUTPUT_NAME, {});
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"resolve-environment-action.js","sourceRoot":"","sources":["../src/resolve-environment-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAKwB;AACxB,6CAAgD;AAChD,6CAAsD;AACtD,iDAAmD;AACnD,uCAA6C;AAC7C,+DAAmE;AACnE,mDAKyB;AACzB,iCAOgB;AAEhB,MAAM,uBAAuB,GAAG,aAAa,CAAC;AAE9C,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,IAAI,MAA0B,CAAC;IAE/B,IAAI,CAAC;QACH,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,kBAAkB,EAC7B,UAAU,EACV,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;QACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;YACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;QAC3C,CAAC;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QACjD,IAAA,yBAAkB,EAAC,IAAA,+BAAgB,GAAE,EAAE,aAAa,CAAC,CAAC;QAEtD,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC1D,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;QACJ,CAAC;QAED,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG,MAAM,IAAA,gDAA0B,EAC7C,MAAM,CAAC,SAAS,EAChB,MAAM,EACN,gBAAgB,EAChB,IAAA,+BAAgB,EAAC,UAAU,CAAC,CAC7B,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,uBAAuB,EAAE,MAAM,CAAC,CAAC;IAClD,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QAExC,IAAI,KAAK,YAAY,mCAAsB,EAAE,CAAC;YAC5C,6DAA6D;YAC7D,qEAAqE;YACrE,IAAI,CAAC,SAAS,CAAC,uBAAuB,EAAE,EAAE,CAAC,CAAC;YAC5C,MAAM,CAAC,OAAO,CACZ,wFAAwF,KAAK,CAAC,OAAO,EAAE,CACxG,CAAC;QACJ,CAAC;aAAM,CAAC;YACN,kFAAkF;YAClF,IAAI,CAAC,SAAS,CACZ,wFAAwF,KAAK,CAAC,OAAO,EAAE,CACxG,CAAC;YAEF,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,kBAAkB,EAC7B,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CAAC;YACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;gBACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;YAC3C,CAAC;QACH,CAAC;QAED,OAAO;IACT,CAAC;IAED,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,kBAAkB,EAC7B,SAAS,EACT,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;IAC3C,CAAC;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,GAAG,EAAE,CAAC;IACd,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,GAAG,0BAAU,CAAC,kBAAkB,mBAAmB,IAAA,sBAAe,EAChE,KAAK,CACN,EAAE,CACJ,CAAC;IACJ,CAAC;IACD,MAAM,IAAA,sBAAe,GAAE,CAAC;AAC1B,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"resolve-environment-action.js","sourceRoot":"","sources":["../src/resolve-environment-action.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAsC;AAEtC,iDAKwB;AACxB,6CAAgD;AAChD,6CAAwC;AACxC,iDAAmD;AACnD,uCAA6C;AAC7C,+DAAmE;AACnE,mDAKyB;AACzB,iCAOgB;AAEhB,MAAM,uBAAuB,GAAG,aAAa,CAAC;AAE9C,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,IAAI,MAA0B,CAAC;IAE/B,IAAI,CAAC;QACH,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,kBAAkB,EAC7B,UAAU,EACV,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;QACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;YACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;QAC3C,CAAC;QAED,MAAM,aAAa,GAAG,MAAM,IAAA,6BAAgB,GAAE,CAAC;QAC/C,IAAA,gCAAyB,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QACjD,IAAA,yBAAkB,EAAC,IAAA,+BAAgB,GAAE,EAAE,aAAa,CAAC,CAAC;QAEtD,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,IAAA,oCAAqB,GAAE,EAAE,MAAM,CAAC,CAAC;QAC1D,IAAI,MAAM,KAAK,SAAS,EAAE,CAAC;YACzB,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;QACJ,CAAC;QAED,MAAM,gBAAgB,GAAG,IAAA,+BAAgB,EAAC,mBAAmB,CAAC,CAAC;QAC/D,MAAM,MAAM,GAAG,MAAM,IAAA,gDAA0B,EAC7C,MAAM,CAAC,SAAS,EAChB,MAAM,EACN,gBAAgB,EAChB,IAAA,+BAAgB,EAAC,UAAU,CAAC,CAC7B,CAAC;QACF,IAAI,CAAC,SAAS,CAAC,uBAAuB,EAAE,MAAM,CAAC,CAAC;IAClD,CAAC;IAAC,OAAO,cAAc,EAAE,CAAC;QACxB,MAAM,KAAK,GAAG,IAAA,gBAAS,EAAC,cAAc,CAAC,CAAC;QAExC,IAAI,KAAK,YAAY,qBAAQ,EAAE,CAAC;YAC9B,6DAA6D;YAC7D,qEAAqE;YACrE,IAAI,CAAC,SAAS,CAAC,uBAAuB,EAAE,EAAE,CAAC,CAAC;YAC5C,MAAM,CAAC,OAAO,CACZ,wFAAwF,KAAK,CAAC,OAAO,EAAE,CACxG,CAAC;QACJ,CAAC;aAAM,CAAC;YACN,kFAAkF;YAClF,IAAI,CAAC,SAAS,CACZ,wFAAwF,KAAK,CAAC,OAAO,EAAE,CACxG,CAAC;YAEF,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,kBAAkB,EAC7B,IAAA,gCAAgB,EAAC,KAAK,CAAC,EACvB,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,EACN,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CACZ,CAAC;YACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;gBACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;YAC3C,CAAC;QACH,CAAC;QAED,OAAO;IACT,CAAC;IAED,MAAM,gBAAgB,GAAG,MAAM,IAAA,sCAAsB,EACnD,0BAAU,CAAC,kBAAkB,EAC7B,SAAS,EACT,SAAS,EACT,MAAM,EACN,MAAM,IAAA,qBAAc,EAAC,MAAM,CAAC,EAC5B,MAAM,CACP,CAAC;IACF,IAAI,gBAAgB,KAAK,SAAS,EAAE,CAAC;QACnC,MAAM,IAAA,gCAAgB,EAAC,gBAAgB,CAAC,CAAC;IAC3C,CAAC;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI,CAAC;QACH,MAAM,GAAG,EAAE,CAAC;IACd,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,IAAI,CAAC,SAAS,CACZ,GAAG,0BAAU,CAAC,kBAAkB,mBAAmB,IAAA,sBAAe,EAChE,KAAK,CACN,EAAE,CACJ,CAAC;IACJ,CAAC;IACD,MAAM,IAAA,sBAAe,GAAE,CAAC;AAC1B,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
49
lib/setup-codeql.js
generated
49
lib/setup-codeql.js
generated
|
|
@ -38,7 +38,6 @@ const fs = __importStar(require("fs"));
|
|||
const path = __importStar(require("path"));
|
||||
const perf_hooks_1 = require("perf_hooks");
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const fast_deep_equal_1 = __importDefault(require("fast-deep-equal"));
|
||||
const semver = __importStar(require("semver"));
|
||||
const uuid_1 = require("uuid");
|
||||
|
|
@ -373,7 +372,7 @@ async function tryGetFallbackToolcacheVersion(cliVersion, tagName, logger) {
|
|||
}
|
||||
// Exported using `export const` for testing purposes. Specifically, we want to
|
||||
// be able to stub this function and have other functions in this file use that stub.
|
||||
const downloadCodeQL = async function (codeqlURL, maybeBundleVersion, maybeCliVersion, apiDetails, tempDir, logger) {
|
||||
const downloadCodeQL = async function (codeqlURL, maybeBundleVersion, maybeCliVersion, apiDetails, tarVersion, tempDir, logger) {
|
||||
const parsedCodeQLURL = new URL(codeqlURL);
|
||||
const searchParams = new URLSearchParams(parsedCodeQLURL.search);
|
||||
const headers = {
|
||||
|
|
@ -403,12 +402,18 @@ const downloadCodeQL = async function (codeqlURL, maybeBundleVersion, maybeCliVe
|
|||
const archivedBundlePath = await toolcache.downloadTool(codeqlURL, dest, authorization, finalHeaders);
|
||||
const downloadDurationMs = Math.round(perf_hooks_1.performance.now() - toolsDownloadStart);
|
||||
logger.debug(`Finished downloading CodeQL bundle to ${archivedBundlePath} (${downloadDurationMs} ms).`);
|
||||
logger.debug("Extracting CodeQL bundle.");
|
||||
const extractionStart = perf_hooks_1.performance.now();
|
||||
const extractedBundlePath = await tar.extract(archivedBundlePath, compressionMethod);
|
||||
const extractionDurationMs = Math.round(perf_hooks_1.performance.now() - extractionStart);
|
||||
logger.debug(`Finished extracting CodeQL bundle to ${extractedBundlePath} (${extractionDurationMs} ms).`);
|
||||
await cleanUpGlob(archivedBundlePath, "CodeQL bundle archive", logger);
|
||||
let extractedBundlePath;
|
||||
let extractionDurationMs;
|
||||
try {
|
||||
logger.debug("Extracting CodeQL bundle.");
|
||||
const extractionStart = perf_hooks_1.performance.now();
|
||||
extractedBundlePath = await tar.extract(archivedBundlePath, compressionMethod, tarVersion, logger);
|
||||
extractionDurationMs = Math.round(perf_hooks_1.performance.now() - extractionStart);
|
||||
logger.debug(`Finished extracting CodeQL bundle to ${extractedBundlePath} (${extractionDurationMs} ms).`);
|
||||
}
|
||||
finally {
|
||||
await (0, util_1.cleanUpGlob)(archivedBundlePath, "CodeQL bundle archive", logger);
|
||||
}
|
||||
const bundleVersion = maybeBundleVersion ?? tryGetBundleVersionFromUrl(codeqlURL, logger);
|
||||
if (bundleVersion === undefined) {
|
||||
logger.debug("Could not cache CodeQL tools because we could not determine the bundle version from the " +
|
||||
|
|
@ -429,7 +434,7 @@ const downloadCodeQL = async function (codeqlURL, maybeBundleVersion, maybeCliVe
|
|||
const toolcachedBundlePath = await toolcache.cacheDir(extractedBundlePath, "CodeQL", toolcacheVersion);
|
||||
// Defensive check: we expect `cacheDir` to copy the bundle to a new location.
|
||||
if (toolcachedBundlePath !== extractedBundlePath) {
|
||||
await cleanUpGlob(extractedBundlePath, "CodeQL bundle from temporary directory", logger);
|
||||
await (0, util_1.cleanUpGlob)(extractedBundlePath, "CodeQL bundle from temporary directory", logger);
|
||||
}
|
||||
return {
|
||||
codeqlFolder: toolcachedBundlePath,
|
||||
|
|
@ -492,6 +497,10 @@ async function setupCodeQLBundle(toolsInput, apiDetails, tempDir, variant, defau
|
|||
}
|
||||
catch (e) {
|
||||
zstdFailureReason = util.getErrorMessage(e) || "unknown error";
|
||||
if (e instanceof actions_util_1.CommandInvocationError) {
|
||||
zstdFailureReason += ` Full error: ${e.stderr}`;
|
||||
logger.debug(`Invocation output the following to stderr: ${e.stderr}`);
|
||||
}
|
||||
logger.warning(`Failed to set up CodeQL tools with zstd. Falling back to gzipped version. Error: ${util.getErrorMessage(e)}`);
|
||||
}
|
||||
}
|
||||
|
|
@ -510,7 +519,7 @@ async function setupCodeQLBundleWithCompressionMethod(toolsInput, apiDetails, te
|
|||
switch (source.sourceType) {
|
||||
case "local": {
|
||||
const compressionMethod = tar.inferCompressionMethod(source.codeqlTarPath);
|
||||
codeqlFolder = await tar.extract(source.codeqlTarPath, compressionMethod);
|
||||
codeqlFolder = await tar.extract(source.codeqlTarPath, compressionMethod, zstdAvailability.version, logger);
|
||||
toolsSource = ToolsSource.Local;
|
||||
break;
|
||||
}
|
||||
|
|
@ -520,7 +529,7 @@ async function setupCodeQLBundleWithCompressionMethod(toolsInput, apiDetails, te
|
|||
toolsSource = ToolsSource.Toolcache;
|
||||
break;
|
||||
case "download": {
|
||||
const result = await (0, exports.downloadCodeQL)(source.codeqlURL, source.bundleVersion, source.cliVersion, apiDetails, tempDir, logger);
|
||||
const result = await (0, exports.downloadCodeQL)(source.codeqlURL, source.bundleVersion, source.cliVersion, apiDetails, zstdAvailability.version, tempDir, logger);
|
||||
toolsVersion = result.toolsVersion;
|
||||
codeqlFolder = result.codeqlFolder;
|
||||
toolsDownloadStatusReport = result.statusReport;
|
||||
|
|
@ -538,24 +547,6 @@ async function setupCodeQLBundleWithCompressionMethod(toolsInput, apiDetails, te
|
|||
zstdAvailability,
|
||||
};
|
||||
}
|
||||
async function cleanUpGlob(glob, name, logger) {
|
||||
logger.debug(`Cleaning up ${name}.`);
|
||||
try {
|
||||
const deletedPaths = await (0, del_1.default)(glob, { force: true });
|
||||
if (deletedPaths.length === 0) {
|
||||
logger.warning(`Failed to clean up ${name}: no files found matching ${glob}.`);
|
||||
}
|
||||
else if (deletedPaths.length === 1) {
|
||||
logger.debug(`Cleaned up ${name}.`);
|
||||
}
|
||||
else {
|
||||
logger.debug(`Cleaned up ${name} (${deletedPaths.length} files).`);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Failed to clean up ${name}: ${e}.`);
|
||||
}
|
||||
}
|
||||
function sanitizeUrlForStatusReport(url) {
|
||||
return ["github/codeql-action", "dsp-testing/codeql-cli-nightlies"].some((repo) => url.startsWith(`https://github.com/${repo}/releases/download/`))
|
||||
? url
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
72
lib/tar.js
generated
72
lib/tar.js
generated
|
|
@ -22,13 +22,21 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.isZstdAvailable = isZstdAvailable;
|
||||
exports.extract = extract;
|
||||
exports.extractTarZst = extractTarZst;
|
||||
exports.inferCompressionMethod = inferCompressionMethod;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const toolrunner_1 = require("@actions/exec/lib/toolrunner");
|
||||
const toolcache = __importStar(require("@actions/tool-cache"));
|
||||
const safe_which_1 = require("@chrisgavin/safe-which");
|
||||
const uuid_1 = require("uuid");
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const util_1 = require("./util");
|
||||
const MIN_REQUIRED_BSD_TAR_VERSION = "3.4.3";
|
||||
const MIN_REQUIRED_GNU_TAR_VERSION = "1.31";
|
||||
|
|
@ -90,21 +98,65 @@ async function isZstdAvailable(logger) {
|
|||
return { available: false };
|
||||
}
|
||||
}
|
||||
async function extract(path, compressionMethod) {
|
||||
async function extract(tarPath, compressionMethod, tarVersion, logger) {
|
||||
switch (compressionMethod) {
|
||||
case "gzip":
|
||||
// While we could also ask tar to autodetect the compression method,
|
||||
// we defensively keep the gzip call identical as requesting a gzipped
|
||||
// bundle will soon be a fallback option.
|
||||
return await toolcache.extractTar(path);
|
||||
// Defensively continue to call the toolcache API as requesting a gzipped
|
||||
// bundle may be a fallback option.
|
||||
return await toolcache.extractTar(tarPath);
|
||||
case "zstd":
|
||||
// By specifying only the "x" flag, we ask tar to autodetect the
|
||||
// compression method.
|
||||
return await toolcache.extractTar(path, undefined, "x");
|
||||
if (!tarVersion) {
|
||||
throw new Error("Could not determine tar version, which is required to extract a Zstandard archive.");
|
||||
}
|
||||
return await extractTarZst(tarPath, tarVersion, logger);
|
||||
}
|
||||
}
|
||||
function inferCompressionMethod(path) {
|
||||
if (path.endsWith(".tar.gz")) {
|
||||
/**
|
||||
* Extract a compressed tar archive
|
||||
*
|
||||
* @param file path to the tar
|
||||
* @param dest destination directory. Optional.
|
||||
* @returns path to the destination directory
|
||||
*/
|
||||
async function extractTarZst(file, tarVersion, logger) {
|
||||
if (!file) {
|
||||
throw new Error("parameter 'file' is required");
|
||||
}
|
||||
// Create dest
|
||||
const dest = await createExtractFolder();
|
||||
try {
|
||||
// Initialize args
|
||||
const args = ["-x", "-v"];
|
||||
let destArg = dest;
|
||||
let fileArg = file;
|
||||
if (process.platform === "win32" && tarVersion.type === "gnu") {
|
||||
args.push("--force-local");
|
||||
destArg = dest.replace(/\\/g, "/");
|
||||
// Technically only the dest needs to have `/` but for aesthetic consistency
|
||||
// convert slashes in the file arg too.
|
||||
fileArg = file.replace(/\\/g, "/");
|
||||
}
|
||||
if (tarVersion.type === "gnu") {
|
||||
// Suppress warnings when using GNU tar to extract archives created by BSD tar
|
||||
args.push("--warning=no-unknown-keyword");
|
||||
args.push("--overwrite");
|
||||
}
|
||||
args.push("-C", destArg, "-f", fileArg);
|
||||
await (0, actions_util_1.runTool)(`tar`, args);
|
||||
}
|
||||
catch (e) {
|
||||
await (0, util_1.cleanUpGlob)(dest, "extraction destination directory", logger);
|
||||
throw e;
|
||||
}
|
||||
return dest;
|
||||
}
|
||||
async function createExtractFolder() {
|
||||
const dest = path_1.default.join((0, actions_util_1.getTemporaryDirectory)(), (0, uuid_1.v4)());
|
||||
fs.mkdirSync(dest, { recursive: true });
|
||||
return dest;
|
||||
}
|
||||
function inferCompressionMethod(tarPath) {
|
||||
if (tarPath.endsWith(".tar.gz")) {
|
||||
return "gzip";
|
||||
}
|
||||
return "zstd";
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"tar.js","sourceRoot":"","sources":["../src/tar.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAqDA,0CA4BC;AAID,0BAeC;AAED,wDAKC;AA3GD,6DAA0D;AAC1D,+DAAiD;AACjD,uDAAmD;AAGnD,iCAAqC;AAErC,MAAM,4BAA4B,GAAG,OAAO,CAAC;AAC7C,MAAM,4BAA4B,GAAG,MAAM,CAAC;AAO5C,KAAK,UAAU,aAAa;IAC1B,MAAM,GAAG,GAAG,MAAM,IAAA,sBAAS,EAAC,KAAK,CAAC,CAAC;IACnC,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,QAAQ,GAAG,MAAM,IAAI,uBAAU,CAAC,GAAG,EAAE,CAAC,WAAW,CAAC,EAAE;QACxD,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC,CAAC,IAAI,EAAE,CAAC;IACV,IAAI,QAAQ,KAAK,CAAC,EAAE,CAAC;QACnB,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;IAClD,CAAC;IACD,oEAAoE;IACpE,IAAI,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QAC/B,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,2BAA2B,CAAC,CAAC;QACxD,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;YACxB,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAC;QAC9D,CAAC;QAED,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC5C,CAAC;SAAM,IAAI,MAAM,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;QACrC,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;QAC/C,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;YACxB,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAC;QAC9D,CAAC;QAED,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC5C,CAAC;SAAM,CAAC;QACN,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;IACzC,CAAC;AACH,CAAC;AAOM,KAAK,UAAU,eAAe,CACnC,MAAc;IAEd,IAAI,CAAC;QACH,MAAM,UAAU,GAAG,MAAM,aAAa,EAAE,CAAC;QACzC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,UAAU,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,SAAS,IAAI,gBAAgB,OAAO,GAAG,CAAC,CAAC;QACrD,QAAQ,IAAI,EAAE,CAAC;YACb,KAAK,KAAK;gBACR,OAAO;oBACL,SAAS,EAAE,OAAO,IAAI,4BAA4B;oBAClD,OAAO,EAAE,UAAU;iBACpB,CAAC;YACJ,KAAK,KAAK;gBACR,OAAO;oBACL,SAAS,EAAE,OAAO,IAAI,4BAA4B;oBAClD,OAAO,EAAE,UAAU;iBACpB,CAAC;YACJ;gBACE,IAAA,kBAAW,EAAC,IAAI,CAAC,CAAC;QACtB,CAAC;IACH,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,MAAM,CAAC,KAAK,CACV,oFAAoF;YAClF,6BAA6B,CAAC,EAAE,CACnC,CAAC;QACF,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,CAAC;IAC9B,CAAC;AACH,CAAC;AAIM,KAAK,UAAU,OAAO,CAC3B,IAAY,EACZ,iBAAoC;IAEpC,QAAQ,iBAAiB,EAAE,CAAC;QAC1B,KAAK,MAAM;YACT,oEAAoE;YACpE,sEAAsE;YACtE,yCAAyC;YACzC,OAAO,MAAM,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QAC1C,KAAK,MAAM;YACT,gEAAgE;YAChE,sBAAsB;YACtB,OAAO,MAAM,SAAS,CAAC,UAAU,CAAC,IAAI,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;IAC5D,CAAC;AACH,CAAC;AAED,SAAgB,sBAAsB,CAAC,IAAY;IACjD,IAAI,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QAC7B,OAAO,MAAM,CAAC;IAChB,CAAC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC"}
|
||||
{"version":3,"file":"tar.js","sourceRoot":"","sources":["../src/tar.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0DA,0CA4BC;AAID,0BAmBC;AASD,sCAyCC;AAQD,wDAKC;AA5KD,uCAAyB;AACzB,gDAAwB;AAExB,6DAA0D;AAC1D,+DAAiD;AACjD,uDAAmD;AACnD,+BAAoC;AAEpC,iDAAgE;AAEhE,iCAAkD;AAElD,MAAM,4BAA4B,GAAG,OAAO,CAAC;AAC7C,MAAM,4BAA4B,GAAG,MAAM,CAAC;AAO5C,KAAK,UAAU,aAAa;IAC1B,MAAM,GAAG,GAAG,MAAM,IAAA,sBAAS,EAAC,KAAK,CAAC,CAAC;IACnC,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,MAAM,QAAQ,GAAG,MAAM,IAAI,uBAAU,CAAC,GAAG,EAAE,CAAC,WAAW,CAAC,EAAE;QACxD,SAAS,EAAE;YACT,MAAM,EAAE,CAAC,IAAY,EAAE,EAAE;gBACvB,MAAM,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAC5B,CAAC;SACF;KACF,CAAC,CAAC,IAAI,EAAE,CAAC;IACV,IAAI,QAAQ,KAAK,CAAC,EAAE,CAAC;QACnB,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;IAClD,CAAC;IACD,oEAAoE;IACpE,IAAI,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QAC/B,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,2BAA2B,CAAC,CAAC;QACxD,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;YACxB,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAC;QAC9D,CAAC;QAED,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC5C,CAAC;SAAM,IAAI,MAAM,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;QACrC,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;QAC/C,IAAI,CAAC,KAAK,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;YACxB,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAC;QAC9D,CAAC;QAED,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC5C,CAAC;SAAM,CAAC;QACN,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC,CAAC;IACzC,CAAC;AACH,CAAC;AAOM,KAAK,UAAU,eAAe,CACnC,MAAc;IAEd,IAAI,CAAC;QACH,MAAM,UAAU,GAAG,MAAM,aAAa,EAAE,CAAC;QACzC,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,UAAU,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,SAAS,IAAI,gBAAgB,OAAO,GAAG,CAAC,CAAC;QACrD,QAAQ,IAAI,EAAE,CAAC;YACb,KAAK,KAAK;gBACR,OAAO;oBACL,SAAS,EAAE,OAAO,IAAI,4BAA4B;oBAClD,OAAO,EAAE,UAAU;iBACpB,CAAC;YACJ,KAAK,KAAK;gBACR,OAAO;oBACL,SAAS,EAAE,OAAO,IAAI,4BAA4B;oBAClD,OAAO,EAAE,UAAU;iBACpB,CAAC;YACJ;gBACE,IAAA,kBAAW,EAAC,IAAI,CAAC,CAAC;QACtB,CAAC;IACH,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,MAAM,CAAC,KAAK,CACV,oFAAoF;YAClF,6BAA6B,CAAC,EAAE,CACnC,CAAC;QACF,OAAO,EAAE,SAAS,EAAE,KAAK,EAAE,CAAC;IAC9B,CAAC;AACH,CAAC;AAIM,KAAK,UAAU,OAAO,CAC3B,OAAe,EACf,iBAAoC,EACpC,UAAkC,EAClC,MAAc;IAEd,QAAQ,iBAAiB,EAAE,CAAC;QAC1B,KAAK,MAAM;YACT,yEAAyE;YACzE,mCAAmC;YACnC,OAAO,MAAM,SAAS,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;QAC7C,KAAK,MAAM;YACT,IAAI,CAAC,UAAU,EAAE,CAAC;gBAChB,MAAM,IAAI,KAAK,CACb,oFAAoF,CACrF,CAAC;YACJ,CAAC;YACD,OAAO,MAAM,aAAa,CAAC,OAAO,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;IAC5D,CAAC;AACH,CAAC;AAED;;;;;;GAMG;AACI,KAAK,UAAU,aAAa,CACjC,IAAY,EACZ,UAAsB,EACtB,MAAc;IAEd,IAAI,CAAC,IAAI,EAAE,CAAC;QACV,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;IAClD,CAAC;IAED,cAAc;IACd,MAAM,IAAI,GAAG,MAAM,mBAAmB,EAAE,CAAC;IAEzC,IAAI,CAAC;QACH,kBAAkB;QAClB,MAAM,IAAI,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;QAE1B,IAAI,OAAO,GAAG,IAAI,CAAC;QACnB,IAAI,OAAO,GAAG,IAAI,CAAC;QACnB,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,IAAI,UAAU,CAAC,IAAI,KAAK,KAAK,EAAE,CAAC;YAC9D,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;YAC3B,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;YAEnC,4EAA4E;YAC5E,uCAAuC;YACvC,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;QACrC,CAAC;QAED,IAAI,UAAU,CAAC,IAAI,KAAK,KAAK,EAAE,CAAC;YAC9B,8EAA8E;YAC9E,IAAI,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;YAC1C,IAAI,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;QAC3B,CAAC;QAED,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;QACxC,MAAM,IAAA,sBAAO,EAAC,KAAK,EAAE,IAAI,CAAC,CAAC;IAC7B,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,MAAM,IAAA,kBAAW,EAAC,IAAI,EAAE,kCAAkC,EAAE,MAAM,CAAC,CAAC;QACpE,MAAM,CAAC,CAAC;IACV,CAAC;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED,KAAK,UAAU,mBAAmB;IAChC,MAAM,IAAI,GAAG,cAAI,CAAC,IAAI,CAAC,IAAA,oCAAqB,GAAE,EAAE,IAAA,SAAM,GAAE,CAAC,CAAC;IAC1D,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IACxC,OAAO,IAAI,CAAC;AACd,CAAC;AAED,SAAgB,sBAAsB,CAAC,OAAe;IACpD,IAAI,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;QAChC,OAAO,MAAM,CAAC;IAChB,CAAC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC"}
|
||||
19
lib/util.js
generated
19
lib/util.js
generated
|
|
@ -68,6 +68,7 @@ exports.checkDiskUsage = checkDiskUsage;
|
|||
exports.checkActionVersion = checkActionVersion;
|
||||
exports.cloneObject = cloneObject;
|
||||
exports.checkSipEnablement = checkSipEnablement;
|
||||
exports.cleanUpGlob = cleanUpGlob;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
|
|
@ -902,4 +903,22 @@ async function checkSipEnablement(logger) {
|
|||
return undefined;
|
||||
}
|
||||
}
|
||||
async function cleanUpGlob(glob, name, logger) {
|
||||
logger.debug(`Cleaning up ${name}.`);
|
||||
try {
|
||||
const deletedPaths = await (0, del_1.default)(glob, { force: true });
|
||||
if (deletedPaths.length === 0) {
|
||||
logger.warning(`Failed to clean up ${name}: no files found matching ${glob}.`);
|
||||
}
|
||||
else if (deletedPaths.length === 1) {
|
||||
logger.debug(`Cleaned up ${name}.`);
|
||||
}
|
||||
else {
|
||||
logger.debug(`Cleaned up ${name} (${deletedPaths.length} files).`);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
logger.warning(`Failed to clean up ${name}: ${e}.`);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=util.js.map
|
||||
File diff suppressed because one or more lines are too long
Loading…
Add table
Add a link
Reference in a new issue