Move debug artifact methods into separate file
This commit is contained in:
parent
5895ab0c0b
commit
eeee462f05
18 changed files with 434 additions and 359 deletions
137
lib/actions-util.js
generated
137
lib/actions-util.js
generated
|
|
@ -18,24 +18,16 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.uploadDatabaseBundleDebugArtifact = exports.uploadLogsDebugArtifact = exports.uploadFinalLogsDebugArtifact = exports.uploadSarifDebugArtifact = exports.uploadDebugArtifacts = exports.sanitizeArifactName = exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
exports.isAnalyzingDefaultBranch = exports.getRelativeScriptPath = exports.isRunningLocalAction = exports.sendStatusReport = exports.createStatusReportBase = exports.getActionsStatus = exports.getRef = exports.computeAutomationID = exports.getAutomationID = exports.getAnalysisKey = exports.getWorkflowRunID = exports.getWorkflow = exports.formatWorkflowCause = exports.formatWorkflowErrors = exports.validateWorkflow = exports.getWorkflowErrors = exports.WorkflowErrors = exports.patternIsSuperset = exports.determineMergeBaseCommitOid = exports.getCommitOid = exports.getTemporaryDirectory = exports.getOptionalInput = exports.getRequiredInput = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const artifact = __importStar(require("@actions/artifact"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const toolrunner = __importStar(require("@actions/exec/lib/toolrunner"));
|
||||
const safeWhich = __importStar(require("@chrisgavin/safe-which"));
|
||||
const adm_zip_1 = __importDefault(require("adm-zip"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
const analyze_1 = require("./analyze");
|
||||
const api = __importStar(require("./api-client"));
|
||||
const codeql_1 = require("./codeql");
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util_1 = require("./util");
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
|
|
@ -685,131 +677,4 @@ async function isAnalyzingDefaultBranch() {
|
|||
return currentRef === defaultBranch;
|
||||
}
|
||||
exports.isAnalyzingDefaultBranch = isAnalyzingDefaultBranch;
|
||||
function sanitizeArifactName(name) {
|
||||
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
|
||||
}
|
||||
exports.sanitizeArifactName = sanitizeArifactName;
|
||||
async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
|
||||
if (toUpload.length === 0) {
|
||||
return;
|
||||
}
|
||||
let suffix = "";
|
||||
const matrix = getRequiredInput("matrix");
|
||||
if (matrix) {
|
||||
try {
|
||||
for (const [, matrixVal] of Object.entries(JSON.parse(matrix)).sort())
|
||||
suffix += `-${matrixVal}`;
|
||||
}
|
||||
catch (e) {
|
||||
core.info("Could not parse user-specified `matrix` input into JSON. The debug artifact will not be named with the user's `matrix` input.");
|
||||
}
|
||||
}
|
||||
await artifact.create().uploadArtifact(sanitizeArifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
|
||||
}
|
||||
exports.uploadDebugArtifacts = uploadDebugArtifacts;
|
||||
async function uploadSarifDebugArtifact(config, outputDir) {
|
||||
if (!(0, util_1.doesDirectoryExist)(outputDir)) {
|
||||
return;
|
||||
}
|
||||
let toUpload = [];
|
||||
for (const lang of config.languages) {
|
||||
const sarifFile = path.resolve(outputDir, `${lang}.sarif`);
|
||||
if (fs.existsSync(sarifFile)) {
|
||||
toUpload = toUpload.concat(sarifFile);
|
||||
}
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, outputDir, config.debugArtifactName);
|
||||
}
|
||||
exports.uploadSarifDebugArtifact = uploadSarifDebugArtifact;
|
||||
async function uploadFinalLogsDebugArtifact(config) {
|
||||
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
||||
for (const language of config.languages) {
|
||||
const databaseDirectory = (0, util_1.getCodeQLDatabasePath)(config, language);
|
||||
const logsDirectory = path.join(databaseDirectory, "log");
|
||||
if (!(0, util_1.doesDirectoryExist)(logsDirectory)) {
|
||||
core.info(`Directory ${logsDirectory} does not exist.`);
|
||||
continue; // Skip this language database.
|
||||
}
|
||||
const walkLogFiles = (dir) => {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
if (entries.length === 0) {
|
||||
core.info(`No debug logs found at directory ${logsDirectory}.`);
|
||||
}
|
||||
for (const entry of entries) {
|
||||
if (entry.isFile()) {
|
||||
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name}`);
|
||||
process.stdout.write(fs.readFileSync(path.resolve(dir, entry.name)));
|
||||
core.endGroup();
|
||||
}
|
||||
else if (entry.isDirectory()) {
|
||||
walkLogFiles(path.resolve(dir, entry.name));
|
||||
}
|
||||
}
|
||||
};
|
||||
walkLogFiles(logsDirectory);
|
||||
}
|
||||
}
|
||||
exports.uploadFinalLogsDebugArtifact = uploadFinalLogsDebugArtifact;
|
||||
async function uploadLogsDebugArtifact(config) {
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
let toUpload = [];
|
||||
for (const language of config.languages) {
|
||||
const databaseDirectory = (0, util_1.getCodeQLDatabasePath)(config, language);
|
||||
const logsDirectory = path.resolve(databaseDirectory, "log");
|
||||
if ((0, util_1.doesDirectoryExist)(logsDirectory)) {
|
||||
toUpload = toUpload.concat((0, util_1.listFolder)(logsDirectory));
|
||||
}
|
||||
}
|
||||
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
// Multilanguage tracing: there are additional logs in the root of the cluster
|
||||
const multiLanguageTracingLogsDirectory = path.resolve(config.dbLocation, "log");
|
||||
if ((0, util_1.doesDirectoryExist)(multiLanguageTracingLogsDirectory)) {
|
||||
toUpload = toUpload.concat((0, util_1.listFolder)(multiLanguageTracingLogsDirectory));
|
||||
}
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
|
||||
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
|
||||
if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
|
||||
const compoundBuildTracerLogDirectory = path.resolve(config.tempDir, "compound-build-tracer.log");
|
||||
if ((0, util_1.doesDirectoryExist)(compoundBuildTracerLogDirectory)) {
|
||||
await uploadDebugArtifacts([compoundBuildTracerLogDirectory], config.tempDir, config.debugArtifactName);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.uploadLogsDebugArtifact = uploadLogsDebugArtifact;
|
||||
/**
|
||||
* If a database has not been finalized, we cannot run the `codeql database bundle`
|
||||
* command in the CLI because it will return an error. Instead we directly zip
|
||||
* all files in the database folder and upload it as an artifact.
|
||||
*/
|
||||
async function uploadPartialDatabaseBundle(config, language) {
|
||||
const databasePath = (0, util_1.getCodeQLDatabasePath)(config, language);
|
||||
const databaseBundlePath = path.resolve(config.dbLocation, `${config.debugDatabaseName}-${language}-partial.zip`);
|
||||
core.info(`${config.debugDatabaseName}-${language} is not finalized. Uploading partial database bundle at ${databaseBundlePath}...`);
|
||||
// See `bundleDb` for explanation behind deleting existing db bundle.
|
||||
if (fs.existsSync(databaseBundlePath)) {
|
||||
await (0, del_1.default)(databaseBundlePath, { force: true });
|
||||
}
|
||||
const zip = new adm_zip_1.default();
|
||||
zip.addLocalFolder(databasePath);
|
||||
zip.writeZip(databaseBundlePath);
|
||||
await uploadDebugArtifacts([databaseBundlePath], config.dbLocation, config.debugArtifactName);
|
||||
}
|
||||
async function uploadDatabaseBundleDebugArtifact(config, logger) {
|
||||
for (const language of config.languages) {
|
||||
if (!(0, analyze_1.dbIsFinalized)(config, language, logger)) {
|
||||
await uploadPartialDatabaseBundle(config, language);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
// Otherwise run `codeql database bundle` command.
|
||||
const bundlePath = await (0, util_1.bundleDb)(config, language, await (0, codeql_1.getCodeQL)(config.codeQLCmd), `${config.debugDatabaseName}-${language}`);
|
||||
await uploadDebugArtifacts([bundlePath], config.dbLocation, config.debugArtifactName);
|
||||
}
|
||||
catch (error) {
|
||||
core.info(`Failed to upload database debug bundles for ${config.debugDatabaseName}-${language}: ${error}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.uploadDatabaseBundleDebugArtifact = uploadDatabaseBundleDebugArtifact;
|
||||
//# sourceMappingURL=actions-util.js.map
|
||||
File diff suppressed because one or more lines are too long
7
lib/actions-util.test.js
generated
7
lib/actions-util.test.js
generated
|
|
@ -497,11 +497,4 @@ on: ["push"]
|
|||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
|
||||
});
|
||||
});
|
||||
(0, ava_1.default)("sanitizeArifactName", (t) => {
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello-world_"), "hello-world_");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello`world`"), "helloworld");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello===123"), "hello123");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"), "manyinvalid");
|
||||
});
|
||||
// TODO(angelapwen): Test uploadDebugArtifacts if toUpload is empty
|
||||
//# sourceMappingURL=actions-util.test.js.map
|
||||
File diff suppressed because one or more lines are too long
3
lib/analyze-action-post.js
generated
3
lib/analyze-action-post.js
generated
|
|
@ -27,6 +27,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||
const logging_1 = require("./logging");
|
||||
async function run(uploadSarifDebugArtifact) {
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
|
|
@ -42,7 +43,7 @@ async function run(uploadSarifDebugArtifact) {
|
|||
}
|
||||
async function runWrapper() {
|
||||
try {
|
||||
await run(actionsUtil.uploadSarifDebugArtifact);
|
||||
await run(debugArtifacts.uploadSarifDebugArtifact);
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`analyze action cleanup failed: ${error}`);
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAA2C;AAC3C,uCAA6C;AAE7C,KAAK,UAAU,GAAG,CAAC,wBAAkC;IACnD,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,IAAI,KAAK,CACb,2FAA2F,CAC5F,CAAC;KACH;IAED,+CAA+C;IAC/C,IAAI,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,SAAS,EAAE;QACrB,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,wBAAwB,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;KACnD;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,CAAC,WAAW,CAAC,wBAAwB,CAAC,CAAC;KACjD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kCAAkC,KAAK,EAAE,CAAC,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"analyze-action-post.js","sourceRoot":"","sources":["../src/analyze-action-post.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA;;;;GAIG;AACH,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAA2C;AAC3C,kEAAoD;AACpD,uCAA6C;AAE7C,KAAK,UAAU,GAAG,CAAC,wBAAkC;IACnD,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,IAAI,KAAK,CACb,2FAA2F,CAC5F,CAAC;KACH;IAED,+CAA+C;IAC/C,IAAI,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,SAAS,EAAE;QACrB,MAAM,SAAS,GAAG,WAAW,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;QACzD,MAAM,wBAAwB,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;KACnD;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,CAAC,cAAc,CAAC,wBAAwB,CAAC,CAAC;KACpD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kCAAkC,KAAK,EAAE,CAAC,CAAC;QAC1D,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
163
lib/debug-artifacts.js
generated
Normal file
163
lib/debug-artifacts.js
generated
Normal file
|
|
@ -0,0 +1,163 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.uploadDatabaseBundleDebugArtifact = exports.uploadLogsDebugArtifact = exports.uploadFinalLogsDebugArtifact = exports.uploadSarifDebugArtifact = exports.uploadDebugArtifacts = exports.sanitizeArifactName = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const artifact = __importStar(require("@actions/artifact"));
|
||||
const core = __importStar(require("@actions/core"));
|
||||
const adm_zip_1 = __importDefault(require("adm-zip"));
|
||||
const del_1 = __importDefault(require("del"));
|
||||
const actions_util_1 = require("./actions-util");
|
||||
const analyze_1 = require("./analyze");
|
||||
const codeql_1 = require("./codeql");
|
||||
const util_1 = require("./util");
|
||||
function sanitizeArifactName(name) {
|
||||
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
|
||||
}
|
||||
exports.sanitizeArifactName = sanitizeArifactName;
|
||||
async function uploadDebugArtifacts(toUpload, rootDir, artifactName) {
|
||||
if (toUpload.length === 0) {
|
||||
return;
|
||||
}
|
||||
let suffix = "";
|
||||
const matrix = (0, actions_util_1.getRequiredInput)("matrix");
|
||||
if (matrix) {
|
||||
try {
|
||||
for (const [, matrixVal] of Object.entries(JSON.parse(matrix)).sort())
|
||||
suffix += `-${matrixVal}`;
|
||||
}
|
||||
catch (e) {
|
||||
core.info("Could not parse user-specified `matrix` input into JSON. The debug artifact will not be named with the user's `matrix` input.");
|
||||
}
|
||||
}
|
||||
await artifact.create().uploadArtifact(sanitizeArifactName(`${artifactName}${suffix}`), toUpload.map((file) => path.normalize(file)), path.normalize(rootDir));
|
||||
}
|
||||
exports.uploadDebugArtifacts = uploadDebugArtifacts;
|
||||
async function uploadSarifDebugArtifact(config, outputDir) {
|
||||
if (!(0, util_1.doesDirectoryExist)(outputDir)) {
|
||||
return;
|
||||
}
|
||||
let toUpload = [];
|
||||
for (const lang of config.languages) {
|
||||
const sarifFile = path.resolve(outputDir, `${lang}.sarif`);
|
||||
if (fs.existsSync(sarifFile)) {
|
||||
toUpload = toUpload.concat(sarifFile);
|
||||
}
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, outputDir, config.debugArtifactName);
|
||||
}
|
||||
exports.uploadSarifDebugArtifact = uploadSarifDebugArtifact;
|
||||
async function uploadFinalLogsDebugArtifact(config) {
|
||||
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
||||
for (const language of config.languages) {
|
||||
const databaseDirectory = (0, util_1.getCodeQLDatabasePath)(config, language);
|
||||
const logsDirectory = path.join(databaseDirectory, "log");
|
||||
if (!(0, util_1.doesDirectoryExist)(logsDirectory)) {
|
||||
core.info(`Directory ${logsDirectory} does not exist.`);
|
||||
continue; // Skip this language database.
|
||||
}
|
||||
const walkLogFiles = (dir) => {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
if (entries.length === 0) {
|
||||
core.info(`No debug logs found at directory ${logsDirectory}.`);
|
||||
}
|
||||
for (const entry of entries) {
|
||||
if (entry.isFile()) {
|
||||
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name}`);
|
||||
process.stdout.write(fs.readFileSync(path.resolve(dir, entry.name)));
|
||||
core.endGroup();
|
||||
}
|
||||
else if (entry.isDirectory()) {
|
||||
walkLogFiles(path.resolve(dir, entry.name));
|
||||
}
|
||||
}
|
||||
};
|
||||
walkLogFiles(logsDirectory);
|
||||
}
|
||||
}
|
||||
exports.uploadFinalLogsDebugArtifact = uploadFinalLogsDebugArtifact;
|
||||
async function uploadLogsDebugArtifact(config) {
|
||||
const codeql = await (0, codeql_1.getCodeQL)(config.codeQLCmd);
|
||||
let toUpload = [];
|
||||
for (const language of config.languages) {
|
||||
const databaseDirectory = (0, util_1.getCodeQLDatabasePath)(config, language);
|
||||
const logsDirectory = path.resolve(databaseDirectory, "log");
|
||||
if ((0, util_1.doesDirectoryExist)(logsDirectory)) {
|
||||
toUpload = toUpload.concat((0, util_1.listFolder)(logsDirectory));
|
||||
}
|
||||
}
|
||||
if (await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING)) {
|
||||
// Multilanguage tracing: there are additional logs in the root of the cluster
|
||||
const multiLanguageTracingLogsDirectory = path.resolve(config.dbLocation, "log");
|
||||
if ((0, util_1.doesDirectoryExist)(multiLanguageTracingLogsDirectory)) {
|
||||
toUpload = toUpload.concat((0, util_1.listFolder)(multiLanguageTracingLogsDirectory));
|
||||
}
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, config.dbLocation, config.debugArtifactName);
|
||||
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
|
||||
if (!(await (0, util_1.codeQlVersionAbove)(codeql, codeql_1.CODEQL_VERSION_NEW_TRACING))) {
|
||||
const compoundBuildTracerLogDirectory = path.resolve(config.tempDir, "compound-build-tracer.log");
|
||||
if ((0, util_1.doesDirectoryExist)(compoundBuildTracerLogDirectory)) {
|
||||
await uploadDebugArtifacts([compoundBuildTracerLogDirectory], config.tempDir, config.debugArtifactName);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.uploadLogsDebugArtifact = uploadLogsDebugArtifact;
|
||||
/**
|
||||
* If a database has not been finalized, we cannot run the `codeql database bundle`
|
||||
* command in the CLI because it will return an error. Instead we directly zip
|
||||
* all files in the database folder and upload it as an artifact.
|
||||
*/
|
||||
async function uploadPartialDatabaseBundle(config, language) {
|
||||
const databasePath = (0, util_1.getCodeQLDatabasePath)(config, language);
|
||||
const databaseBundlePath = path.resolve(config.dbLocation, `${config.debugDatabaseName}-${language}-partial.zip`);
|
||||
core.info(`${config.debugDatabaseName}-${language} is not finalized. Uploading partial database bundle at ${databaseBundlePath}...`);
|
||||
// See `bundleDb` for explanation behind deleting existing db bundle.
|
||||
if (fs.existsSync(databaseBundlePath)) {
|
||||
await (0, del_1.default)(databaseBundlePath, { force: true });
|
||||
}
|
||||
const zip = new adm_zip_1.default();
|
||||
zip.addLocalFolder(databasePath);
|
||||
zip.writeZip(databaseBundlePath);
|
||||
await uploadDebugArtifacts([databaseBundlePath], config.dbLocation, config.debugArtifactName);
|
||||
}
|
||||
async function uploadDatabaseBundleDebugArtifact(config, logger) {
|
||||
for (const language of config.languages) {
|
||||
if (!(0, analyze_1.dbIsFinalized)(config, language, logger)) {
|
||||
await uploadPartialDatabaseBundle(config, language);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
// Otherwise run `codeql database bundle` command.
|
||||
const bundlePath = await (0, util_1.bundleDb)(config, language, await (0, codeql_1.getCodeQL)(config.codeQLCmd), `${config.debugDatabaseName}-${language}`);
|
||||
await uploadDebugArtifacts([bundlePath], config.dbLocation, config.debugArtifactName);
|
||||
}
|
||||
catch (error) {
|
||||
core.info(`Failed to upload database debug bundles for ${config.debugDatabaseName}-${language}: ${error}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.uploadDatabaseBundleDebugArtifact = uploadDatabaseBundleDebugArtifact;
|
||||
//# sourceMappingURL=debug-artifacts.js.map
|
||||
1
lib/debug-artifacts.js.map
Normal file
1
lib/debug-artifacts.js.map
Normal file
File diff suppressed because one or more lines are too long
34
lib/debug-artifacts.test.js
generated
Normal file
34
lib/debug-artifacts.test.js
generated
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ava_1 = __importDefault(require("ava"));
|
||||
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||
(0, ava_1.default)("sanitizeArifactName", (t) => {
|
||||
t.deepEqual(debugArtifacts.sanitizeArifactName("hello-world_"), "hello-world_");
|
||||
t.deepEqual(debugArtifacts.sanitizeArifactName("hello`world`"), "helloworld");
|
||||
t.deepEqual(debugArtifacts.sanitizeArifactName("hello===123"), "hello123");
|
||||
t.deepEqual(debugArtifacts.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"), "manyinvalid");
|
||||
});
|
||||
// TODO(angelapwen): Test uploadDebugArtifacts if toUpload is empty
|
||||
//# sourceMappingURL=debug-artifacts.test.js.map
|
||||
1
lib/debug-artifacts.test.js.map
Normal file
1
lib/debug-artifacts.test.js.map
Normal file
|
|
@ -0,0 +1 @@
|
|||
{"version":3,"file":"debug-artifacts.test.js","sourceRoot":"","sources":["../src/debug-artifacts.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAAA,8CAAuB;AAEvB,kEAAoD;AAEpD,IAAA,aAAI,EAAC,qBAAqB,EAAE,CAAC,CAAC,EAAE,EAAE;IAChC,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,mBAAmB,CAAC,cAAc,CAAC,EAClD,cAAc,CACf,CAAC;IACF,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,mBAAmB,CAAC,cAAc,CAAC,EAAE,YAAY,CAAC,CAAC;IAC9E,CAAC,CAAC,SAAS,CAAC,cAAc,CAAC,mBAAmB,CAAC,aAAa,CAAC,EAAE,UAAU,CAAC,CAAC;IAC3E,CAAC,CAAC,SAAS,CACT,cAAc,CAAC,mBAAmB,CAAC,yBAAyB,CAAC,EAC7D,aAAa,CACd,CAAC;AACJ,CAAC,CAAC,CAAC;AAEH,mEAAmE"}
|
||||
3
lib/init-action-post.js
generated
3
lib/init-action-post.js
generated
|
|
@ -27,6 +27,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||
const core = __importStar(require("@actions/core"));
|
||||
const actionsUtil = __importStar(require("./actions-util"));
|
||||
const config_utils_1 = require("./config-utils");
|
||||
const debugArtifacts = __importStar(require("./debug-artifacts"));
|
||||
const logging_1 = require("./logging");
|
||||
async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, uploadFinalLogsDebugArtifact) {
|
||||
const logger = (0, logging_1.getActionsLogger)();
|
||||
|
|
@ -43,7 +44,7 @@ async function run(uploadDatabaseBundleDebugArtifact, uploadLogsDebugArtifact, u
|
|||
}
|
||||
async function runWrapper() {
|
||||
try {
|
||||
await run(actionsUtil.uploadDatabaseBundleDebugArtifact, actionsUtil.uploadLogsDebugArtifact, actionsUtil.uploadFinalLogsDebugArtifact);
|
||||
await run(debugArtifacts.uploadDatabaseBundleDebugArtifact, debugArtifacts.uploadLogsDebugArtifact, debugArtifacts.uploadFinalLogsDebugArtifact);
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(`init action cleanup failed: ${error}`);
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAA2C;AAC3C,uCAA6C;AAE7C,KAAK,UAAU,GAAG,CAChB,iCAA2C,EAC3C,uBAAiC,EACjC,4BAAsC;IAEtC,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,IAAI,KAAK,CACb,2FAA2F,CAC5F,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,SAAS,EAAE;QACrB,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QACtC,MAAM,4BAA4B,CAAC,MAAM,CAAC,CAAC;KAC5C;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,CACP,WAAW,CAAC,iCAAiC,EAC7C,WAAW,CAAC,uBAAuB,EACnC,WAAW,CAAC,4BAA4B,CACzC,CAAC;KACH;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,+BAA+B,KAAK,EAAE,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
{"version":3,"file":"init-action-post.js","sourceRoot":"","sources":["../src/init-action-post.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;AAEH,oDAAsC;AAEtC,4DAA8C;AAC9C,iDAA2C;AAC3C,kEAAoD;AACpD,uCAA6C;AAE7C,KAAK,UAAU,GAAG,CAChB,iCAA2C,EAC3C,uBAAiC,EACjC,4BAAsC;IAEtC,MAAM,MAAM,GAAG,IAAA,0BAAgB,GAAE,CAAC;IAElC,MAAM,MAAM,GAAG,MAAM,IAAA,wBAAS,EAAC,WAAW,CAAC,qBAAqB,EAAE,EAAE,MAAM,CAAC,CAAC;IAC5E,IAAI,MAAM,KAAK,SAAS,EAAE;QACxB,MAAM,IAAI,KAAK,CACb,2FAA2F,CAC5F,CAAC;KACH;IAED,qDAAqD;IACrD,IAAI,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,SAAS,EAAE;QACrB,MAAM,iCAAiC,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACxD,MAAM,uBAAuB,CAAC,MAAM,CAAC,CAAC;QACtC,MAAM,4BAA4B,CAAC,MAAM,CAAC,CAAC;KAC5C;AACH,CAAC;AAED,KAAK,UAAU,UAAU;IACvB,IAAI;QACF,MAAM,GAAG,CACP,cAAc,CAAC,iCAAiC,EAChD,cAAc,CAAC,uBAAuB,EACtC,cAAc,CAAC,4BAA4B,CAC5C,CAAC;KACH;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,+BAA+B,KAAK,EAAE,CAAC,CAAC;QACvD,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;KACpB;AACH,CAAC;AAED,KAAK,UAAU,EAAE,CAAC"}
|
||||
|
|
@ -753,15 +753,3 @@ test("isAnalyzingDefaultBranch()", async (t) => {
|
|||
t.deepEqual(await actionsutil.isAnalyzingDefaultBranch(), false);
|
||||
});
|
||||
});
|
||||
|
||||
test("sanitizeArifactName", (t) => {
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello-world_"), "hello-world_");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello`world`"), "helloworld");
|
||||
t.deepEqual(actionsutil.sanitizeArifactName("hello===123"), "hello123");
|
||||
t.deepEqual(
|
||||
actionsutil.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"),
|
||||
"manyinvalid"
|
||||
);
|
||||
});
|
||||
|
||||
// TODO(angelapwen): Test uploadDebugArtifacts if toUpload is empty
|
||||
|
|
|
|||
|
|
@ -2,35 +2,22 @@ import * as fs from "fs";
|
|||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
|
||||
import * as artifact from "@actions/artifact";
|
||||
import * as core from "@actions/core";
|
||||
import * as toolrunner from "@actions/exec/lib/toolrunner";
|
||||
import * as safeWhich from "@chrisgavin/safe-which";
|
||||
import AdmZip from "adm-zip";
|
||||
import del from "del";
|
||||
import * as yaml from "js-yaml";
|
||||
|
||||
import { dbIsFinalized } from "./analyze";
|
||||
import * as api from "./api-client";
|
||||
import { CODEQL_VERSION_NEW_TRACING, getCodeQL } from "./codeql";
|
||||
import { Config } from "./config-utils";
|
||||
import { Logger } from "./logging";
|
||||
import * as sharedEnv from "./shared-environment";
|
||||
import {
|
||||
bundleDb,
|
||||
codeQlVersionAbove,
|
||||
doesDirectoryExist,
|
||||
getCachedCodeQlVersion,
|
||||
getCodeQLDatabasePath,
|
||||
getRequiredEnvParam,
|
||||
GITHUB_DOTCOM_URL,
|
||||
isGitHubGhesVersionBelow,
|
||||
isHTTPError,
|
||||
isInTestMode,
|
||||
listFolder,
|
||||
UserError,
|
||||
} from "./util";
|
||||
import { Language } from "./languages";
|
||||
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
const pkg = require("../package.json");
|
||||
|
|
@ -883,184 +870,4 @@ export async function isAnalyzingDefaultBranch(): Promise<boolean> {
|
|||
const defaultBranch = event?.repository?.default_branch;
|
||||
|
||||
return currentRef === defaultBranch;
|
||||
}
|
||||
|
||||
export function sanitizeArifactName(name: string): string {
|
||||
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
|
||||
}
|
||||
|
||||
export async function uploadDebugArtifacts(
|
||||
toUpload: string[],
|
||||
rootDir: string,
|
||||
artifactName: string
|
||||
) {
|
||||
if (toUpload.length === 0) {
|
||||
return;
|
||||
}
|
||||
let suffix = "";
|
||||
const matrix = getRequiredInput("matrix");
|
||||
if (matrix) {
|
||||
try {
|
||||
for (const [, matrixVal] of Object.entries(JSON.parse(matrix)).sort())
|
||||
suffix += `-${matrixVal}`;
|
||||
} catch (e) {
|
||||
core.info(
|
||||
"Could not parse user-specified `matrix` input into JSON. The debug artifact will not be named with the user's `matrix` input."
|
||||
);
|
||||
}
|
||||
}
|
||||
await artifact.create().uploadArtifact(
|
||||
sanitizeArifactName(`${artifactName}${suffix}`),
|
||||
toUpload.map((file) => path.normalize(file)),
|
||||
path.normalize(rootDir)
|
||||
);
|
||||
}
|
||||
|
||||
export async function uploadSarifDebugArtifact(
|
||||
config: Config,
|
||||
outputDir: string
|
||||
) {
|
||||
if (!doesDirectoryExist(outputDir)) {
|
||||
return;
|
||||
}
|
||||
|
||||
let toUpload: string[] = [];
|
||||
for (const lang of config.languages) {
|
||||
const sarifFile = path.resolve(outputDir, `${lang}.sarif`);
|
||||
if (fs.existsSync(sarifFile)) {
|
||||
toUpload = toUpload.concat(sarifFile);
|
||||
}
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, outputDir, config.debugArtifactName);
|
||||
}
|
||||
|
||||
export async function uploadFinalLogsDebugArtifact(config: Config) {
|
||||
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
||||
for (const language of config.languages) {
|
||||
const databaseDirectory = getCodeQLDatabasePath(config, language);
|
||||
const logsDirectory = path.join(databaseDirectory, "log");
|
||||
if (!doesDirectoryExist(logsDirectory)) {
|
||||
core.info(`Directory ${logsDirectory} does not exist.`);
|
||||
continue; // Skip this language database.
|
||||
}
|
||||
|
||||
const walkLogFiles = (dir: string) => {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
if (entries.length === 0) {
|
||||
core.info(`No debug logs found at directory ${logsDirectory}.`);
|
||||
}
|
||||
for (const entry of entries) {
|
||||
if (entry.isFile()) {
|
||||
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name}`);
|
||||
process.stdout.write(fs.readFileSync(path.resolve(dir, entry.name)));
|
||||
core.endGroup();
|
||||
} else if (entry.isDirectory()) {
|
||||
walkLogFiles(path.resolve(dir, entry.name));
|
||||
}
|
||||
}
|
||||
};
|
||||
walkLogFiles(logsDirectory);
|
||||
}
|
||||
}
|
||||
|
||||
export async function uploadLogsDebugArtifact(config: Config) {
|
||||
const codeql = await getCodeQL(config.codeQLCmd);
|
||||
|
||||
let toUpload: string[] = [];
|
||||
for (const language of config.languages) {
|
||||
const databaseDirectory = getCodeQLDatabasePath(config, language);
|
||||
const logsDirectory = path.resolve(databaseDirectory, "log");
|
||||
if (doesDirectoryExist(logsDirectory)) {
|
||||
toUpload = toUpload.concat(listFolder(logsDirectory));
|
||||
}
|
||||
}
|
||||
|
||||
if (await codeQlVersionAbove(codeql, CODEQL_VERSION_NEW_TRACING)) {
|
||||
// Multilanguage tracing: there are additional logs in the root of the cluster
|
||||
const multiLanguageTracingLogsDirectory = path.resolve(
|
||||
config.dbLocation,
|
||||
"log"
|
||||
);
|
||||
if (doesDirectoryExist(multiLanguageTracingLogsDirectory)) {
|
||||
toUpload = toUpload.concat(listFolder(multiLanguageTracingLogsDirectory));
|
||||
}
|
||||
}
|
||||
await uploadDebugArtifacts(
|
||||
toUpload,
|
||||
config.dbLocation,
|
||||
config.debugArtifactName
|
||||
);
|
||||
|
||||
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
|
||||
if (!(await codeQlVersionAbove(codeql, CODEQL_VERSION_NEW_TRACING))) {
|
||||
const compoundBuildTracerLogDirectory = path.resolve(
|
||||
config.tempDir,
|
||||
"compound-build-tracer.log"
|
||||
);
|
||||
if (doesDirectoryExist(compoundBuildTracerLogDirectory)) {
|
||||
await uploadDebugArtifacts(
|
||||
[compoundBuildTracerLogDirectory],
|
||||
config.tempDir,
|
||||
config.debugArtifactName
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If a database has not been finalized, we cannot run the `codeql database bundle`
|
||||
* command in the CLI because it will return an error. Instead we directly zip
|
||||
* all files in the database folder and upload it as an artifact.
|
||||
*/
|
||||
async function uploadPartialDatabaseBundle(config: Config, language: Language) {
|
||||
const databasePath = getCodeQLDatabasePath(config, language);
|
||||
const databaseBundlePath = path.resolve(
|
||||
config.dbLocation,
|
||||
`${config.debugDatabaseName}-${language}-partial.zip`
|
||||
);
|
||||
core.info(
|
||||
`${config.debugDatabaseName}-${language} is not finalized. Uploading partial database bundle at ${databaseBundlePath}...`
|
||||
);
|
||||
// See `bundleDb` for explanation behind deleting existing db bundle.
|
||||
if (fs.existsSync(databaseBundlePath)) {
|
||||
await del(databaseBundlePath, { force: true });
|
||||
}
|
||||
const zip = new AdmZip();
|
||||
zip.addLocalFolder(databasePath);
|
||||
zip.writeZip(databaseBundlePath);
|
||||
await uploadDebugArtifacts(
|
||||
[databaseBundlePath],
|
||||
config.dbLocation,
|
||||
config.debugArtifactName
|
||||
);
|
||||
}
|
||||
|
||||
export async function uploadDatabaseBundleDebugArtifact(
|
||||
config: Config,
|
||||
logger: Logger
|
||||
) {
|
||||
for (const language of config.languages) {
|
||||
if (!dbIsFinalized(config, language, logger)) {
|
||||
await uploadPartialDatabaseBundle(config, language);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
// Otherwise run `codeql database bundle` command.
|
||||
const bundlePath = await bundleDb(
|
||||
config,
|
||||
language,
|
||||
await getCodeQL(config.codeQLCmd),
|
||||
`${config.debugDatabaseName}-${language}`
|
||||
);
|
||||
await uploadDebugArtifacts(
|
||||
[bundlePath],
|
||||
config.dbLocation,
|
||||
config.debugArtifactName
|
||||
);
|
||||
} catch (error) {
|
||||
core.info(
|
||||
`Failed to upload database debug bundles for ${config.debugDatabaseName}-${language}: ${error}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -7,6 +7,7 @@ import * as core from "@actions/core";
|
|||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { getConfig } from "./config-utils";
|
||||
import * as debugArtifacts from "./debug-artifacts";
|
||||
import { getActionsLogger } from "./logging";
|
||||
|
||||
async function run(uploadSarifDebugArtifact: Function) {
|
||||
|
|
@ -28,7 +29,7 @@ async function run(uploadSarifDebugArtifact: Function) {
|
|||
|
||||
async function runWrapper() {
|
||||
try {
|
||||
await run(actionsUtil.uploadSarifDebugArtifact);
|
||||
await run(debugArtifacts.uploadSarifDebugArtifact);
|
||||
} catch (error) {
|
||||
core.setFailed(`analyze action cleanup failed: ${error}`);
|
||||
console.log(error);
|
||||
|
|
|
|||
18
src/debug-artifacts.test.ts
Normal file
18
src/debug-artifacts.test.ts
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
import test from "ava";
|
||||
|
||||
import * as debugArtifacts from "./debug-artifacts";
|
||||
|
||||
test("sanitizeArifactName", (t) => {
|
||||
t.deepEqual(
|
||||
debugArtifacts.sanitizeArifactName("hello-world_"),
|
||||
"hello-world_"
|
||||
);
|
||||
t.deepEqual(debugArtifacts.sanitizeArifactName("hello`world`"), "helloworld");
|
||||
t.deepEqual(debugArtifacts.sanitizeArifactName("hello===123"), "hello123");
|
||||
t.deepEqual(
|
||||
debugArtifacts.sanitizeArifactName("*m)a&n^y%i££n+v!a:l[i]d"),
|
||||
"manyinvalid"
|
||||
);
|
||||
});
|
||||
|
||||
// TODO(angelapwen): Test uploadDebugArtifacts if toUpload is empty
|
||||
201
src/debug-artifacts.ts
Normal file
201
src/debug-artifacts.ts
Normal file
|
|
@ -0,0 +1,201 @@
|
|||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import * as artifact from "@actions/artifact";
|
||||
import * as core from "@actions/core";
|
||||
import AdmZip from "adm-zip";
|
||||
import del from "del";
|
||||
|
||||
import { getRequiredInput } from "./actions-util";
|
||||
import { dbIsFinalized } from "./analyze";
|
||||
import { CODEQL_VERSION_NEW_TRACING, getCodeQL } from "./codeql";
|
||||
import { Config } from "./config-utils";
|
||||
import { Language } from "./languages";
|
||||
import { Logger } from "./logging";
|
||||
import {
|
||||
bundleDb,
|
||||
codeQlVersionAbove,
|
||||
doesDirectoryExist,
|
||||
getCodeQLDatabasePath,
|
||||
listFolder,
|
||||
} from "./util";
|
||||
|
||||
export function sanitizeArifactName(name: string): string {
|
||||
return name.replace(/[^a-zA-Z0-9_\\-]+/g, "");
|
||||
}
|
||||
|
||||
export async function uploadDebugArtifacts(
|
||||
toUpload: string[],
|
||||
rootDir: string,
|
||||
artifactName: string
|
||||
) {
|
||||
if (toUpload.length === 0) {
|
||||
return;
|
||||
}
|
||||
let suffix = "";
|
||||
const matrix = getRequiredInput("matrix");
|
||||
if (matrix) {
|
||||
try {
|
||||
for (const [, matrixVal] of Object.entries(JSON.parse(matrix)).sort())
|
||||
suffix += `-${matrixVal}`;
|
||||
} catch (e) {
|
||||
core.info(
|
||||
"Could not parse user-specified `matrix` input into JSON. The debug artifact will not be named with the user's `matrix` input."
|
||||
);
|
||||
}
|
||||
}
|
||||
await artifact.create().uploadArtifact(
|
||||
sanitizeArifactName(`${artifactName}${suffix}`),
|
||||
toUpload.map((file) => path.normalize(file)),
|
||||
path.normalize(rootDir)
|
||||
);
|
||||
}
|
||||
|
||||
export async function uploadSarifDebugArtifact(
|
||||
config: Config,
|
||||
outputDir: string
|
||||
) {
|
||||
if (!doesDirectoryExist(outputDir)) {
|
||||
return;
|
||||
}
|
||||
|
||||
let toUpload: string[] = [];
|
||||
for (const lang of config.languages) {
|
||||
const sarifFile = path.resolve(outputDir, `${lang}.sarif`);
|
||||
if (fs.existsSync(sarifFile)) {
|
||||
toUpload = toUpload.concat(sarifFile);
|
||||
}
|
||||
}
|
||||
await uploadDebugArtifacts(toUpload, outputDir, config.debugArtifactName);
|
||||
}
|
||||
|
||||
export async function uploadFinalLogsDebugArtifact(config: Config) {
|
||||
core.info("Debug mode is on. Printing CodeQL debug logs...");
|
||||
for (const language of config.languages) {
|
||||
const databaseDirectory = getCodeQLDatabasePath(config, language);
|
||||
const logsDirectory = path.join(databaseDirectory, "log");
|
||||
if (!doesDirectoryExist(logsDirectory)) {
|
||||
core.info(`Directory ${logsDirectory} does not exist.`);
|
||||
continue; // Skip this language database.
|
||||
}
|
||||
|
||||
const walkLogFiles = (dir: string) => {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
if (entries.length === 0) {
|
||||
core.info(`No debug logs found at directory ${logsDirectory}.`);
|
||||
}
|
||||
for (const entry of entries) {
|
||||
if (entry.isFile()) {
|
||||
core.startGroup(`CodeQL Debug Logs - ${language} - ${entry.name}`);
|
||||
process.stdout.write(fs.readFileSync(path.resolve(dir, entry.name)));
|
||||
core.endGroup();
|
||||
} else if (entry.isDirectory()) {
|
||||
walkLogFiles(path.resolve(dir, entry.name));
|
||||
}
|
||||
}
|
||||
};
|
||||
walkLogFiles(logsDirectory);
|
||||
}
|
||||
}
|
||||
|
||||
export async function uploadLogsDebugArtifact(config: Config) {
|
||||
const codeql = await getCodeQL(config.codeQLCmd);
|
||||
|
||||
let toUpload: string[] = [];
|
||||
for (const language of config.languages) {
|
||||
const databaseDirectory = getCodeQLDatabasePath(config, language);
|
||||
const logsDirectory = path.resolve(databaseDirectory, "log");
|
||||
if (doesDirectoryExist(logsDirectory)) {
|
||||
toUpload = toUpload.concat(listFolder(logsDirectory));
|
||||
}
|
||||
}
|
||||
|
||||
if (await codeQlVersionAbove(codeql, CODEQL_VERSION_NEW_TRACING)) {
|
||||
// Multilanguage tracing: there are additional logs in the root of the cluster
|
||||
const multiLanguageTracingLogsDirectory = path.resolve(
|
||||
config.dbLocation,
|
||||
"log"
|
||||
);
|
||||
if (doesDirectoryExist(multiLanguageTracingLogsDirectory)) {
|
||||
toUpload = toUpload.concat(listFolder(multiLanguageTracingLogsDirectory));
|
||||
}
|
||||
}
|
||||
await uploadDebugArtifacts(
|
||||
toUpload,
|
||||
config.dbLocation,
|
||||
config.debugArtifactName
|
||||
);
|
||||
|
||||
// Before multi-language tracing, we wrote a compound-build-tracer.log in the temp dir
|
||||
if (!(await codeQlVersionAbove(codeql, CODEQL_VERSION_NEW_TRACING))) {
|
||||
const compoundBuildTracerLogDirectory = path.resolve(
|
||||
config.tempDir,
|
||||
"compound-build-tracer.log"
|
||||
);
|
||||
if (doesDirectoryExist(compoundBuildTracerLogDirectory)) {
|
||||
await uploadDebugArtifacts(
|
||||
[compoundBuildTracerLogDirectory],
|
||||
config.tempDir,
|
||||
config.debugArtifactName
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If a database has not been finalized, we cannot run the `codeql database bundle`
|
||||
* command in the CLI because it will return an error. Instead we directly zip
|
||||
* all files in the database folder and upload it as an artifact.
|
||||
*/
|
||||
async function uploadPartialDatabaseBundle(config: Config, language: Language) {
|
||||
const databasePath = getCodeQLDatabasePath(config, language);
|
||||
const databaseBundlePath = path.resolve(
|
||||
config.dbLocation,
|
||||
`${config.debugDatabaseName}-${language}-partial.zip`
|
||||
);
|
||||
core.info(
|
||||
`${config.debugDatabaseName}-${language} is not finalized. Uploading partial database bundle at ${databaseBundlePath}...`
|
||||
);
|
||||
// See `bundleDb` for explanation behind deleting existing db bundle.
|
||||
if (fs.existsSync(databaseBundlePath)) {
|
||||
await del(databaseBundlePath, { force: true });
|
||||
}
|
||||
const zip = new AdmZip();
|
||||
zip.addLocalFolder(databasePath);
|
||||
zip.writeZip(databaseBundlePath);
|
||||
await uploadDebugArtifacts(
|
||||
[databaseBundlePath],
|
||||
config.dbLocation,
|
||||
config.debugArtifactName
|
||||
);
|
||||
}
|
||||
|
||||
export async function uploadDatabaseBundleDebugArtifact(
|
||||
config: Config,
|
||||
logger: Logger
|
||||
) {
|
||||
for (const language of config.languages) {
|
||||
if (!dbIsFinalized(config, language, logger)) {
|
||||
await uploadPartialDatabaseBundle(config, language);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
// Otherwise run `codeql database bundle` command.
|
||||
const bundlePath = await bundleDb(
|
||||
config,
|
||||
language,
|
||||
await getCodeQL(config.codeQLCmd),
|
||||
`${config.debugDatabaseName}-${language}`
|
||||
);
|
||||
await uploadDebugArtifacts(
|
||||
[bundlePath],
|
||||
config.dbLocation,
|
||||
config.debugArtifactName
|
||||
);
|
||||
} catch (error) {
|
||||
core.info(
|
||||
`Failed to upload database debug bundles for ${config.debugDatabaseName}-${language}: ${error}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -8,6 +8,7 @@ import * as core from "@actions/core";
|
|||
|
||||
import * as actionsUtil from "./actions-util";
|
||||
import { getConfig } from "./config-utils";
|
||||
import * as debugArtifacts from "./debug-artifacts";
|
||||
import { getActionsLogger } from "./logging";
|
||||
|
||||
async function run(
|
||||
|
|
@ -35,9 +36,9 @@ async function run(
|
|||
async function runWrapper() {
|
||||
try {
|
||||
await run(
|
||||
actionsUtil.uploadDatabaseBundleDebugArtifact,
|
||||
actionsUtil.uploadLogsDebugArtifact,
|
||||
actionsUtil.uploadFinalLogsDebugArtifact
|
||||
debugArtifacts.uploadDatabaseBundleDebugArtifact,
|
||||
debugArtifacts.uploadLogsDebugArtifact,
|
||||
debugArtifacts.uploadFinalLogsDebugArtifact
|
||||
);
|
||||
} catch (error) {
|
||||
core.setFailed(`init action cleanup failed: ${error}`);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue