Upload much more data in status reports
This commit is contained in:
parent
9769e4a6df
commit
87758a1402
33 changed files with 537 additions and 339 deletions
2
lib/analysis-paths.test.js
generated
2
lib/analysis-paths.test.js
generated
|
|
@ -20,6 +20,7 @@ ava_1.default("emptyPaths", async (t) => {
|
|||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
|
||||
|
|
@ -32,6 +33,7 @@ ava_1.default("nonEmptyPaths", async (t) => {
|
|||
queries: {},
|
||||
paths: ['path1', 'path2', '**/path3'],
|
||||
pathsIgnore: ['path4', 'path5', 'path6/**'],
|
||||
originalUserInput: {},
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA2C;AAE3C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC3B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,WAAW,EAAE,EAAE;QACf,KAAK,EAAE,EAAE;KACV,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;QACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;KAC5C,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,gGAAgG,CAAC,CAAC;AAC5I,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"analysis-paths.test.js","sourceRoot":"","sources":["../src/analysis-paths.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,gEAAkD;AAClD,mDAA2C;AAE3C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,YAAY,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC3B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,WAAW,EAAE,EAAE;QACf,KAAK,EAAE,EAAE;QACT,iBAAiB,EAAE,EAAE;KACtB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;IACnD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,SAAS,CAAC,CAAC;AACrD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,eAAe,EAAE,KAAK,EAAC,CAAC,EAAC,EAAE;IAC9B,MAAM,MAAM,GAAG;QACb,SAAS,EAAE,EAAE;QACb,OAAO,EAAE,EAAE;QACX,KAAK,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;QACrC,WAAW,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,UAAU,CAAC;QAC3C,iBAAiB,EAAE,EAAE;KACtB,CAAC;IACF,aAAa,CAAC,8BAA8B,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,cAAc,CAAC,CAAC;IACxD,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,EAAE,gGAAgG,CAAC,CAAC;AAC5I,CAAC,CAAC,CAAC"}
|
||||
22
lib/autobuild.js
generated
22
lib/autobuild.js
generated
|
|
@ -11,10 +11,24 @@ const core = __importStar(require("@actions/core"));
|
|||
const codeql_1 = require("./codeql");
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function sendCompletedStatusReport(startedAt, allLanguages, failingLanguage, cause) {
|
||||
var _a, _b;
|
||||
const status = failingLanguage !== undefined || cause !== undefined ? 'failure' : 'success';
|
||||
const statusReportBase = await util.createStatusReportBase('autobuild', status, startedAt, (_a = cause) === null || _a === void 0 ? void 0 : _a.message, (_b = cause) === null || _b === void 0 ? void 0 : _b.stack);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
autobuild_languages: allLanguages.join(','),
|
||||
autobuild_failure: failingLanguage,
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
var _a;
|
||||
const startedAt = new Date();
|
||||
let language;
|
||||
try {
|
||||
if (util.should_abort('autobuild', true) || !await util.reportActionStarting('autobuild')) {
|
||||
if (util.should_abort('autobuild', true) ||
|
||||
!await util.sendStatusReport(await util.createStatusReportBase('autobuild', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
// Attempt to find a language to autobuild
|
||||
|
|
@ -22,7 +36,7 @@ async function run() {
|
|||
// The languages are sorted in order specified by user or by lines of code if we got
|
||||
// them from the GitHub API, so try to build the first language on the list.
|
||||
const autobuildLanguages = ((_a = process.env[sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES]) === null || _a === void 0 ? void 0 : _a.split(',')) || [];
|
||||
const language = autobuildLanguages[0];
|
||||
language = autobuildLanguages[0];
|
||||
if (!language) {
|
||||
core.info("None of the languages in this project require extra build steps");
|
||||
return;
|
||||
|
|
@ -38,10 +52,10 @@ async function run() {
|
|||
}
|
||||
catch (error) {
|
||||
core.setFailed("We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. " + error.message);
|
||||
await util.reportActionFailed('autobuild', error.message, error.stack);
|
||||
await sendCompletedStatusReport(startedAt, [language], language, error);
|
||||
return;
|
||||
}
|
||||
await util.reportActionSucceeded('autobuild');
|
||||
await sendCompletedStatusReport(startedAt, [language]);
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("autobuild action failed. " + e);
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,qCAAqC;AACrC,gEAAkD;AAClD,6CAA+B;AAE/B,KAAK,UAAU,GAAG;;IAChB,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,EAAE;YACzF,OAAO;SACR;QAED,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,OAAA,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,8BAA8B,CAAC,0CAAE,KAAK,CAAC,GAAG,MAAK,EAAE,CAAC;QACnG,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEvC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;QAC3B,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAEpC,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,MAAM,IAAI,CAAC,kBAAkB,CAAC,WAAW,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QACvE,OAAO;KACR;IAED,MAAM,IAAI,CAAC,qBAAqB,CAAC,WAAW,CAAC,CAAC;AAChD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"autobuild.js","sourceRoot":"","sources":["../src/autobuild.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,qCAAqC;AACrC,gEAAkD;AAClD,6CAA+B;AAS/B,KAAK,UAAU,yBAAyB,CACtC,SAAe,EACf,YAAsB,EACtB,eAAwB,EACxB,KAAa;;IAEb,MAAM,MAAM,GAAG,eAAe,KAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IAC5F,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CACxD,WAAW,EACX,MAAM,EACN,SAAS,QACT,KAAK,0CAAE,OAAO,QACd,KAAK,0CAAE,KAAK,CAAC,CAAC;IAChB,MAAM,YAAY,GAA0B;QAC1C,GAAG,gBAAgB;QACnB,mBAAmB,EAAE,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC;QAC3C,iBAAiB,EAAE,eAAe;KACnC,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,QAAQ,CAAC;IACb,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,WAAW,EAAE,IAAI,CAAC;YACpC,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,WAAW,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YAC7G,OAAO;SACR;QAED,0CAA0C;QAC1C,mFAAmF;QACnF,oFAAoF;QACpF,4EAA4E;QAC5E,MAAM,kBAAkB,GAAG,OAAA,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,8BAA8B,CAAC,0CAAE,KAAK,CAAC,GAAG,MAAK,EAAE,CAAC;QACnG,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,CAAC;QAEjC,IAAI,CAAC,QAAQ,EAAE;YACb,IAAI,CAAC,IAAI,CAAC,iEAAiE,CAAC,CAAC;YAC7E,OAAO;SACR;QAED,IAAI,CAAC,KAAK,CAAC,sCAAsC,QAAQ,EAAE,CAAC,CAAC;QAE7D,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YACjC,IAAI,CAAC,OAAO,CAAC,oCAAoC,QAAQ,8BAA8B,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,wDAAwD,CAAC,CAAC;SAC3L;QAED,IAAI,CAAC,UAAU,CAAC,qCAAqC,QAAQ,OAAO,CAAC,CAAC;QACtE,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;QAC3B,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;QAEpC,IAAI,CAAC,QAAQ,EAAE,CAAC;KAEjB;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,kIAAkI,GAAG,KAAK,CAAC,OAAO,CAAC,CAAC;QACnK,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,EAAE,QAAQ,EAAE,KAAK,CAAC,CAAC;QACxE,OAAO;KACR;IAED,MAAM,yBAAyB,CAAC,SAAS,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC;AACzD,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,4BAA4B,GAAG,CAAC,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
11
lib/config-utils.js
generated
11
lib/config-utils.js
generated
|
|
@ -354,7 +354,8 @@ async function getDefaultConfig() {
|
|||
languages: languages,
|
||||
queries: queries,
|
||||
pathsIgnore: [],
|
||||
paths: []
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
};
|
||||
}
|
||||
exports.getDefaultConfig = getDefaultConfig;
|
||||
|
|
@ -434,7 +435,13 @@ async function loadConfig(configFile) {
|
|||
paths.push(validateAndSanitisePath(path, PATHS_PROPERTY, configFile));
|
||||
});
|
||||
}
|
||||
return { languages, queries, pathsIgnore, paths };
|
||||
return {
|
||||
languages,
|
||||
queries,
|
||||
pathsIgnore,
|
||||
paths,
|
||||
originalUserInput: parsedYAML
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Load and return the config.
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
7
lib/config-utils.test.js
generated
7
lib/config-utils.test.js
generated
|
|
@ -169,6 +169,13 @@ ava_1.default("load non-empty input", async (t) => {
|
|||
queries: { 'javascript': ['/foo/a.ql', '/bar/b.ql'] },
|
||||
pathsIgnore: ['a', 'b'],
|
||||
paths: ['c/d'],
|
||||
originalUserInput: {
|
||||
name: 'my config',
|
||||
'disable-default-queries': true,
|
||||
queries: [{ uses: './foo' }],
|
||||
'paths-ignore': ['a', 'b'],
|
||||
paths: ['c/d'],
|
||||
},
|
||||
};
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
setInput('config-file', 'input');
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
65
lib/finalize-db.js
generated
65
lib/finalize-db.js
generated
|
|
@ -16,6 +16,17 @@ const configUtils = __importStar(require("./config-utils"));
|
|||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function sendStatusReport(startedAt, queriesStats, uploadStats, error) {
|
||||
var _a, _b, _c;
|
||||
const status = ((_a = queriesStats) === null || _a === void 0 ? void 0 : _a.analyze_failure_language) !== undefined || error !== undefined ? 'failure' : 'success';
|
||||
const statusReportBase = await util.createStatusReportBase('finish', status, startedAt, (_b = error) === null || _b === void 0 ? void 0 : _b.message, (_c = error) === null || _c === void 0 ? void 0 : _c.stack);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
...(queriesStats || {}),
|
||||
...(uploadStats || {}),
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function createdDBForScannedLanguages(databaseFolder) {
|
||||
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
|
||||
if (scannedLanguages) {
|
||||
|
|
@ -39,27 +50,40 @@ async function finalizeDatabaseCreation(databaseFolder, config) {
|
|||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(databaseFolder, sarifFolder, config) {
|
||||
const codeql = codeql_1.getCodeQL();
|
||||
for (let database of fs.readdirSync(databaseFolder)) {
|
||||
core.startGroup('Analyzing ' + database);
|
||||
const queries = config.queries[database] || [];
|
||||
for (let language of fs.readdirSync(databaseFolder)) {
|
||||
core.startGroup('Analyzing ' + language);
|
||||
const queries = config.queries[language] || [];
|
||||
if (queries.length === 0) {
|
||||
throw new Error('Unable to analyse ' + database + ' as no queries were selected for this language');
|
||||
throw new Error('Unable to analyse ' + language + ' as no queries were selected for this language');
|
||||
}
|
||||
try {
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuite = path.join(databaseFolder, language + '-queries.qls');
|
||||
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||
fs.writeFileSync(querySuite, querySuiteContents);
|
||||
core.debug('Query suite file for ' + language + '...\n' + querySuiteContents);
|
||||
const sarifFile = path.join(sarifFolder, language + '.sarif');
|
||||
await codeql.databaseAnalyze(path.join(databaseFolder, language), sarifFile, querySuite);
|
||||
core.debug('SARIF results for database ' + language + ' created at "' + sarifFile + '"');
|
||||
core.endGroup();
|
||||
}
|
||||
catch (e) {
|
||||
// For now the fields about query performance are not populated
|
||||
return {
|
||||
analyze_failure_language: language,
|
||||
};
|
||||
}
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuite = path.join(databaseFolder, database + '-queries.qls');
|
||||
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||
fs.writeFileSync(querySuite, querySuiteContents);
|
||||
core.debug('Query suite file for ' + database + '...\n' + querySuiteContents);
|
||||
const sarifFile = path.join(sarifFolder, database + '.sarif');
|
||||
await codeql.databaseAnalyze(path.join(databaseFolder, database), sarifFile, querySuite);
|
||||
core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"');
|
||||
core.endGroup();
|
||||
}
|
||||
return {};
|
||||
}
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let queriesStats = undefined;
|
||||
let uploadStats = undefined;
|
||||
try {
|
||||
if (util.should_abort('finish', true) || !await util.reportActionStarting('finish')) {
|
||||
if (util.should_abort('finish', true) ||
|
||||
!await util.sendStatusReport(await util.createStatusReportBase('finish', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
const config = await configUtils.getConfig();
|
||||
|
|
@ -71,20 +95,17 @@ async function run() {
|
|||
core.info('Finalizing database creation');
|
||||
await finalizeDatabaseCreation(databaseFolder, config);
|
||||
core.info('Analyzing database');
|
||||
await runQueries(databaseFolder, sarifFolder, config);
|
||||
queriesStats = await runQueries(databaseFolder, sarifFolder, config);
|
||||
if ('true' === core.getInput('upload')) {
|
||||
if (!await upload_lib.upload(sarifFolder)) {
|
||||
await util.reportActionFailed('finish', 'upload');
|
||||
return;
|
||||
}
|
||||
uploadStats = await upload_lib.upload(sarifFolder);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('finish', error.message, error.stack);
|
||||
await sendStatusReport(startedAt, queriesStats, uploadStats, error);
|
||||
return;
|
||||
}
|
||||
await util.reportActionSucceeded('finish');
|
||||
await sendStatusReport(startedAt, queriesStats, uploadStats);
|
||||
}
|
||||
run().catch(e => {
|
||||
core.setFailed("analyze action failed: " + e);
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"finalize-db.js","sourceRoot":"","sources":["../src/finalize-db.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,gDAAkC;AAClC,uCAAyB;AACzB,2CAA6B;AAE7B,qCAAqC;AACrC,4DAA8C;AAC9C,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAE/B,KAAK,UAAU,4BAA4B,CAAC,cAAsB;IAChE,MAAM,gBAAgB,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,+BAA+B,CAAC,CAAC;IAChF,IAAI,gBAAgB,EAAE;QACpB,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;QAC3B,KAAK,MAAM,QAAQ,IAAI,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE;YAClD,IAAI,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;YAC1C,MAAM,MAAM,CAAC,sBAAsB,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,EAAE,QAAQ,CAAC,CAAC;YACnF,IAAI,CAAC,QAAQ,EAAE,CAAC;SACjB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CAAC,cAAsB,EAAE,MAA0B;IACxF,MAAM,4BAA4B,CAAC,cAAc,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;IAC3B,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;QAC1C,MAAM,MAAM,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC,CAAC;QACnE,IAAI,CAAC,QAAQ,EAAE,CAAC;KACjB;AACH,CAAC;AAED,2DAA2D;AAC3D,KAAK,UAAU,UAAU,CAAC,cAAsB,EAAE,WAAmB,EAAE,MAA0B;IAC/F,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;IAC3B,KAAK,IAAI,QAAQ,IAAI,EAAE,CAAC,WAAW,CAAC,cAAc,CAAC,EAAE;QACnD,IAAI,CAAC,UAAU,CAAC,YAAY,GAAG,QAAQ,CAAC,CAAC;QAEzC,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;QAC/C,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;YACxB,MAAM,IAAI,KAAK,CAAC,oBAAoB,GAAG,QAAQ,GAAG,gDAAgD,CAAC,CAAC;SACrG;QAED,uEAAuE;QACvE,2EAA2E;QAC3E,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,GAAG,cAAc,CAAC,CAAC;QACxE,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACxE,EAAE,CAAC,aAAa,CAAC,UAAU,EAAE,kBAAkB,CAAC,CAAC;QACjD,IAAI,CAAC,KAAK,CAAC,uBAAuB,GAAG,QAAQ,GAAG,OAAO,GAAG,kBAAkB,CAAC,CAAC;QAE9E,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,QAAQ,GAAG,QAAQ,CAAC,CAAC;QAE9D,MAAM,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,EAAE,SAAS,EAAE,UAAU,CAAC,CAAC;QAEzF,IAAI,CAAC,KAAK,CAAC,6BAA6B,GAAG,QAAQ,GAAG,eAAe,GAAG,SAAS,GAAG,GAAG,CAAC,CAAC;QACzF,IAAI,CAAC,QAAQ,EAAE,CAAC;KACjB;AACH,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,QAAQ,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,QAAQ,CAAC,EAAE;YACnF,OAAO;SACR;QACD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,EAAE,CAAC;QAE7C,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,0BAA0B,EAAE,EAAE,CAAC,CAAC;QAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;QAEzD,MAAM,cAAc,GAAG,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;QAEtF,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;QAC5C,MAAM,EAAE,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;QAE7B,IAAI,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;QAC1C,MAAM,wBAAwB,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC;QAEvD,IAAI,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QAChC,MAAM,UAAU,CAAC,cAAc,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;QAEtD,IAAI,MAAM,KAAK,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACtC,IAAI,CAAC,MAAM,UAAU,CAAC,MAAM,CAAC,WAAW,CAAC,EAAE;gBACzC,MAAM,IAAI,CAAC,kBAAkB,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;gBAClD,OAAO;aACR;SACF;KAEF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,MAAM,IAAI,CAAC,kBAAkB,CAAC,QAAQ,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QACpE,OAAO;KACR;IAED,MAAM,IAAI,CAAC,qBAAqB,CAAC,QAAQ,CAAC,CAAC;AAC7C,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,yBAAyB,GAAG,CAAC,CAAC,CAAC;IAC9C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"finalize-db.js","sourceRoot":"","sources":["../src/finalize-db.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AACtC,gDAAkC;AAClC,uCAAyB;AACzB,2CAA6B;AAE7B,qCAAqC;AACrC,4DAA8C;AAC9C,gEAAkD;AAClD,yDAA2C;AAC3C,6CAA+B;AAiC/B,KAAK,UAAU,gBAAgB,CAC7B,SAAe,EACf,YAA6C,EAC7C,WAAsD,EACtD,KAAa;;IAEb,MAAM,MAAM,GAAG,OAAA,YAAY,0CAAE,wBAAwB,MAAK,SAAS,IAAI,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;IACnH,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,QAAQ,EAAE,MAAM,EAAE,SAAS,QAAE,KAAK,0CAAE,OAAO,QAAE,KAAK,0CAAE,KAAK,CAAC,CAAC;IACtH,MAAM,YAAY,GAAuB;QACvC,GAAG,gBAAgB;QACnB,GAAG,CAAC,YAAY,IAAI,EAAE,CAAC;QACvB,GAAG,CAAC,WAAW,IAAI,EAAE,CAAC;KACvB,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,4BAA4B,CAAC,cAAsB;IAChE,MAAM,gBAAgB,GAAG,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,+BAA+B,CAAC,CAAC;IAChF,IAAI,gBAAgB,EAAE;QACpB,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;QAC3B,KAAK,MAAM,QAAQ,IAAI,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE;YAClD,IAAI,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;YAC1C,MAAM,MAAM,CAAC,sBAAsB,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,EAAE,QAAQ,CAAC,CAAC;YACnF,IAAI,CAAC,QAAQ,EAAE,CAAC;SACjB;KACF;AACH,CAAC;AAED,KAAK,UAAU,wBAAwB,CAAC,cAAsB,EAAE,MAA0B;IACxF,MAAM,4BAA4B,CAAC,cAAc,CAAC,CAAC;IAEnD,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;IAC3B,KAAK,MAAM,QAAQ,IAAI,MAAM,CAAC,SAAS,EAAE;QACvC,IAAI,CAAC,UAAU,CAAC,aAAa,GAAG,QAAQ,CAAC,CAAC;QAC1C,MAAM,MAAM,CAAC,gBAAgB,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC,CAAC;QACnE,IAAI,CAAC,QAAQ,EAAE,CAAC;KACjB;AACH,CAAC;AAED,2DAA2D;AAC3D,KAAK,UAAU,UAAU,CACvB,cAAsB,EACtB,WAAmB,EACnB,MAA0B;IAE1B,MAAM,MAAM,GAAG,kBAAS,EAAE,CAAC;IAC3B,KAAK,IAAI,QAAQ,IAAI,EAAE,CAAC,WAAW,CAAC,cAAc,CAAC,EAAE;QACnD,IAAI,CAAC,UAAU,CAAC,YAAY,GAAG,QAAQ,CAAC,CAAC;QAEzC,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;QAC/C,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;YACxB,MAAM,IAAI,KAAK,CAAC,oBAAoB,GAAG,QAAQ,GAAG,gDAAgD,CAAC,CAAC;SACrG;QAED,IAAI;YACF,uEAAuE;YACvE,2EAA2E;YAC3E,MAAM,UAAU,GAAG,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,GAAG,cAAc,CAAC,CAAC;YACxE,MAAM,kBAAkB,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACxE,EAAE,CAAC,aAAa,CAAC,UAAU,EAAE,kBAAkB,CAAC,CAAC;YACjD,IAAI,CAAC,KAAK,CAAC,uBAAuB,GAAG,QAAQ,GAAG,OAAO,GAAG,kBAAkB,CAAC,CAAC;YAE9E,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,QAAQ,GAAG,QAAQ,CAAC,CAAC;YAE9D,MAAM,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,QAAQ,CAAC,EAAE,SAAS,EAAE,UAAU,CAAC,CAAC;YAEzF,IAAI,CAAC,KAAK,CAAC,6BAA6B,GAAG,QAAQ,GAAG,eAAe,GAAG,SAAS,GAAG,GAAG,CAAC,CAAC;YACzF,IAAI,CAAC,QAAQ,EAAE,CAAC;SAEjB;QAAC,OAAO,CAAC,EAAE;YACV,+DAA+D;YAC/D,OAAO;gBACL,wBAAwB,EAAE,QAAQ;aACnC,CAAC;SACH;KACF;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAoC,SAAS,CAAC;IAC9D,IAAI,WAAW,GAA8C,SAAS,CAAC;IACvE,IAAI;QACF,IAAI,IAAI,CAAC,YAAY,CAAC,QAAQ,EAAE,IAAI,CAAC;YACnC,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,QAAQ,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;YACxG,OAAO;SACR;QACD,MAAM,MAAM,GAAG,MAAM,WAAW,CAAC,SAAS,EAAE,CAAC;QAE7C,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,0BAA0B,EAAE,EAAE,CAAC,CAAC;QAC9D,OAAO,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;QAEzD,MAAM,cAAc,GAAG,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,0BAA0B,CAAC,CAAC;QAEtF,MAAM,WAAW,GAAG,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;QAC5C,MAAM,EAAE,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;QAE7B,IAAI,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;QAC1C,MAAM,wBAAwB,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC;QAEvD,IAAI,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QAChC,YAAY,GAAG,MAAM,UAAU,CAAC,cAAc,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;QAErE,IAAI,MAAM,KAAK,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACtC,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;SACpD;KAEF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,MAAM,gBAAgB,CAAC,SAAS,EAAE,YAAY,EAAE,WAAW,EAAE,KAAK,CAAC,CAAC;QACpE,OAAO;KACR;IAED,MAAM,gBAAgB,CAAC,SAAS,EAAE,YAAY,EAAE,WAAW,CAAC,CAAC;AAC/D,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,yBAAyB,GAAG,CAAC,CAAC,CAAC;IAC9C,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
29
lib/setup-tracer.js
generated
29
lib/setup-tracer.js
generated
|
|
@ -121,11 +121,32 @@ function concatTracerConfigs(configs) {
|
|||
fs.writeFileSync(envPath, buffer);
|
||||
return { env, spec };
|
||||
}
|
||||
async function sendSuccessStatusReport(startedAt, config) {
|
||||
const statusReportBase = await util.createStatusReportBase('init', 'success', startedAt);
|
||||
const languages = config.languages.join(',');
|
||||
const workflowLanguages = core.getInput('languages', { required: false });
|
||||
const paths = (config.originalUserInput.paths || []).join(',');
|
||||
const pathsIgnore = (config.originalUserInput['paths-ignore'] || []).join(',');
|
||||
const disableDefaultQueries = config.originalUserInput['disable-default-queries'] ? languages : '';
|
||||
const queries = (config.originalUserInput.queries || []).map(q => q.uses).join(',');
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
languages: languages,
|
||||
workflow_languages: workflowLanguages,
|
||||
paths: paths,
|
||||
paths_ignore: pathsIgnore,
|
||||
disable_default_queries: disableDefaultQueries,
|
||||
queries: queries,
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let config;
|
||||
let codeql;
|
||||
try {
|
||||
if (util.should_abort('init', false) || !await util.reportActionStarting('init')) {
|
||||
if (util.should_abort('init', false) ||
|
||||
!await util.sendStatusReport(await util.createStatusReportBase('init', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
core.startGroup('Setup CodeQL tools');
|
||||
|
|
@ -139,7 +160,7 @@ async function run() {
|
|||
}
|
||||
catch (e) {
|
||||
core.setFailed(e.message);
|
||||
await util.reportActionAborted('init', e.message);
|
||||
await util.sendStatusReport(await util.createStatusReportBase('init', 'aborted', startedAt, e.message));
|
||||
return;
|
||||
}
|
||||
try {
|
||||
|
|
@ -200,10 +221,10 @@ async function run() {
|
|||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('init', error.message, error.stack);
|
||||
await util.sendStatusReport(await util.createStatusReportBase('init', 'failure', startedAt, error.message, error.stack));
|
||||
return;
|
||||
}
|
||||
await util.reportActionSucceeded('init');
|
||||
await sendSuccessStatusReport(startedAt, config);
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||
}
|
||||
run().catch(e => {
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
2
lib/shared-environment.js
generated
2
lib/shared-environment.js
generated
|
|
@ -10,7 +10,7 @@ exports.CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
|||
// action (i.e. the upload action is being used by a third-party integrator)
|
||||
// then this variable will be assigned the start time of the action invoked
|
||||
// rather that the init action.
|
||||
exports.CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT';
|
||||
exports.CODEQL_WORKFLOW_STARTED_AT = 'CODEQL_WORKFLOW_STARTED_AT';
|
||||
// Populated when the init action completes successfully
|
||||
exports.CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
|
||||
//# sourceMappingURL=shared-environment.js.map
|
||||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,+BAA+B,GAAG,iCAAiC,CAAC;AACpE,QAAA,8BAA8B,GAAG,gCAAgC,CAAC;AAC/E,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,wBAAwB,GAAG,0BAA0B,CAAC;AACnE,wDAAwD;AAC3C,QAAA,4BAA4B,GAAG,8BAA8B,CAAC"}
|
||||
{"version":3,"file":"shared-environment.js","sourceRoot":"","sources":["../src/shared-environment.ts"],"names":[],"mappings":";;AAAa,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AAC1D,QAAA,+BAA+B,GAAG,iCAAiC,CAAC;AACpE,QAAA,8BAA8B,GAAG,gCAAgC,CAAC;AAC/E,wEAAwE;AACxE,2EAA2E;AAC3E,4EAA4E;AAC5E,2EAA2E;AAC3E,+BAA+B;AAClB,QAAA,0BAA0B,GAAG,4BAA4B,CAAC;AACvE,wDAAwD;AAC3C,QAAA,4BAA4B,GAAG,8BAA8B,CAAC"}
|
||||
65
lib/upload-lib.js
generated
65
lib/upload-lib.js
generated
|
|
@ -48,7 +48,7 @@ async function uploadPayload(payload) {
|
|||
// If in test mode we don't want to upload the results
|
||||
const testMode = process.env['TEST_MODE'] === 'true' || false;
|
||||
if (testMode) {
|
||||
return true;
|
||||
return;
|
||||
}
|
||||
const [owner, repo] = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
||||
// Make up to 4 attempts to upload, and sleep for these
|
||||
|
|
@ -66,13 +66,12 @@ async function uploadPayload(payload) {
|
|||
const statusCode = response.status;
|
||||
if (statusCode === 202) {
|
||||
core.info("Successfully uploaded results");
|
||||
return true;
|
||||
return;
|
||||
}
|
||||
const requestID = response.headers["x-github-request-id"];
|
||||
// On any other status code that's not 5xx mark the upload as failed
|
||||
if (!statusCode || statusCode < 500 || statusCode >= 600) {
|
||||
core.setFailed('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
return false;
|
||||
throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
}
|
||||
// On a 5xx status code we may retry the request
|
||||
if (attempt < backoffPeriods.length) {
|
||||
|
|
@ -88,11 +87,12 @@ async function uploadPayload(payload) {
|
|||
// If the upload fails with 5xx then we assume it is a temporary problem
|
||||
// and not an error that the user has caused or can fix.
|
||||
// We avoid marking the job as failed to avoid breaking CI workflows.
|
||||
core.error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
return false;
|
||||
throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
}
|
||||
}
|
||||
return false;
|
||||
// This case shouldn't ever happen as the final iteration of the loop
|
||||
// will always throw an error instead of exiting to here.
|
||||
throw new Error('Upload failed');
|
||||
}
|
||||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
|
|
@ -103,8 +103,7 @@ async function upload(input) {
|
|||
.filter(f => f.endsWith(".sarif"))
|
||||
.map(f => path.resolve(input, f));
|
||||
if (sarifFiles.length === 0) {
|
||||
core.setFailed("No SARIF files found to upload in \"" + input + "\".");
|
||||
return false;
|
||||
throw new Error("No SARIF files found to upload in \"" + input + "\".");
|
||||
}
|
||||
return await uploadFiles(sarifFiles);
|
||||
}
|
||||
|
|
@ -123,27 +122,22 @@ function countResultsInSarif(sarif) {
|
|||
}
|
||||
exports.countResultsInSarif = countResultsInSarif;
|
||||
// Validates that the given file path refers to a valid SARIF file.
|
||||
// Returns a non-empty list of error message if the file is invalid,
|
||||
// otherwise returns the empty list if the file is valid.
|
||||
// Throws an error if the file is invalid.
|
||||
function validateSarifFileSchema(sarifFilePath) {
|
||||
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
|
||||
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
|
||||
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||
if (result.valid) {
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
// Set the failure message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
const errorMessages = result.errors.map(e => "- " + e.stack);
|
||||
core.setFailed("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + errorMessages.join("\n"));
|
||||
// Also output the more verbose error messages in groups as these may be very large.
|
||||
if (!result.valid) {
|
||||
// Output the more verbose error messages in groups as these may be very large.
|
||||
for (const error of result.errors) {
|
||||
core.startGroup("Error details: " + error.stack);
|
||||
core.info(JSON.stringify(error, null, 2));
|
||||
core.endGroup();
|
||||
}
|
||||
return false;
|
||||
// Set the main error message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
const sarifErrors = result.errors.map(e => "- " + e.stack);
|
||||
throw new Error("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + sarifErrors.join("\n"));
|
||||
}
|
||||
}
|
||||
exports.validateSarifFileSchema = validateSarifFileSchema;
|
||||
|
|
@ -154,22 +148,19 @@ async function uploadFiles(sarifFiles) {
|
|||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
return false;
|
||||
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
if (!validateSarifFileSchema(file)) {
|
||||
return false;
|
||||
}
|
||||
validateSarifFileSchema(file);
|
||||
}
|
||||
const commitOid = await util.getCommitOid();
|
||||
const workflowRunIDStr = util.getRequiredEnvParam('GITHUB_RUN_ID');
|
||||
const ref = util.getRef();
|
||||
const analysisKey = await util.getAnalysisKey();
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||
const startedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
const zipped_sarif = zlib_1.default.gzipSync(sarifPayload).toString('base64');
|
||||
|
|
@ -177,8 +168,7 @@ async function uploadFiles(sarifFiles) {
|
|||
let checkoutURI = file_url_1.default(checkoutPath);
|
||||
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
return false;
|
||||
throw new Error('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
}
|
||||
let matrix = core.getInput('matrix');
|
||||
if (matrix === "null" || matrix === "") {
|
||||
|
|
@ -198,12 +188,19 @@ async function uploadFiles(sarifFiles) {
|
|||
"tool_names": toolNames,
|
||||
});
|
||||
// Log some useful debug info about the info
|
||||
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
|
||||
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
|
||||
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
core.debug("Raw upload size: " + rawUploadSizeBytes + " bytes");
|
||||
const zippedUploadSizeBytes = zipped_sarif.length;
|
||||
core.debug("Base64 zipped upload size: " + zippedUploadSizeBytes + " bytes");
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
core.debug("Number of results in upload: " + numResultInSarif);
|
||||
// Make the upload
|
||||
const succeeded = await uploadPayload(payload);
|
||||
await uploadPayload(payload);
|
||||
core.endGroup();
|
||||
return succeeded;
|
||||
return {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif,
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=upload-lib.js.map
|
||||
File diff suppressed because one or more lines are too long
6
lib/upload-lib.test.js
generated
6
lib/upload-lib.test.js
generated
|
|
@ -16,12 +16,10 @@ const uploadLib = __importStar(require("./upload-lib"));
|
|||
testing_utils_1.setupTests(ava_1.default);
|
||||
ava_1.default('validateSarifFileSchema - valid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
|
||||
t.true(uploadLib.validateSarifFileSchema(inputFile));
|
||||
t.notThrows(() => uploadLib.validateSarifFileSchema(inputFile));
|
||||
});
|
||||
ava_1.default('validateSarifFileSchema - invalid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
|
||||
t.false(uploadLib.validateSarifFileSchema(inputFile));
|
||||
// validateSarifFileSchema calls core.setFailed which sets the exit code on error
|
||||
process.exitCode = 0;
|
||||
t.throws(() => uploadLib.validateSarifFileSchema(inputFile));
|
||||
});
|
||||
//# sourceMappingURL=upload-lib.test.js.map
|
||||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,mDAA2C;AAC3C,wDAA0C;AAE1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAC1C,MAAM,SAAS,GAAG,SAAS,GAAG,oCAAoC,CAAC;IACnE,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AACvD,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE;IAC5C,MAAM,SAAS,GAAG,SAAS,GAAG,sCAAsC,CAAC;IACrE,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;IACtD,iFAAiF;IACjF,OAAO,CAAC,QAAQ,GAAG,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"upload-lib.test.js","sourceRoot":"","sources":["../src/upload-lib.test.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,8CAAuB;AAEvB,mDAA2C;AAC3C,wDAA0C;AAE1C,0BAAU,CAAC,aAAI,CAAC,CAAC;AAEjB,aAAI,CAAC,iCAAiC,EAAE,CAAC,CAAC,EAAE;IAC1C,MAAM,SAAS,GAAG,SAAS,GAAG,oCAAoC,CAAC;IACnE,CAAC,CAAC,SAAS,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AAClE,CAAC,CAAC,CAAC;AAEH,aAAI,CAAC,mCAAmC,EAAE,CAAC,CAAC,EAAE;IAC5C,MAAM,SAAS,GAAG,SAAS,GAAG,sCAAsC,CAAC;IACrE,CAAC,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AAC/D,CAAC,CAAC,CAAC"}
|
||||
22
lib/upload-sarif.js
generated
22
lib/upload-sarif.js
generated
|
|
@ -10,21 +10,27 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|||
const core = __importStar(require("@actions/core"));
|
||||
const upload_lib = __importStar(require("./upload-lib"));
|
||||
const util = __importStar(require("./util"));
|
||||
async function sendSuccessStatusReport(startedAt, uploadStats) {
|
||||
const statusReportBase = await util.createStatusReportBase('upload-sarif', 'success', startedAt);
|
||||
const statusReport = {
|
||||
...statusReportBase,
|
||||
...uploadStats,
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
async function run() {
|
||||
if (util.should_abort('upload-sarif', false) || !await util.reportActionStarting('upload-sarif')) {
|
||||
const startedAt = new Date();
|
||||
if (util.should_abort('upload-sarif', false) ||
|
||||
!await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
if (await upload_lib.upload(core.getInput('sarif_file'))) {
|
||||
await util.reportActionSucceeded('upload-sarif');
|
||||
}
|
||||
else {
|
||||
await util.reportActionFailed('upload-sarif', 'upload');
|
||||
}
|
||||
const uploadStats = await upload_lib.upload(core.getInput('sarif_file'));
|
||||
await sendSuccessStatusReport(startedAt, uploadStats);
|
||||
}
|
||||
catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('upload-sarif', error.message, error.stack);
|
||||
await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'failure', startedAt, error.message, error.stack));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
{"version":3,"file":"upload-sarif.js","sourceRoot":"","sources":["../src/upload-sarif.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,yDAA2C;AAC3C,6CAA+B;AAE/B,KAAK,UAAU,GAAG;IAChB,IAAI,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,KAAK,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,oBAAoB,CAAC,cAAc,CAAC,EAAE;QAChG,OAAO;KACR;IAED,IAAI;QACF,IAAI,MAAM,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE;YACxD,MAAM,IAAI,CAAC,qBAAqB,CAAC,cAAc,CAAC,CAAC;SAClD;aAAM;YACL,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;SACzD;KACF;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,MAAM,IAAI,CAAC,kBAAkB,CAAC,cAAc,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,KAAK,CAAC,CAAC;QAC1E,OAAO;KACR;AACH,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,qCAAqC,GAAG,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
{"version":3,"file":"upload-sarif.js","sourceRoot":"","sources":["../src/upload-sarif.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAsC;AAEtC,yDAA2C;AAC3C,6CAA+B;AAI/B,KAAK,UAAU,uBAAuB,CAAC,SAAe,EAAE,WAA0C;IAChG,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,cAAc,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;IACjG,MAAM,YAAY,GAA4B;QAC5C,GAAG,gBAAgB;QACnB,GAAI,WAAW;KAChB,CAAC;IACF,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,CAAC;AAC5C,CAAC;AAED,KAAK,UAAU,GAAG;IAChB,MAAM,SAAS,GAAG,IAAI,IAAI,EAAE,CAAC;IAC7B,IAAI,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,KAAK,CAAC;QACxC,CAAC,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAAC,cAAc,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,IAAI,CAAC,EAAE;QAChH,OAAO;KACR;IAED,IAAI;QACF,MAAM,WAAW,GAAG,MAAM,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC,CAAC;QACzE,MAAM,uBAAuB,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;KAEvD;IAAC,OAAO,KAAK,EAAE;QACd,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QAC9B,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,IAAI,CAAC,sBAAsB,CAC3D,cAAc,EACd,SAAS,EACT,SAAS,EACT,KAAK,CAAC,OAAO,EACb,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC;QAChB,OAAO;KACR;AACH,CAAC;AAED,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACd,IAAI,CAAC,SAAS,CAAC,qCAAqC,GAAG,CAAC,CAAC,CAAC;IAC1D,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AACjB,CAAC,CAAC,CAAC"}
|
||||
109
lib/util.js
generated
109
lib/util.js
generated
|
|
@ -13,7 +13,6 @@ const fs = __importStar(require("fs"));
|
|||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const api = __importStar(require("./api-client"));
|
||||
const configUtils = __importStar(require("./config-utils"));
|
||||
const sharedEnv = __importStar(require("./shared-environment"));
|
||||
/**
|
||||
* Should the current action be aborted?
|
||||
|
|
@ -139,22 +138,11 @@ exports.getRef = getRef;
|
|||
*
|
||||
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
|
||||
* @param status The status. Must be 'success', 'failure', or 'starting'
|
||||
* @param startedAt The time this action started executing.
|
||||
* @param cause Cause of failure (only supply if status is 'failure')
|
||||
* @param exception Exception (only supply if status is 'failure')
|
||||
*/
|
||||
async function createStatusReport(actionName, status, cause, exception) {
|
||||
var _a, _b;
|
||||
// If this is not the init action starting up or aborting then try to load the config.
|
||||
// If it fails then carry because it's important to still send the status report.
|
||||
let config = undefined;
|
||||
if (actionName !== 'init' || (status !== 'starting' && status !== 'aborted')) {
|
||||
try {
|
||||
config = await configUtils.getConfig();
|
||||
}
|
||||
catch (e) {
|
||||
core.error('Unable to load config: ' + e);
|
||||
}
|
||||
}
|
||||
async function createStatusReportBase(actionName, status, actionStartedAt, cause, exception) {
|
||||
const commitOid = process.env['GITHUB_SHA'] || '';
|
||||
const ref = getRef();
|
||||
const workflowRunIDStr = process.env['GITHUB_RUN_ID'];
|
||||
|
|
@ -165,20 +153,22 @@ async function createStatusReport(actionName, status, cause, exception) {
|
|||
const workflowName = process.env['GITHUB_WORKFLOW'] || '';
|
||||
const jobName = process.env['GITHUB_JOB'] || '';
|
||||
const analysis_key = await getAnalysisKey();
|
||||
const languages = ((_b = (_a = config) === null || _a === void 0 ? void 0 : _a.languages) === null || _b === void 0 ? void 0 : _b.join(',')) || "";
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT] || new Date().toISOString();
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_STARTED_AT, startedAt);
|
||||
let workflowStartedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
|
||||
if (workflowStartedAt === undefined) {
|
||||
workflowStartedAt = actionStartedAt.toISOString();
|
||||
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
|
||||
}
|
||||
let statusReport = {
|
||||
workflow_run_id: workflowRunID,
|
||||
workflow_name: workflowName,
|
||||
job_name: jobName,
|
||||
analysis_key: analysis_key,
|
||||
languages: languages,
|
||||
commit_oid: commitOid,
|
||||
ref: ref,
|
||||
action_name: actionName,
|
||||
action_oid: "unknown",
|
||||
started_at: startedAt,
|
||||
started_at: workflowStartedAt,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
status: status
|
||||
};
|
||||
// Add optional parameters
|
||||
|
|
@ -197,12 +187,17 @@ async function createStatusReport(actionName, status, cause, exception) {
|
|||
}
|
||||
return statusReport;
|
||||
}
|
||||
exports.createStatusReportBase = createStatusReportBase;
|
||||
/**
|
||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||
*
|
||||
* Returns the status code of the response to the status request.
|
||||
* Optionally checks the response from the API endpoint and sets the action
|
||||
* as failed if the status report failed. This is only expected to be used
|
||||
* when sending a 'starting' report.
|
||||
*
|
||||
* Returns whether sending the status report was successful of not.
|
||||
*/
|
||||
async function sendStatusReport(statusReport) {
|
||||
async function sendStatusReport(statusReport, ignoreFailures) {
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
core.debug('Sending status report: ' + statusReportJSON);
|
||||
const nwo = getRequiredEnvParam("GITHUB_REPOSITORY");
|
||||
|
|
@ -212,65 +207,25 @@ async function sendStatusReport(statusReport) {
|
|||
repo: repo,
|
||||
data: statusReportJSON,
|
||||
});
|
||||
return statusResponse.status;
|
||||
}
|
||||
/**
|
||||
* Send a status report that an action is starting.
|
||||
*
|
||||
* If the action is `init` then this also records the start time in the environment,
|
||||
* and ensures that the analysed languages are also recorded in the envirenment.
|
||||
*
|
||||
* Returns true unless a problem occurred and the action should abort.
|
||||
*/
|
||||
async function reportActionStarting(action) {
|
||||
const statusCode = await sendStatusReport(await createStatusReport(action, 'starting'));
|
||||
// If the status report request fails with a 403 or a 404, then this is a deliberate
|
||||
// message from the endpoint that the SARIF upload can be expected to fail too,
|
||||
// so the action should fail to avoid wasting actions minutes.
|
||||
//
|
||||
// Other failure responses (or lack thereof) could be transitory and should not
|
||||
// cause the action to fail.
|
||||
if (statusCode === 403) {
|
||||
core.setFailed('The repo on which this action is running is not opted-in to CodeQL code scanning.');
|
||||
return false;
|
||||
}
|
||||
if (statusCode === 404) {
|
||||
core.setFailed('Not authorized to used the CodeQL code scanning feature on this repo.');
|
||||
return false;
|
||||
if (!ignoreFailures) {
|
||||
// If the status report request fails with a 403 or a 404, then this is a deliberate
|
||||
// message from the endpoint that the SARIF upload can be expected to fail too,
|
||||
// so the action should fail to avoid wasting actions minutes.
|
||||
//
|
||||
// Other failure responses (or lack thereof) could be transitory and should not
|
||||
// cause the action to fail.
|
||||
if (statusResponse.status === 403) {
|
||||
core.setFailed('The repo on which this action is running is not opted-in to CodeQL code scanning.');
|
||||
return false;
|
||||
}
|
||||
if (statusResponse.status === 404) {
|
||||
core.setFailed('Not authorized to used the CodeQL code scanning feature on this repo.');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
exports.reportActionStarting = reportActionStarting;
|
||||
/**
|
||||
* Report that an action has failed.
|
||||
*
|
||||
* Note that the started_at date is always that of the `init` action, since
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
async function reportActionFailed(action, cause, exception) {
|
||||
await sendStatusReport(await createStatusReport(action, 'failure', cause, exception));
|
||||
}
|
||||
exports.reportActionFailed = reportActionFailed;
|
||||
/**
|
||||
* Report that an action has succeeded.
|
||||
*
|
||||
* Note that the started_at date is always that of the `init` action, since
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
async function reportActionSucceeded(action) {
|
||||
await sendStatusReport(await createStatusReport(action, 'success'));
|
||||
}
|
||||
exports.reportActionSucceeded = reportActionSucceeded;
|
||||
/**
|
||||
* Report that an action has been aborted.
|
||||
*
|
||||
* Note that the started_at date is always that of the `init` action, since
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
async function reportActionAborted(action, cause) {
|
||||
await sendStatusReport(await createStatusReport(action, 'aborted', cause));
|
||||
}
|
||||
exports.reportActionAborted = reportActionAborted;
|
||||
exports.sendStatusReport = sendStatusReport;
|
||||
/**
|
||||
* Get the array of all the tool names contained in the given sarif contents.
|
||||
*
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -11,6 +11,7 @@ test("emptyPaths", async t => {
|
|||
queries: {},
|
||||
pathsIgnore: [],
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], undefined);
|
||||
|
|
@ -24,6 +25,7 @@ test("nonEmptyPaths", async t => {
|
|||
queries: {},
|
||||
paths: ['path1', 'path2', '**/path3'],
|
||||
pathsIgnore: ['path4', 'path5', 'path6/**'],
|
||||
originalUserInput: {},
|
||||
};
|
||||
analysisPaths.includeAndExcludeAnalysisPaths(config);
|
||||
t.is(process.env['LGTM_INDEX_INCLUDE'], 'path1\npath2');
|
||||
|
|
|
|||
|
|
@ -4,9 +4,40 @@ import { getCodeQL } from './codeql';
|
|||
import * as sharedEnv from './shared-environment';
|
||||
import * as util from './util';
|
||||
|
||||
interface AutobuildStatusReport extends util.StatusReportBase {
|
||||
// Comma-separated set of languages being autobuilt
|
||||
autobuild_languages: string;
|
||||
// Language that failed autobuilding (or undefined if all languages succeeded).
|
||||
autobuild_failure?: string;
|
||||
}
|
||||
|
||||
async function sendCompletedStatusReport(
|
||||
startedAt: Date,
|
||||
allLanguages: string[],
|
||||
failingLanguage?: string,
|
||||
cause?: Error) {
|
||||
|
||||
const status = failingLanguage !== undefined || cause !== undefined ? 'failure' : 'success';
|
||||
const statusReportBase = await util.createStatusReportBase(
|
||||
'autobuild',
|
||||
status,
|
||||
startedAt,
|
||||
cause?.message,
|
||||
cause?.stack);
|
||||
const statusReport: AutobuildStatusReport = {
|
||||
...statusReportBase,
|
||||
autobuild_languages: allLanguages.join(','),
|
||||
autobuild_failure: failingLanguage,
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let language;
|
||||
try {
|
||||
if (util.should_abort('autobuild', true) || !await util.reportActionStarting('autobuild')) {
|
||||
if (util.should_abort('autobuild', true) ||
|
||||
!await util.sendStatusReport(await util.createStatusReportBase('autobuild', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -15,7 +46,7 @@ async function run() {
|
|||
// The languages are sorted in order specified by user or by lines of code if we got
|
||||
// them from the GitHub API, so try to build the first language on the list.
|
||||
const autobuildLanguages = process.env[sharedEnv.CODEQL_ACTION_TRACED_LANGUAGES]?.split(',') || [];
|
||||
const language = autobuildLanguages[0];
|
||||
language = autobuildLanguages[0];
|
||||
|
||||
if (!language) {
|
||||
core.info("None of the languages in this project require extra build steps");
|
||||
|
|
@ -36,11 +67,11 @@ async function run() {
|
|||
|
||||
} catch (error) {
|
||||
core.setFailed("We were unable to automatically build your code. Please replace the call to the autobuild action with your custom build steps. " + error.message);
|
||||
await util.reportActionFailed('autobuild', error.message, error.stack);
|
||||
await sendCompletedStatusReport(startedAt, [language], language, error);
|
||||
return;
|
||||
}
|
||||
|
||||
await util.reportActionSucceeded('autobuild');
|
||||
await sendCompletedStatusReport(startedAt, [language]);
|
||||
}
|
||||
|
||||
run().catch(e => {
|
||||
|
|
|
|||
|
|
@ -187,6 +187,13 @@ test("load non-empty input", async t => {
|
|||
queries: {'javascript': ['/foo/a.ql', '/bar/b.ql']},
|
||||
pathsIgnore: ['a', 'b'],
|
||||
paths: ['c/d'],
|
||||
originalUserInput: {
|
||||
name: 'my config',
|
||||
'disable-default-queries': true,
|
||||
queries: [{ uses: './foo' }],
|
||||
'paths-ignore': ['a', 'b'],
|
||||
paths: ['c/d'],
|
||||
},
|
||||
};
|
||||
|
||||
fs.writeFileSync(path.join(tmpDir, 'input'), inputFileContents, 'utf8');
|
||||
|
|
|
|||
|
|
@ -17,6 +17,20 @@ const QUERIES_USES_PROPERTY = 'uses';
|
|||
const PATHS_IGNORE_PROPERTY = 'paths-ignore';
|
||||
const PATHS_PROPERTY = 'paths';
|
||||
|
||||
/**
|
||||
* Format of the config file supplied by the user.
|
||||
*/
|
||||
export interface UserConfig {
|
||||
name?: string;
|
||||
'disable-default-queries'?: boolean;
|
||||
queries?: {
|
||||
name?: string;
|
||||
uses: string;
|
||||
}[];
|
||||
'paths-ignore'?: string[];
|
||||
paths?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Format of the parsed config file.
|
||||
*/
|
||||
|
|
@ -39,6 +53,14 @@ export interface Config {
|
|||
* List of paths to include in analysis.
|
||||
*/
|
||||
paths: string[];
|
||||
/**
|
||||
* A unaltered copy of the original user input.
|
||||
* Mainly intended to be used for status reporting.
|
||||
* If any field is useful for the actual processing
|
||||
* of the action then consider pulling it out to a
|
||||
* top-level field above.
|
||||
*/
|
||||
originalUserInput: UserConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -460,7 +482,8 @@ export async function getDefaultConfig(): Promise<Config> {
|
|||
languages: languages,
|
||||
queries: queries,
|
||||
pathsIgnore: [],
|
||||
paths: []
|
||||
paths: [],
|
||||
originalUserInput: {},
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -468,7 +491,7 @@ export async function getDefaultConfig(): Promise<Config> {
|
|||
* Load the config from the given file.
|
||||
*/
|
||||
async function loadConfig(configFile: string): Promise<Config> {
|
||||
let parsedYAML;
|
||||
let parsedYAML: UserConfig;
|
||||
|
||||
if (isLocal(configFile)) {
|
||||
// Treat the config file as relative to the workspace
|
||||
|
|
@ -486,7 +509,7 @@ async function loadConfig(configFile: string): Promise<Config> {
|
|||
if (typeof parsedYAML[NAME_PROPERTY] !== "string") {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
if (parsedYAML[NAME_PROPERTY].length === 0) {
|
||||
if (parsedYAML[NAME_PROPERTY]!.length === 0) {
|
||||
throw new Error(getNameInvalid(configFile));
|
||||
}
|
||||
}
|
||||
|
|
@ -507,7 +530,7 @@ async function loadConfig(configFile: string): Promise<Config> {
|
|||
if (typeof parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY] !== "boolean") {
|
||||
throw new Error(getDisableDefaultQueriesInvalid(configFile));
|
||||
}
|
||||
disableDefaultQueries = parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY];
|
||||
disableDefaultQueries = parsedYAML[DISABLE_DEFAULT_QUERIES_PROPERTY]!;
|
||||
}
|
||||
if (!disableDefaultQueries) {
|
||||
await addDefaultQueries(languages, queries);
|
||||
|
|
@ -517,7 +540,7 @@ async function loadConfig(configFile: string): Promise<Config> {
|
|||
if (!(parsedYAML[QUERIES_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getQueriesInvalid(configFile));
|
||||
}
|
||||
for (const query of parsedYAML[QUERIES_PROPERTY]) {
|
||||
for (const query of parsedYAML[QUERIES_PROPERTY]!) {
|
||||
if (!(QUERIES_USES_PROPERTY in query) || typeof query[QUERIES_USES_PROPERTY] !== "string") {
|
||||
throw new Error(getQueryUsesInvalid(configFile));
|
||||
}
|
||||
|
|
@ -529,7 +552,7 @@ async function loadConfig(configFile: string): Promise<Config> {
|
|||
if (!(parsedYAML[PATHS_IGNORE_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_IGNORE_PROPERTY].forEach(path => {
|
||||
parsedYAML[PATHS_IGNORE_PROPERTY]!.forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsIgnoreInvalid(configFile));
|
||||
}
|
||||
|
|
@ -541,7 +564,7 @@ async function loadConfig(configFile: string): Promise<Config> {
|
|||
if (!(parsedYAML[PATHS_PROPERTY] instanceof Array)) {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
parsedYAML[PATHS_PROPERTY].forEach(path => {
|
||||
parsedYAML[PATHS_PROPERTY]!.forEach(path => {
|
||||
if (typeof path !== "string" || path === '') {
|
||||
throw new Error(getPathsInvalid(configFile));
|
||||
}
|
||||
|
|
@ -549,7 +572,13 @@ async function loadConfig(configFile: string): Promise<Config> {
|
|||
});
|
||||
}
|
||||
|
||||
return {languages, queries, pathsIgnore, paths};
|
||||
return {
|
||||
languages,
|
||||
queries,
|
||||
pathsIgnore,
|
||||
paths,
|
||||
originalUserInput: parsedYAML
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -584,7 +613,7 @@ function isLocal(configPath: string): boolean {
|
|||
return (configPath.indexOf("@") === -1);
|
||||
}
|
||||
|
||||
function getLocalConfig(configFile: string, workspacePath: string): any {
|
||||
function getLocalConfig(configFile: string, workspacePath: string): UserConfig {
|
||||
// Error if the config file is now outside of the workspace
|
||||
if (!(configFile + path.sep).startsWith(workspacePath + path.sep)) {
|
||||
throw new Error(getConfigFileOutsideWorkspaceErrorMessage(configFile));
|
||||
|
|
@ -598,7 +627,7 @@ function getLocalConfig(configFile: string, workspacePath: string): any {
|
|||
return yaml.safeLoad(fs.readFileSync(configFile, 'utf8'));
|
||||
}
|
||||
|
||||
async function getRemoteConfig(configFile: string): Promise<any> {
|
||||
async function getRemoteConfig(configFile: string): Promise<UserConfig> {
|
||||
// retrieve the various parts of the config location, and ensure they're present
|
||||
const format = new RegExp('(?<owner>[^/]+)/(?<repo>[^/]+)/(?<path>[^@]+)@(?<ref>.*)');
|
||||
const pieces = format.exec(configFile);
|
||||
|
|
|
|||
|
|
@ -9,6 +9,53 @@ import * as sharedEnv from './shared-environment';
|
|||
import * as upload_lib from './upload-lib';
|
||||
import * as util from './util';
|
||||
|
||||
interface QueriesStatusReport {
|
||||
// Time taken in ms to analyze builtin queries for cpp (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_cpp_duration_ms?: number;
|
||||
// Time taken in ms to analyze builtin queries for csharp (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_csharp_duration_ms?: number;
|
||||
// Time taken in ms to analyze builtin queries for go (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_go_duration_ms?: number;
|
||||
// Time taken in ms to analyze builtin queries for java (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_java_duration_ms?: number;
|
||||
// Time taken in ms to analyze builtin queries for javascript (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_javascript_duration_ms?: number;
|
||||
// Time taken in ms to analyze builtin queries for python (or undefined if this language was not analyzed)
|
||||
analyze_builtin_queries_python_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for cpp (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_cpp_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for csharp (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_csharp_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for go (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_go_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for java (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_java_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for javascript (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_javascript_duration_ms?: number;
|
||||
// Time taken in ms to analyze custom queries for python (or undefined if this language was not analyzed)
|
||||
analyze_custom_queries_python_duration_ms?: number;
|
||||
// Name of language that errored during analysis (or undefined if no langauge failed)
|
||||
analyze_failure_language?: string;
|
||||
}
|
||||
|
||||
interface FinishStatusReport extends util.StatusReportBase, upload_lib.UploadStatusReport, QueriesStatusReport {}
|
||||
|
||||
async function sendStatusReport(
|
||||
startedAt: Date,
|
||||
queriesStats: QueriesStatusReport | undefined,
|
||||
uploadStats: upload_lib.UploadStatusReport | undefined,
|
||||
error?: Error) {
|
||||
|
||||
const status = queriesStats?.analyze_failure_language !== undefined || error !== undefined ? 'failure' : 'success';
|
||||
const statusReportBase = await util.createStatusReportBase('finish', status, startedAt, error?.message, error?.stack);
|
||||
const statusReport: FinishStatusReport = {
|
||||
...statusReportBase,
|
||||
...(queriesStats || {}),
|
||||
...(uploadStats || {}),
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
|
||||
async function createdDBForScannedLanguages(databaseFolder: string) {
|
||||
const scannedLanguages = process.env[sharedEnv.CODEQL_ACTION_SCANNED_LANGUAGES];
|
||||
if (scannedLanguages) {
|
||||
|
|
@ -33,35 +80,53 @@ async function finalizeDatabaseCreation(databaseFolder: string, config: configUt
|
|||
}
|
||||
|
||||
// Runs queries and creates sarif files in the given folder
|
||||
async function runQueries(databaseFolder: string, sarifFolder: string, config: configUtils.Config) {
|
||||
const codeql = getCodeQL();
|
||||
for (let database of fs.readdirSync(databaseFolder)) {
|
||||
core.startGroup('Analyzing ' + database);
|
||||
async function runQueries(
|
||||
databaseFolder: string,
|
||||
sarifFolder: string,
|
||||
config: configUtils.Config): Promise<QueriesStatusReport> {
|
||||
|
||||
const queries = config.queries[database] || [];
|
||||
const codeql = getCodeQL();
|
||||
for (let language of fs.readdirSync(databaseFolder)) {
|
||||
core.startGroup('Analyzing ' + language);
|
||||
|
||||
const queries = config.queries[language] || [];
|
||||
if (queries.length === 0) {
|
||||
throw new Error('Unable to analyse ' + database + ' as no queries were selected for this language');
|
||||
throw new Error('Unable to analyse ' + language + ' as no queries were selected for this language');
|
||||
}
|
||||
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuite = path.join(databaseFolder, database + '-queries.qls');
|
||||
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||
fs.writeFileSync(querySuite, querySuiteContents);
|
||||
core.debug('Query suite file for ' + database + '...\n' + querySuiteContents);
|
||||
try {
|
||||
// Pass the queries to codeql using a file instead of using the command
|
||||
// line to avoid command line length restrictions, particularly on windows.
|
||||
const querySuite = path.join(databaseFolder, language + '-queries.qls');
|
||||
const querySuiteContents = queries.map(q => '- query: ' + q).join('\n');
|
||||
fs.writeFileSync(querySuite, querySuiteContents);
|
||||
core.debug('Query suite file for ' + language + '...\n' + querySuiteContents);
|
||||
|
||||
const sarifFile = path.join(sarifFolder, database + '.sarif');
|
||||
const sarifFile = path.join(sarifFolder, language + '.sarif');
|
||||
|
||||
await codeql.databaseAnalyze(path.join(databaseFolder, database), sarifFile, querySuite);
|
||||
await codeql.databaseAnalyze(path.join(databaseFolder, language), sarifFile, querySuite);
|
||||
|
||||
core.debug('SARIF results for database ' + database + ' created at "' + sarifFile + '"');
|
||||
core.endGroup();
|
||||
core.debug('SARIF results for database ' + language + ' created at "' + sarifFile + '"');
|
||||
core.endGroup();
|
||||
|
||||
} catch (e) {
|
||||
// For now the fields about query performance are not populated
|
||||
return {
|
||||
analyze_failure_language: language,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
async function run() {
|
||||
const startedAt = new Date();
|
||||
let queriesStats: QueriesStatusReport | undefined = undefined;
|
||||
let uploadStats: upload_lib.UploadStatusReport | undefined = undefined;
|
||||
try {
|
||||
if (util.should_abort('finish', true) || !await util.reportActionStarting('finish')) {
|
||||
if (util.should_abort('finish', true) ||
|
||||
!await util.sendStatusReport(await util.createStatusReportBase('finish', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
const config = await configUtils.getConfig();
|
||||
|
|
@ -78,22 +143,19 @@ async function run() {
|
|||
await finalizeDatabaseCreation(databaseFolder, config);
|
||||
|
||||
core.info('Analyzing database');
|
||||
await runQueries(databaseFolder, sarifFolder, config);
|
||||
queriesStats = await runQueries(databaseFolder, sarifFolder, config);
|
||||
|
||||
if ('true' === core.getInput('upload')) {
|
||||
if (!await upload_lib.upload(sarifFolder)) {
|
||||
await util.reportActionFailed('finish', 'upload');
|
||||
return;
|
||||
}
|
||||
uploadStats = await upload_lib.upload(sarifFolder);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('finish', error.message, error.stack);
|
||||
await sendStatusReport(startedAt, queriesStats, uploadStats, error);
|
||||
return;
|
||||
}
|
||||
|
||||
await util.reportActionSucceeded('finish');
|
||||
await sendStatusReport(startedAt, queriesStats, uploadStats);
|
||||
}
|
||||
|
||||
run().catch(e => {
|
||||
|
|
|
|||
|
|
@ -131,13 +131,54 @@ function concatTracerConfigs(configs: { [lang: string]: TracerConfig }): TracerC
|
|||
return { env, spec };
|
||||
}
|
||||
|
||||
interface InitSuccessStatusReport extends util.StatusReportBase {
|
||||
// Comma-separated list of languages that analysis was run for
|
||||
// This may be from the workflow file or may be calculated from repository contents
|
||||
languages: string;
|
||||
// Comma-separated list of languages specified explicitly in the workflow file
|
||||
workflow_languages: string;
|
||||
// Comma-separated list of paths, from the 'paths' config field
|
||||
paths: string;
|
||||
// Comma-separated list of paths, from the 'paths-ignore' config field
|
||||
paths_ignore: string;
|
||||
// Commas-separated list of languages where the default queries are disabled
|
||||
disable_default_queries: string;
|
||||
// Comma-separated list of queries sources, from the 'queries' config field
|
||||
queries: string;
|
||||
}
|
||||
|
||||
async function sendSuccessStatusReport(startedAt: Date, config: configUtils.Config) {
|
||||
const statusReportBase = await util.createStatusReportBase('init', 'success', startedAt);
|
||||
|
||||
const languages = config.languages.join(',');
|
||||
const workflowLanguages = core.getInput('languages', { required: false });
|
||||
const paths = (config.originalUserInput.paths || []).join(',');
|
||||
const pathsIgnore = (config.originalUserInput['paths-ignore'] || []).join(',');
|
||||
const disableDefaultQueries = config.originalUserInput['disable-default-queries'] ? languages : '';
|
||||
const queries = (config.originalUserInput.queries || []).map(q => q.uses).join(',');
|
||||
|
||||
const statusReport: InitSuccessStatusReport = {
|
||||
...statusReportBase,
|
||||
languages: languages,
|
||||
workflow_languages: workflowLanguages,
|
||||
paths: paths,
|
||||
paths_ignore: pathsIgnore,
|
||||
disable_default_queries: disableDefaultQueries,
|
||||
queries: queries,
|
||||
};
|
||||
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
|
||||
async function run() {
|
||||
|
||||
const startedAt = new Date();
|
||||
let config: configUtils.Config;
|
||||
let codeql: CodeQL;
|
||||
|
||||
try {
|
||||
if (util.should_abort('init', false) || !await util.reportActionStarting('init')) {
|
||||
if (util.should_abort('init', false) ||
|
||||
!await util.sendStatusReport(await util.createStatusReportBase('init', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -153,7 +194,7 @@ async function run() {
|
|||
|
||||
} catch (e) {
|
||||
core.setFailed(e.message);
|
||||
await util.reportActionAborted('init', e.message);
|
||||
await util.sendStatusReport(await util.createStatusReportBase('init', 'aborted', startedAt, e.message));
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -226,10 +267,15 @@ async function run() {
|
|||
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('init', error.message, error.stack);
|
||||
await util.sendStatusReport(await util.createStatusReportBase(
|
||||
'init',
|
||||
'failure',
|
||||
startedAt,
|
||||
error.message,
|
||||
error.stack));
|
||||
return;
|
||||
}
|
||||
await util.reportActionSucceeded('init');
|
||||
await sendSuccessStatusReport(startedAt, config);
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_INIT_COMPLETED, 'true');
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,6 @@ export const CODEQL_ACTION_TRACED_LANGUAGES = 'CODEQL_ACTION_TRACED_LANGUAGES';
|
|||
// action (i.e. the upload action is being used by a third-party integrator)
|
||||
// then this variable will be assigned the start time of the action invoked
|
||||
// rather that the init action.
|
||||
export const CODEQL_ACTION_STARTED_AT = 'CODEQL_ACTION_STARTED_AT';
|
||||
export const CODEQL_WORKFLOW_STARTED_AT = 'CODEQL_WORKFLOW_STARTED_AT';
|
||||
// Populated when the init action completes successfully
|
||||
export const CODEQL_ACTION_INIT_COMPLETED = 'CODEQL_ACTION_INIT_COMPLETED';
|
||||
|
|
|
|||
|
|
@ -7,12 +7,10 @@ setupTests(test);
|
|||
|
||||
test('validateSarifFileSchema - valid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/valid-sarif.sarif';
|
||||
t.true(uploadLib.validateSarifFileSchema(inputFile));
|
||||
t.notThrows(() => uploadLib.validateSarifFileSchema(inputFile));
|
||||
});
|
||||
|
||||
test('validateSarifFileSchema - invalid', t => {
|
||||
const inputFile = __dirname + '/../src/testdata/invalid-sarif.sarif';
|
||||
t.false(uploadLib.validateSarifFileSchema(inputFile));
|
||||
// validateSarifFileSchema calls core.setFailed which sets the exit code on error
|
||||
process.exitCode = 0;
|
||||
t.throws(() => uploadLib.validateSarifFileSchema(inputFile));
|
||||
});
|
||||
|
|
|
|||
|
|
@ -35,13 +35,13 @@ export function combineSarifFiles(sarifFiles: string[]): string {
|
|||
|
||||
// Upload the given payload.
|
||||
// If the request fails then this will retry a small number of times.
|
||||
async function uploadPayload(payload): Promise<boolean> {
|
||||
async function uploadPayload(payload) {
|
||||
core.info('Uploading results');
|
||||
|
||||
// If in test mode we don't want to upload the results
|
||||
const testMode = process.env['TEST_MODE'] === 'true' || false;
|
||||
if (testMode) {
|
||||
return true;
|
||||
return;
|
||||
}
|
||||
|
||||
const [owner, repo] = util.getRequiredEnvParam("GITHUB_REPOSITORY").split("/");
|
||||
|
|
@ -64,15 +64,14 @@ async function uploadPayload(payload): Promise<boolean> {
|
|||
const statusCode = response.status;
|
||||
if (statusCode === 202) {
|
||||
core.info("Successfully uploaded results");
|
||||
return true;
|
||||
return;
|
||||
}
|
||||
|
||||
const requestID = response.headers["x-github-request-id"];
|
||||
|
||||
// On any other status code that's not 5xx mark the upload as failed
|
||||
if (!statusCode || statusCode < 500 || statusCode >= 600) {
|
||||
core.setFailed('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
return false;
|
||||
throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
}
|
||||
|
||||
// On a 5xx status code we may retry the request
|
||||
|
|
@ -89,25 +88,34 @@ async function uploadPayload(payload): Promise<boolean> {
|
|||
// If the upload fails with 5xx then we assume it is a temporary problem
|
||||
// and not an error that the user has caused or can fix.
|
||||
// We avoid marking the job as failed to avoid breaking CI workflows.
|
||||
core.error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
return false;
|
||||
throw new Error('Upload failed (' + requestID + '): (' + statusCode + ') ' + JSON.stringify(response.data));
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
// This case shouldn't ever happen as the final iteration of the loop
|
||||
// will always throw an error instead of exiting to here.
|
||||
throw new Error('Upload failed');
|
||||
}
|
||||
|
||||
export interface UploadStatusReport {
|
||||
// Size in bytes of unzipped SARIF upload
|
||||
raw_upload_size_bytes?: number;
|
||||
// Size in bytes of actual SARIF upload
|
||||
zipped_upload_size_bytes?: number;
|
||||
// Number of results in the SARIF upload
|
||||
num_results_in_sarif?: number;
|
||||
}
|
||||
|
||||
// Uploads a single sarif file or a directory of sarif files
|
||||
// depending on what the path happens to refer to.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
export async function upload(input: string): Promise<boolean> {
|
||||
export async function upload(input: string): Promise<UploadStatusReport> {
|
||||
if (fs.lstatSync(input).isDirectory()) {
|
||||
const sarifFiles = fs.readdirSync(input)
|
||||
.filter(f => f.endsWith(".sarif"))
|
||||
.map(f => path.resolve(input, f));
|
||||
if (sarifFiles.length === 0) {
|
||||
core.setFailed("No SARIF files found to upload in \"" + input + "\".");
|
||||
return false;
|
||||
throw new Error("No SARIF files found to upload in \"" + input + "\".");
|
||||
}
|
||||
return await uploadFiles(sarifFiles);
|
||||
} else {
|
||||
|
|
@ -125,50 +133,42 @@ export function countResultsInSarif(sarif: string): number {
|
|||
}
|
||||
|
||||
// Validates that the given file path refers to a valid SARIF file.
|
||||
// Returns a non-empty list of error message if the file is invalid,
|
||||
// otherwise returns the empty list if the file is valid.
|
||||
export function validateSarifFileSchema(sarifFilePath: string): boolean {
|
||||
// Throws an error if the file is invalid.
|
||||
export function validateSarifFileSchema(sarifFilePath: string) {
|
||||
const sarif = JSON.parse(fs.readFileSync(sarifFilePath, 'utf8'));
|
||||
const schema = JSON.parse(fs.readFileSync(__dirname + '/../src/sarif_v2.1.0_schema.json', 'utf8'));
|
||||
|
||||
const result = new jsonschema.Validator().validate(sarif, schema);
|
||||
if (result.valid) {
|
||||
return true;
|
||||
} else {
|
||||
// Set the failure message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
const errorMessages = result.errors.map(e => "- " + e.stack);
|
||||
core.setFailed("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + errorMessages.join("\n"));
|
||||
|
||||
// Also output the more verbose error messages in groups as these may be very large.
|
||||
if (!result.valid) {
|
||||
// Output the more verbose error messages in groups as these may be very large.
|
||||
for (const error of result.errors) {
|
||||
core.startGroup("Error details: " + error.stack);
|
||||
core.info(JSON.stringify(error, null, 2));
|
||||
core.endGroup();
|
||||
}
|
||||
|
||||
return false;
|
||||
// Set the main error message to the stacks of all the errors.
|
||||
// This should be of a manageable size and may even give enough to fix the error.
|
||||
const sarifErrors = result.errors.map(e => "- " + e.stack);
|
||||
throw new Error("Unable to upload \"" + sarifFilePath + "\" as it is not valid SARIF:\n" + sarifErrors.join("\n"));
|
||||
}
|
||||
}
|
||||
|
||||
// Uploads the given set of sarif files.
|
||||
// Returns true iff the upload occurred and succeeded
|
||||
async function uploadFiles(sarifFiles: string[]): Promise<boolean> {
|
||||
async function uploadFiles(sarifFiles: string[]): Promise<UploadStatusReport> {
|
||||
core.startGroup("Uploading results");
|
||||
core.info("Uploading sarif files: " + JSON.stringify(sarifFiles));
|
||||
|
||||
const sentinelEnvVar = "CODEQL_UPLOAD_SARIF";
|
||||
if (process.env[sentinelEnvVar]) {
|
||||
core.error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
return false;
|
||||
throw new Error("Aborting upload: only one run of the codeql/analyze or codeql/upload-sarif actions is allowed per job");
|
||||
}
|
||||
core.exportVariable(sentinelEnvVar, sentinelEnvVar);
|
||||
|
||||
// Validate that the files we were asked to upload are all valid SARIF files
|
||||
for (const file of sarifFiles) {
|
||||
if (!validateSarifFileSchema(file)) {
|
||||
return false;
|
||||
}
|
||||
validateSarifFileSchema(file);
|
||||
}
|
||||
|
||||
const commitOid = await util.getCommitOid();
|
||||
|
|
@ -176,7 +176,7 @@ async function uploadFiles(sarifFiles: string[]): Promise<boolean> {
|
|||
const ref = util.getRef();
|
||||
const analysisKey = await util.getAnalysisKey();
|
||||
const analysisName = util.getRequiredEnvParam('GITHUB_WORKFLOW');
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT];
|
||||
const startedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
|
||||
|
||||
let sarifPayload = combineSarifFiles(sarifFiles);
|
||||
sarifPayload = fingerprints.addFingerprints(sarifPayload);
|
||||
|
|
@ -187,8 +187,7 @@ async function uploadFiles(sarifFiles: string[]): Promise<boolean> {
|
|||
const workflowRunID = parseInt(workflowRunIDStr, 10);
|
||||
|
||||
if (Number.isNaN(workflowRunID)) {
|
||||
core.setFailed('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
return false;
|
||||
throw new Error('GITHUB_RUN_ID must define a non NaN workflow run ID');
|
||||
}
|
||||
|
||||
let matrix: string | undefined = core.getInput('matrix');
|
||||
|
|
@ -212,14 +211,21 @@ async function uploadFiles(sarifFiles: string[]): Promise<boolean> {
|
|||
});
|
||||
|
||||
// Log some useful debug info about the info
|
||||
core.debug("Raw upload size: " + sarifPayload.length + " bytes");
|
||||
core.debug("Base64 zipped upload size: " + zipped_sarif.length + " bytes");
|
||||
core.debug("Number of results in upload: " + countResultsInSarif(sarifPayload));
|
||||
const rawUploadSizeBytes = sarifPayload.length;
|
||||
core.debug("Raw upload size: " + rawUploadSizeBytes + " bytes");
|
||||
const zippedUploadSizeBytes = zipped_sarif.length;
|
||||
core.debug("Base64 zipped upload size: " + zippedUploadSizeBytes + " bytes");
|
||||
const numResultInSarif = countResultsInSarif(sarifPayload);
|
||||
core.debug("Number of results in upload: " + numResultInSarif);
|
||||
|
||||
// Make the upload
|
||||
const succeeded = await uploadPayload(payload);
|
||||
await uploadPayload(payload);
|
||||
|
||||
core.endGroup();
|
||||
|
||||
return succeeded;
|
||||
return {
|
||||
raw_upload_size_bytes: rawUploadSizeBytes,
|
||||
zipped_upload_size_bytes: zippedUploadSizeBytes,
|
||||
num_results_in_sarif: numResultInSarif,
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,20 +3,36 @@ import * as core from '@actions/core';
|
|||
import * as upload_lib from './upload-lib';
|
||||
import * as util from './util';
|
||||
|
||||
interface UploadSarifStatusReport extends util.StatusReportBase, upload_lib.UploadStatusReport {}
|
||||
|
||||
async function sendSuccessStatusReport(startedAt: Date, uploadStats: upload_lib.UploadStatusReport) {
|
||||
const statusReportBase = await util.createStatusReportBase('upload-sarif', 'success', startedAt);
|
||||
const statusReport: UploadSarifStatusReport = {
|
||||
...statusReportBase,
|
||||
... uploadStats,
|
||||
};
|
||||
await util.sendStatusReport(statusReport);
|
||||
}
|
||||
|
||||
async function run() {
|
||||
if (util.should_abort('upload-sarif', false) || !await util.reportActionStarting('upload-sarif')) {
|
||||
const startedAt = new Date();
|
||||
if (util.should_abort('upload-sarif', false) ||
|
||||
!await util.sendStatusReport(await util.createStatusReportBase('upload-sarif', 'starting', startedAt), true)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
if (await upload_lib.upload(core.getInput('sarif_file'))) {
|
||||
await util.reportActionSucceeded('upload-sarif');
|
||||
} else {
|
||||
await util.reportActionFailed('upload-sarif', 'upload');
|
||||
}
|
||||
const uploadStats = await upload_lib.upload(core.getInput('sarif_file'));
|
||||
await sendSuccessStatusReport(startedAt, uploadStats);
|
||||
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
await util.reportActionFailed('upload-sarif', error.message, error.stack);
|
||||
await util.sendStatusReport(await util.createStatusReportBase(
|
||||
'upload-sarif',
|
||||
'failure',
|
||||
startedAt,
|
||||
error.message,
|
||||
error.stack));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
133
src/util.ts
133
src/util.ts
|
|
@ -5,7 +5,6 @@ import * as os from 'os';
|
|||
import * as path from 'path';
|
||||
|
||||
import * as api from './api-client';
|
||||
import * as configUtils from './config-utils';
|
||||
import * as sharedEnv from './shared-environment';
|
||||
|
||||
/**
|
||||
|
|
@ -138,21 +137,36 @@ export function getRef(): string {
|
|||
type ActionName = 'init' | 'autobuild' | 'finish' | 'upload-sarif';
|
||||
type ActionStatus = 'starting' | 'aborted' | 'success' | 'failure';
|
||||
|
||||
interface StatusReport {
|
||||
export interface StatusReportBase {
|
||||
// ID of the workflow run containing the action run
|
||||
"workflow_run_id": number;
|
||||
// Workflow name. Converted to analysis_name further down the pipeline.
|
||||
"workflow_name": string;
|
||||
// Job name from the workflow
|
||||
"job_name": string;
|
||||
// Analysis key, normally composed from the workflow path and job name
|
||||
"analysis_key": string;
|
||||
// Value of the matrix for this instantiation of the job
|
||||
"matrix_vars"?: string;
|
||||
"languages": string;
|
||||
// Commit oid that the workflow was triggered on
|
||||
"commit_oid": string;
|
||||
// Ref that the workflow was triggered on
|
||||
"ref": string;
|
||||
// Name of the action being executed
|
||||
"action_name": ActionName;
|
||||
// Version if the action being executed, as a commit oid
|
||||
"action_oid": string;
|
||||
// Time the first action started. Normally the init action
|
||||
"started_at": string;
|
||||
// Time this action started
|
||||
"action_started_at": string;
|
||||
// Time this action completed, or undefined if not yet completed
|
||||
"completed_at"?: string;
|
||||
// State this action is currently in
|
||||
"status": ActionStatus;
|
||||
// Cause of the failure (or undefined if status is not failure)
|
||||
"cause"?: string;
|
||||
// Stack trace of the failure (or undefined if status is not failure)
|
||||
"exception"?: string;
|
||||
}
|
||||
|
||||
|
|
@ -161,26 +175,17 @@ interface StatusReport {
|
|||
*
|
||||
* @param actionName The name of the action, e.g. 'init', 'finish', 'upload-sarif'
|
||||
* @param status The status. Must be 'success', 'failure', or 'starting'
|
||||
* @param startedAt The time this action started executing.
|
||||
* @param cause Cause of failure (only supply if status is 'failure')
|
||||
* @param exception Exception (only supply if status is 'failure')
|
||||
*/
|
||||
async function createStatusReport(
|
||||
export async function createStatusReportBase(
|
||||
actionName: ActionName,
|
||||
status: ActionStatus,
|
||||
actionStartedAt: Date,
|
||||
cause?: string,
|
||||
exception?: string):
|
||||
Promise<StatusReport> {
|
||||
|
||||
// If this is not the init action starting up or aborting then try to load the config.
|
||||
// If it fails then carry because it's important to still send the status report.
|
||||
let config: configUtils.Config | undefined = undefined;
|
||||
if (actionName !== 'init' || (status !== 'starting' && status !== 'aborted')) {
|
||||
try {
|
||||
config = await configUtils.getConfig();
|
||||
} catch (e) {
|
||||
core.error('Unable to load config: ' + e);
|
||||
}
|
||||
}
|
||||
Promise<StatusReportBase> {
|
||||
|
||||
const commitOid = process.env['GITHUB_SHA'] || '';
|
||||
const ref = getRef();
|
||||
|
|
@ -192,21 +197,23 @@ async function createStatusReport(
|
|||
const workflowName = process.env['GITHUB_WORKFLOW'] || '';
|
||||
const jobName = process.env['GITHUB_JOB'] || '';
|
||||
const analysis_key = await getAnalysisKey();
|
||||
const languages = config?.languages?.join(',') || "";
|
||||
const startedAt = process.env[sharedEnv.CODEQL_ACTION_STARTED_AT] || new Date().toISOString();
|
||||
core.exportVariable(sharedEnv.CODEQL_ACTION_STARTED_AT, startedAt);
|
||||
let workflowStartedAt = process.env[sharedEnv.CODEQL_WORKFLOW_STARTED_AT];
|
||||
if (workflowStartedAt === undefined) {
|
||||
workflowStartedAt = actionStartedAt.toISOString();
|
||||
core.exportVariable(sharedEnv.CODEQL_WORKFLOW_STARTED_AT, workflowStartedAt);
|
||||
}
|
||||
|
||||
let statusReport: StatusReport = {
|
||||
let statusReport: StatusReportBase = {
|
||||
workflow_run_id: workflowRunID,
|
||||
workflow_name: workflowName,
|
||||
job_name: jobName,
|
||||
analysis_key: analysis_key,
|
||||
languages: languages,
|
||||
commit_oid: commitOid,
|
||||
ref: ref,
|
||||
action_name: actionName,
|
||||
action_oid: "unknown", // TODO decide if it's possible to fill this in
|
||||
started_at: startedAt,
|
||||
started_at: workflowStartedAt,
|
||||
action_started_at: actionStartedAt.toISOString(),
|
||||
status: status
|
||||
};
|
||||
|
||||
|
|
@ -231,9 +238,16 @@ async function createStatusReport(
|
|||
/**
|
||||
* Send a status report to the code_scanning/analysis/status endpoint.
|
||||
*
|
||||
* Returns the status code of the response to the status request.
|
||||
* Optionally checks the response from the API endpoint and sets the action
|
||||
* as failed if the status report failed. This is only expected to be used
|
||||
* when sending a 'starting' report.
|
||||
*
|
||||
* Returns whether sending the status report was successful of not.
|
||||
*/
|
||||
async function sendStatusReport(statusReport: StatusReport): Promise<number> {
|
||||
export async function sendStatusReport<S extends StatusReportBase>(
|
||||
statusReport: S,
|
||||
ignoreFailures?: boolean): Promise<boolean> {
|
||||
|
||||
const statusReportJSON = JSON.stringify(statusReport);
|
||||
|
||||
core.debug('Sending status report: ' + statusReportJSON);
|
||||
|
|
@ -245,68 +259,27 @@ async function sendStatusReport(statusReport: StatusReport): Promise<number> {
|
|||
repo: repo,
|
||||
data: statusReportJSON,
|
||||
});
|
||||
return statusResponse.status;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a status report that an action is starting.
|
||||
*
|
||||
* If the action is `init` then this also records the start time in the environment,
|
||||
* and ensures that the analysed languages are also recorded in the envirenment.
|
||||
*
|
||||
* Returns true unless a problem occurred and the action should abort.
|
||||
*/
|
||||
export async function reportActionStarting(action: ActionName): Promise<boolean> {
|
||||
const statusCode = await sendStatusReport(await createStatusReport(action, 'starting'));
|
||||
|
||||
// If the status report request fails with a 403 or a 404, then this is a deliberate
|
||||
// message from the endpoint that the SARIF upload can be expected to fail too,
|
||||
// so the action should fail to avoid wasting actions minutes.
|
||||
//
|
||||
// Other failure responses (or lack thereof) could be transitory and should not
|
||||
// cause the action to fail.
|
||||
if (statusCode === 403) {
|
||||
core.setFailed('The repo on which this action is running is not opted-in to CodeQL code scanning.');
|
||||
return false;
|
||||
}
|
||||
if (statusCode === 404) {
|
||||
core.setFailed('Not authorized to used the CodeQL code scanning feature on this repo.');
|
||||
return false;
|
||||
if (!ignoreFailures) {
|
||||
// If the status report request fails with a 403 or a 404, then this is a deliberate
|
||||
// message from the endpoint that the SARIF upload can be expected to fail too,
|
||||
// so the action should fail to avoid wasting actions minutes.
|
||||
//
|
||||
// Other failure responses (or lack thereof) could be transitory and should not
|
||||
// cause the action to fail.
|
||||
if (statusResponse.status === 403) {
|
||||
core.setFailed('The repo on which this action is running is not opted-in to CodeQL code scanning.');
|
||||
return false;
|
||||
}
|
||||
if (statusResponse.status === 404) {
|
||||
core.setFailed('Not authorized to used the CodeQL code scanning feature on this repo.');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Report that an action has failed.
|
||||
*
|
||||
* Note that the started_at date is always that of the `init` action, since
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
export async function reportActionFailed(action: ActionName, cause?: string, exception?: string) {
|
||||
await sendStatusReport(await createStatusReport(action, 'failure', cause, exception));
|
||||
}
|
||||
|
||||
/**
|
||||
* Report that an action has succeeded.
|
||||
*
|
||||
* Note that the started_at date is always that of the `init` action, since
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
export async function reportActionSucceeded(action: ActionName) {
|
||||
await sendStatusReport(await createStatusReport(action, 'success'));
|
||||
}
|
||||
|
||||
/**
|
||||
* Report that an action has been aborted.
|
||||
*
|
||||
* Note that the started_at date is always that of the `init` action, since
|
||||
* this is likely to give a more useful duration when inspecting events.
|
||||
*/
|
||||
export async function reportActionAborted(action: ActionName, cause?: string) {
|
||||
await sendStatusReport(await createStatusReport(action, 'aborted', cause));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the array of all the tool names contained in the given sarif contents.
|
||||
*
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue